query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Function to test add furniture functionality.
def test_add_furniture(self): add_furniture('invoice.csv', 'Elisa Miles', 'LR04', 'Leather Sofa', 25) add_furniture('invoice.csv', 'Edward Data', 'KT78', 'Kitchen Table', 10) add_furniture('invoice.csv', 'Alex Gonzales', 'BR02', 'Queen Mattress', 17) # Generate list of rentals with open('invoice.csv', 'r') as csvfile: rentals = [] for row in csvfile: rentals.append(row) print(rentals) # Assert statements self.assertEqual(rentals[0], ('Elisa Miles,LR04,Leather Sofa,25\n')) self.assertEqual(rentals[1], ('Edward Data,KT78,Kitchen Table,10\n')) self.assertEqual(rentals[2], ('Alex Gonzales,BR02,Queen Mattress,17\n'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_add_data():\n add_furniture(\"invoice_file.csv\", \"Elisa Miles\", \"LR04\", \"Leather Sofa\", 25.00)\n add_furniture(\"invoice_file.csv\", \"Edward Data\", \"KT78\", \"Kitchen Table\", 10.00)\n add_furniture(\"invoice_file.csv\", \"Alex Gonzales\", \"BR02\", \"Queen Mattress\", 17.00)", "def setUp(self):\n self.item = Furniture('11', 'sofa', '4', '5', 'suede', 'xl')", "def test_add_stock_item(self):\n pass", "def test_add_new_furniture(self):\n input_vars = ['4', 'Rug', '1', 'y', 'Berber', 's']\n inventory = {}\n with patch('builtins.input', side_effect=input_vars):\n main.add_new_item(inventory)\n self.assertEqual(inventory['4'],\n {\n 'product_code': '4',\n 'description': 'Rug',\n 'market_price': 24,\n 'rental_price': '1',\n 'material': 'Berber',\n 'size': 's'\n })", "def test_api_can_add_food_to_a_meal(self):\n response = self.client.post(f'/api/v1/meals/{self.breakfast.id}/foods/{self.oatmeal.id}')\n # import code; code.interact(local=dict(globals(), **locals()))\n\n self.assertEqual(response.data['message'], \"Successfully added oatmeal to breakfast\")", "def add_furniture():\n print(\"Attempting to seed the furniture collection.....\")\n print()\n\n chair_path = Path(\"chair.png\")\n\n couch = FurnitureItem(\n \"Comfy couch\",\n \"Well loved, but still in pretty good condition\",\n 60.00,\n 40,\n \"swiessle@stevens.edu\",\n \"Couch\",\n \"beige\",\n [50, 20, 10],\n )\n couch.set_image_filepath(chair_path)\n Database.add_item(couch)\n print(\"couch has been successfully added\")\n\n table = FurnitureItem(\n \"Dining room table\",\n \"Wooden dining room table. Has a few scuffs, but not bad!\",\n 30.00,\n 15,\n \"gracem730@gmail.com\",\n \"Table\",\n \"wood\",\n [40, 20, 40],\n )\n table.set_image_filepath(chair_path)\n Database.add_item(table)\n print(\"table has been successfully added\")\n\n bed = FurnitureItem(\n \"Bed Frame\",\n \"Just selling the bed frame, you'll have \\\n to get your own mattress\",\n 55.00,\n 50,\n \"erotside@stevens.edu\",\n \"Bed\",\n \"white\",\n [10, 20, 10],\n )\n bed.set_image_filepath(chair_path)\n Database.add_item(bed)\n print(\"bed has been successfully added\")\n\n desk = FurnitureItem(\n \"Ikea desk, no longer need it\",\n \"In great condition, this is truly a steal\",\n 60.00,\n 35,\n \"jlora@stevens.edu\",\n \"Ikea Desk\",\n \"navy\",\n [20, 20, 30],\n )\n desk.set_image_filepath(chair_path)\n Database.add_item(desk)\n print(\"desk has been successfully added\")\n\n shelf = FurnitureItem(\n \"Book shelf, never used\",\n \"Brand new\",\n 110.00,\n 25,\n \"dcarpent@stevens.edu\",\n \"Book Shelf\",\n \"black\",\n [10, 20, 100],\n )\n shelf.set_image_filepath(chair_path)\n Database.add_item(shelf)\n print(\"shelf has been successfully added\")\n\n print()\n print(\"Done seeding the furniture collection!\")\n print(\"----------------------------------------------\")", "def test_add(self):\n # Everything added will be deleted later in test_delete.\n first_name = 'Trevor'\n last_name = 'Harvey'\n entry_date = '04/19/2012'\n title = 'Test'\n minutes = 34\n notes = 'testing entries. and regex (555) 555-3425'\n self.data.add(first_name, last_name, entry_date, title, minutes, notes)\n # second test add\n first_name = 'Nik'\n last_name = 'Silver'\n entry_date = '01/14/1827'\n title = 'random@mail.com'\n minutes = 34\n notes = 'This is an email test.'\n\n self.data.add(first_name, last_name, entry_date, title, minutes, notes)", "def test_add_new_in_stock(add):\n length = len(STOCK)\n # here first parameter is for quantity and second for price while flower name is initialised already\n add[1].add_new_in_stock(10, 4.5)\n assert len(STOCK) == length + 1\n assert STOCK[-1] == {'flower_name': \"Sunflower\", 'quantity': 10, \"price\": 4.5}\n STOCK.pop()", "def add_furniture(itemcode, description, marketprice, rentalprice):\n\n material = input(\"Enter item material: \")\n size = input(\"Enter item size (S,M,L,XL): \")\n newitem = Furniture(itemcode, description,\n marketprice, rentalprice\n , material, size)\n FULLINVENTORY[itemcode] = newitem.returnasdictionary()\n print(\"New inventory item added\")", "def test_add_to_stock(add):\n assert STOCK[0]['quantity'] == 20\n add[0].add_to_stock(10)\n assert STOCK[0]['quantity'] == 30\n STOCK[0]['quantity'] = 20", "def test_add_meal(self):\n with self.client:\n response = self.add_meal(\"pilawo\", 15000)\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertEqual(data.get('message'), \"Meal successfully created\")", "def test_add_one_more_test(self):\n self.assertTrue(True)", "def test_form_submition_and_product_creation(user_company, client, authenticated_user):\n add_product_url = reverse('add-product')\n response = client.post(add_product_url, {\n 'name': 'Test_product_name',\n 'serial_number': 'XZ001', \n 'manufacturer': 'Test company',\n 'price_net': 415.26,\n 'description': fake.paragraph(),\n 'stock': 16\n })\n assert response.status_code == 302\n product = Product.objects.get(name='Test_product_name')\n assert response.url == reverse('product-detail',kwargs={'pk': product.pk}) \n assert product.user == authenticated_user\n assert product in Product.objects.all()", "def test_add_item_to_cart(client):\n raise NotImplemented('Acceptance test failed')", "def test_add_new_in_stock_negative(add):\n # here first parameter is for quantity and second for price while flower name is initialised already\n\n for i in [(0, 1.1), (\"we\", \"EW\"), (0, 0)]:\n add[1].add_new_in_stock(10, 4.5), i\n assert not STOCK[-1] == {'flower_name': \"Sunflower\", 'quantity': 10, \"price\": 4.5}\n STOCK.pop()", "def test_foodtrucks_create(self):\n\t\tprint 'API Test: create a new foodtruck'\n\t\turl = reverse('foodtruck_list')\n\t\tdata = {\"status\" : \"APPROVED\",\\\n\t\t \"expirationdate\" : \"2015-03-15T00:00:00\",\\\n\t\t \"permit\" : \"14MFF-0107\",\\\n\t\t \"block\" : \"3794\",\\\n\t\t \"received\" : \"Jun 24 2014 1:49PM\",\\\n\t\t \"facilitytype\" : \"Truck\",\\\n\t\t \"blocklot\" : \"3794002A\",\\\n\t\t \"locationdescription\" : \"02ND ST: TOWNSEND ST to KING ST (700 - 799)\",\\\n\t\t \"cnn\" : 148000,\\\n\t\t \"priorpermit\" : 1,\\\n\t\t \"approved\" : \"2014-06-24T13:55:30\",\\\n\t\t \"noisent\" : \"2013-07-25T00:00:00\",\\\n\t\t \"schedule\" : \"http://bsm.sfdpw.org/PermitsTracker/reports/report.aspx?title=schedule&report=rptSchedule&params=permit=14MFF-0107&ExportPDF=1&Filename=14MFF-0107_schedule.pdf\",\\\n\t\t \"address\" : \"750 02ND ST\",\\\n\t\t \"applicant\" : \"Steve's Mobile Deli\",\\\n\t\t \"lot\" : \"002A\",\\\n\t\t \"fooditems\" : \"Cold Truck: Pre-packaged sandwiches: Burgers: Hot Dogs: Muffin Sandwiches: Enchiladas: Bagels: Burritos: Salads: Snacks: Beverages\",\\\n\t\t \"longitude\" : -122.402978526686,\\\n\t\t \"latitude\" : 37.7302216813049, \\\n\t\t \"y\" : 2093947.369,\\\n\t\t \"x\" : 6011371.493,\\\n\t\t \"objectid\" : 554527}\n\t\t\n\t\tresponse = self.client.post(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_201_CREATED)\n\t\t\n\t\tquant = '1.000000'\n\t\tfor k, v in data.iteritems():\n\t\t\tif v is not None and (k is \"y\" or k is \"x\" or k is \"latitude\" or k is \"longitude\"):\n\t\t\t\tself.assertEqual(response.data[k].quantize(Decimal(quant)), Decimal(v).quantize(Decimal(quant)))\n\t\t\telif v is not None and (k is \"approved\" or k is \"received\" or k is \"expirationdate\" or k is \"noisent\"):\n\t\t\t\tself.assertEqual(response.data[k], parse(v))\n\t\t\telse:\n\t\t\t\tself.assertEqual(response.data[k], v)\n\t\t\n\t\tresponse = self.client.get(url, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\n\t\tfor k, v in data.iteritems():\n\t\t\tif v is not None and (k is \"y\" or k is \"x\" or k is \"latitude\" or k is \"longitude\"):\n\t\t\t\tself.assertEqual(response.data[0][k].quantize(Decimal(quant)), Decimal(v).quantize(Decimal(quant)))\n\t\t\telif v is not None and (k is \"approved\" or k is \"received\" or k is \"expirationdate\" or k is \"noisent\"):\n\t\t\t\tself.assertEqual(response.data[0][k], parse(v))\n\t\t\telse:\n\t\t\t\tself.assertEqual(response.data[0][k], v)\n\t\tprint 'pass'", "def setUp(self):\n self.client = APIClient()\n self.apple = Food.objects.create(name=\"apple\", calories=50)\n self.oatmeal = Food.objects.create(name=\"oatmeal\", calories=400)\n self.breakfast = Meal.objects.create(name=\"breakfast\")\n self.snack = Meal.objects.create(name=\"snack\")\n self.lunch = Meal.objects.create(name=\"lunch\")\n self.dinner = Meal.objects.create(name=\"dinner\")\n self.breakfast.foods.add(self.apple)", "def get_furniture():", "def test_add_item_adds_single_entry():\n sc.menu = sc.default_menu\n sc.current.add_item('Coffee', 1)\n assert sc.current.receipt == {'subtotal': 1.59, 'Coffee': 1}", "def test_add_to_fav_(self):\n result = self.client.post(\"/add_to_fav\", data={\"yelp_biz_id\":\"JA_V9TqDCrkgknqrcUndIQ\", \n \"yelp_rest_name\":\"Siam\", \"yelp_rating\":\"4\", \n \"yelp_category\":\"Thai\", \"yelp_price\":\"$$\", \n \"yelp_image_url\":\"https://s3-media2.fl.yelpcdn.com/bphoto/1SkZwZrRZkQSzRMn_Trs3w/o.jpg\" })\n\n DB_result = Restaurant_details.query.filter_by(biz_id = \"JA_V9TqDCrkgknqrcUndIQ\").first()\n self.assertIsNotNone(DB_result) #testing that the returned result is not NONE\n self.assertEqual(DB_result.restaurant_name, 'Siam') #testing restaurant name is what it should be\n \n self.assertIn(b\"Your Favourite has been saved\", result.data)", "def test_create_ingredient_successful(self):\n payload = {'name':'Cabbage'}\n self.client.post(INGREDIENTS_URL, payload)\n exists = Ingredient.objects.all().filter(user=self.user, name=payload['name']).exists\n self.assertTrue(exists)", "def test_get_food(self):\n pass", "def test_add_item_using_post(self):\n pass", "def test_create_ingredient(self):\n\n ingredient_payload = {'name': 'Test Ingredient'}\n self.client.post(URL_INGREDIENTS, ingredient_payload)\n\n is_ingredient_created = Ingredient.objects.filter(\n user=self.user,\n name=ingredient_payload['name']\n ).exists()\n\n self.assertTrue(is_ingredient_created)", "def test_create_shelf(self, *_):\n form = forms.ShelfForm()\n form.data[\"user\"] = self.local_user.id\n form.data[\"name\"] = \"new shelf name\"\n form.data[\"description\"] = \"desc\"\n form.data[\"privacy\"] = \"unlisted\"\n request = self.factory.post(\"\", form.data)\n request.user = self.local_user\n\n views.create_shelf(request)\n\n shelf = models.Shelf.objects.get(name=\"new shelf name\")\n self.assertEqual(shelf.privacy, \"unlisted\")\n self.assertEqual(shelf.description, \"desc\")\n self.assertEqual(shelf.user, self.local_user)", "def test_add_book(self):\n\n first_book_list = BookList()\n first_book = Book()\n\n first_book.create_book({\n \"title\": \"First Man\",\n \"author\": \"James R. Hansen\",\n \"year\": 2005,\n \"publisher_name\": \"Simon & Schuster\",\n \"publication_date\": \"01/01/2018\",\n \"num_copies\": 1\n })\n\n assert first_book_list.add_book(first_book)\n assert first_book_list.find_book(\"First Man\")\n assert first_book_list.num_books() == 1", "def test_add_furniture_write(_customers_to_add):\n\n test_invoice = \"../data/test-invoice.csv\"\n csv_contents = []\n\n if Path(test_invoice).exists():\n remove(test_invoice)\n\n for customer in _customers_to_add:\n l.add_furniture(\n test_invoice, customer[0], customer[1], customer[2], customer[3]\n )\n\n with open(test_invoice, \"r\") as csv_file:\n contents = reader(csv_file, delimiter=',')\n for line in contents:\n if line != []:\n csv_contents += [line]\n\n csv_contents += contents\n\n assert _customers_to_add == csv_contents", "def test_add_to_cart(self):\n\n # test sale item that can be sold\n response = self.client.get(\n '/self.base_url/sales/3/2',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"These are the items on your Cart\")\n self.assertEqual(response.status_code,200)", "def test_shoppingitems_creation(self):\n # register and login a user\n self.app.post('/register', data=self.user_reg_details)\n self.app.post('/login', data=self.user_login_details)\n # create a shopping list\n self.shopping_class_obj.create_list(\n 'Easter', 'maina@gmail.com')\n # make a post request to add an item\n res = self.app.post(\n '/shoppingitems/Easter', data={'item-name': 'Bread'})\n self.assertEqual(res.status_code, 200)\n response = self.item_class_obj.add_item(\n 'Easter', 'Bread', 'maina@gmail.com')\n self.assertIsInstance(response, list)\n # check if item was successfully created\n self.assertIn(\"Bread\", str(res.data))", "def test_add_item_adds_multiple_entries():\n sc.menu = sc.default_menu\n sc.current.add_item('Coffee', 2)\n sc.current.add_item('Coffee', 1)\n sc.current.add_item('Tea', 1)\n assert sc.current.receipt == {'subtotal': 6.36, 'Coffee': 3, 'Tea': 1}", "def test_add_feature_view(self):\n print 'Running %s ...' % getName()\n \n seq = self.sequenceListingFixture.create_sequence_instance(self.sequenceListing)\n# test that URL resolves to correct views function\n found = resolve('/sequencelistings/sl%d/seq%d/add_feature/' % (self.sequenceListing.id, seq.id))\n self.assertEqual(found.func, views.add_feature)\n \n response = self.client.get(reverse('sequencelistings:add_feature', \n args=[self.sequenceListing.id, seq.id]))\n self.assertEqual(response.status_code, 200)\n# test that the page returns expected html contents\n self.assertContains(response, \"Feature key\")\n self.assertContains(response, \"Submit\")", "def test_add(self):\n # add a todo\n self.add(title=\"Sample task todo\", description=\"for sample\", state=\"todo\")\n task = Task.query.filter_by(title='Sample task todo').first()\n self.assertEqual(task.description, 'for sample')\n self.assertEqual(task.state, 'todo')\n\n # add a doing\n self.add(title=\"Sample task doing\", description=\"for sample\", state=\"doing\")\n task = Task.query.filter_by(title=\"Sample task doing\").first()\n self.assertEqual(task.description, 'for sample')\n self.assertEqual(task.state, 'doing')\n\n # add a done\n self.add(title=\"Sample task done\", description=\"for sample\", state=\"done\")\n task = Task.query.filter_by(title='Sample task done').first()\n self.assertEqual(task.description, 'for sample')\n self.assertEqual(task.state, 'done')", "def test_add(self):\n print('test_add')\n \n self.assertEqual(120, add(100, 20))\n self.assertNotEqual(3, add(10, 10))", "def test_add_furniture_append(_customers_to_add):\n\n test_invoice = \"../data/test-invoice.csv\"\n csv_contents = []\n\n if Path(test_invoice).exists():\n remove(test_invoice)\n\n if not Path(test_invoice).exists():\n open(test_invoice, \"a\").close()\n\n for customer in _customers_to_add:\n l.add_furniture(\n test_invoice, customer[0], customer[1], customer[2], customer[3]\n )\n\n with open(test_invoice, \"r\") as csv_file:\n contents = reader(csv_file, delimiter=',')\n for line in contents:\n if line != []:\n csv_contents += [line]\n\n csv_contents += contents\n\n assert _customers_to_add == csv_contents", "def test_add_to_cart_item_not_in_system(self):\n # test sale products not in db\n\n response = self.client.get(\n '/self.base_url/sales/1999/2',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"This product does not exist\")\n self.assertEqual(response.status_code,200)\n\n\n # test add item which is at minimum stock", "def test_add_with_end_shelf_life(self):\n good = GoodInfo(\"яйцо 1 кат.\", \"-30\", \"40\", \"2020-12-1\", \n \"3\", \"2020-12-1\")\n check_product_data = self.database.add(good)\n\n self.assertFalse(check_product_data)", "def test_add_product(self):\n view = ProductCreateListView.as_view({'post': 'create'})\n uri = reverse('products:create/list-products')\n data = {\n \"name\": \"Iphone 7\",\n \"description\": \"Mobile phone\",\n \"price\": 200,\n \"is_available\": True\n }\n request = self.factory.post(uri, data, HTTP_AUTHORIZATION='Token {}'.format(self.token_admin.key))\n request.user = self.user['admin']\n response = view(request)\n self.assertEqual(response.status_code, 201,\n f'Expected Response Code 201, received {response.status_code} instead.')", "def test_ingredients_create(self):\n app = self.create_app()\n c = app.test_client()\n\n # test if authorization is required to create an ingredient\n rv = c.get('/ingredients/create')\n self.assertRedirects(rv, \"/auth/login\")\n\n register(c, app.config[\"USERNAME\"], app.config[\"PASSWORD\"])\n login(c, app.config[\"USERNAME\"], app.config[\"PASSWORD\"])\n c.get('/ingredients/create')\n self.assert_template_used(\"ingredients/create.html\")\n\n # tests if ingredient already in database\n create_ingredient(c, {'id': 1, 'name': \"ing_unittest1_liquid\", 'portion_size': 4, 'portion_size_unit': \"cup\",\n 'protein': 5.5, 'fat': 7.1, 'carbs': 20.5, 'calories': 98, 'price': 0,\n 'price_size': 0.01, 'price_size_unit': \"gal\", 'tag': \"dairy\", 'notes': \"no notes\"})\n self.assert_message_flashed(\"Ingredient already in the database.\")\n\n # tests inserting new ingredient\n create_ingredient(c, {'id': 1, 'name': \"XXXXX\", 'portion_size': 4, 'portion_size_unit': \"cup\",\n 'protein': 5.5, 'fat': 7.1, 'carbs': 20.5, 'calories': 98, 'price': 0,\n 'price_size': 0.01, 'price_size_unit': \"gal\", 'tag': \"dairy\", 'notes': \"no notes\"})\n self.assert_template_used(\"ingredients/index.html\")", "def test_create_pizza(self):\n url = reverse('pizzas-list')\n data = {'name': 'Quattro Formaggio'}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Pizza.objects.count(), 1)\n self.assertEqual(Pizza.objects.get().name, 'Quattro Formaggio')", "def test_add_new_product(self):\n self._require_login(self.user1)\n post_data = {\n \"category\": {\n \"name\": \"deportes\",\n \"index\": 1\n },\n \"price\": \"4500.0\",\n \"name\": \"Producto 3\",\n \"description\": \"Descripcion de producto 3\"\n }\n\n response = self.client.post('/api/1.0/products/', data=post_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertNotEqual(response.data['published_date'], '')\n self.assertEqual(response.data['name'], 'Producto 3')\n self.assertEqual(response.data['description'], 'Descripcion de producto 3')\n self.assertEqual(response.data['selling'], True)\n self.assertEqual(response.data['price'], '4500.0')\n self.assertEqual(response.data['seller']['user']['username'], self.username)\n self.assertEqual(response.data['category']['name'], 'deportes')", "def test_add_new_sale(self):\n self.register_admin_test_account()\n token = self.login_admin_test() \n\n response = self.app_test_client.post('{}/saleorder'.format(\n self.base_url), json=self.SaleOrder, headers=dict(Authorization=token),\n content_type='application/json')\n\n self.assertEqual(response.status_code, 201)\n\n self.assertEqual(general_helper_functions.convert_json(\n response)['saleorder']['name'], self.SaleOrder['name'])\n\n self.assertEqual(general_helper_functions.convert_json(\n response)['saleorder']['price'], self.SaleOrder['price'])\n\n self.assertEqual(general_helper_functions.convert_json(\n response)['saleorder']['quantity'], self.SaleOrder['quantity'])\n\n self.assertEqual(general_helper_functions.convert_json(\n response)['saleorder']['totalamt'], self.SaleOrder['totalamt'])\n\n self.assertEqual(general_helper_functions.convert_json(\n response)['message'], 'Sale successfully made')", "def test_create_ingredient_succesfull(self):\n\n payload = {'name': 'Cabbage'}\n self.client.post(INGREDIENTS_URL, payload)\n\n exist = Ingredient.objects.filter(\n user=self.user,\n name=payload['name'],\n ).exists()\n self.assertTrue(exist)", "def test_add_feature(self):\n fc1 = self.read_feature()\n fc2 = self.read_feature('Aegean_Sea')\n\n # add a feature already in the feature collection\n fc1.add_feature(fc1.features[0])\n assert len(fc1.features) == 1\n\n # add a new feature to the feature collection\n fc1.add_feature(fc2.features[0])\n assert len(fc1.features) == 2\n\n self.check_feature(fc1.features[0])\n self.check_feature(fc1.features[1], expected_name='Aegean Sea')", "def test_detail_view_after_add_feature(self):\n print 'Running %s ...' % getName()\n \n s1 = self.sequenceListingFixture.create_sequence_instance(self.sequenceListing)\n f = s1.feature_set.all()\n self.assertEqual(1, len(f), 'Expected 1 feature.')\n \n# create feature\n f2 = Feature.objects.create(sequence=s1, \n featureKey='allele', \n location='4')\n self.assertEqual('allele', f2.featureKey)\n self.assertEqual('4', f2.location)\n \n f = s1.feature_set.all()\n self.assertEqual(2, len(f), 'Expected 2 features.')\n self.assertEqual('source', f[0].featureKey)\n \n response = self.client.get(reverse('sequencelistings:detail', args=[self.sequenceListing.id]))\n self.assertEqual(response.status_code, 200)\n# test that the page returns expected html contents\n self.assertContains(response, \"source\")\n self.assertContains(response, \"1..18\")\n self.assertContains(response, \"allele\")\n self.assertContains(response, \"4\")", "def test_add_yet_one_more_test(self):\n self.assertTrue(True)", "def test_valid_addition(self):\n\n test_name = sys._getframe().f_code.co_name\n\n log.info(\"###### TEST EXECUTION STARTED :: \" + test_name + \" ######\")\n\n num1 = data_reader.get_data(test_name, \"Number_A\")\n num2 = data_reader.get_data(test_name, \"Number_B\")\n expected_text = data_reader.get_data(test_name, \"Expected\")\n\n with allure.step(\"Verify valid addition functionality\"):\n result = self.main_page.verify_addition_functionality(num1, num2, expected=expected_text)\n self.exe_status.mark_final(test_step=test_name, result=result)", "def test_blog_add():", "def test_add(self):\n self.assertEqual(3, add(1, 2))\n self.assertNotEqual(3, add(2, 2))", "def test_create_recipe_with_ingredient(self):\n ingredient1 = sample_ingredient(user=self.user, name='Prawns')\n ingrident2 = sample_ingredient(user=self.user, name ='Ginger')\n\n payload = {\n 'title': 'Thai prawn and curry',\n 'ingredient': [ingredient1.id,ingrident2.id],\n 'time_minuts':60,\n 'price': 250\n }\n res = self.client.post(RECIPE_URL,payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredient.all()\n self.assertEqual(ingredients.count(),2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingrident2,ingredients)", "def test_adding_many_todos(self):\n event = Event.objects.filter(slug__endswith=\"-upcoming\") \\\n .order_by(\"-pk\")[0]\n event.end = event.start + datetime.timedelta(days=2)\n event.save()\n\n # check if the event has 0 todos\n assert event.todoitem_set.all().count() == 0\n\n # add standard todos\n ident = event.get_ident()\n url, form = self._get_initial_form('todos_add', ident)\n\n # fix: turn Nones into empty strings\n for key, value in form.items():\n if value is None:\n form[key] = ''\n\n rv = self.client.post(reverse('todos_add', args=[ident]), form)\n\n # let's check if the form passes\n assert rv.status_code == 302\n\n # finally let's check there are some new todos\n assert event.todoitem_set.all().count() == 9", "def test_add(self):\n self.client.login(username='admin', password='admin')\n response = self.client.post('/add/', {'url': 'http://example.com'}, follow=True)\n self.assertShortURLCreated(response)", "def test_add(self):\n self.assertEqual(3, foo.add(1, 2))\n self.assertNotEqual(3, foo.add(2, 2))", "def test_beneficiaries_create_callback_that_will_pass(self):\n post_body = {\n 'lastname': 'Doe',\n 'lastname2': '',\n 'middlename': '',\n 'firstname': 'Jane',\n 'nativename': '',\n 'nationality_country_iso_code': 'FRA',\n 'code': '',\n 'date_of_birth': '1970-07-01',\n 'country_of_birth_iso_code': 'FRA',\n 'gender': 'Male',\n 'address': '42 Rue des fleurs',\n 'postal_code': '75000',\n 'city': 'Paris',\n 'country_iso_code': 'FRA',\n 'msisdn': '1123131413',\n 'email': 'kzhang@microfocus.com',\n 'id_type': 'PASSPORT',\n 'id_country_iso_code': '',\n 'id_number': '1123131413',\n 'occupation': 'Teacher',\n 'bank_accout_holder_name': '',\n 'province_state': ''\n }\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n url = reverse('beneficiary:beneficiaries-create')\n response = self.client.post(url, data=post_body, content_type='application/json')\n return self.assertTrue(response.status_code, 201)", "def test_post_foods(self):\n pass", "def test_create_basic_recipe(self):\n payload = {\"title\": \"Vietnamese Cake\",\n \"time_minutes\": 45,\n \"price\": 5.55}\n res = self.client.post(RECIPE_URL, payload)\n recipe = Recipe.objects.get(id=res.data['id'])\n for key in payload.keys():\n if key == \"price\":\n self.assertEqual(round(Decimal(payload[key]), 2), getattr(recipe, key))\n else:\n self.assertEqual(payload[key], getattr(recipe, key))\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)", "def test_add_to_cart(self):\n\n # Log the user in that is not the seller\n self.client.login(username=\"test_user\", password=\"secret\")\n\n # Confirm that product title appears in cart\n response = self.client.get(reverse('website:cart'))\n\n # Check that the response is 200 ok\n self.assertEqual(response.status_code, 200)\n\n # Ensure that the cart displays product title, but not the title for product2\n self.assertIn('<h6 class=\"mr-auto p-2\">Test Product</h6>'.encode(), response.content)\n self.assertNotIn('<h6 class=\"mr-auto p-2\">Test Product2</h6>'.encode(), response.content)\n\n # Confirm that the post returns a response of 302\n response = self.client.get(reverse(\"website:add_to_cart\", args=(1,)))\n self.assertEqual(response.status_code, 302)", "def test_add_item_returns_string():\n assert sc.current.receipt == {'subtotal': 0}", "def test_create_warranty(self):\n pass", "def add_new_flower() -> Union[str, Response]:\n if request.method == \"POST\":\n flower_name = request.form[\"flower_name\"]\n quantity = request.form[\"quantity\"]\n price = request.form[\"price\"]\n valid_quantity = validate_int(quantity)\n valid_price = validate_float(price)\n if not valid_quantity or not valid_price:\n flash(\"Invalid entry\", \"danger\")\n return render_template(\"add_new_flower.html\")\n add = AddFlower(flower_name)\n add.add_new_in_stock(valid_quantity, valid_price)\n return redirect(url_for(\"add_flower\", items=STOCK))\n return render_template(\"add_new_flower.html\")", "def test_adding_a_new_item_with_no_supply(self): \n print '\\n'\n logger.debug('Add a new item to a current PO via PUT')\n print '\\n'\n \n #Verifying po in database\n self.assertEqual(self.po.id, 1)\n self.assertEqual(self.po.items.count(), 1)\n self.assertEqual(self.po.grand_total, Decimal('129.58'))\n self.assertEqual(timezone('Asia/Bangkok').normalize(self.po.order_date).date(), datetime.datetime.now().date())\n item = self.po.items.all()[0]\n self.assertEqual(item.id, 1)\n self.assertEqual(item.quantity, 10)\n self.assertEqual(item.total, Decimal('121.1'))\n \n modified_po_data = copy.deepcopy(base_purchase_order)\n modified_po_data['items'][1]['unit_cost'] = Decimal('11.99')\n modified_po_data['items'][1]['comments'] = 'test change'\n modified_po_data['items'][1]['description'] = \"test description change\"\n del modified_po_data['items'][1]['supply']\n resp = self.client.put('/api/v1/purchase-order/1/',\n format='json',\n data=modified_po_data)\n \n #Verify the response\n self.assertEqual(resp.status_code, 200, msg=resp)\n po = resp.data\n self.assertEqual(po['id'], 1)\n self.assertEqual(po['supplier']['id'], 1)\n self.assertEqual(po['vat'], 7)\n #self.assertEqual(Decimal(po['grand_total']), Decimal('74.85'))\n self.assertEqual(po['discount'], 0)\n self.assertEqual(po['revision'], 1)\n self.assertEqual(len(po['items']), 2)\n #self.assertEqual(po['status'], 'PAID')\n #Check the new pdf\n #webbrowser.get(\"open -a /Applications/Google\\ Chrome.app %s\").open(po['pdf']['url'])\n \n item1 = po['items'][0]\n logger.debug(item1)\n self.assertEqual(item1['id'], 2)\n self.assertEqual(item1['quantity'], Decimal('10.0000000000'))\n self.assertEqual(item1['description'], u'Pattern: Maxx, Col: Blue')\n self.assertEqual(Decimal(item1['unit_cost']), Decimal('12.1100'))\n self.assertEqual(Decimal(item1['total']), Decimal('121.10'))\n\n item2 = po['items'][1]\n logger.debug(item2)\n self.assertEqual(item2['id'], 3)\n self.assertEqual(item2['quantity'], Decimal('3.0000000000'))\n self.assertEqual(item2['comments'], 'test change')\n self.assertEqual(item2['description'], 'test description change')\n self.assertEqual(Decimal(item2['unit_cost']), Decimal('11.99'))\n self.assertEqual(Decimal(item2['total']), Decimal('35.97'))\n \n #Verify database record\n po = PurchaseOrder.objects.get(pk=1)\n \n self.assertEqual(po.supplier.id, 1)\n #self.assertEqual(timezone('Asia/Bangkok').normalize(po.order_date), datetime.datetime.now().date())\n self.assertEqual(po.vat, 7)\n self.assertEqual(po.grand_total, Decimal('168.06'))\n self.assertEqual(po.items.count(), 2)\n \n # Check new item in the database\n item2_d = po.items.all().order_by('id')[1]\n self.assertEqual(item2_d.id, 3)\n self.assertEqual(item2_d.description, 'test description change')\n self.assertEqual(item2_d.comments, 'test change')\n self.assertEqual(item2_d.quantity, 3)\n self.assertEqual(item2_d.unit_cost, Decimal('11.99'))\n self.assertEqual(item2_d.total, Decimal('35.97'))\n\n # Check new supply product in the database\n products = SupplyProduct.objects.filter(supply=item2_d.supply, supplier=self.po.supplier)\n self.assertEqual(products.count(), 1)\n product = products.all()[0]\n self.assertEqual(product.supply.id, item2_d.supply.id)\n self.assertEqual(product.supplier.id, self.po.supplier.id)\n self.assertEqual(product.cost, Decimal('11.99'))", "def xtest_adding_a_new_item_with_no_supply(self): \n print '\\n'\n logger.debug('Add a new item to a current PO via PUT')\n print '\\n'\n \n #Verifying po in database\n self.assertEqual(self.po.id, 1)\n self.assertEqual(self.po.items.count(), 1)\n self.assertEqual(self.po.grand_total, Decimal('129.58'))\n self.assertEqual(timezone('Asia/Bangkok').normalize(self.po.order_date).date(), datetime.datetime.now().date())\n item = self.po.items.all()[0]\n self.assertEqual(item.id, 1)\n self.assertEqual(item.quantity, 10)\n self.assertEqual(item.total, Decimal('121.1'))\n \n modified_po_data = copy.deepcopy(base_purchase_order)\n modified_po_data['items'][1]['unit_cost'] = Decimal('11.99')\n modified_po_data['items'][1]['comments'] = 'test change'\n modified_po_data['items'][1]['description'] = \"test description change\"\n modified_po_data['status'] = 'PROCESSED'\n\n logger.debug(modified_po_data)\n\n resp = self.client.put('/api/v1/purchase-order/1/',\n format='json',\n data=modified_po_data)\n \n #Verify the response\n self.assertEqual(resp.status_code, 200, msg=resp)\n po = resp.data\n self.assertEqual(po['id'], 1)\n self.assertEqual(po['supplier']['id'], 1)\n self.assertEqual(po['vat'], 7)\n #self.assertEqual(Decimal(po['grand_total']), Decimal('74.85'))\n self.assertEqual(po['discount'], 0)\n self.assertEqual(po['revision'], 1)\n self.assertEqual(len(po['items']), 2)\n #self.assertEqual(po['status'], 'PAID')\n #Check the new pdf\n #webtbrowser.get(\"open -a /Applications/Google\\ Chrome.app %s\").open(po['pdf']['url'])\n \n item1 = po['items'][0]\n logger.debug(item1)\n self.assertEqual(item1['id'], 2)\n self.assertEqual(item1['quantity'], '10.0000000000')\n self.assertEqual(item1['description'], u'Pattern: Maxx, Col: Blue')\n self.assertEqual(Decimal(item1['unit_cost']), Decimal('12.1100'))\n self.assertEqual(Decimal(item1['total']), Decimal('121.10'))\n\n item2 = po['items'][1]\n logger.debug(item2)\n self.assertEqual(item2['id'], 3)\n self.assertEqual(item2['quantity'], '3.0000000000')\n self.assertEqual(item2['comments'], 'test change')\n self.assertEqual(item2['description'], 'test description change')\n self.assertEqual(Decimal(item2['unit_cost']), Decimal('11.99'))\n self.assertEqual(Decimal(item2['total']), Decimal('35.97'))\n \n #Verify database record\n po = PurchaseOrder.objects.get(pk=1)\n \n self.assertEqual(po.supplier.id, 1)\n self.assertEqual(po.status, 'PROCESSED')\n #self.assertEqual(timezone('Asia/Bangkok').normalize(po.order_date), datetime.datetime.now().date())\n self.assertEqual(po.vat, 7)\n self.assertEqual(po.grand_total, Decimal('168.07'))\n self.assertEqual(po.items.count(), 2)\n \n # Check new item in the database\n item2_d = po.items.all().order_by('id')[1]\n self.assertEqual(item2_d.id, 203)\n self.assertEqual(item2_d.description, 'test description change')\n self.assertEqual(item2_d.comments, 'test change')\n self.assertEqual(item2_d.quantity, 3)\n self.assertEqual(item2_d.unit_cost, Decimal('11.99'))\n self.assertEqual(item2_d.total, Decimal('35.97'))\n\n # Check new supply product in the database\n products = SupplyProduct.objects.filter(supply=item2_d.supply, supplier=self.po.supplier)\n self.assertEqual(products.count(), 1)\n product = products.all()[0]\n self.assertEqual(product.supply.id, item2_d.supply.id)\n self.assertEqual(product.supplier.id, self.po.supplier.id)\n self.assertEqual(product.cost, Decimal('11.99'))", "def test_create_recipe_category(self):\n self.signup('Bo', 'Theo', 'Bo_theo5@example.com', 'Bo1995', 'Bo1995')\n self.login('Bo_theo5@example.com', 'Bo1995')\n self.dashboard()\n self.category('JunkFood')\n self.dashboard()\n self.recipe_dashboard()\n rv = self.create_recipe('cakes', 'blah, blah, blah....mix ingredient, heat')\n self.assertIn(b'Recipe created', rv.data)", "def test_api_for_real_stock(self):\n ticker = \"googl\"\n name = \"Google\"\n data = {'name': name, 'ticker': ticker}\n request = self.client.post('/stocks/addstock/', data, follow=True, secure=True)\n self.assertEqual(request.status_code, 200)\n data = Stock.objects.all()\n self.assertEqual(len(data), 1)", "def test_create_ingredient_successful(self):\n payload = {\n 'name': 'turmeric'\n }\n\n self.client.post(INGREDIENT_URL, payload)\n\n exists = Ingredients.objects.filter(\n user=self.user,\n name=payload['name']\n ).exists()\n\n self.assertTrue(exists)", "def test_adding_item_to_list(create_shopping_item, create_shopping_list):\n shopping_list = create_shopping_list\n items_before = shopping_list.items.values_list().count()\n new_item = create_shopping_item\n shopping_list.items.add(new_item)\n items_after = shopping_list.items.values_list().count()\n assert items_after > items_before\n assert items_before == 0\n assert items_after == 1", "def test_create_recipe_with_ingredients(self):\n ing1 = sample_ingredient(user=self.user,name=\"ginger\")\n ing2 = sample_ingredient(user=self.user, name=\"Prawn\")\n payload = {\n 'title':'Prawn curry',\n 'ingredient':[ing1.id,ing2.id],\n 'time_minutes':60,\n 'price':10.00,\n }\n res = self.client.post(RECIPE_URL,payload)\n self.assertEqual(res.status_code,status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredient.all()\n self.assertEqual(ingredients.count(),2)\n self.assertIn(ing1,ingredients)\n self.assertIn(ing2,ingredients)", "def test_add_without_name(self):\n good = GoodInfo(\"\", \"30\", \"40\", \"2020-12-30\", \n \"14\", \"2020-12-30\")\n check_product_data = self.database.add(good)\n\n self.assertFalse(check_product_data)", "def test_new(self):", "def test_new(self):", "def test_create_product_success(self):\n res = self.client.post(PRODUCTS_URL, PRODUCT_PAYLOAD)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n self.assertEqual(res.data['supplier_id'], self.user.id)\n self.assertEqual(res.data['name'], PRODUCT_PAYLOAD['name'])\n self.assertEqual(res.data['price'], PRODUCT_PAYLOAD['price'])", "def test_add(self):\n # This checks for a correct vlaue.\n with unittest.mock.patch('builtins.input', return_value='Snow'):\n user_date, title, minutes, notes, stop = self.ec.add()\n self.assertEqual(user_date, '01/04/1995')\n self.assertEqual(title, 'Snow')\n self.assertEqual(minutes, 15)\n self.assertEqual(notes, 'Snow')\n self.assertFalse(stop)\n # This checks for an incorrect vlaue.\n with unittest.mock.patch('builtins.input', return_value='q'):\n user_date, title, minutes, notes, stop = self.ec.add()\n self.assertFalse(stop)", "def test_create_recipe_with_ingredients(self):\n ingredient1 = sample_ingredient(user=self.user, name = 'bla')\n ingredient2 = sample_ingredient(user=self.user, name = 'blaa')\n payload = {\n 'title': 'red curry',\n 'ingredients': [ingredient1.id, ingredient2.id],\n 'time_minutes': 30,\n 'price': 30.00\n }\n res = self.client.post(RECIPE_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredients.all()\n\n self.assertEqual(ingredients.count(), 2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingredient2, ingredients)", "def test_households_in_admin_unit(self):", "def test_create_new_recipe(self):\n payload = {\n 'title': 'Cheescake',\n 'time_taken': 35,\n 'price': 5\n }\n\n res = self.client.post(RECIPE_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n for key in payload.keys():\n self.assertEqual((payload)[key], getattr(recipe, key))\n\n # recipe = get_sample_recipe(self.sample_user)\n # db_recipe =\n\n # self.assertEqual(recipe.title, )", "def test_expenses_creation(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n res = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=self.expense)\n self.assertEqual(res.status_code, 201)\n results = json.loads(res.data)\n self.assertEqual('snacks', results['name'])", "def test_add_category(self):\n self.signup('Bo', 'Theo', 'Bo_theo5@example.com', 'Bo1995', 'Bo1995')\n self.login('Bo_theo5@example.com', 'Bo1995')\n self.dashboard()\n rv = self.category('Breakfast')\n self.assertIn(b'Category created', rv.data)", "def test_add(self):\n self.assertEqual(work_file.add(10, 5), 15)\n self.assertEqual(work_file.add(-1, 1), 0)\n self.assertEqual(work_file.add(-1, -1), -2)", "def test_add_all(self): #SAUCE-LAB-7\n login = LoginPage(self.driver)\n login.open()\n inventory_page = login.login(_DEF_USER, _DEF_PASSWORD)\n first_item = inventory_page.products\n first_item: InventoryItem\n for item in first_item:\n item.add_to_cart()\n if inventory_page.header.get_total_cart_items() == 6:\n print('\\n')\n print(f'Total of products {inventory_page.header.get_total_cart_items()}')\n else:\n print('\\n')\n print('Not all items were added')", "def test_registred(\n self, mock_get_ai_details, mock_get_ai, mock_get_purchased, mock_get_categories\n ):\n\n # We mock API calls\n mock_get_ai.return_value = self.ai\n mock_get_ai_details.return_value = self.ai_details\n mock_get_purchased.return_value.json.return_value = [\n factory.build(dict, FACTORY_CLASS=AiFactory),\n factory.build(dict, FACTORY_CLASS=AiFactory),\n factory.build(dict, FACTORY_CLASS=AiFactory)\n ]\n\n response = self.client.get(reverse(\n 'studio:skills',\n kwargs={\n 'aiid': self.ai['aiid']\n }\n ))\n self.assertEqual(response.status_code, 200)", "def test_add1(self):\n self.assertEqual(15, add(10 , 5), \"should be 15\")", "def test_add_event(self):\n self.test_create_organization()\n url = reverse('MGA:add_event')\n data = {'org_id': 1, 'title': 'first', 'capacity': 5, 'description': 'nothing!', 'date': now()}\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_create_new_shopping_list(create_shopping_list):\n shopping_list = create_shopping_list\n assert shopping_list.items.values_list().count() == 0\n assert shopping_list.budget == 0", "def test_successful_add_instructor():\n assert add_instructor('mary', 'jones', 'instructor')", "def test_creating_recipe_with_ingredients(self):\n ingredient1 = create_sample_ingredient(user=self.user, name=\"Paprika\")\n ingredient2 = create_sample_ingredient(user=self.user, name=\"Salad\")\n\n payload = {\n \"title\": \"Green Salad\",\n \"time_minutes\": 34,\n \"price\": 4.66,\n \"ingredients\": [ingredient1.id, ingredient2.id]\n }\n res = self.client.post(RECIPE_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredients.all()\n self.assertEqual(ingredients.count(), 2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingredient2, ingredients)", "def test_funders_created(self):\n # Currently, there is 1 ProjectFunding object in the database\n org_existing = OrganizationFactory(name='Existing Organization')\n funder_existing = ProjectFundingFactory()\n funder_existing.sources.add(org_existing)\n self.assertEqual(ProjectFunding.objects.count(), 1)\n\n self.call_command(filename='power_plant_import/tests/data/six_rows.csv')\n\n # Get the Projects that were created during the import\n (project_ouessant1, project_ouessant2, project_liaoning) = self.get_created_projects()\n # The project_ouessant1 has 2 funders, and the project_ouessant2 has 1 funder.\n # The funder_existing also still exists.\n self.assertEqual(ProjectFunding.objects.count(), 4)\n self.assertEqual(project_ouessant1.funding.count(), 2)\n self.assertEqual(\n ProjectFunding.objects.filter(\n project=project_ouessant1,\n amount=100,\n currency='USD',\n ).count(),\n 1\n )\n self.assertEqual(\n ProjectFunding.objects.filter(\n project=project_ouessant1,\n amount=200,\n currency='RUB',\n ).count(),\n 1\n )\n self.assertEqual(project_ouessant2.funding.count(), 1)\n self.assertEqual(\n ProjectFunding.objects.filter(\n project=project_ouessant2,\n amount=None,\n currency=None,\n ).count(),\n 1\n )\n self.assertEqual(project_liaoning.funding.count(), 0)\n # The org_existing is funding 3 Projects: the one for the funder_existing,\n # the project_ouessant1 and the project_ouessant2\n self.assertEqual(org_existing.projectfunding_set.count(), 3)", "def test_create(self):\n self.assertEqual(Exercise.objects.count(), 2)\n payload = {\n 'name': 'Pecho plano',\n 'description': 'Some description',\n 'muscle_group': 'pecho'\n }\n self.client.post('/exercises/', data=payload)\n self.assertEqual(Exercise.objects.count(), 3)", "def test_create_ingredient_successful(self):\n payload = {'name': \"Test ingredient\"}\n\n # in order to create user must be already authenticated\n self.client.post(INGREDIENTS_URL, payload)\n\n exists = Ingredient.objects.filter(\n user=self.user,\n name=payload['name'],\n ).exists()\n\n self.assertTrue(exists)", "def test_basicSalePC(self):\n # Basic price check\n self.log.info(\"Price checking Generic Item via speedkey\")\n pos.click(\"Price Check\")\n pos.click_speed_key(\"Generic Item\")\n \n # Confirm the right item, at the right price\n self.read_price_check(\"Generic Item\", \"$0.01\")\n # Add the item\n pos.click(\"Sell Item\")\n \n # Confirm we added the item\n ret = self.confirm_line(-1, \"Generic Item\", \"$0.01\")\n if ret == True:\n self.log.info(\"Confirmed item added\")\n else:\n self.tc_fail(ret)\n \n # Setup for next test\n self.recover()", "def test_add_new_product(self):\n response=self.add_new_product()\n result = json.loads(response.data.decode('utf-8'))\n self.assertEqual(response.status_code, 201, result['New Product'])", "def test_phonebook_add_names_and_numbers(self):\n self.phonebook.add(\"Sue\", \"12345\")\n self.assertIn(\"Sue\", self.phonebook.get_names())\n self.assertIn(\"12345\", self.phonebook.get_numbers())", "def test_add(self):\n r = main.List.connection()\n main.List.add(r, \"ToDo\", 1, \"Buy apples\", 2, \"20.05.2015\")\n task = r.get(\"ToDo\")\n self.assertTrue(task, \"No such entry in DB. Adding failed.\")", "def test_create(self):\n self.assertEqual(Routine.objects.count(), 2)\n payload = {\n 'name': 'Monday routine',\n }\n self.client.post('/routines/', data=payload)\n self.assertEqual(Routine.objects.count(), 3)", "def test_create(self):\n pass", "def test_create_ingredient_successful(self):\n\n payload = {'name': 'Salt'}\n\n res = self.client.post(INGREDIENTS_URL, payload)\n\n exists = Ingredient.objects.filter(\n user=self.user,\n name=payload['name']\n ).exists()\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n self.assertTrue(exists)", "def test_price_return(self, basic_factory, garment_factory, affiliate_item_factory):\n basic = basic_factory()\n garment = garment_factory(basic=basic)\n affiliate_item_factory(garment=garment, price=Decimal(100))\n\n budget_end, luxury_start = update_basic_price_points(basic)\n\n assert budget_end == Decimal(100)\n assert luxury_start == Decimal(100)", "def test_create_recipe_with_ingredients(self):\n ingred1 = sample_ingredient(self.user)\n ingred2 = sample_ingredient(self.user, name='sugar')\n payload = {\n 'title': 'cake',\n 'time_minutes': 39,\n 'price': 39,\n 'ingredients': [ingred1.id, ingred2.id]\n }\n res = self.client.post(RECIPE_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredients.all()\n\n self.assertEqual(ingredients.count(), 2)\n self.assertIn(ingred1, ingredients)\n self.assertIn(ingred2, ingredients)", "def test_add(self):\n query_string = [('x', 56),\n ('y', 56)]\n response = self.client.open('/addition-api/1.0.0/add',\n method='GET',\n query_string=query_string)\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def test_add_with_not_right_shelf_life(self):\n good = GoodInfo(\"яйцо 1 кат.\", \"-30\", \"40\", \"2020-12-30\", \n \"-14\", \"2020-12-30\")\n check_product_data = self.database.add(good)\n\n self.assertFalse(check_product_data)", "def test_get_meals(self):\n with self.client:\n self.add_meal(\"fries\", 10000)\n response = self.get_meals()\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertIn(u\"fries\",\n data['meal_items'][0]['meal_name'])", "def test_add(self):\r\n # the same url works for changing the whole grading model (graceperiod, cutoffs, and grading types) when\r\n # the grading_index is None; thus, using None to imply adding a grading_type doesn't work; so, it uses an\r\n # index out of bounds to imply create item.\r\n grader = {\r\n \"type\": \"manual\",\r\n \"min_count\": 5,\r\n \"drop_count\": 10,\r\n \"short_label\": \"yo momma\",\r\n \"weight\": 17.3,\r\n }\r\n resp = self.client.ajax_post('{}/{}'.format(self.url, len(self.starting_graders) + 1), grader)\r\n self.assertEqual(resp.status_code, 200)\r\n obj = json.loads(resp.content)\r\n self.assertEqual(obj['id'], len(self.starting_graders))\r\n del obj['id']\r\n self.assertEqual(obj, grader)\r\n current_graders = CourseGradingModel.fetch(self.course.id).graders\r\n self.assertEqual(len(self.starting_graders) + 1, len(current_graders))" ]
[ "0.80027395", "0.70377046", "0.6918816", "0.68016565", "0.6707079", "0.66921055", "0.66690326", "0.6534658", "0.6481919", "0.6451947", "0.64004886", "0.63858724", "0.63445914", "0.6246608", "0.6238948", "0.62372917", "0.6225444", "0.62142056", "0.6196843", "0.61662406", "0.61525416", "0.61463815", "0.61367375", "0.6128628", "0.6087897", "0.6084826", "0.60704744", "0.60496575", "0.60485226", "0.60328287", "0.6025724", "0.60199904", "0.6004244", "0.5999555", "0.59929633", "0.5988957", "0.5986267", "0.59768784", "0.5976332", "0.5962955", "0.5945178", "0.59450984", "0.593694", "0.59238064", "0.59057254", "0.59030885", "0.59022737", "0.5897166", "0.58935285", "0.5880571", "0.58789814", "0.5875115", "0.58590674", "0.5858464", "0.5857371", "0.5848306", "0.58449346", "0.58428025", "0.5836767", "0.58358836", "0.5835537", "0.5831131", "0.5824135", "0.58231074", "0.58134985", "0.58119506", "0.5809898", "0.5805821", "0.5805821", "0.58055645", "0.5801179", "0.5800207", "0.57987607", "0.57877415", "0.5781426", "0.578013", "0.57766265", "0.5776084", "0.5775225", "0.57722926", "0.57646465", "0.57624316", "0.57509786", "0.5750392", "0.57465154", "0.5741696", "0.57389975", "0.5735282", "0.57325673", "0.57261366", "0.5722593", "0.5721424", "0.57185876", "0.5712181", "0.5711897", "0.5710253", "0.57065535", "0.56974274", "0.56951314", "0.5694064" ]
0.7485811
1
Tests single customer functionality.
def test_single_customer(self): create_invoice = single_customer("Susan Wong", "invoice.csv") create_invoice("test_items.csv") # Generate list of rentals with open('invoice.csv', 'r') as csvfile: rentals = [] for row in csvfile: rentals.append(row) print(rentals) # Assert statements self.assertEqual(rentals[3], ('Susan Wong,AT92,Office Chair,13\n')) self.assertEqual(rentals[4], ('Susan Wong,KE25,Espresso Machine,30\n'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_customer(self):\n # get the id of a customer\n test_customer = self._create_customers(\"Alex\")\n logging.debug(test_customer)\n test_customer.create() \n resp = self.app.get(\n \"/customers/{}\".format(test_customer.id), content_type=\"application/json\"\n )\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()\n self.assertEqual(data[\"name\"], test_customer.name)", "def test_get_customer(self):\n get_customer_url = reverse(\"customer_detail\", kwargs={\"pk\": 2})\n response = self.client.get(get_customer_url)\n\n customer_expected_json = {\n \"first_name\": \"Veronica\",\n \"last_name\": \"Ajiambo\",\n \"is_active\": True,\n }\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data, customer_expected_json)", "def test_customer_detail(self):\n # first performing create\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then performing detail\n self._detail_model(\"customer\", self.customer_data, id, [\"name\", \"email\", \"phone\"])\n \n self.assertIsNotNone(id)", "def test_add_customer(self):\n set_up_db()\n add_customer(*self.test_customer)\n test_customer = Customer.get_by_id(1)\n self.assertEqual(\"Bruce\", test_customer.name)\n self.assertEqual(\"Wayne\", test_customer.last_name)\n self.assertEqual(\"1007 Mountain Drive, Gotham\", test_customer.home_address)\n self.assertEqual(\"228-626-7699\", test_customer.phone_number)\n self.assertEqual(\"b_wayne@gotham.net\", test_customer.email)\n self.assertEqual(True, test_customer.status)\n self.assertEqual(200000.00, test_customer.credit_limit)", "def test_create_customer(self):\n url = reverse('customers-list')\n data = {\n 'first_name': self.customer_first_name,\n 'last_name': self.customer_last_name,\n 'email': self.customer_email\n }\n response = self.client.post(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Customer.objects.count(), 1)\n self.assertEqual(Customer.objects.get().first_name, 'John')", "def test_data_when_import_customer_with_data(self):\n\n customer = self.customers[0]\n self.assertEqual(\"Jimena\", customer.get_first_name())\n self.assertEqual(\"Sanabria\", customer.get_last_name())\n self.assertEqual(\"21-08-1980\", customer.get_date_of_birth())\n self.assertEqual([\"Nueva Granada #1837\"], customer.get_addresses())\n self.assertEqual([4244270,70759942], customer.get_phones())\n self.assertEqual(\"giovi_times@hotmail.com\", customer.get_email())\n self.assertEqual(\"Gold\", customer.get_membership())\n self.assertEqual(\"Active\", customer.get_status())", "def test_activate_customer(self):\n # create a customer to activate\n body = {\n \"name\": \"Kendall\",\n \"address\": \"333 Bedford Street\",\n \"phone_number\": \"555-555-3333\",\n \"email\": \"ktoole@peloton.com\",\n \"credit_card\": \"VISA\"\n }\n resp_create = self.app.post('/customers',\n json=body,\n content_type='application/json')\n self.assertEqual(resp_create.status_code, status.HTTP_201_CREATED)\n self.assertEqual(resp_create.get_json()['active'], True)\n customer_id = resp_create.get_json()[\"id\"]\n\n # deactivate the customer\n logging.debug(customer_id)\n resp_deactivate = self.app.put(\"/customers/{}/deactivate\".format(customer_id),\n json=body,\n content_type=\"application/json\")\n self.assertEqual(resp_deactivate.status_code, status.HTTP_200_OK)\n self.assertEqual(resp_deactivate.get_json()[\"active\"], False)\n\n # activate the customer\n logging.debug(customer_id)\n resp_activate = self.app.put(\"/customers/{}/activate\".format(customer_id),\n json=body,\n content_type=\"application/json\")\n self.assertEqual(resp_activate.status_code, status.HTTP_200_OK)\n self.assertEqual(resp_activate.get_json()[\"active\"], True)", "def test_get_customer_by_name(self):\n test_customer = self._create_customers(\"Alex\")\n test_customer.create()\n test_customer = self._create_customers(\"Sally\")\n test_customer.create()\n test_customer = self._create_customers(\"John\")\n test_customer.create()\n resp = self.app.get(\"/customers?name={}\".format(\"John\"))\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()\n self.assertEqual(len(data), 1)\n self.assertEqual(data[0][\"name\"], test_customer.name)", "def test_create_customer(self):\n create_customer_url = reverse(\"customer_list\")\n\n customer_info = {\"first_name\": \"Denny\", \"last_name\": \"Wayne\"}\n\n response = self.client.post(\n create_customer_url, data=customer_info, format=\"json\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Customer.objects.count(), 5)\n self.assertEqual(Customer.objects.get(pk=5).first_name, \"Denny\")\n self.assertEqual(Customer.objects.get(pk=5).last_name, \"Wayne\")", "def test_customer_list(self):\n self.url = reverse(\"customer-list\")\n response = self.client.get(self.url, **self.auth_headers)\n self.assertEqual(200, response.status_code)", "def test_customer_create(self):\n self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])", "def test_newCustomer(self):\n\t\tdashboardPage = DashboardPage(self.driver)\n\t\tdashboardPage.goToOnboard()\n\n\n\t\tdashboardPage.createCustomer(USER_NAME, S3FOLDER)\n\t\tdashboardPage.goToCustomerList()\n\t\tdashboardPage.sortRecentCustomer()\n\n\t\tinitialId = dashboardPage.getId()\n\t\teditPage = dashboardPage.goToEditPage() \n\t\tcheckId, checkName, checkS3Folder, maxSize, panoMaxSize, checkBox = editPage.getParameters()\n\n\n\t\tself.assertEqual(initialId, checkId)\n\t\tself.assertEqual(checkName, USER_NAME)\n\t\tself.assertEqual(checkS3Folder, S3FOLDER)\n\t\tself.assertEqual(maxSize, MAX_SIZE)\n\t\tself.assertEqual(panoMaxSize, PANO_MAX_SIZE)\n\t\tself.assertEqual(CHECK_BOX, checkBox)", "def test_display_customers(self):\r\n create_empty_db()\r\n self.assertEqual([], display_customers())\r\n add_customer(**user_1)\r\n add_customer(**user_2)\r\n add_customer(**user_3)\r\n self.assertEqual(['Post Malone', 'Howard Moon', 'Vince Noir'],\r\n display_customers())\r\n drop_db()", "def _create_customers(self, customer_name=\"Alex\"):\n test_customer = Customer(\n name=customer_name,\n address=\"Washington Square Park\",\n phone_number=\"555-555-1234\",\n email=\"alex@jr.com\",\n credit_card=\"VISA\",\n active = True\n )\n return test_customer", "def test_available_customer():\n rep = RentRepository()\n rep.store( '23','12', '1', '1')\n try:\n\n idBook = '13'\n idCustomer = '23'\n flag = '1'\n id = '1'\n Validator.available_customer(rep.get_all(), idCustomer)\n assert False\n\n except RepositoryExceptionRent as msg:\n assert True", "def test_search_customer(self):\n expected_result = {\"name\": \"Bruce\", \"last_name\": \"Wayne\", \"email\": \"b_wayne@gotham.net\",\n \"phone_number\": \"228-626-7699\"}\n set_up_db()\n add_customer(*self.test_customer)\n self.assertDictEqual(expected_result, search_customer(1))", "def test_delete_customer(self):\n set_up_db()\n add_customer(*self.test_customer)\n delete_customer(1)\n try:\n Customer.get_by_id(1)\n except DoesNotExist:\n LOGGER.info(\"Customer was deleted.\")", "def test_create_new_customer(client, db_session):\n # Arrange\n customer_data = {\"name\": \"Customer 1\", \"isActive\": True}\n\n # Act\n response = client.post(\"api/customers/\", json=customer_data)\n response_data = response.get_json()\n\n # Assert\n assert response.status_code == status.HTTP_201_CREATED\n assert response_data[\"name\"] == customer_data[\"name\"]", "def test_search_customer(self):\r\n create_empty_db()\r\n add_customer(**user_1)\r\n test_map = {'name': user_1['name'], 'lastname': user_1['lastname'],\r\n 'email': user_1['email_address'],\r\n 'phone_number': user_1['phone_number']}\r\n self.assertEqual(test_map, search_customer(user_1['customer_id']))\r\n\r\n # Non-existant Customer Test\r\n self.assertEqual({}, search_customer('99999'))\r\n drop_db()", "def test_set_existing_customer(self):\n request = self.factory.get('/', follow=True)\n request.user = self.bart\n request.session = {'session_key': 'bart1234'}\n self.cm.process_request(request)\n self.assertEqual(request.customer, self.bart.customer)", "def test_create_customers(self):\n test_customer = self._create_customers(\"Alex\")\n logging.debug(test_customer)\n resp = self.app.post(\n \"/customers\", json=test_customer.serialize(), content_type=\"application/json\"\n )\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)\n\n # # # Make sure location header is set\n location = resp.headers.get(\"Location\", None)\n self.assertIsNotNone(location)\n # # Check the data is correct\n new_customer = resp.get_json()\n self.assertEqual(\n new_customer[\"name\"], test_customer.name, \"Names do not match\"\n )\n self.assertEqual(\n new_customer[\"address\"], test_customer.address, \"Addresses do not match\"\n )\n self.assertEqual(\n new_customer[\"phone_number\"], test_customer.phone_number, \"Phone number does not match\"\n )\n self.assertEqual(\n new_customer[\"email\"], test_customer.email, \"Email does not match\"\n )\n self.assertEqual(\n new_customer[\"credit_card\"], test_customer.credit_card, \"Credit card does not match\"\n )\n self.assertEqual(\n new_customer['active'], True, \"active status not match\"\n )\n # # # Check that the location header was correct\n resp = self.app.get(location, content_type=\"application/json\")\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n new_customer = resp.get_json()\n self.assertEqual(new_customer[\"name\"], test_customer.name)\n self.assertEqual(new_customer[\"address\"], test_customer.address)\n self.assertEqual(new_customer[\"phone_number\"], test_customer.phone_number)\n self.assertEqual(new_customer[\"email\"], test_customer.email)\n self.assertEqual(new_customer[\"credit_card\"], test_customer.credit_card)\n self.assertEqual(new_customer['active'], True, \"active status not match\")", "def test_3(self):\n c1 = Store.Customer(\"harold\", \"qcf\", True)\n self.assertTrue(c1.is_premium_member(), \"not premium member\")", "def test_get_order_by_customer(self):\n test_order = self._create_orders(1)[0]\n print(test_order.customer_id)\n resp = self.app.get('/orders/customers/{}'.format(test_order.customer_id),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()[0]\n self.assertEqual(data['uuid'], test_order.uuid)", "def test_customer_delete(self):\n # first performe create\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then performe delete\n self._delete_model(\"customer\", id)\n self.assertIsNotNone(id)", "def test_case_customer_part_orders(self):\n pass", "def test_create_new_auth_customer(self):\n request = self.factory.get('/', follow=True)\n request.user = self.lisa\n request.session = {'session_key': 'lisa1234'}\n self.cm.process_request(request)\n self.assertEqual(request.customer.user, self.lisa)", "def test_case_user_not_yet_customer(self):\n pass", "def test_delete_customer(self):\r\n create_empty_db()\r\n add_customer(**user_1)\r\n delete_customer(user_1['customer_id'])\r\n self.assertEqual({}, search_customer(user_1['customer_id']))\r\n drop_db()", "def test_customer_creation():\n agent = AgentFactory()\n customer = CustomerFactory(agent=agent)\n assert agent == customer.agent\n\n customer.name = 'customer test name 1'\n customer.customer_type = 'hom'\n customer.save()\n assert customer.name == 'customer test name 1'\n\n customer.name = 'customer test name 2'\n customer.customer_type = 'oth'\n customer.save()\n assert customer.name == 'customer test name 2'", "def test_addCustomerAddsACustomer(self):\n customersBeforeAddingCustomer = DataManagerUnitTest.dm.getCustomers()\n DataManagerUnitTest.dm.addCustomer(\"John Doe\", \"johndoe@email.com\")\n customersAfterAddingCustomer = DataManagerUnitTest.dm.getCustomers()\n self.assertEqual(1, len(customersAfterAddingCustomer) - len(customersBeforeAddingCustomer))", "def test_create_customer_success(self):\n customer = Customer.objects.create(**customer_data)\n\n self.assertTrue(isinstance(customer, Customer))\n self.assertEqual(customer.__str__(), customer_data['email'])", "def test_valid_customer(self):\n request = MockRequest()\n\n key_list = list_customer_keys(self._connection, _test_username)\n self.assertEqual(len(key_list), 1)\n key_id, key_value = key_list[0]\n\n authentication_string = compute_authentication_string(\n key_id,\n key_value,\n _test_username,\n _test_method,\n current_timestamp(),\n _test_uri\n )\n request.__dict__[\"authorization\"] = authentication_string.split()\n request.__dict__[\"method\"] = _test_method\n request.__dict__[\"headers\"] = {\n 'x-nimbus-io-timestamp' : str(current_timestamp())\n } \n request.__dict__[\"path_qs\"] = _test_uri\n\n authenticator = SqlAuthenticator()\n authenticated = authenticator.authenticate(\n self._connection, _test_username, request\n )\n self.assertTrue(authenticated)", "def test_deactivate_customer(self):\n #create a customer to deactivate\n body = {\n \"name\": \"Robin\",\n \"address\": \"222 Bleeker Street\",\n \"phone_number\": \"555-555-2222\",\n \"email\": \"rarzon@peloton.com\",\n \"credit_card\": \"VISA\"\n }\n \n #test_customer = self._create_customers(\"Alex\")\n #logging.debug(test_customer)\n #test_customer.create() \n resp_create = self.app.post('/customers',\n json=body,\n content_type='application/json')\n self.assertEqual(resp_create.status_code, status.HTTP_201_CREATED)\n self.assertEqual(resp_create.get_json()['active'], True)\n customer_id = resp_create.get_json()[\"id\"]\n\n # deactivate the customer\n logging.debug(customer_id)\n resp_deactivate = self.app.put(\"/customers/{}/deactivate\".format(customer_id),\n content_type=\"application/json\")\n self.assertEqual(resp_deactivate.status_code, status.HTTP_200_OK)\n self.assertEqual(resp_deactivate.get_json()[\"active\"], False)", "def test_get_customer_rental(self):\n get_customer_rentals_url = reverse(\n \"customer_rental_list\", kwargs={\"pk\": self.user1.pk}\n )\n response = self.client.get(get_customer_rentals_url)\n self.assertEqual(len(response.data), 1)", "def test_get_customers(self):\n get_customers_url = reverse(\"customer_list\")\n response = self.client.get(get_customers_url)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # get data from db\n customers = Customer.objects.all()\n serializer = CustomerSerializer(customers, many=True)\n self.assertEqual(response.data, serializer.data)\n\n self.assertEqual(len(response.data), 4)", "def customer():\n customer = stripe.Customer.create(\n description=\"User created by pytest test_payments.py\",\n email=generate_random_email(),\n address={\"country\": \"DK\"},\n )\n yield customer\n customer.delete()", "def test_get_customer_list(self):\n customer = self._create_customers(\"Alex\")\n customer.create()\n customer = self._create_customers(\"Sally\")\n customer.create()\n customer = self._create_customers(\"John\")\n customer.create()\n resp = self.app.get(\"/customers\")\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()\n self.assertEqual(len(data), 3)", "def test_associate_customer_on_signup(self):\n # is this necessary, or is it handled by login logic anyway?\n pass", "def test_update_customer(self):\n # create a customer to update \n test_customer = self._create_customers(\"Alex\")\n resp = self.app.post(\n \"/customers\", json=test_customer.serialize(), content_type=\"application/json\"\n )\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)\n \n # update the customer\n new_customer = resp.get_json()\n logging.debug(new_customer)\n new_customer[\"address\"] = \"unknown\"\n resp = self.app.put(\n \"/customers/{}\".format(new_customer[\"id\"]),\n json=new_customer,\n content_type=\"application/json\",\n )\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n updated_customer = resp.get_json()\n self.assertEqual(updated_customer[\"address\"], \"unknown\")", "def test_update_stripe_customer_id(self):\n pass", "def test_verify_attrs_customer(self):\n self.assertEqual(self.customer.name, 'Customer Test')\n self.assertEqual(self.customer.document, '000.000.000-00')", "def test_customer_access(self):\n # not logged-in\n for url in self.urls_get:\n response = self.client.get(url, follow=True)\n self.assertTrue(is_login_page(response))\n\n for url in self.urls_post:\n response = self.client.post(url['url'], url['data'], follow=True)\n self.assertTrue(is_login_page(response))\n\n # logged-in. Should throw a 403 or redirect to login\n self.client.login(self.user.email)\n for url in self.urls_get:\n response = self.client.get(url, follow=True)\n if response.status_code != 403:\n self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,\n target_status_code=200)\n for url in self.urls_post:\n response = self.client.post(url['url'], url['data'], follow=True)\n if response.status_code != 403:\n self.assertRedirects(response, reverse('wl_dashboard:tables_customer'), status_code=302,\n target_status_code=200)", "def test_create_import_customer_object(self):\n\n self.assertIsInstance(self.import_customer, ImportCustomer)", "def test_4(self):\n c1 = Store.Customer(\"harold\", \"qcf\", False)\n self.assertFalse(c1.is_premium_member(), \"IS premium member\")", "def test_customer_update(self):\n # first performe create\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then performe update\n data = { \n \"name\": \"Changed the name\",\n \"email\": self.customer_data[\"email\"],\n \"phone\": self.customer_data[\"phone\"]\n }\n self._update_model(\"customer\", id, data, [\"name\"])\n self.assertIsNotNone(id)", "def test_import_csv_file_return_customer_object(self):\n\n self.assertIsInstance(self.customers[0], Customer)", "def test_import_customer_without_data(self):\n\n customer = self.import_customer.create_customer_object(\"cust002010\", {})\n self.assertIsInstance(customer, Customer)", "def test_adv_w_customer_ad_rep(self):\n ad_rep = AdRep.objects.get(id=1000)\n ad_rep.rank = 'CUSTOMER'\n ad_rep.save()\n self.prep_ad_rep(ad_rep)\n UnqualifiedConsumerEmailTask().run(test_mode=self.consumer)\n self.common_asserts()", "def test_get_orders_by_caterer(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\tself.order_data) , content_type = 'application/json')\n\t\tself.assertEqual(response.status_code, 201)\n\n\t\tres = self.login_admin_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\tresponse = self.client().get(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token})\n\t\tself.assertEqual(response.status_code, 200)", "def test_single_customer(_full_invoice):\n\n test_invoice = \"../data/test-invoice.csv\"\n items_to_insert = \"../data/items.csv\"\n csv_contents = []\n\n function = l.single_customer(\"Kyouma Hououin\", test_invoice)\n function(items_to_insert)\n\n with open(test_invoice, \"r\") as csv_file:\n contents = reader(csv_file, delimiter=',')\n for line in contents:\n if line != []:\n csv_contents += [line]\n\n csv_contents += contents\n\n assert _full_invoice == csv_contents", "def test_list_active_customers(self):\r\n create_empty_db()\r\n add_customer(**user_1)\r\n add_customer(**user_2)\r\n add_customer(**user_3)\r\n self.assertEqual(2, list_active_customers())\r\n drop_db()", "def test_search_no_customer(self):\n set_up_db()\n self.assertEqual({}, search_customer(1))", "def test_list_active_customers(self):\n set_up_db()\n add_customer(*self.test_customer)\n add_customer(customer_id=2, name=\"Clark\", last_name=\"Kent\", home_address=None,\n phone_number=\"228-626-7899\", email=\"ckent@dailyplanet.com\",\n status=True, credit_limit=200.00)\n add_customer(customer_id=3, name=\"Diana\", last_name=\"Prince\", home_address=None,\n phone_number=\"587-8423\", email=\"ww@justiceleague.net\",\n status=False, credit_limit=100.00)\n self.assertEqual(2, list_active_customers())", "def test_delete_customer_success(self):\n customer = Customer.objects.create(**customer_data)\n\n self.assertTrue(isinstance(customer, Customer))\n\n customer.delete()\n\n with self.assertRaises(Customer.DoesNotExist):\n Customer.objects.get(email=customer_data['email'])", "def test_associate_customer_on_login(self):\n request = self.factory.post('/shop/auth/login/', follow=True)\n request.user = self.lisa\n customer = Customer()\n customer.save()\n request.session = {\n 'session_key': 'lisa_swap',\n SESSION_KEY: customer.pk,\n }\n request.customer = Customer.objects.get_customer(request)\n user_logged_in.send(sender=self.lisa.__class__, request=request, user=self.lisa)\n self.assertEqual(request.customer, customer)\n self.assertEqual(request.customer.user, self.lisa)", "def test_customer_search(_customers_to_add, _customer_search_christina):\n\n test_invoice = \"../data/test-invoice.csv\"\n csv_contents = []\n\n if Path(test_invoice).exists():\n remove(test_invoice)\n\n for customer in _customers_to_add:\n l.add_furniture(\n test_invoice, customer[0], customer[1], customer[2], customer[3]\n )\n\n func = l.single_customer_search(\"Christina\", test_invoice)\n assert func(\"FG88\") == _customer_search_christina\n assert func(\"FG204\") == \"Nothing Found\"", "def test_addsNewCustomersFromAPIs(self):\n DataManagerUnitTest.dm.reload()\n result = DataManagerUnitTest.dm.onlineStoreDatabase.getCustomers()\n customers = set([key['user']['email'] for key in DataManagerUnitTest.dm.getAllOrders()])\n self.assertEqual(len(customers), len(result))", "def store_customer(self, name):\n pass", "def fake_customer_list(customer_list):\n return fake_generic_listing(customer_list, 'customer')", "def test_update_customer(self):\n set_up_db()\n add_customer(*self.test_customer)\n update_customer_credit(1, 500000.00)\n self.assertEqual(500000.00, Customer.get_by_id(1).credit_limit)", "def test_add_customer(self):\r\n create_empty_db()\r\n add_customer(**user_1)\r\n query = Customer.get(Customer.customer_id == user_1['customer_id'])\r\n self.assertEqual(user_1['name'], query.customer_name)\r\n self.assertEqual(user_1['lastname'], query.customer_last_name)\r\n self.assertEqual(user_1['home_address'], query.customer_address)\r\n self.assertEqual(user_1['phone_number'], query.customer_phone)\r\n self.assertEqual(user_1['email_address'], query.customer_email)\r\n self.assertEqual(user_1['status'], query.customer_status)\r\n self.assertEqual(user_1['credit_limit'], query.customer_limit)\r\n\r\n # add another person\r\n add_customer(**user_2)\r\n query = Customer.get(Customer.customer_id == user_2['customer_id'])\r\n self.assertEqual(user_2['name'], query.customer_name)\r\n self.assertEqual(user_2['lastname'], query.customer_last_name)\r\n self.assertEqual(user_2['home_address'], query.customer_address)\r\n self.assertEqual(user_2['phone_number'], query.customer_phone)\r\n self.assertEqual(user_2['email_address'], query.customer_email)\r\n self.assertEqual(user_2['status'], query.customer_status)\r\n self.assertEqual(user_2['credit_limit'], query.customer_limit)\r\n\r\n # add a duplicate person\r\n with self.assertRaises(ValueError):\r\n add_customer(**user_2)\r\n drop_db()", "def test_create_new_anonymous_customer(self):\n request = self.factory.get('/', follow=True)\n request.user = AnonymousUser()\n request.session = {'session_key': 'anon1234'}\n self.cm.process_request(request)\n self.assertTrue(request.customer)\n self.assertEqual(request.customer.user, None)", "def getCustomer(self):\n if self.__orderinfo is None:\n return False\n else:\n self.__customer.getCustomer(self.__orderinfo['customerID'])", "def is_customer(self):\n return self.user_type == 'C'", "def test_it_all(self):\n LOGGER.info(\"test_it_all\")\n\n # Make sure a Customer table exists and make sure it is empty.\n create_tables()\n delete_customer_table()\n\n # Add three rows to the Customer table from the CUSTOMERS data, above.\n add_customers(CUSTOMERS)\n\n # Delete a row containing a customer whose status is active.\n customer_id_deleted = -1 # will be used in blocks below\n customer_id_credit_updated = -1 # will be used in blocks below\n\n with DATABASE.transaction():\n query = Customer.select().where(Customer.status == True) # pylint: disable=E1111,C0121\n self.assertTrue(len(query) == 2)\n customer_id_deleted = query[0].customer_id\n delete_customer(customer_id_deleted)\n # update credit for the other active customer\n customer_id_credit_updated = query[1].customer_id\n update_customer_credit(customer_id_credit_updated, 1300.01)\n\n # Verify that there is 1 active customer in the db.\n self.assertEqual(list_active_customers(), 1)\n\n # Search the remaining customers and verify their data\n customer_lookup = {\"Beat\": \"beat_choonz\",\n \"Shady\": \"shady_flava\",\n \"Vegeta\": \"vegeta_colt\"}\n\n for customer_id in range(1, 4):\n if customer_id is customer_id_deleted:\n continue\n\n acustomer = search_customer(customer_id)\n original_customer_key = customer_lookup[acustomer[\"name\"]]\n original_customer = CUSTOMERS[original_customer_key]\n\n self.assertEqual(acustomer[\"name\"], original_customer[\"first_name\"])\n self.assertEqual(acustomer[\"last_name\"], original_customer[\"last_name\"])\n self.assertEqual(acustomer[\"email_address\"], original_customer[\"email_address\"])\n self.assertEqual(acustomer[\"phone_number\"], original_customer[\"phone\"])\n\n # Verify the new credit limit for the customer whose credit was updated\n if customer_id is customer_id_credit_updated:\n with DATABASE.transaction():\n query = Customer.select().where(Customer.customer_id == customer_id) # pylint: disable=E1111\n self.assertEqual(float(query[0].credit_limit), 1300.01)\n\n # Try to update the deleted customer's credit\n with self.assertRaises(ValueError):\n update_customer_credit(customer_id_deleted, 12345.00)\n\n # Try to delete the deleted customer\n with self.assertRaises(ValueError):\n delete_customer(customer_id_deleted)\n\n # Try to search for deleted customer\n acustomer = search_customer(customer_id_deleted)\n self.assertDictEqual(acustomer, {})\n\n # Update customer credit in batch\n ids_credit = tuple((id, 21000) for id in range(1, 4))\n\n # Check all results are True, meaning all the customers' credit was updated successfully,\n # except for the customer that is not in the db, which should have a False result.\n for result in update_multiple_customers_credit(ids_credit):\n if result[0] is customer_id_deleted:\n self.assertFalse(result[1])\n else:\n self.assertTrue(result[1])\n\n # Check the ones that got updated that they were updated to 21000.00.\n for customer_id in range(1, 4):\n with DATABASE.transaction():\n try:\n acustomer = Customer.get(Customer.customer_id == customer_id)\n self.assertEqual(float(acustomer.credit_limit), 21000.00)\n except DoesNotExist:\n pass", "def test_client_post():\n test_data = {\"email\": \"test-pymango@example.org\", \"name\": \"Test Customer\"}\n response = client.req(TEST_API_KEY, \"post\", \"v1/customers/\", data=test_data)\n for k in test_data.keys():\n eq_(test_data.get(k), response.get(k))", "def test_create_customer_rental(self):\n create_rental_url = reverse(\n \"customer_rental_list\", kwargs={\"pk\": self.user1.pk}\n )\n\n data = {\"book\": self.book1.pk}\n response = self.client.post(create_rental_url, data=data, format=\"json\")\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def is_customer(self) -> bool:\n return self.customer_id is not None", "def test_get_non_existent_customer_returns_404(self):\n get_customer_url = reverse(\"customer_detail\", kwargs={\"pk\": 1002})\n response = self.client.get(get_customer_url)\n\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)", "def main():\n test_customer = Customer('Jake', 'Robin', 25, 'Male')\n\n print_title(test_customer)\n print_object_details(test_customer)", "def test_comicscreators_get(self):\n pass", "def test_get_order_by_customer_fail(self):\n test_order = self._create_orders(1)[0]\n print(test_order.customer_id)\n resp = self.app.get('/orders/customers/{}'.format(0),\n content_type='application/json')\n print(resp.status_code)\n print(resp.get_json())\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n self.assertEqual([], resp.get_json())", "def test_update_customer_success(self):\n customer = Customer.objects.create(**customer_data)\n\n self.assertTrue(isinstance(customer, Customer))\n\n email= 'doe2018@john.com'\n customer.email = email\n customer.save()\n\n self.assertEqual(customer.__str__(), email)", "def customer(self, customer_id=None):\r\n return customers.Customer(self, customer_id)", "def create_customer(cls, api, **data):\n return api.create_customer(**data)", "def test_get_list_customers_with_filters(client, db_session, make_customer_list):\n # Arange\n customers = make_customer_list(10)\n\n # Act\n response = client.get(\"api/customers/?name=customer\")\n response_data = response.get_json()\n\n # Assert\n assert response.status_code == status.HTTP_200_OK\n assert len(response_data[\"items\"]) == len(customers)", "def test_delete_customer_fails(self):\n customer = Customer.objects.create(**customer_data)\n\n self.assertTrue(isinstance(customer, Customer))\n\n with self.assertRaises(Customer.DoesNotExist):\n new_customer = Customer.objects.get(email='mac@jordan.com')\n new_customer.delete()", "def get(self, customer_id):\n customer = get_a_customer(customer_id)\n if not customer:\n api.abort(404)\n else:\n return customer", "def has_customer(self):\n return self.customer is not None", "def test_list_active_users(_list_active_customers):\n for customer in _list_active_customers:\n bo.add_customer(\n customer[0],\n customer[1],\n customer[2],\n customer[3],\n customer[4],\n customer[5],\n customer[6],\n customer[7]\n )\n\n assert bo.list_active_customers() == 4\n\n for customer in _list_active_customers:\n bo.delete_customer(customer[0])\n \n assert bo.list_active_customers() == 0", "def test_get_order_buyer_info(self):\n pass", "def get_customer(self) -> djstripe.models.Customer:\n if self.customer_id:\n return self.customer\n\n name = self.display_name or self.name or \"\"\n email = self.billing_email or self.email or \"\"\n\n if stripe.api_key != \"sk_test_xxxx\":\n try:\n customer = stripe.Customer.create(name=name, email=email)\n self.customer = djstripe.models.Customer.sync_from_stripe_data(customer)\n except Exception:\n logger.exception(\"Error creating customer on Stripe\")\n else:\n self.customer = djstripe.models.Customer.objects.create(\n id=shortuuid.uuid(), name=name, email=email\n )\n\n self.save()\n return self.customer", "def customer_get_one(user_id):\n return customer_get(user_id)", "def customer(self):\n return self.__customer", "def get_new_customer() -> Customer:\r\n print(\"\\n-- PERSONAL INFORMATION --\")\r\n print(\"To start an order you must provide the following details.\\n\")\r\n\r\n print(\"- NAME -\")\r\n first_name = get_valid_input(\"Please type your FIRST NAME: \", validate_name)\r\n last_name = get_valid_input(\"Please type your LAST NAME: \", validate_name)\r\n\r\n print(\"\\n- CONTACT -\")\r\n email = get_valid_input(\"Please type your EMAIL address: \", validate_email)\r\n phone = get_valid_input(\"Please type your PHONE NUMBER: \", validate_phone).replace(\"-\",\"\").replace(\"(\", \"\").replace(\")\", \"\")\r\n\r\n print(\"\\n- ADDRESS -\")\r\n print(\"Please type your ADDRESS using the following form.\")\r\n print(\"HOUSE # Street Name, City, State/Province, ZIP/Postal Code\")\r\n print(\"EXAMPLE: 700 Pennsylvania Avenue NW, Washington, DC, 20408\")\r\n\r\n address = get_valid_input(\"ADDRESS: \", validate_address)\r\n\r\n customer = Customer(last_name, first_name, email, phone, address)\r\n return customer", "def get_customer_id_by_sale_id(sale_id):\n\n # your code", "def is_customer(self):\n return self.rol == ProfileRoles.CUSTOMER", "def test_update_customer_credit(self):\r\n create_empty_db()\r\n add_customer(**user_1)\r\n update_customer_credit(user_1['customer_id'], 5000.00)\r\n query = Customer.get(Customer.customer_id == user_1['customer_id'])\r\n self.assertEqual(5000.00, query.customer_limit)\r\n\r\n # Test for non-existant customer\r\n with self.assertRaises(ValueError):\r\n update_customer_credit('456879', 5000.00)\r\n\r\n # Test for non-float value inputted\r\n with self.assertRaises(TypeError):\r\n update_customer_credit(user_1['customer_id'], '$20')\r\n drop_db()", "def test_invoice_create(self):\n # first we create a customer\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then we can create the invoice\n data = self.invoice_data\n data[\"customer_id\"] = id\n self._create_model(\"invoice\", data, [])\n self.assertIsNotNone(id)", "def test_client_retrieve(self):\n pass", "def customer_created_handler(event):\n obj = event.obj\n\n # submit customer after creation\n obj.workflow.submit()", "def test_get_sale_record(self):\n reply = self.admin_add_product()\n\n resp = self.admin_create_user()\n reply = self.attendant_login()\n token = reply['token']\n sale = dict(products = [\n {\n \"prod_name\":\"NY_denims\", \n \"quantity\":10\n }\n\t ])\n resp = self.client.post(\n '/api/v1/sales',\n content_type='application/json',\n data=json.dumps(sale),\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'Sale record created')\n self.assertEqual(resp.status_code, 200)\n\n resp = self.client.get(\n '/api/v1/sales/1',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'Sale fetched sucessfully!')\n self.assertEqual(resp.status_code, 200)", "def customer(self):\n return Customer(self._dict.get('customer'))", "def test_get_specific_sale_record(self):\n \n self.register_admin_test_account()\n token = self.login_admin_test()\n\n response = self.app_test_client.get(\n '{}/saleorder'.format(self.base_url), json={\n 'sale_id': 1,\n 'name': \"Sample Bags\",\n 'price': 20,\n 'quantity': 1,\n 'totalamt': 20\n },\n headers=dict(Authorization=token),\n content_type='application/json')\n\n response = self.app_test_client.get(\n '{}/saleorder/1'.format(self.base_url),\n headers=dict(Authorization=token),\n content_type='application/json'\n )\n \n self.assertEqual(response.status_code, 200)", "def test_case_customer_complete_courseevent_order(self):", "def test_comicscreators_id_get(self):\n pass", "def add_customer(customer_id, first_name, last_name, home_address,\n phone_number, email_address, status, credit_limit):\n print('Adding new customer, Customer ID {}...'.format(customer_id))\n try:\n Customer.get_by_id(customer_id)\n print('Customer ID {} is already in use'.format(customer_id))\n except Exception as ex:\n if \"instance matching query does not exist\" in str(ex):\n try:\n new_customer = Customer.create(customer_ID=customer_id,\n first_name=first_name,\n last_name=last_name,\n home_address=home_address,\n phone_number=phone_number,\n email_address=email_address,\n status=status,\n credit_limit=credit_limit)\n new_customer.save()\n LOGGER.info('Added new customer, Customer ID %s', customer_id)\n except IntegrityError:\n print('Incorrect format, customer {} not saved'\n .format(customer_id))", "def test_get_order(self):\n # get the id of a order\n order = Order.find_by_customer_id(1)[0]\n resp = self.app.get('/orders/{}'.format(order.id),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = json.loads(resp.data)\n self.assertEqual(data['customer_id'], order.customer_id)", "def test_invoice_detail(self):\n # first we create a customer\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then we can create the invoice\n data = self.invoice_data\n data[\"customer_id\"] = id\n id_inv = self._create_model(\"invoice\", data, [])\n if id_inv:\n # then performing detail\n self._detail_model(\"invoice\", self.invoice_data, id, [])\n self.assertIsNotNone(id_inv)\n self.assertIsNotNone(id)", "def add_customer(customer_id, name, lastname, home_address,\n phone_number, email_address, status, credit_limit):\n try:\n with database.transaction():\n customer = Customer.create(\n customer_id=customer_id,\n name=name,\n lastname=lastname,\n home_address=home_address,\n phone_number=phone_number,\n email_address=email_address,\n status=status,\n credit_limit=credit_limit,\n )\n customer.save()\n except Exception as unknown_error:\n print(unknown_error)" ]
[ "0.79642403", "0.76802677", "0.75478774", "0.7539025", "0.73661083", "0.7350192", "0.7332087", "0.72675115", "0.72450405", "0.7234708", "0.72308517", "0.71525866", "0.7051875", "0.6995165", "0.6978112", "0.69741607", "0.69428545", "0.6925339", "0.6922476", "0.6905763", "0.6901384", "0.68974566", "0.6896882", "0.68801117", "0.6844671", "0.68427277", "0.68323416", "0.6829282", "0.67786074", "0.6774265", "0.67398727", "0.67255276", "0.66943866", "0.6693104", "0.6683042", "0.6681061", "0.66773295", "0.6672892", "0.66204876", "0.66084325", "0.6578653", "0.6568171", "0.65654814", "0.65473306", "0.6519365", "0.64796406", "0.6454954", "0.6450067", "0.64324594", "0.6428295", "0.64156675", "0.6396947", "0.63918823", "0.6390269", "0.6339305", "0.63291734", "0.6315576", "0.6314174", "0.63080347", "0.6293471", "0.629316", "0.62900394", "0.62559944", "0.6209446", "0.6206534", "0.62042344", "0.6162169", "0.61349165", "0.6129715", "0.6124896", "0.61239773", "0.61197245", "0.6111891", "0.60799384", "0.60657233", "0.6022006", "0.5994098", "0.5992547", "0.59916055", "0.59852225", "0.59812796", "0.5978196", "0.59757555", "0.59631705", "0.5959458", "0.5942916", "0.5942294", "0.59406936", "0.593012", "0.5923498", "0.5880987", "0.5880455", "0.587846", "0.5874933", "0.58732593", "0.58730406", "0.58654284", "0.586368", "0.5855174", "0.5854856" ]
0.7136031
12
Return the match method once, then stop
def __iter__(self): yield self.match raise StopIteration
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def match(self) -> bool:", "def handleMatch(self, m):\r\n pass", "def match(self) -> \"MatchResult\":\n raise NotImplementedError", "def continue_running(self, method):", "def __iter__(self): \n yield self.match \n raise StopIteration", "def __iter__(self):\n yield self.match\n raise StopIteration", "def continue_search( self ):\n return True;", "def matchResult(self, method, path):\n pass", "def find_matches(self, match_fn):\n return\n yield # Turns this function into a generator but that is empty", "def find_matches(self, match_fn):\n return\n yield # Turns this function into a generator but that is empty", "def matches(self):\n pass", "def _match(self) -> None:\n self.matched = [i for i in self.data if self.match(i)]\n self.unmatched = [i for i in self.data if not self.match(i)]", "def greedy(self) -> Action:\n return NotImplemented", "def match(self, ctx):\n pass", "def on_match_start(self, *args, **kwargs):\n self._match = list()", "def __iter__(self):\n yield self.match", "def __iter__(self):\n self.currentMatchIndex = -1\n return self", "def test_match_or_none2():\r\n runmatch(lcode)", "def rpc_match():", "def test_match_or_none():\r\n runmatch(lcode)", "def matching_function(self):\n return self.matching", "def process_match_result(self, match):\n raise NotImplementedError()", "def matches(self):\n return False", "def match(self, item):", "def route_match(self):\n if self.whole_word_var.get():\n self.whole_word_matches()\n else:\n self.partial_word_matches()", "def route_match(self):\n if self.whole_word_var.get():\n self.whole_word_matches()\n else:\n self.partial_word_matches()", "def null_match():\n return Self._.match(\n lambda e=Example: e\n )", "def go_again(self):\n return True", "def _stop(self):", "def go_again(self):\n return False", "def match(self, *args): \n if self.fall or not args: \n return True \n elif self.value in args: # changed for v1.5, see below \n self.fall = True \n return True \n else: \n return False \n\n pass", "def run(self):\n # Handle a mismatch with the lock token.\n if not self.regex.match(self.locktoken): return 0\n\n # Execute the child actions.\n return super(FilterLockToken, self).run()", "def __checkForPattern(self):\n if self._keyCode in self._patterns:\n assert(self.notify.debug(\"Pattern Match: \" + self._keyCode))\n messenger.send(KeyCodes.PATTERN_MATCH_EVENT, [self._keyCode])\n self.reset()\n \n # If the key code is longer than the longest pattern possible,\n # Then reset! \n elif self._keyCodeCount == self._patternLimit or len(self.getPossibleMatchesList()) == 0:\n assert(self.notify.debug(\"No pattern match!\"))\n messenger.send(KeyCodes.PATTERN_NO_MATCH_EVENT)\n self.reset()", "def matches(self, target):\n raise NotImplementedError()", "def run(self):\n # If the path name doesn't match, do nothing.\n if not self.regex.search(self.path): return\n\n # Execute the child actions.\n return super(FilterPath, self).run()", "def _internal_match(self, pattern):\n compiled_re = re.compile(pattern)\n for word in self.words:\n if compiled_re.fullmatch(word) is not None:\n yield word", "def test(self, parent, block):\n\n self.match = self.pattern.match(block) if self.pattern is not None else None\n return self.match is not None", "def test_matcher_called(self):\n\n skill = _TestSkill(None, None)\n message = Mock()\n skill.hello_skill(message)\n\n self.assertTrue(message.respond.called_once)", "def _consume(self, pattern):\n if self.is_finished:\n raise StopIteration()\n found = re.match(pattern, self.text[self.pos:])\n if found is None:\n return None\n self.pos += found.end()\n return found.group()", "def test_match_right_none():\r\n runmatch(lcode_right_none)", "def do_match(self, context):\n\t\treturn self.extract(context) is not None", "def ask_find_match(self, event=None):\n term = self.text_find.get()\n if term == '':\n return\n if self.term != term:\n self.term = term\n self.chars = len(term)\n self.text.tag_remove('found', '1.0', tk.END)\n self.route_match()\n self.highlight_next_match()", "def ask_find_match(self, event=None):\n term = self.text_find.get()\n if term == '':\n return\n if self.term != term:\n self.term = term\n self.chars = len(term)\n self.text.tag_remove('found', '1.0', tk.END)\n self.route_match()\n self.highlight_next_match()", "def _on_go(self) -> None:\n if self._is_ringing_rounds:\n self._should_start_method = True", "def matches(self, change):\n\n return False", "def __gotoNextMethodClass(self):\n self.activeWindow().gotoMethodClass(False)", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5\n self.fall = True\n return True\n else:\n return False", "def hit(self):", "def FindNext():\r\n return _hiew.HiewGate_FindNext()", "def _play_match(self, team, opponent, point, mode, match_id):", "def do_matchmaking(self):\n # Remove seeks, offers, etc for lagged-out players\n now = time.time()\n\n to_remove = {}\n for pname in self.last_ping:\n elapsed = now - self.last_ping[pname]\n logging.debug('Time since last ping: %f' % elapsed)\n if elapsed > TIMEOUT:\n msg = 'Lost contact with %s' % pname\n to_remove[pname] = msg\n \n for pname in to_remove:\n msg = to_remove[pname]\n self._rem_player(pname, msg)\n\n # Remove outstanding (and now expired) match offers\n #for match in self.offers.values():\n # msg = 'Match offer %s expired' % match.matchid\n # self._rem_offer(match, msg)\n\n # Generate new match offers\n for m in self.matchmaker.generate_matches(self.seeks.values()):\n self.offers[m.matchid] = m\n for s in m.seeks:\n msg = 'Seek %s matched Match %s' % (s.seekid, m.matchid)\n self.seeks.pop(s.seekid, None)\n self.comm.offer_match(m)", "def __next__(self):\n\t\treturn next()", "def _stop(self):\n return True", "def stop() -> None:", "def match(self, *args):\n if self.fall or not args:\n return True\n if self.value in args:\n self.fall = True\n return True\n return False", "def stop():\n raise StopIteration", "def stop(self):", "def stop(self):", "def stop(self) -> None:", "def stop(self) -> None:", "def _stop_all(self, method_name):\n for proc in self.get_all():\n if proc.alive:\n getattr(proc, method_name)()", "def match(self, other):", "def match(self, sentence) -> bool:\r\n pass", "def next():", "def next():", "def skip_until_re(self, r, flags=0, timeout=None):\n match = self.read_cond(lambda x: re.search(r, x.buf, flags=flags), timeout)\n self.buf = self.buf[match.start():]\n return match if len(match.groups()) > 1 else match.group(len(match.groups()))", "def _result_already_returned(self):\n return self.deferred.called", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def test_match_regexp_including_start():\r\n runmatch(lcode)", "def fuzz():\n if FUZZ:\n time.sleep(random.random())", "def test_match_right_regexp_to_none():\r\n runmatch(lcode)", "def test_match_left_none():\r\n runmatch(lcode_left_none)", "def match(self, *args):\n if self.fall or not args:\n return True\n elif self.value in args: # changed for v1.5, see below\n self.fall = True\n return True\n else:\n return False", "def match(self, inp):\n return 0", "def _do_iteration(self):\n return True", "def match(self, operation: str, path: str):\n for reg, handler in self.handlers:\n if re.match(reg, path):\n print(\"Found matching handler for\", operation, path)\n method = getattr(handler, operation)\n return method\n raise Exception(f\"No handler for {operation} at {path}\")", "def _uncached_match(self, text, pos, cache):\n m = self.re.match(text, pos)\n if m is not None:\n span = m.span()\n node = RegexNode(self.name, text, pos, pos + span[1] - span[0])\n node.match = m # TODO: A terrible idea for cache size?\n return node", "def matches(self, accession):\n pass", "def run(self):\n # If the change type doesn't match, do nothing.\n if not self.regex.match(self.chgtype): return 0\n\n # Perform the child actions.\n return super(FilterChgType, self).run()", "def next(self):\n while True:\n try:\n return super(Camera, self).next()\n except ids_core.IDSCaptureStatus:\n self._check_capture_status()", "def restart(self, relay):\n if self.stop():\n return self.start(relay)\n return False", "def match(self, item):\n if self._position == len(self._matchers):\n raise RuntimeError('Matcher exhausted, no more matchers to use')\n\n matcher = self._matchers[self._position]\n if matcher(item):\n self._position += 1\n\n if self._position == len(self._matchers):\n # All patterns have been matched\n return True\n\n return False", "def stop(self):\n return self", "def match(self, match: CallMatch, matcher: CallMatcher):\n\n if match.terminated:\n raise SyntaxError(f\"Tried matching {self.node_name} after match was terminated\")", "def __call__(self):\r\n return self.next()", "def restart(self):\n\t\treturn self.reset().start()", "def does_match(self):\n if self.stopped:\n return False\n return self.current_state == self.q3", "def stop(self):\n return", "def match(self, target, guess):\r\n return guess == target" ]
[ "0.6595691", "0.6266482", "0.62587726", "0.621846", "0.61685926", "0.60233533", "0.5958962", "0.5932863", "0.5927277", "0.5927277", "0.5921931", "0.5910934", "0.590172", "0.5840986", "0.5780704", "0.57200015", "0.5675008", "0.5612232", "0.5565016", "0.55173606", "0.5446127", "0.5437482", "0.5409024", "0.53881294", "0.5359487", "0.5359487", "0.53517526", "0.5351686", "0.5332766", "0.5313133", "0.5307114", "0.5234572", "0.5227728", "0.5215304", "0.5211191", "0.5211141", "0.52007794", "0.5193343", "0.5187756", "0.51677376", "0.51665854", "0.51598436", "0.51598436", "0.51474106", "0.5129905", "0.51257116", "0.51225", "0.5120055", "0.5112271", "0.5107232", "0.51025057", "0.5100631", "0.5074669", "0.5073873", "0.50729114", "0.5067626", "0.50567305", "0.50537556", "0.50537556", "0.50492984", "0.50492984", "0.504853", "0.503409", "0.5033252", "0.5018043", "0.5018043", "0.50071394", "0.5001559", "0.49967107", "0.49967107", "0.49967107", "0.49967107", "0.49967107", "0.4995168", "0.4990144", "0.4989171", "0.4988502", "0.49860117", "0.4977551", "0.49635053", "0.49617335", "0.49603036", "0.49588615", "0.4956027", "0.49487078", "0.49481225", "0.49437177", "0.4942265", "0.49399132", "0.49379936", "0.49347872", "0.4922619", "0.4912662", "0.49111554" ]
0.5906426
18
Indicate whether or not to enter a case suite
def match(self, *args): if self.fall or not args: return True elif self.value in args: # changed for v1.5, see below self.fall = True return True else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_case(self) -> bool:\n return pulumi.get(self, \"test_case\")", "def should_run(self, case: Tuple[Dict[str, Any], ...]) -> bool:\n return True", "def CASE10( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Run all enabled checks\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Run all enabled checks\" )\n main.step( \"Run all enabled checks\" )\n main.caseResult = main.TRUE\n main.eventGenerator.triggerEvent( EventType().CHECK_ALL, EventScheduleMethod().RUN_BLOCK )\n # Wait for the scheduler to become idle before going to the next testcase\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"All enabled checks passed\",\n onfail=\"Not all enabled checks passed\" )\n time.sleep( main.caseSleep )", "def test_modes(self):\n step = self.run_step('S01-first.py')\n self.assertTrue(step.success)\n self.assertTrue(step.local.is_testing)\n self.assertFalse(step.local.is_interactive)\n self.assertFalse(step.local.is_single_run)", "def enter_case(self, case):\n print \" \" + case.how_to_behave + '.'", "def startOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def is_test(self):\r\n return self.has_label('tests')", "def in_suite(cls, method, suite_name):\n return suite_name in getattr(method, '_suites', set())", "def requires_case(self):\n return any(not action.auto_select for action in self.actions.load_update_cases)", "def entrance_exam(self):\n status = False\n tool = ProgrammingTool.create(self.PROGRAMMING_TOOL)\n if tool.connect(self.target_name):\n status = entrance_exam(tool, self.register_map)\n tool.disconnect()\n\n return status == EntranceExamErrors.OK", "def runTest(self):\n return True", "def CASE51( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Set FlowObjective to False\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Set FlowObjective to False\" )\n main.step( \"Set FlowObjective to False\" )\n main.caseResult = main.TRUE\n main.eventGenerator.triggerEvent( EventType().ONOS_SET_FLOWOBJ, EventScheduleMethod().RUN_BLOCK, 'false' )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Set FlowObjective test passed\",\n onfail=\"Set FlowObjective test failed\" )\n time.sleep( main.caseSleep )", "def run_suite(case, config, summary):\n config[\"name\"] = case\n model_dir = os.path.join(livvkit.model_dir, config['data_dir'], case)\n bench_dir = os.path.join(livvkit.bench_dir, config['data_dir'], case)\n tabs = []\n case_summary = LIVVDict()\n model_cases = functions.collect_cases(model_dir)\n bench_cases = functions.collect_cases(bench_dir)\n\n for subcase in sorted(six.iterkeys(model_cases)):\n bench_subcases = bench_cases[subcase] if subcase in bench_cases else []\n case_sections = []\n for mcase in sorted(model_cases[subcase], key=functions.sort_processor_counts):\n bpath = (os.path.join(bench_dir, subcase, mcase.replace(\"-\", os.path.sep))\n if mcase in bench_subcases else \"\")\n mpath = os.path.join(model_dir, subcase, mcase.replace(\"-\", os.path.sep))\n case_result = _analyze_case(mpath, bpath, config)\n case_sections.append(elements.section(mcase, case_result))\n case_summary[subcase] = _summarize_result(case_result,\n case_summary[subcase])\n tabs.append(elements.tab(subcase, section_list=case_sections))\n\n result = elements.page(case, config[\"description\"], tab_list=tabs)\n summary[case] = case_summary\n _print_summary(case, summary[case])\n functions.create_page_from_template(\"verification.html\",\n os.path.join(livvkit.index_dir,\n \"verification\",\n case + \".html\")\n )\n functions.write_json(result, os.path.join(livvkit.output_dir, \"verification\"), case+\".json\")", "def in_test_mode(mode: str) -> bool:\n return mode == TEST", "def match(self):\n return 'test' in self.name", "def CASE50( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Set FlowObjective to True\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Set FlowObjective to True\" )\n main.step( \"Set FlowObjective to True\" )\n main.caseResult = main.TRUE\n main.eventGenerator.triggerEvent( EventType().ONOS_SET_FLOWOBJ, EventScheduleMethod().RUN_BLOCK, 'true' )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Set FlowObjective test passed\",\n onfail=\"Set FlowObjective test failed\" )\n time.sleep( main.caseSleep )", "def test_single_test_case():\n pass", "def suite_started(self, module):", "def test_enable_maintence_mode1(self):\n pass", "def CASE32( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Install point intents and check intent states and ping\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Install point intents and check intent states and ping\" )\n main.step( \"Install point intents and check intent states and ping\" )\n main.caseResult = main.TRUE\n main.eventGenerator.triggerEvent( EventType().APP_INTENT_POINT_ADD_ALL, EventScheduleMethod().RUN_BLOCK )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Install point intents test passed\",\n onfail=\"Install point intents test failed\" )\n time.sleep( main.caseSleep )", "def test_case_01(self):\n if True:\n self.fail()", "def test_is_active_of_homework_positive():\n assert oop_hw.is_active()", "def test_enabled(self):\n # OSA script should have been installed in setUp function, which sets\n # enabled to True by default.\n self.assertTrue(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))\n # Disable OSA Script\n self.run_function(\"assistive.enable\", [OSA_SCRIPT, False])\n # Assert against new disabled status\n self.assertFalse(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))", "def suite_ended(self, module):", "def i_see_the_active_cases_page(browser):\n assert browser.find_by_text('Active Cases')", "def should_display(self, source: TestGroupReport) -> bool:\n style = self.get_style(source)\n if source.category == ReportCategories.TESTSUITE:\n return style.display_testsuite\n elif source.category == ReportCategories.PARAMETRIZATION:\n return style.display_testcase\n return style.display_test", "def test_contains_true(self):\n self.assertTrue('DEPTH' in self.tester)\n self.assertTrue('depth' in self.tester)", "def test_passed():\n pass", "def CASE30( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Install host intents and check intent states and ping\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Install host intents and check intent states and ping\" )\n main.step( \"Install host intents and check intent states and ping\" )\n main.caseResult = main.TRUE\n main.eventGenerator.triggerEvent( EventType().APP_INTENT_HOST_ADD_ALL, EventScheduleMethod().RUN_BLOCK )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Install host intents test passed\",\n onfail=\"Install host intents test failed\" )\n time.sleep( main.caseSleep )", "def test_if(self):", "def CASE41( self, main ):\n import time\n import random\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Randomly bring up one ONOS node that is down\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Randomly bring up one ONOS node that is down\" )\n main.step( \"Randomly bring up one ONOS node that is down\" )\n main.caseResult = main.TRUE\n targetControllers = []\n for controller in main.controllers:\n if not controller.isUp():\n targetControllers.append( controller.index )\n if len( targetControllers ) == 0:\n main.log.warn( \"All controllers are up\" )\n main.caseResult = main.FALSE\n else:\n index = random.sample( targetControllers, 1 )\n main.eventGenerator.triggerEvent( EventType().ONOS_ONOS_UP, EventScheduleMethod().RUN_BLOCK, index[ 0 ] )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Randomly bring up ONOS test passed\",\n onfail=\"Randomly bring up ONOS test failed\" )\n time.sleep( main.caseSleep )", "def spec(self) -> bool:\n\t\treturn True", "def CASE90( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests.CHOTestMonkey.dependencies.EventScheduler import EventScheduleMethod\n\n main.log.report( \"Sleep for some time\" )\n main.log.report( \"__________________________________________________\" )\n main.case( \"Sleep for some time\" )\n main.step( \"Sleep for some time\" )\n main.caseResult = main.TRUE\n sleepSec = int( main.params[ 'CASE90' ][ 'sleepSec' ] )\n main.eventGenerator.triggerEvent( EventType().TEST_SLEEP, EventScheduleMethod().RUN_BLOCK, sleepSec )\n with main.eventScheduler.idleCondition:\n while not main.eventScheduler.isIdle():\n main.eventScheduler.idleCondition.wait()\n utilities.assert_equals( expect=main.TRUE,\n actual=main.caseResult,\n onpass=\"Sleep test passed\",\n onfail=\"Sleep test failed\" )\n time.sleep( main.caseSleep )", "def test_enable(self):\n # OSA script should have been installed and enabled in setUp function\n # Now let's disable it, which should return True.\n self.assertTrue(self.run_function(\"assistive.enable\", [OSA_SCRIPT, False]))\n # Double check the script was disabled, as intended.\n self.assertFalse(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))\n # Now re-enable\n self.assertTrue(self.run_function(\"assistive.enable\", [OSA_SCRIPT]))\n # Double check the script was enabled, as intended.\n self.assertTrue(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))", "def func_case(self):\n test.success(\"\")", "def ok_to_run(self):\n # READING DOC STRING, LOOKING FOR VERSION\n doc_dict = self.doc_dict\n skip_test = False\n msg = ''\n if 'deprecated' in doc_dict:\n msg = \"This test has been deprecated\"\n skip_test = True\n elif 'version' in doc_dict and int(self.core.config.get('TestRun', 'driver_version')) < doc_dict['version']:\n msg = \"Features unavailable in this version: {}\".format(doc_dict['version'])\n skip_test = True\n elif 'datacenters' in doc_dict and len([s for s in doc_dict['datacenters'] if s in self.core.config.get('TestRun', 'datacenters')]) == 0:\n msg = \"Test only works in {}\".format(doc_dict['datacenters'])\n skip_test = True\n elif 'no_environment' in doc_dict and self.core.config.get('TestRun', 'datacenters').upper() in doc_dict['no_environment']:\n msg = \"Test does not work in {}\".format(doc_dict['no_environment'])\n skip_test = True\n if skip_test:\n self.core.write(\"\\n\" + \"_\" * 40 + \"\\n{}\".format(msg), level='error')\n if self.core.driver is not None:\n self.core.driver.close_driver()\n self.core.driver_state = False\n self.skipTest(msg)", "def list(self):\n print \"\\nAvailable Test Cases\"\n print \"====================\"\n for case in self.cases:\n print case.__name__", "def test1(self):\n self.assertTrue(True)", "def is_experiment(cfg):\n if CONDITIONS in list(cfg.keys()):\n return True\n else:\n return False", "def test_2nd_scenario():\n start_entered_scenario(config.MERAKI_CAMERAS[0][\"serial\"])\n return \"ok\"", "def test_test_case_name_step(self, pytester: pytest.Pytester, adaptavist_mock: AdaptavistMock):\n pytester.makepyfile(\n \"\"\"\n import pytest\n\n def test_TEST_T123_1():\n assert True\n \"\"\"\n )\n _, _, etss = adaptavist_mock\n pytester.runpytest(\"--adaptavist\")\n etss.assert_called_once_with(\n test_run_key=\"TEST-C1\",\n test_case_key=\"TEST-T123\",\n step=1,\n status=\"Pass\",\n comment=\"\",\n environment=None,\n executor=getpass.getuser().lower(),\n assignee=getpass.getuser().lower(),\n )", "def decide_to_test(identifier, name, listing):\n if not listing:\n return False\n elif identifier in listing and \\\n question(_(\"Do you want to test the %s now?\" % name), True):\n return True\n else:\n return False", "def test_enable_maintence_mode(self):\n pass", "def pytest_addoption(parser):\n parser.addoption(\"--cases\", help=\"Test cases to run\")", "def test(self):\n\n return True", "def run_case(self, **kwargs):\n module_name = kwargs.get('module_name', None)\n if self.result:\n self.success_msg.append('>>>%s PASSED' % module_name or sys.modules[__name__])\n else:\n self.fail_msg.insert(0, '>>>%s FAILED' % module_name or sys.modules[__name__])", "def setUp(self):\n if not self.flag:\n self.fail(self.err_msg)", "def test_start(self):\n self.fail(\"write a test\")", "def spec_tests():\n pass", "def testControlEnvironment(video1, video2):\n try:\n control.main(video1, video2, Verbose=True, Testing=True)\n return True\n except ValueError:\n return False", "def course_tester(courses):\n\n return False", "def endOfTestcase(self):\n pass # nothing to do here. Hence pass statement is called.", "def uses_usercase(self):\n return any(form.uses_usercase() for form in self.get_forms())", "def test_win(self):\n self.choice.return_value = \"ant\"\n self.input.side_effect = list(\"ant\" \"n\")\n\n gallows.main()\n\n self.xprint.assert_any_call('Yes! The secret word is \"ant\"! '\n 'You have won!')", "def on_testcase_choice(self, event):\n\n self.log.trace(\"Handle test case selection by user.\")\n result = {'successful': False}\n\n try:\n # determine test selected\n case_name = event.GetString()\n\n if case_name.lower() == CUSTOM_SEL.lower():\n # set custom flag\n self.custom_testcase = True\n # show custom input\n self.input_custom_testcase.Show()\n\n else:\n # clear custom flag\n self.custom_testcase = False\n # make sure custom field is hidden\n self.input_custom_testcase.Hide()\n if case_name.lower() != BLANK_SEL.lower():\n # determine test case ID\n self.case_id = self.database.return_testcase_id(case_name, self.test_id)['id']\n # return data\n data = self.database.return_testcase_data(self.case_id)\n testcase_data = data['testcase data']\n\n # set class\n self.input_testcase_class.SetSelection(\n self.input_testcase_class.FindString(str(testcase_data['class'])))\n\n # set minimum version\n self.input_minversion.SetValue(str(testcase_data['minimum version']))\n\n # parse procedure into IDs\n step_ids = testcase_data['procedure'].replace(' ', '').split(',')\n # update base step with first step\n #self.log.trace(\"Updating base step ...\")\n #if len(step_ids) >= 1:\n # step_data = self.database.return_procedure_step_data(step_ids[0])['step data']\n # select_id = self.input_base_step.FindString(str(step_data['name']))\n # self.input_base_step.SetSelection(select_id)\n #else: self.log.trace(\"No steps found in test case.\")\n # update additional steps\n if len(step_ids) > 0:\n for step_id in step_ids:\n step_data = self.database.return_procedure_step_data(step_id)['step data']\n # determine if procedure step input already exists\n index = step_ids.index(step_id)\n if len(self.procedure_steps) >= index + 1:\n input = self.procedure_steps[index]\n else:\n # create new step\n input = self.create_new_input_step()['step']\n\n # set step input selection\n select_id = input.FindString(str(step_data['name']))\n input.SetSelection(select_id)\n else: self.log.trace(\"No steps found in test case.\")\n\n result['successful'] = True\n except BaseException, e:\n self.handle_exception(e, \"handle test case selection by user\")\n\n # return\n return result", "def testing(self):\n print('test successful')", "def test_if_demo_state_shows_by_default(self):\n demo.setup(self.hass, {demo.DOMAIN: {}})\n\n self.assertIsNotNone(self.hass.states.get('a.Demo_Mode'))", "def startTestRun(self):", "def test_break_security_group_usual_case():", "def test_default(self):\r\n self.assertEqual(self.option.default, 'testing')", "def test_contains_true(self):\n self.assertTrue('BarcodeSequence' in self.tester)\n self.assertTrue('barcodesequence' in self.tester)", "def test_use_case_glue(self):\n\n uc1 = self.create(UseCaseItem, UML.UseCase)\n include = self.create(IncludeItem)\n\n glued = self.allow(include, include.head, uc1)\n assert glued", "def test_completed(self):\n return False", "def test_func(self):\n self.rol_nu = rol_get_huidige(self.request)\n return self.rol_nu in (Rollen.ROL_BB, Rollen.ROL_BKO, Rollen.ROL_RKO, Rollen.ROL_RCL, Rollen.ROL_HWL)", "def test_func(self):\n self.rol_nu = rol_get_huidige(self.request)\n return self.rol_nu in (Rollen.ROL_BB, Rollen.ROL_BKO, Rollen.ROL_RKO, Rollen.ROL_RCL, Rollen.ROL_HWL)", "def test_func(self):\n self.rol_nu = rol_get_huidige(self.request)\n return self.rol_nu in (Rollen.ROL_BB, Rollen.ROL_BKO, Rollen.ROL_RKO, Rollen.ROL_RCL, Rollen.ROL_HWL)", "async def contest(self, ctx):\n\t\tawait ctx.send_help('contest')", "def suite():\n return unittest.makeSuite(OpenedTestCase)", "def test_valid(self):\n args = [SIMPLE_TEMPLATE, SIMPLE_CANDIDATE]\n result = self.runner.invoke(main, args)\n self.assertEqual(0, result.exit_code)", "def should_show():", "def test_execution(self):\n self.assertTrue(True)", "def test_input2(self):\n in1 =\"aple\"\n result = options.checkw(in1)\n self.assertEqual(result,False)", "def gen_suite(tests):\n cases = [gen_case(test) for test in tests]\n return {\n 'cases': cases,\n 'scored': True,\n 'setup': '',\n 'teardown': '',\n 'type': 'doctest'\n }", "def testbed_name(self): \n return \"C-Lab\"", "def should_fake_it(self):\n try:\n environment.get(\"FakeIt\")\n return True\n except KeyError:\n return False", "def testable(self):\n\t\treturn True", "def test_contains_true(self):\n self.assertTrue('1.SKM7.640188' in self.tester)", "def test_contains_true(self):\n self.assertTrue('1.SKM7.640188' in self.tester)", "def is_successful(self):\n for item in self.summary:\n if item['task_status'] is False:\n return testcase.TestCase.EX_TESTCASE_FAILED\n\n return super().is_successful()", "def main():\n print(\"Welcome to the Cryptography Suite!\")\n run_suite()\n while should_continue():\n run_suite()\n print(\"Goodbye!\")", "def parseTestcase(file_,dict_info,user_ini):\n \n reinsid = re.compile('QC_*?([\\d-]+)')\n try:\n suite = TestData(source = \"%s\" %(file_))\n except Exception:\n recordLogsToList('Warning: TestData analyze file [%s] Failed' % file_)\n return False\n for mytestcase in suite.testcase_table:\n if not mytestcase.tags.value:\n recordLogsToList('%s QCID is missed,Please input it in your script!' %file_)\n return False\n for tag in mytestcase.tags.value:\n if 'QC_' in tag:\n insid = reinsid.findall(tag)\n #print insid\n #break\n if dict_info.has_key(int(insid[0])):\n tester = dict_info[int(insid[0])]\n print \"test:\",file_\n print 'tester:',tester\n full_name = get_full_name(tester,user_ini)\n if full_name == 'NONE':\n break\n \n email_name ='Owner-%s@nokia.com' % full_name\n for ftag in suite.setting_table.force_tags.value:\n #print ftag\n if 'Owner-'in ftag or 'owner-' in ftag:\n owner = re.findall(r'[O|o]wner-(.*)@.*.com',ftag)\n if owner[0]!=full_name:\n flag = suite.setting_table.force_tags.value.index(ftag)\n #print flag\n suite.setting_table.force_tags.value[flag] = email_name\n suite.save()\n recordLogsToList('%s'%file_)\n recordLogsToList('%s-----> %s' %(ftag,email_name))\n global STAMP\n STAMP = False\n global COUNT\n COUNT = COUNT+1\n recordLogsToList('=========%d cases has modified==========' %COUNT)\n else:\n recordLogsToList('%s'%file_)\n recordLogsToList('Warning: ID %s not find in excel,please check it!' % int(insid[0]))\n recordLogsToList('==========================================')\n global ID_NOT_FOUND\n ID_NOT_FOUND = ID_NOT_FOUND+1\n \n return True", "def func(self):\n if (not self.switches or \"online\" in self.switches) and not self.args:\n self.display_lists()\n return\n if \"claim\" in self.switches or (not self.switches and self.args):\n self.claim_scene()\n return\n if \"validate\" in self.switches:\n self.validate_scene()\n return\n if \"viewrequests\" in self.switches:\n self.view_requests()\n return\n self.msg(\"Invalid switch.\")", "def test_create_scenario1(self):\n pass", "def test1(self):\n reporter.testStart('Test 1');\n oVM = self.getVmByName('tst-bs-pae');\n\n for cCpus in self.acCpus:\n if cCpus == 1: reporter.testStart('1 cpu');\n else: reporter.testStart('%u cpus' % (cCpus));\n\n for sVirtMode in self.asVirtModes:\n if sVirtMode == 'raw' and cCpus > 1:\n continue;\n\n hsVirtModeDesc = {};\n hsVirtModeDesc['raw'] = 'Raw-mode';\n hsVirtModeDesc['hwvirt'] = 'HwVirt';\n hsVirtModeDesc['hwvirt-np'] = 'NestedPaging';\n reporter.testStart(hsVirtModeDesc[sVirtMode]);\n\n fHwVirt = sVirtMode != 'raw';\n fNestedPaging = sVirtMode == 'hwvirt-np';\n self.test1OneCfg(oVM, cCpus, fHwVirt, fNestedPaging);\n\n reporter.testDone();\n reporter.testDone();\n\n return reporter.testDone()[1] == 0;", "def add_case(self, name):\n mod = self._mod\n std = mod.give_aster_study()\n prefs = aster_s_gui.AsterPreferences()\n \n case = std.add_case(self.find_new_name(std, name))\n case.use(aster_s.CommFile(self.get_str(\"command-file\")))\n case.use(aster_s.SMeshEntry(self.give_field(\"mesh\").node.entry))\n if prefs.get(aster_s_gui.InteractiveFollowUp):\n case.use(aster_s.InteractivFollowUp())\n if prefs.get(aster_s_gui.SaveBaseResult):\n case.use(aster_s.HasBaseResult())\n mod.update()\n #salome.sg.updateObjBrowser(0)", "def test_activate_login(self):\r\n pass", "def test_02_pass(self):\n if x==1:\n pass", "def test_01_pass(self):\n if x==1:\n pass", "def test_01_pass(self):\n if x==1:\n pass", "def check_should_run_suite(suite, diff_files):\n\n if suite in always_run_jobs:\n # you gotta do what you gotta do\n return True\n\n all_ignore = True\n any_docs = False\n all_docs = True\n any_console = False\n all_console = True\n any_java = False\n any_testable_script = False\n all_testable_script = True\n\n # go over all of the files in the diff and collect some information about the diff contents, we'll use this later\n # to decide whether or not to run the suite\n for f in diff_files:\n is_ignore = check_ignore(f)\n all_ignore = all_ignore and is_ignore\n is_docs = check_docs(f)\n any_docs = any_docs or is_docs\n all_docs = all_docs and is_docs\n is_console = check_console(f)\n any_console = any_console or is_console\n all_console = all_console and is_console\n is_script = check_testable_script(f)\n any_testable_script = any_testable_script or is_script\n all_testable_script = all_testable_script and is_script\n any_java = any_java or (not is_ignore and not is_docs and not is_console and not is_script)\n\n # if everything is ignorable, we can skip this suite\n if all_ignore:\n return False\n # if the test suite is a doc job, return true if any of the files changed were docs\n if suite in docs_jobs:\n return any_docs\n # if all of the changes are docs paths, but the current suite is not a docs job, we can skip\n if all_docs:\n return False\n if suite in web_console_still_run_for_java_jobs:\n return any_console or any_java\n # if the test suite is a web console job, return true if any of the changes are web console files\n if suite in web_console_jobs:\n return any_console\n # if all of the changes are web console paths, but the current suite is not a web console job, we can skip\n if all_console:\n return False\n if suite in script_job:\n return any_testable_script\n if all_testable_script:\n return False\n\n # if all of the files belong to known non-java groups, we can also skip java\n # note that this should probably be reworked to much more selectively run the java jobs depending on the diff\n if not any_java:\n return False\n\n # we don't know we can skip for sure, so lets run it\n return True", "def is_shed_tool_conf(self):", "def test_break_security_group_usual_case_specify_sg():", "def test_is_advancing_to_next_stage_yes(self):\n\n # test_input_cases =\n # [(die_a_value, die_b_value, stage, ok_output),]\n test_input_cases = [\n (\"1\", \"2\", 1, True),\n (\"2\", \"1\", 1, True),\n (\"ANGRY\", \"4\", 2, True),\n (\"4\", \"ANGRY\", 2, True),\n ]\n\n for test_io in test_input_cases:\n self._test_is_game_over(*test_io)", "def get_prog_runatstart(self):\n #en = self._get_prop(\"runAtStartup\")\n #return bool( en == \"true\" )\n return bool(self._mydict['runAtStartup'] == \"true\")", "def test_verification_status_visible(self):\r\n self.client.login(username=\"jack\", password=\"test\")\r\n self.check_verification_status_on('verified', 'You\\'re enrolled as a verified student')\r\n self.check_verification_status_on('honor', 'You\\'re enrolled as an honor code student')\r\n self.check_verification_status_on('audit', 'You\\'re auditing this course')", "def tests():", "def state_chosen_enter(cfg, app, win):", "def startMode(self):\n return True, None", "def test_get_scenario(self):\n pass", "def verify_try_demoMode_screenname():\r\n msg = \"\"\r\n try:\r\n sleep(3)\r\n 'Getting Create Password text_view object'\r\n flag = ui_controls.ui_element(get_obj_identifier('demo_demoMode_textview'))\r\n\r\n if g.platform == 'ios':\r\n flag2,msg = label_textvalidation('demo_demoMode_textview','Demo Mode')\r\n \r\n flag = flag2\r\n if flag:\r\n print \"Demo Mode Screen Name is displayed properly\"\r\n return True, msg\r\n else:\r\n print \"Demo Mode Screen Name is not displayed properly\"\r\n\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return True, msg", "def TestOneStep(self):\n pass" ]
[ "0.7348757", "0.64489675", "0.6420823", "0.6398555", "0.63852155", "0.6344665", "0.6292689", "0.6013752", "0.594612", "0.59133625", "0.5888049", "0.58311087", "0.5821088", "0.58115166", "0.5791672", "0.5789133", "0.57623816", "0.5759933", "0.5748338", "0.5702276", "0.5696946", "0.5647437", "0.56395656", "0.562595", "0.5622106", "0.56209075", "0.5607384", "0.56065255", "0.55919296", "0.55812854", "0.55646586", "0.55376375", "0.5520023", "0.55164725", "0.55146325", "0.55128455", "0.5487366", "0.54838836", "0.54822284", "0.5474697", "0.54652566", "0.54543465", "0.54470205", "0.543067", "0.5416217", "0.5410337", "0.53954226", "0.53946", "0.5383388", "0.5379843", "0.5376668", "0.53678155", "0.534548", "0.5314182", "0.5307601", "0.529985", "0.52962923", "0.5293024", "0.5289332", "0.5287723", "0.52862746", "0.5274682", "0.52671194", "0.5266313", "0.5266313", "0.5266313", "0.5260066", "0.5259636", "0.5258887", "0.5256274", "0.524546", "0.52431095", "0.52429545", "0.5239987", "0.5239416", "0.52353567", "0.5231108", "0.5231108", "0.5219662", "0.52188313", "0.52180004", "0.5213661", "0.521363", "0.52091646", "0.5208184", "0.52078366", "0.52022725", "0.52007705", "0.52007705", "0.51987416", "0.5198607", "0.5196055", "0.5189688", "0.51880115", "0.5187637", "0.518421", "0.51826775", "0.5181493", "0.51808923", "0.51795155", "0.517902" ]
0.0
-1
runs an automatic check to see if any transcriptions need to be started or are already finished and need to be reuploded\n\n Needs dbConnection & an integer representing the max concurrent transcriptons that can be ran at a time\n\n This is a function that you dont want to parse and upload files from the 'transcripts' folder into. because you really dont know which files are in progress or not whatever. ill fix later .
def runAutoCheck(dbConnection, maxConcurrent): # checks if any shows are pending. fileContent = DatabaseInteract.checkPre(dbConnection) if(len(fileContent) > 0 and Tools.numRunningProcesses() < maxConcurrent): cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET pending = TRUE WHERE id = '" + str(fileContent[1]) + "';") dbConnection.commit() cursor.close() url = fileContent[0] indexID = str(fileContent[1]) # get the ID instead of the filename service = str(fileContent[3]) # podcastName = fileContent[2] Tools.transcribeAll(service, url, indexID) # download the mp3 will print when done
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parseUpload(dbconnection, fileName):\n nhContent = ParseText.nohupTranscriptionContent(fileName)\n count = 0\n while count < len(nhContent[0]):\n try:\n rtf = nhContent[0][count]\n transcription = nhContent[1][count].replace(\"'\", \"''\").replace(\"_\", \"\")\n dbID = nhContent[2][count].replace(\".\", \"\")\n duration = nhContent[3][count]\n DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID)\n count += 1\n except:\n print(\"couldnt upload one at index \" + str(count))\n count += 1", "def check_transcripts(request):\r\n transcripts_presence = {\r\n 'html5_local': [],\r\n 'html5_equal': False,\r\n 'is_youtube_mode': False,\r\n 'youtube_local': False,\r\n 'youtube_server': False,\r\n 'youtube_diff': True,\r\n 'current_item_subs': None,\r\n 'status': 'Error',\r\n }\r\n try:\r\n __, videos, item = _validate_transcripts_data(request)\r\n except TranscriptsRequestValidationException as e:\r\n return error_response(transcripts_presence, e.message)\r\n\r\n transcripts_presence['status'] = 'Success'\r\n\r\n filename = 'subs_{0}.srt.sjson'.format(item.sub)\r\n content_location = StaticContent.compute_location(item.location.course_key, filename)\r\n try:\r\n local_transcripts = contentstore().find(content_location).data\r\n transcripts_presence['current_item_subs'] = item.sub\r\n except NotFoundError:\r\n pass\r\n\r\n # Check for youtube transcripts presence\r\n youtube_id = videos.get('youtube', None)\r\n if youtube_id:\r\n transcripts_presence['is_youtube_mode'] = True\r\n\r\n # youtube local\r\n filename = 'subs_{0}.srt.sjson'.format(youtube_id)\r\n content_location = StaticContent.compute_location(item.location.course_key, filename)\r\n try:\r\n local_transcripts = contentstore().find(content_location).data\r\n transcripts_presence['youtube_local'] = True\r\n except NotFoundError:\r\n log.debug(\"Can't find transcripts in storage for youtube id: %s\", youtube_id)\r\n\r\n # youtube server\r\n youtube_text_api = copy.deepcopy(settings.YOUTUBE['TEXT_API'])\r\n youtube_text_api['params']['v'] = youtube_id\r\n youtube_response = requests.get('http://' + youtube_text_api['url'], params=youtube_text_api['params'])\r\n\r\n if youtube_response.status_code == 200 and youtube_response.text:\r\n transcripts_presence['youtube_server'] = True\r\n #check youtube local and server transcripts for equality\r\n if transcripts_presence['youtube_server'] and transcripts_presence['youtube_local']:\r\n try:\r\n youtube_server_subs = get_transcripts_from_youtube(\r\n youtube_id,\r\n settings,\r\n item.runtime.service(item, \"i18n\")\r\n )\r\n if json.loads(local_transcripts) == youtube_server_subs: # check transcripts for equality\r\n transcripts_presence['youtube_diff'] = False\r\n except GetTranscriptsFromYouTubeException:\r\n pass\r\n\r\n # Check for html5 local transcripts presence\r\n html5_subs = []\r\n for html5_id in videos['html5']:\r\n filename = 'subs_{0}.srt.sjson'.format(html5_id)\r\n content_location = StaticContent.compute_location(item.location.course_key, filename)\r\n try:\r\n html5_subs.append(contentstore().find(content_location).data)\r\n transcripts_presence['html5_local'].append(html5_id)\r\n except NotFoundError:\r\n log.debug(\"Can't find transcripts in storage for non-youtube video_id: %s\", html5_id)\r\n if len(html5_subs) == 2: # check html5 transcripts for equality\r\n transcripts_presence['html5_equal'] = json.loads(html5_subs[0]) == json.loads(html5_subs[1])\r\n\r\n command, subs_to_use = _transcripts_logic(transcripts_presence, videos)\r\n transcripts_presence.update({\r\n 'command': command,\r\n 'subs': subs_to_use,\r\n })\r\n return JsonResponse(transcripts_presence)", "def insertTranscription(dbConnection, realtimefactor, transcription, duration, dbID):\n try:\n cursor = dbConnection.cursor()\n cursor.execute(\"UPDATE transcriptions SET realtimefactor = '\" + realtimefactor + \"', transcription = '\" + transcription + \"', datetranscribed = now(), duration = '\" + duration + \"' WHERE id = '\" + str(dbID) + \"';\")\n dbConnection.commit()\n cursor.close()\n return True\n except Exception as e:\n Tools.writeException(\"uploadTranscriptionData\", e)\n return False", "def transcript_sequence(species,aceVersion,log=0):\n \n os.chdir(os.environ['PYDATA']+'/%s/log'%species)\n logFile=open('%s_ace_transcripts.txt'%species,'w')\n t1=time.time()\n #create ace transcript_sequence\n path=os.environ['PYDATA']+\"/\"+species+\"/aceview/\"+species+\"_transcript_sequence.bkdb\"\n if os.path.exists(path):\n os.remove(path)\n transcriptDB=bsddb.btopen(path,'w')\n \n #test if mRNAs sequences are in one file or in several chromosome files\n try:\n sequenceFile = open('%s/%s_%s/AceView.ncbi_37.all_mrnas_dna.fasta' %(os.environ['ACEDATA'],species,aceVersion.lower()),'r')\n chrFlag=0 \n except: \n chrFlag=1 \n \n if chrFlag: \n #open database for relation between chromosome and Ensembl region\n path=os.environ['PYDATA']+'/'+species+'/ensembl/'+species+'_region_by_chromosome.bkdb'\n chrDB=bsddb.btopen(path,'r')\n chromosomes=chrDB.keys()\n tscriptNb=0 \n for chromosome in chromosomes:\n print 'processing chromosome: '+chromosome\n try: \n sequenceFile = open('%s/%s_%s/x1.all_mrnas_fasta.%s.fasta' %(os.environ['ACEDATA'],species,aceVersion.lower(),chromosome),'r') \n region=chrDB[chromosome] \n geneName='' \n transcriptName=''\n sequence='' \n for lines in sequenceFile:\n tscriptNb=tscriptNb+1 \n line = lines.split('\\n')[0]\n if not line:\n #save last transcript\n if geneName != '': \n #save previous transcript\n transcriptDB[transcriptName]=cPickle.dumps(ps_class.Transcript(ID=transcriptName,geneID=geneName,chromosome=chromosome,region=region,sequence=sequence),protocol=-1)\n break\n # get some informations \n if line[0]=='>': \n if geneName != '': \n #save previous transcript\n transcriptDB[transcriptName]=cPickle.dumps(ps_class.Transcript(ID=transcriptName,geneID=geneName,chromosome=chromosome,region=region,sequence=sequence),protocol=-1)\n transcriptName = line.split(':')[1] \n shortName=transcriptName.split(aceVersion)[0] \n transcriptLetter=shortName.split('.')[-1]\n geneName=shortName.split('.'+transcriptLetter)[0] \n sequence='' \n else:\n # Construct sequence\n sequence=sequence+line\n except:\n logFile.write('no AceView files %s/x1.all_mrnas_fasta.%s.fasta' %(os.environ['ACEDATA'],chromosome)) \n transcriptDB.close()\n chrDB.close()\n else: \n tscriptNb=0 \n sequenceFile = open('%s/%s_%s/AceView.ncbi_37.all_mrnas_dna.fasta' %(os.environ['ACEDATA'],species,aceVersion.lower()),'r') \n geneName='' \n transcriptName=''\n sequence='' \n for lines in sequenceFile:\n tscriptNb=tscriptNb+1 \n line = lines.split('\\n')[0]\n if not line:\n #save last transcript\n if geneName != '': \n #save previous transcript\n transcriptDB[transcriptName]=cPickle.dumps(ps_class.Transcript(ID=transcriptName,geneID=geneName,sequence=sequence),protocol=-1)\n break\n # get some informations \n if line[0]=='>': \n if geneName != '': \n #save previous transcript\n transcriptDB[transcriptName]=cPickle.dumps(ps_class.Transcript(ID=transcriptName,geneID=geneName,sequence=sequence),protocol=-1)\n transcriptName = line.split(':')[1] \n shortName=transcriptName.split(aceVersion)[0] \n transcriptLetter=shortName.split('.')[-1]\n geneName=shortName.split('.'+transcriptLetter)[0] \n sequence='' \n else:\n # Construct sequence\n sequence=sequence+line \n transcriptDB.close()\n \n t2=time.time()\n if log!=0:\n log.write('\\t%u\\t%.2f\\n'%(tscriptNb,t2-t1))", "async def test_get_transcripts_from_gene(test_db):\n resp = await test_db.get_transcripts_from_gene(\"BRAF\", 2145, 2145)\n assert len(resp) == 32\n\n resp = await test_db.get_transcripts_from_gene(\"BRAF\", 140453136,\n 140453136)\n assert len(resp) == 0", "def test_process_file(self):\n # 1\n self.assertEqual(get_file_reply(files[0][0], files[0][1]), \"Inserted 4 Records\")\n results = self.database_connection.select('''SELECT COUNT(*) FROM ''' + table_name)[0][0]\n # 2\n self.assertEqual(results, 4)\n # csv, renewing connection\n self.database_connection.connect()\n # 3\n self.assertEqual(get_file_reply(files[1][0], files[1][1]), \"Inserted 4 Records\")\n results = self.database_connection.select('''SELECT COUNT(*) FROM ''' + table_name)[0][0]\n # 4\n self.assertEqual(results, 8)\n self.database_connection.connect()\n # 5\n self.assertFalse(get_file_reply(files[0][0], files[1][1]))", "def run_tximport():\n eligible_experiments = (\n Experiment.objects.annotate(num_organisms=Count(\"organisms\"))\n .filter(num_organisms=1, technology=\"RNA-SEQ\", num_processed_samples=0)\n .prefetch_related(\"samples__results\")\n )\n\n paginator = Paginator(eligible_experiments, PAGE_SIZE)\n page = paginator.page()\n\n # Next is to figure out how many samples were processed for\n # each experiment. Should be able to reuse code from salmon\n # cause it does this stuff.\n tximport_pipeline = ProcessorPipeline.TXIMPORT\n\n while True:\n creation_count = 0\n\n for experiment in page.object_list:\n quant_results = get_quant_results_for_experiment(experiment)\n\n if should_run_tximport(experiment, quant_results, True):\n processor_job = ProcessorJob()\n processor_job.pipeline_applied = tximport_pipeline.value\n processor_job.ram_amount = 8192\n # This job doesn't need to run on a specific volume\n # but it uses the same Nomad job as Salmon jobs which\n # do require the volume index.\n processor_job.volume_index = random.choice(list(get_active_volumes()))\n processor_job.save()\n\n assoc = ProcessorJobOriginalFileAssociation()\n # Any original file linked to any sample of the\n # experiment will work. Tximport is somewhat special\n # in that it doesn't actuallhy use original files so\n # this is just used to point to the experiment.\n assoc.original_file = experiment.samples.all()[0].original_files.all()[0]\n assoc.processor_job = processor_job\n assoc.save()\n\n creation_count += 1\n\n try:\n send_job(tximport_pipeline, processor_job)\n except Exception:\n # If we cannot queue the job now the Foreman will do\n # it later.\n pass\n\n logger.info(\"Created %d tximport jobs for experiments past the thresholds.\", creation_count)\n\n if not page.has_next():\n break\n else:\n page = paginator.page(page.next_page_number())", "def start_transcribing():\n transcribe.main()", "def subprocess_transcribe_function( fname, voicenote_filename_regex ):\n if not hasattr( subprocess_transcribe_function, \"client\" ):\n # Init function failed.\n return None\n if subprocess_transcribe_function.verbose:\n # TODO: We should (probably?) queue these messages and print() on a single thread/process...but....\n print( \"Transcribing {}...\".format( fname ) )\n try:\n ret = ( recording_date_from_full_path( fname, voicenote_filename_regex ), fname, transcribe_wav( fname, client=subprocess_transcribe_function.client ) )\n except BaseException as e:\n # Do NOT kill the program. We'll leave the audio file in the unprocessed directory.\n print( \"ERROR:\" )\n print( e )\n ret = None\n return ret", "def docxProcessing():\n DOCUMENT_ORIGIN_CODE = \"RADIOLOGIE_SOFTWARE\"\n global DATABASE\n conn = db.create_connection(DATABASE)\n pathFolder = \"fichiers source/\"\n extension = \".docx\"\n docxFileArrayPath = glob.glob(pathFolder + \"*\" + extension)\n print(\" - Processing docx\", end=\"\") \n for file in docxFileArrayPath:\n text = readFile.readDocxFile(file)\n query = getDocumentQuery(text, DOCUMENT_ORIGIN_CODE, file, pathFolder, extension)\n db.insert_document(conn, query) \n print(\".\", end = '')\n #commit the changes to db\t\t\t\n conn.commit()\n #close the connection\n conn.close()\n print(\"\\n\")", "def gbk_upload(self):\n t_count = 0\n os.chdir(self.path)\n print(os.getcwd())\n if os.path.isdir(self.path + '/Databases') is False:\n os.mkdir('Databases')\n for tier in os.listdir(os.getcwd()):\n if tier == 'Databases':\n continue\n db_name = str(tier) + '.db'\n if os.path.isfile(self.path + '/Databases/' + db_name) is False:\n print('Copying Template BioSQL Database... '\n 'This may take a few minutes...')\n shutil.copy2(where.Templates + '/Template_BioSQL_DB.db',\n self.path + '/Databases/%s' % db_name)\n else:\n os.remove(self.path + '/Databases/' + db_name)\n print('Copying Template BioSQL Database... '\n 'This may take a few minutes...')\n shutil.copy2(where.Templates + '/Template_BioSQL_DB.db',\n self.path + '/Databases/%s' % db_name)\n\n server = BioSeqDatabase.open_database(\n driver='sqlite3', db=(\n self.path + '/Databases/' + db_name))\n os.chdir(tier)\n for gene in os.listdir(os.getcwd()):\n os.chdir(gene)\n sub_db_name = gene\n for file in os.listdir(os.getcwd()):\n try:\n if sub_db_name not in server.keys():\n server.new_database(sub_db_name)\n db = server[sub_db_name]\n count = db.load(SeqIO.parse(file, 'genbank'))\n server.commit()\n print('Server Commited %s' % sub_db_name)\n print('%s database loaded with %s.' % (db.dbid, file))\n print(\n \"That file contains %s genbank records.\" %\n str(count))\n t_count = t_count + count\n print(\n 'The total number of files loaded so far is %i.' %\n t_count)\n except BaseException:\n server.rollback()\n try:\n del server[sub_db_name]\n server.commit()\n except BaseException:\n raise\n raise\n os.chdir('..')\n os.chdir('..')", "def check_active_requests():\n\n active_requests = jobtracker.query(\"SELECT * FROM requests \" \\\n \"WHERE status='waiting'\")\n for request in active_requests:\n\n\t# Check requested status \n\tif DownloaderSPAN512.check_request_done(request):\n\t dlm_cout.outs(\"Restore (GUID: %s) has succeeded. Will create file entries.\\n\" % request['guid'])\n\t create_file_entries(request)\n\n\telse:\n#\t dlm_cout.outs(\"Request (GUID: %s) has failed.\\n\" \\\n#\t \"\\tDatabase failed to report the data as restored.\" % request['guid'])\n#\t jobtracker.query(\"UPDATE requests SET status='failed', \" \\\n# \"details='Request failed. Why ?', \" \\\n# \"updated_at='%s' \" \\\n# \"WHERE guid='%s'\" % (jobtracker.nowstr(), request['guid']))\n\n query = \"SELECT (TO_SECONDS('%s')-TO_SECONDS(created_at)) \" \\\n \"AS deltaT_seconds \" \\\n \"FROM requests \" \\\n \"WHERE guid='%s'\" % \\\n (jobtracker.nowstr(), request['guid'])\n row = jobtracker.query(query, fetchone=True)\n #if row['deltaT_seconds']/3600. > config.download.request_timeout:\n if row/3600. > config.download.request_timeout:\n dlm_cout.outs(\"Restore (GUID: %s) is over %d hr old \" \\\n \"and still not ready. Marking \" \\\n \"it as failed.\" % \\\n (request['guid'], config.download.request_timeout))\n jobtracker.query(\"UPDATE requests \" \\\n \"SET status='failed', \" \\\n \"details='Request took too long (> %d hr)', \" \\\n \"updated_at='%s' \" \\\n \"WHERE guid='%s'\" % \\\n (config.download.request_timeout, jobtracker.nowstr(), \\\n request['guid']))", "def checkFiles(self): \r\n mdate_filenames_list = []\r\n mdate_filenames_tuple = {}\r\n last24 = []\r\n now = datetime.datetime.now() \r\n noise,ft = file_type.split('.')\r\n ## note can do an entry bg color stoplight thing >24 hrs = red, 12-24 hrs = yellow < 12 = green nice little if loop\r\n for f in filenames_list:\r\n if os.path.isfile(f):\r\n lastmod_date = datetime.datetime.fromtimestamp(os.path.getmtime(f))\r\n mdate_filenames_tuple = lastmod_date, f\r\n mdate_filenames_list.append(mdate_filenames_tuple)\r\n \r\n if now - lastmod_date < file_age:\r\n \r\n #print (\"{} was last modified on {:%a %b %d %Y, %H:%M:%S, %Z}. Moving to 'destinaiton' transfer folder.\".format(f, lastmod_date))\r\n last24.append(f)\r\n shutil.copy2(f, destination)\r\n xferTime=time.time()\r\n \r\n fa = str(file_age) \r\n with sqlite3.connect('fileTransfer.db') as connection:\r\n c = connection.cursor()\r\n c.execute(\"INSERT INTO tbl_lastRun(col_timestamp, col_source, col_destination, col_file_type, col_file_age) VALUES (?,?,?,?,?)\",(xferTime, source, destination, ft, hrs))\r\n connection.commit()\r\n connection.close \r\n\r\n clear(self)\r\n ask_quit(self)", "def database_script_check(table, bs_id, attempt_num):\n con = lite.connect(DB_FILE, timeout = TIMEOUT)\n con.row_factory = lite.Row\n with con:\n cur = con.cursor()\n #get script data\n cur.execute(\"SELECT * FROM {0} WHERE AttemptNum=? AND BSID=?\".format(table),\n (attempt_num, bs_id))\n rows = cur.fetchall()\n\n error_data = []\n for row in rows:\n if row['Started'] == None or row['Ended'] == None or row['Exit'] != 0:\n error_data.append([row['Command'], row['Arguments'], row['ExpProc']])\n return error_data", "def createStructuredTranscript_Non_Core_Doc():\n\n #create a temporary folder that will hold the data transformed from doc to docx\n os.system('mkdir ' + INPUT_FOLDER+'temp')\n\n core_doc_asset = []\n missing_count = 0\n missing_files=[]\n # get all the docx files that are part of the core asset\n for file in glob.glob(INPUT_FOLDER+\"*.doc\"):\n\n # RG numbers for the core asset\n if (\"RG-50.030\" not in file and\n \"RG-50.106\" not in file and\n \"RG-50.549\" not in file):\n \n\n \n # convert file to docx, storing it in an untracked folder called temp\n file_docx = file + 'x'\n command = 'textutil -convert docx ' + file + ' -output ' + INPUT_FOLDER+'temp/'+ file_docx.split('/')[-1]\n call(command, shell=True)\n\n # append to the array\n core_doc_asset.append(file_docx)\n \n\n \n\n # get the units for each file, store them and update tracker\n core_doc_asset=create_dictionary_of_file_list(core_doc_asset)\n \n not_processed=0\n processed_doc=0\n \n # get the units for each file, store them and update tracker \n for mongo_rg in core_doc_asset:\n # get text units for this entry\n processed=[]\n result=[]\n \n for file in core_doc_asset[mongo_rg]:\n \n \n \n units = getTextUnits(INPUT_FOLDER+'temp/'+file.split('/')[-1])\n \n if units:\n #replace white spaces\n for i,element in enumerate(units):\n units[i]['unit']=' '.join(element['unit'].split())\n result.extend(units)\n \n processed.append(True)\n else:\n #check if processed\n processed.append(False)\n\n #set the method used to transform the transcript\n h.update_field(DB, TRACKER, \"rg_number\", mongo_rg, \"method\", \"transcribe_non_core_doc\")\n\n not_processed=not_processed+1\n\n if False in processed:\n\n h.update_field(DB, TRACKER, \"rg_number\", mongo_rg, \"status\", \"Unprocessed\")\n not_processed=not_processed+1\n missing_files.append(' '.join(core_doc_asset[mongo_rg]))\n else:\n # insert units on the output collection\n h.update_field(DB, OUTPUT, \"shelfmark\", 'USHMM '+mongo_rg, \"structured_transcript\", result)\n\n \n # update status on the stracker\n \n h.update_field(DB, TRACKER, \"rg_number\", mongo_rg, \"status\", \"Processed\")\n processed_doc=processed_doc+1\n \n\n #delete the temporary folder\n os.system('rm -r ' + INPUT_FOLDER+'temp')\n\n \n #write the missing files to text file\n file = open(OUTPUT_FOLDER_USHMM_PROCESSING_LOGS+'transcribe_non_core_doc_failed.txt','w')\n file.write('\\n'.join(missing_files))\n\n \n # success\n pprint.pprint(\"Non-core doc files were successfully processed, but there are \" + str(missing_count) + \" missing\")", "def verify_count(upload_id, localstore, language):\n reader = csv.DictReader(open(localstore, 'r'))\n\n if len(reader.fieldnames) < 2:\n msg = _('There are missing columns in the uploaded Subject file')\n\n return {'task_id': None, 'success': False, 'messages': [msg]}\n\n upload = SubjectUpload.objects.get(id=upload_id)\n upload.subject_name = reader.fieldnames[1][0:50]\n upload.save()\n\n logger.debug('Created new SubjectUpload transaction record for \"%s\".',\n upload.subject_name)\n\n # do this in bulk!\n # insert upload_id, portable_id, number\n sql = 'INSERT INTO \"%s\" (\"%s\",\"%s\",\"%s\") VALUES (%%(upload_id)s, %%(geoid)s, %%(number)s)' % (\n SubjectStage._meta.db_table, SubjectStage._meta.fields[1].attname,\n SubjectStage._meta.fields[2].attname,\n SubjectStage._meta.fields[3].attname)\n args = []\n\n try:\n for row in reader:\n args.append({\n 'upload_id': upload.id,\n 'geoid': row[reader.fieldnames[0]].strip(),\n 'number': row[reader.fieldnames[1]].strip()\n })\n # django ORM takes about 320s for 280K geounits\n #SubjectStage(upload=upload, portable_id=row[reader.fieldnames[0]],number=row[reader.fieldnames[1]]).save()\n\n # direct access to db-api takes about 60s for 280K geounits\n cursor = connection.cursor()\n cursor.executemany(sql, tuple(args))\n\n logger.debug('Bulk loaded CSV records into the staging area.')\n except AttributeError:\n msg = _('There are an incorrect number of columns in the uploaded '\n 'Subject file')\n\n return {'task_id': None, 'success': False, 'messages': [msg]}\n except Exception:\n msg = _('Invalid data detected in the uploaded Subject file')\n\n return {'task_id': None, 'success': False, 'messages': [msg]}\n\n nlines = upload.subjectstage_set.all().count()\n geolevel, nunits = LegislativeLevel.get_basest_geolevel_and_count()\n\n prev_lang = None\n if not language is None:\n prev_lang = get_language()\n activate(language)\n\n # Validation #1: if the number of geounits in the uploaded file\n # don't match the geounits in the database, the content is not valid\n if nlines != nunits:\n # The number of geounits in the uploaded file do not match the base geolevel geounits\n msg = _(\n 'There are an incorrect number of geounits in the uploaded Subject file. '\n )\n if nlines < nunits:\n missing = nunits - nlines\n msg += _n('There is %(count)d geounit missing.',\n 'There are %(count)d geounits missing.', missing) % {\n 'count': missing\n }\n else:\n extra = nlines - nunits\n msg += _n('There is %(count)d extra geounit.',\n 'There are %(count)d extra geounits.', extra) % {\n 'count': extra\n }\n\n # since the transaction was never committed after all the inserts, this nullifies\n # all the insert statements, so there should be no quarantine to clean up\n\n logger.debug(msg)\n\n upload.status = 'ER'\n upload.save()\n\n status = {'task_id': None, 'success': False, 'messages': [msg]}\n\n else:\n # The next task will preload the units into the quarintine table\n task = verify_preload.delay(upload_id, language=language).task_id\n\n status = {\n 'task_id': task,\n 'success': True,\n 'messages': [_('Verifying consistency of uploaded geounits ...')]\n }\n\n # reset language to default\n if not prev_lang is None:\n activate(prev_lang)\n\n return status", "def run():\n check_active_requests()\n start_downloads()\n check_download_attempts()\n numsuccess = verify_files()\n recover_failed_downloads()\n check_downloading_requests()\n acknowledge_downloaded_files()\n if can_request_more():\n make_request()\n return numsuccess", "def verify_files():\n toverify = jobtracker.query(\"SELECT * FROM files \" \\\n \"WHERE status='unverified'\")\n\n numverified = 0\n for file in toverify:\n\n actualsize = pipeline_utils.get_file_size(file['filename'])\n\n expectedsize = file['size']\n\n last_attempt_id = jobtracker.query(\"SELECT id \" \\\n \"FROM download_attempts \" \\\n \"WHERE file_id=%s \" \\\n \"ORDER BY id DESC \" % file['id'], \\\n fetchone=True)\n \n queries = []\n if actualsize == expectedsize:\n dlm_cout.outs(\"Download of %s is complete and verified.\" % \\\n os.path.split(file['filename'])[-1])\n # Everything checks out!\n queries.append(\"UPDATE files \" \\\n \"SET status='downloaded', \" \\\n \"details='Download is complete and verified', \" \\\n \"updated_at='%s'\" \\\n \"WHERE id=%d\" % \\\n (jobtracker.nowstr(), file['id']))\n queries.append(\"UPDATE download_attempts \" \\\n \"SET status='downloaded', \" \\\n \"details='Download is complete and verified', \" \\\n \"updated_at='%s'\" \\\n \"WHERE id=%d\" % \\\n (jobtracker.nowstr(), last_attempt_id))\n\n\t # Mark the beam as downloaded in the main database\n\t #mark_beam_downloaded(os.path.split(file['filename'])[-1]))\n\n numverified += 1\n else:\n dlm_cout.outs(\"Verification of %s failed. \\n\" \\\n \"\\tActual size (%d bytes) != Expected size (%d bytes)\" % \\\n (os.path.split(file['filename'])[-1], actualsize, expectedsize))\n \n # Boo... verification failed.\n queries.append(\"UPDATE files \" \\\n \"SET status='failed', \" \\\n \"details='Downloaded file failed verification', \" \\\n \"updated_at='%s'\" \\\n \"WHERE id=%d\" % \\\n (jobtracker.nowstr(), file['id']))\n queries.append(\"UPDATE download_attempts \" \\\n \"SET status='verification_failed', \" \\\n \"details='Downloaded file failed verification', \" \\\n \"updated_at='%s'\" \\\n \"WHERE id=%d\" % \\\n (jobtracker.nowstr(), last_attempt_id))\n jobtracker.query(queries)\n return numverified", "def resetScript(dbConnection, maxConcurrent):\n while (Tools.numRunningProcesses() != 0): # wait for the transcriptions to end. Pings every 2 mins\n time.sleep(120)\n emptyPodcastFolder = Tools.cleanupFolder(\"podcasts\")\n DatabaseInteract.refreshDatabase(dbConnection)", "def process_files(file_list, mdb_database, mdb_user, mdb_pwd, mdb_server, mdb_auth):\n uri = \"mongodb://{0}:{1}@{2}/{3}\".format(mdb_user, mdb_pwd, mdb_server, mdb_auth)\n db_name = '{0}'.format(mdb_database)\n client = pymongo.MongoClient(uri)\n db = client[db_name]\n raw_msg = db['raw_messages']\n total_processed = 0\n total_queries = 0\n total_error = 0\n unique_queries_to_add = []\n queries_hash = set()\n\n for file_name in file_list:\n print('--> processing file: {0}'.format(file_name))\n with open(file_name) as f:\n lines = f.readlines()\n total_queries += len(lines)\n for i, line in enumerate(lines):\n try:\n jsonn = json.loads(line)\n jsonn['insertTime'] = get_timestamp()\n doc_hash = calculate_hash(jsonn)\n if doc_hash not in queries_hash:\n unique_queries_to_add.append(jsonn)\n queries_hash.add(doc_hash)\n total_processed += 1\n except Exception as e:\n print('ERROR: file {0} line {1} --- {2}'.format(file_name, i, e))\n total_error += 1\n\n total_unique_queries = len(unique_queries_to_add)\n print('- Total processed: {0} from {1}'.format(total_processed, total_queries))\n print('- Total unique queries: {0} '.format(total_unique_queries))\n print('- Total errors: {0}'.format(total_error))\n print('- Adding queries to MongoDB')\n for jsonn in tqdm(unique_queries_to_add):\n raw_msg.insert_one(jsonn)", "def ifAlreadyDone(self, cxRepo, schemaRepo, schema, tablename):\n logging.debug(f\"\"\"check if {schema}.{tablename} has been analyzed\"\"\")\n conn = self.connect(cxRepo)\n sql = f\"\"\"select table_name from {schemaRepo}.tablediff where lower\n (table_name) = lower('{tablename}') and schema1 = '{schema}' and\n server1_status = 'ready' and server1_status = 'ready' and result in\n ('ready', 'init')\"\"\"\n with conn:\n with conn.cursor() as curs:\n curs.execute(sql)\n row = curs.fetchone()\n if row is None:\n return 1\n else:\n return 0", "def should_start_analysis(self):\n return len(self.task_queue) >= self.bulk_size", "def validate_and_submit(self, filename):\n\n matches = [p for p in self.process_list[self.name] if filename == p.source]\n if filename not in self.transfer_queue[self.name] and not matches:\n t = threading.Thread(target=self.is_stable, args=(filename,))\n t.setDaemon(True)\n t.start()", "def processFile(fileName):\n\n cursor = db.cursor()\n cursor.execute(\"BEGIN\")\n institutionCounter = 0\n\n def submitInstitute(bankCode, bankName, bic):\n try:\n cursor.execute(\"INSERT INTO institutions (bankCode, bic, name) VALUES(?,?,?)\", (bankCode, bic, bankName))\n except sqlite3.Error as e:\n print(\"Sorry , Error: {0} while inserting {1} ({2})\".format(e.args[0], bankCode, bic))\n\n book = xlrd.open_workbook(fileName, 'r')\n sheet = book.sheet_by_index(0)\n\n for row_index in range(2, sheet.nrows):\n submitInstitute(sheet.cell(row_index,0).value, sheet.cell(row_index,2).value, sheet.cell(row_index,1).value)\n institutionCounter += 1\n\n return institutionCounter", "def task_run_core():\n\n ## initialize parameters\n if task_get_option('format'):\n fmts = task_get_option('format')\n else:\n fmts = 'HB' # default value if no format option given\n for fmt in fmts.split(','):\n last_updated = fetch_last_updated(fmt)\n write_message(\"last stored run date is %s\" % last_updated)\n\n sql = {\n \"all\" : \"\"\"SELECT br.id FROM bibrec AS br, bibfmt AS bf\n WHERE bf.id_bibrec = br.id AND bf.format = '%s'\"\"\" % fmt,\n \"last\": \"\"\"SELECT br.id FROM bibrec AS br\n INNER JOIN bibfmt AS bf ON bf.id_bibrec = br.id\n WHERE br.modification_date >= '%(last_updated)s'\n AND bf.format='%(format)s'\n AND bf.last_updated < br.modification_date\"\"\" \\\n % {'format': fmt,\n 'last_updated': last_updated.strftime('%Y-%m-%d %H:%M:%S')},\n \"missing\" : \"\"\"SELECT br.id\n FROM bibrec as br\n LEFT JOIN bibfmt as bf\n ON bf.id_bibrec = br.id AND bf.format ='%s'\n WHERE bf.id_bibrec IS NULL\n AND br.id BETWEEN %%s AND %%s\n \"\"\" % fmt,\n }\n sql_queries = []\n cds_query = {}\n if task_has_option(\"all\"):\n sql_queries.append(sql['all'])\n if task_has_option(\"last\"):\n sql_queries.append(sql['last'])\n if task_has_option(\"collection\"):\n cds_query['collection'] = task_get_option('collection')\n else:\n cds_query['collection'] = \"\"\n\n if task_has_option(\"field\"):\n cds_query['field'] = task_get_option('field')\n else:\n cds_query['field'] = \"\"\n\n if task_has_option(\"pattern\"):\n cds_query['pattern'] = task_get_option('pattern')\n else:\n cds_query['pattern'] = \"\"\n\n if task_has_option(\"matching\"):\n cds_query['matching'] = task_get_option('matching')\n else:\n cds_query['matching'] = \"\"\n\n if task_has_option(\"recids\"):\n recids = split_cli_ids_arg(task_get_option('recids'))\n else:\n recids = []\n\n ### sql commands to be executed during the script run\n ###\n bibreformat_task(fmt, sql, sql_queries, cds_query, task_has_option('without'), not task_has_option('noprocess'), recids)\n return True", "def is_ready_to_recognize():\n time.sleep(0.01)\n ready_to_recognize_file = open(\"varThread\\\\ready_recognize.txt\", \"r\")\n if int(ready_to_recognize_file.read(1)) == 1:\n ready_to_recognize_file.close()\n is_recognized_file = open('varThread\\\\recognized.check', 'r')\n is_recognized = is_recognized_file.read(1)\n if is_recognized == '0':\n is_recognized_file.close()\n return True \n else:\n is_recognized_file.close()\n return False \n else:\n ready_to_recognize_file.close()\n return False", "def abortStageIn(dbh, lfns, DBReleaseIsAvailable):\n\n numberOfFiles = len(lfns)\n numberOfDBReleaseFiles = 0\n\n if DBReleaseIsAvailable:\n for lfn in lfns:\n if isDBReleaseFile(dbh, lfn): # multi-trf jobs will have more than one DBRelease file\n numberOfDBReleaseFiles += 1\n\n if numberOfDBReleaseFiles < numberOfFiles:\n tolog(\"Number of locally available DBRelease files = %d (%d files in total), continue with stage-in\" % (numberOfDBReleaseFiles, numberOfFiles))\n status = False # do not abort stage-in\n else:\n tolog(\"Number of locally available DBRelease files = %d (%d files in total), abort stage-in\" % (numberOfDBReleaseFiles, numberOfFiles))\n status = True # abort stage-in\n\n return status", "def insert_files(db: DB, files: list, user_id: int,) -> bool:\n db_updated = False\n for file in files:\n\n with open(file, newline='') as csv_file:\n rows = list(csv.reader(csv_file))\n\n row = 1 if \"Apple\" in file else 4\n\n for i in range(row, len(rows)):\n receipt = None\n\n if \"Apple\" in file:\n receipt = AppleReceipt(db, rows[i], user_id)\n elif \"ESL\" in file:\n receipt = ESLReceipt(db, rows[i], user_id)\n\n if receipt is not None and not receipt.exists_in_db():\n receipt.insert_to_db()\n db_updated = True\n\n if db_updated:\n log(\"The database has been updated.\", level=\"debug\")\n else:\n log(\"No updates made to the database.\", level=\"debug\")\n\n return db_updated", "def transcribe_proc():\n while True:\n # Get result of transcription\n transcribe_result = transcriber.transcribe_stream(\n audio_stream(), sample_rate, sample_width, channels\n )\n\n _LOGGER.debug(\"Transcription result: %s\", transcribe_result)\n\n transcribe_result = transcribe_result or Transcription.empty()\n transcribe_dict = dataclasses.asdict(transcribe_result)\n transcribe_dict[\"timeout\"] = is_timeout\n\n print_json(transcribe_dict)\n transcription_printed.set()", "def perform_process(transformer: transformer_class.Transformer, check_md: dict) -> dict:\n # Process each CSV file into BETYdb\n start_timestamp = datetime.datetime.now()\n files_count = 0\n files_csv = 0\n lines_read = 0\n error_count = 0\n files_loaded = []\n for one_file in check_md['list_files']():\n files_count += 1\n if os.path.splitext(one_file)[1].lower() == '.csv':\n files_csv += 1\n\n # Make sure we can access the file\n if not os.path.exists(one_file):\n msg = \"Unable to access csv file '%s'\" % one_file\n logging.debug(msg)\n return {'code': -1000,\n 'error': msg}\n\n try:\n # Read in the lines from the file\n with open(one_file, 'r') as in_file:\n reader = csv.DictReader(in_file)\n files_loaded.append(one_file)\n for row in reader:\n centroid_lonlat = [row['lon'], row['lat']]\n time_fmt = row['dp_time']\n timestamp = row['timestamp']\n dp_metadata = {\n \"source\": row['source'],\n \"value\": row['value']\n }\n trait = row['trait']\n\n __internal__.create_datapoint_with_dependencies(transformer.args.clowder_url, transformer.args.clowder_key,\n trait, (centroid_lonlat[1], centroid_lonlat[0]), time_fmt,\n time_fmt, dp_metadata, timestamp)\n lines_read += 1\n\n except Exception:\n logging.exception(\"Error reading CSV file '%s'. Continuing processing\", os.path.basename(one_file))\n error_count += 1\n\n if files_csv <= 0:\n logging.info(\"No CSV files were found in the list of files to process\")\n if error_count > 0:\n logging.error(\"Errors were found during processing\")\n return {'code': -1001, 'error': \"Too many errors occurred during processing. Please correct and try again\"}\n\n return {\n 'code': 0,\n configuration.TRANSFORMER_NAME: {\n 'version': configuration.TRANSFORMER_VERSION,\n 'utc_timestamp': datetime.datetime.utcnow().isoformat(),\n 'processing_time': str(datetime.datetime.now() - start_timestamp),\n 'num_files_received': str(files_count),\n 'num_csv_files': str(files_csv),\n 'lines_loaded': str(lines_read),\n 'files_processed': str(files_loaded)\n }\n }", "async def check_migration(migration):\n try:\n count = await db.Migrations.count_documents({migration: True})\n except Exception as e:\n print(e)\n return count > 0", "def main():\n conn = psycopg2.connect(\"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n print('song file processing is complete')\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n print('log file processing is complete')\n conn.close()", "def fileTranscriptionContent(filePath):\n try:\n continu = True\n f = open(filePath, 'r')\n fileContent = \"\"\n while (continu):\n temp = f.readline(300000)\n if(len(temp) == 0):\n continu = False\n else:\n fileContent += temp\n results = []\n f.close()\n url = re.findall(r'URL:(.*?)\\n', fileContent)\n results.append(url)\n realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent)\n results.append(realTimeFactor)\n transcription = re.findall(r'utterance-id1 (.*?)\\n', fileContent)\n for item in transcription:\n if(len(item) > 500):\n results.append(item.replace(\"'\", \"''\"))\n if((len(results[0]) > 0) and (len(results[1]) > 0) and (len(results[2]) > 0)):\n return results\n else:\n Tools.writeException(\"fileTranscriptionContent\", \"ERROR attempted to parse \" + filePath + \" but got \" + str(results))\n return False\n except Exception as e:\n Tools.writeException(\"fileTranscriptionContent\", e)", "def transcribe(config):\n\n long_mode = True\n\n if 'audio_data' not in config:\n raise KeyError(\"`audio_data` not specified for transcription operation.\")\n\n if 'timeout' not in config:\n raise KeyError(\"`timeout` not specified for transcription operation.\")\n\n try:\n if config.pop('audio_duration') < 60: \n long_mode = False\n except KeyError:\n pass\n\n if long_mode:\n print(\"Running in long audio duration mode (audio is >60 seconds duration)...\")\n print(\"Uploading file...\")\n remote_object = gcloud_upload_file(config['audio_data'], config['storage_bucket'])\n file_name = remote_object.rsplit('/', 1)[-1]\n\n config['audio_data'] = \"gs://%s/%s\" % (config['storage_bucket'], file_name)\n storage_bucket = config.pop('storage_bucket')\n\n print(\"Transcribing file...\")\n result = gcloud_transcribe_long(config)\n\n print(\"Transcription successful, cleaning up...\")\n print(\"Deleting uploaded GCS file...\")\n gcloud_delete_file(file_name, storage_bucket)\n else:\n print(\"Transcribing file...\")\n config.pop('timeout')\n config.pop('storage_bucket')\n result = gcloud_transcribe_short(config)\n\n return result", "def _parse_transcription_file(self, root: str, name: str) -> None:\n trans_path = os.path.join(root, name)\n with open(trans_path, \"r\", encoding=\"utf-8\") as trans:\n # Each line has the form \"ID THE TARGET TRANSCRIPTION\"\n for line in trans:\n id_, transcript = line.split(maxsplit=1)\n dropped = self._process_audio(root, id_)\n if not dropped:\n self._process_transcript(transcript)", "def cdb_check():\n logfile = 'janusess'\n logger = logging.getLogger(logfile)\n\n check_time = 0.5\n\n log = 'Checking CouchDB every {0} sec until operational.'.format(check_time)\n logger.debug(log)\n\n count = 1\n while True:\n\n # Issue CouchDB GET request and process result\n http_resp = requests.get('http://127.0.0.1:5984/')\n\n # Successful GET request\n if http_resp.status_code == 200:\n log = 'CouchDB is operational.'\n logger.info(log)\n MPQ_ACT.put_nowait([\n datetime.now().isoformat(' '),\n 'INFO',\n log\n ])\n MPQ_STAT.put_nowait([\n 'base',\n [\n 'couchdb',\n STAT_LVL['op']\n ]\n ])\n break\n\n # All GET errors\n else:\n log = 'CouchDB is not operational, failed with http ' +\\\n 'response {0}. Making another attempt.'.format(http_resp.status_code)\n logger.warning(log)\n MPQ_ACT.put_nowait([\n datetime.now().isoformat(' '),\n 'WARNING',\n log\n ])\n MPQ_STAT.put_nowait([\n 'base',\n [\n 'couchdb',\n STAT_LVL['cfg_err']\n ]\n ])\n\n count += count\n time.sleep(check_time)", "def check(self):\n self.logger.info(\"Performing check ... (database file: '%s')\" % self.config.database)\n # read the database file\n try:\n f = open(self.config.database)\n data = json.load(f)\n f.close()\n except Exception, ex:\n self.logger.error(\"Could not read database file, reason: %s\" % ex)\n return 1\n # perform actual check against the database file\n # data: {file_path: {last_modif: <value>, last_modif_human: <value>}}\n for file_name, values in data.items():\n try:\n dt = os.path.getmtime(file_name)\n if dt != values[\"last_modif\"]:\n self._modified.append(file_name)\n except OSError:\n self._removed.append(file_name)\n # check actual files in the directory tree - check for newly\n # added files\n # get files currently in the directory - returns full file paths\n curr_file_names = helpers.get_files(path=self.config.watched_dir,\n file_mask=self.config.watch_masks,\n recursive=True)\n for file_name in curr_file_names:\n if file_name in self.config.ignore_list:\n continue\n encoded_file_name = unicode(file_name, \"utf-8\")\n if encoded_file_name not in data.keys():\n self._added.append(file_name)\n self.summarize()\n return 0", "def has_finished():", "def check_processed(args):\n\n unknown_fns = glob.glob(\"*M.txt\") + glob.glob(\"unknowns/*M.txt\")\n unknown_fns = [os.path.basename(fn) for fn in unknown_fns]\n\n with sqlite3.connect(args.search_db) as conn:\n conn.row_factory = sqlite3.Row\n sql_ranges = conn.execute('SELECT * FROM range ORDER BY p, d, m_start').fetchall()\n\n # At some later point maybe don't load all (group by thousands or something)\n conn.row_factory = None\n results = conn.execute('SELECT P, D, m FROM result').fetchall()\n print(f\"\\tLoaded {len(results):,} results\")\n\n # ---- Add file only ranges\n ranges, lookup = sql_and_file_ranges(sql_ranges, unknown_fns)\n\n # ---- Find results not belonging to any range\n build_and_count_pd_results(results, ranges, lookup)\n\n print_results(conn, ranges, lookup)", "def ready_to_export_transaction(self):\n try:\n SubjectReferral = models.get_model('bcpp_subject', 'subjectreferral')\n subject_referral = SubjectReferral.objects.get(subject_visit=self.subject_visit)\n if subject_referral.referral_code:\n return True\n except SubjectReferral.DoesNotExist:\n pass\n return False", "def checkforDB(self):\r\n #This is a hardisk operation idk why im doing it in the networking class lol\r\n #Check for folder\r\n cDir = os.getcwd()\r\n u = \" \\ \"\r\n u = u.replace(' ', '')\r\n\r\n #Format from \\ to / because windows >:C seriously ima kill which ever windows dev decided \\ is better than /\r\n\r\n cDirF = cDir.replace(u, '/') # if on any system that uses / for files, this statement'll pass... unless they used \\ in a file name... then we're boned.\r\n xmlFileSys = cDirF+\"/runnerData\"\r\n check = os.path.isdir(cDirF+\"/runnerData\")\r\n if(check == True):\r\n #check for any xml files.\r\n #first change dir lol, it won't work if you're not in the directory.\r\n os.chdir(xmlFileSys)\r\n listXml = 0\r\n for files in glob.glob(\"*.xml\"):\r\n listXml = listXml + 1\r\n if(listXml != 0):\r\n return 0 #True -- The DB is there\r\n if(listXml == 0):\r\n return 1 # False -- The files and or the folder isnt there\r\n\r\n if(check == False):\r\n try:\r\n os.makedirs('runnerData') # Gonna go ahead and make the dir here so we can populate it later.\\\r\n return 1 #False\r\n except:\r\n #Meaning the folder is there yet the files arent...\r\n pass\r\n return 1 #False\r", "def get_candidate_queries(num_candidate, file_path):\n try:\n # TO COMPLETE\n except IOError:\n # TO COMPLETE", "def database_search_done_check(obsid, pointing):\n con = lite.connect(DB_FILE, timeout = TIMEOUT)\n con.row_factory = lite.Row\n with con:\n cur = con.cursor()\n #get script data\n cur.execute(\"SELECT Ended FROM PulsarSearch WHERE Obsid=? AND Pointing=?\",\n (obsid, pointing))\n endtime = cur.fetchall()\n searched_check = False\n for e in endtime:\n if e[0] is not None:\n searched_check = True\n return searched_check", "def nohupTranscriptionContent(filePath):\n try:\n continu = True\n fileContent = \"\"\n f = open(filePath, 'r')\n while (continu):\n temp = f.readline(900000)\n if(len(temp) == 0):\n continu = False\n else:\n fileContent += temp\n results = []\n realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent)\n results.append(realTimeFactor)\n transcription = re.findall(r'utterance-id(.*?) (.*?)\\n', fileContent)\n transcriptionList = []\n transcriptionIDList = []\n for item in transcription:\n if(len(item[1]) > 1000):\n transcriptionIDList.append(item[0])\n transcriptionList.append(item[1])\n results.append(transcriptionList)\n results.append(transcriptionIDList)\n transcriptionTime = re.findall(r'seconds / (.*?) seconds\\.', fileContent)\n results.append(transcriptionTime)\n return results\n except Exception as e:\n Tools.writeException(\"nohupTranscriptionContent\", e)\n return False", "def check_backup(self):\n res = 0\n sql = '''select status\n FROM v$rman_backup_job_details\n WHERE start_time > SYSDATE - 1\n ORDER BY END_TIME '''\n self.cur.execute(sql)\n curres = self.cur.fetchall()\n rescount = (self.cur.rowcount)\n if rescount == 0:\n res = 99\n print(res)\n else:\n for i in curres:\n if re.search('FAILED|ERROR', i[0]):\n res = res + 1\n print(res)", "def step060():\n logger.logMessage('Begin: updating database')\n update_sql = 'update weather_work set tsa=$1, esDocId = $2 where time = $3;'\n pgConn = pg.connect(host=host,user=user,password=password,database=database) \n c = pgConn.cursor()\n# c.execute('drop table weather_work')\n# c.execute('create table weather_work (like weather excluding constraints)')\n# c.execute('insert into weather_work select * from weather_dupes')\n# c.execute('create index weather_work_time on weather_work(time)')\n pgConn.commit()\n c.execute('prepare updtDocid as {0}'.format(update_sql))\n numUpdates = 0\n with open(renumFile,'r') as f:\n line = f.readline().rstrip()\n while line != '':\n fields = line.split(';')\n tsa = int(fields[0])\n time = fields[1].rstrip() \n docid = fields[2].rstrip()\n try:\n dic = { 'esDocId': docid, 'tsa': tsa , 'time': time+\"+00:00\" }\n c.execute('execute updtDocid (%(tsa)s,%(esDocId)s,%(time)s)',dic)\n numUpdates += 1\n if numUpdates % 250 == 0:\n pgConn.commit()\n logger.logMessage(level='DEBUG',message=\"{0:9d} commited updates\".format(numUpdates))\n except:\n logger.logException('Exception while updating database')\n pgConn.rollback()\n raise\n line = f.readline().rstrip()\n pgConn.commit()\n logger.logMessage(\"Total updates: {0:d}\".format(numUpdates))\n c.close()\n pgConn.close()\n logger.logMessage('End : updating database')", "def check_upload(job_id, file, mainchain = None):\n ## NOTE:\n ## - Requires uploaded structures to be X-ray EXPDTA\n ## - Checks if the PDB file contains valid aa/na residues\n ## - PDB file must have at least 30 ATOMs\n ## - PDB file can not have lowercase alt. res. numbers\n ## - Checks standard deviation of temp. factors\n ## - Checks that not all occupancies are 0.00\n ## - Checks for properly formatted ATOM lines\n tmpfile = None ## this is the second part of the return\n atom_num = []\n res_type = []\n res_num = []\n chain = []\n temp_factors = []\n bad_std = -1\n num_total = 0\n num_good = 0\n occupancy = 0.0\n ignore = 0\n line_num = 0\n\n for line in file:\n line_num += 1\n\n if line.startswith('HEADER'):\n header_id = re.sub(r\"^HEADER.{56}(....)\", '\\\\1', line).strip()\n ## FIXME: Calls to MySQL can not be made in this def, 2009-06-16\n #mysql.job_set_header_id(job_id, str(header_id))\n\n #if line.startswith('EXPDTA NMR') or \\\n # line.startswith('EXPDTA SOLUTION NMR'):\n # ## TODO: Might need to add \"SOLID-STATE NMR\", 2009-11-10\n # msg = \"NMR structure! \"\n # msg += \"Please do not submit NMR structures, theoretical models, \"\n # msg += \"or any PDB file with unrefined Bs.\"\n # return msg\n\n elif line.startswith('EXPDTA') and line.find('X-RAY DIFFRACTION') == -1:\n msg = \"Not an X-ray diffraction structure. TLSMD currently only \"\n msg += \"performs analysis on X-ray models. Will not proceed.\"\n return msg, tmpfile\n\n elif re.match(r'^REMARK 2 RESOLUTION\\. ([0-9\\.]{1,}) ANGSTROMS.*', line):\n resolution = re.sub(r'^REMARK 2 RESOLUTION\\. ([0-9\\.]{1,}) ANGSTROMS.*', '\\\\1', line).strip()\n ## FIXME: Calls to MySQL can not be made in this def, 2009-06-16\n #mysql.job_set_resolution(job_id, resolution)\n\n elif re.match('^ATOM.....................[0-9][a-z]', line):\n ## E.g., Don't allow \"100b\". Force it to be \"100B\"\n example = re.sub(r'^ATOM.....................([0-9][a-z]).*', '\\\\1', line).strip()\n msg = \"Please change lowercase to uppercase for alternate \"\n msg += \"residue numbers. (E.g., change \\\" %s \\\" to \\\" %s \\\")\" % (\n example, example.upper())\n return msg, tmpfile\n\n elif mainchain == True and line.startswith('ATOM') and \\\n const.RE_MAINCHAIN_ATOMS.match(line) and \\\n Library.library_is_standard_residue(line[17:20].strip()):\n ## Only pass mainchain atoms to the running_stddev() function\n tmpfile = misc.generate_security_code()\n num_total += 1\n\n try:\n int(line[7:11].strip())\n int(line[23:26].strip())\n float(line[56:60].strip())\n float(line[60:66].strip())\n except:\n return \"Not a proper ATOM line: <pre>%s</pre>\" % line, tmpfile\n\n if float(line[56:60].strip()) < 1.00:\n ## ignore occupancies < 1.00\n ignore += 1\n continue\n else:\n num_good += 1\n atom_num.append(int(line[7:11].strip()))\n res_type.append(line[17:20].strip())\n res_num.append(int(line[23:26].strip()))\n chain.append(line[21:22])\n occupancy += float(line[56:60].strip())\n temp_factors.append(float(line[60:66].strip()))\n\n elif mainchain == False and line.startswith('ATOM') and (\n Library.library_is_standard_residue(line[17:20].strip())):\n tmpfile = job_id\n num_total += 1\n\n try:\n int(line[7:11].strip())\n int(line[23:26].strip())\n float(line[56:60].strip())\n float(line[60:66].strip())\n except:\n return \"Not a proper ATOM line: <pre>%s</pre>\" % line, tmpfile\n\n if float(line[56:60].strip()) < 1.00:\n ## ignore occupancies < 1.00\n ignore += 1\n continue\n else:\n num_good += 1\n atom_num.append(int(line[7:11].strip()))\n res_type.append(line[17:20].strip())\n res_num.append(int(line[23:26].strip()))\n chain.append(line[21:22])\n occupancy += float(line[56:60].strip())\n temp_factors.append(float(line[60:66].strip()))\n\n else:\n continue\n\n #return \"Number of atoms: %s (%s) (%s)\" % (num_total, len(temp_factors), num_good)\n\n ## TODO: Add check for ANISOU that are pure ISOT, 2010-03-23\n\n ## FIXME: This does not work yet.\n #if(ignore == num_total):\n # return \"All occupancies are less than 1.0, so all atoms will be ignored. Nothing to do.\"\n\n msg = \"Not a PDB structure or has unrecognized residue names.\"\n if mainchain and num_good < 5:\n return msg, tmpfile\n elif not mainchain and num_good < 30:\n return msg, tmpfile\n\n if(occupancy / num_good == 0.0):\n return \"All occupancies are 0.0. TLSMD won't run on this structure.\", tmpfile\n\n bad_std, tmpfile = running_stddev(tmpfile, atom_num, res_type, res_num, \n chain, temp_factors)\n if bad_std > 0:\n ## If there are a string of \"bad\" B-factors, return a plot showing the\n ## \"bad\" regions and do not proceed any further in the analysis.\n f = open('%s/%s.gnu' % (conf.WEBTMP_PATH, tmpfile), 'w')\n\n ## modify script template\n script = _STDDEV_FOR_BAD_TFACT_TEMPLATE\n script = script.replace(\"<webtmp_path>\", conf.WEBTMP_PATH)\n script = script.replace(\"<tmpfile>\", tmpfile)\n script = script.replace(\"<gnuplot_font>\", conf.GNUPLOT_FONT)\n #script = script.replace(\"<min_stddev_bfact>\", conf.MIN_STDDEV_BFACT)\n #script = script.replace(\"<max_stddev_bfact>\", conf.MAX_STDDEV_BFACT)\n\n f.write(script)\n f.close()\n subprocess.Popen([r\"%s\" % conf.GNUPLOT, \"%s/%s.gnu\" % (\n conf.WEBTMP_PATH, tmpfile)]).wait()\n\n return_string = \"Standard deviation of temperature factors is less \"\n return_string += \"than %s or greater than %s for those residues in \" % (\n conf.MIN_STDDEV_BFACT, conf.MAX_STDDEV_BFACT)\n return_string += \"the shaded regions below:<br>\"\n return_string += \"<center><img src='%s/%s.png'/></center>\" % (\n conf.WEBTMP_URL, tmpfile)\n return_string += \"<br><h3>NOTE: Your structure was run through a \"\n return_string += \"sanity check twice: (1) using all atoms in your \"\n return_string += \"structure; and (2) using only the mainchain atoms \"\n return_string += \"({N,CA,C,O,CB} or {P,O5*,C5*,C4*,C3*,O3*}). \"\n return_string += \"Both sanity checks failed.</h3>\"\n return return_string, tmpfile\n\n return '', tmpfile", "def main():\n conn = psycopg2.connect(f\"host=127.0.0.1 dbname=sparkifydb user={username} password={password}\")\n cur = conn.cursor()\n conn.set_session(autocommit=True)\n\n artists_data, songs_data = process_song_file()\n songplays_help_df, time_data, users_data = process_log_file()\n songplays_data = process_songplays_data(artists_data, songs_data, songplays_help_df)\n\n data_list = [songplays_data, users_data, songs_data, artists_data, time_data]\n for idx, (data, query) in enumerate(zip(data_list, insert_table_queries), start=1):\n print(f\"inserting file {idx}/{len(data_list)}\")\n for row in data:\n try:\n cur.execute(query, row)\n except psycopg2.Error as error:\n print(f\"Psychog2 error @ file {idx} row {row}: {error} NOTE: this file will not be inserted.\")\n\n conn.close()", "def import_queued_submissions(conn, limit=50):\n query = schema.submission.select(schema.submission.c.handled == False).limit(limit)\n count = 0\n for submission in conn.execute(query):\n import_submission(conn, submission)\n count += 1\n logger.debug(\"Imported %d submissions\", count)", "def firsttime_run(filedir='recount-methylation-files', \n run_timestamp=gettime_ntp()):\n print(\"Beginning first time server run...\")\n equery_dest = settings.equerypath; temppath = settings.temppath\n gse_query(); gsm_query()\n gseqfile = getlatest_filepath(equery_dest,'gse_edirectquery') \n gsmqfile = getlatest_filepath(equery_dest,'gsm_edirectquery')\n gsequery_filter()\n gsefiltpath = getlatest_filepath(equery_dest,'gsequery_filt')\n if gsefiltpath:\n gsefiltd = querydict(querypath=gsefiltpath,splitdelim=' ')\n gseidlist = list(gsefiltd.keys())\n print(\"GSE id list of len \"+str(len(gseidlist))+\" found. Returning...\")\n return gseidlist\n else:\n print(\"Error retrieving gse query filtered file. Returning...\")\n return None\n return None", "def jobs_validator(config_file):\n\n config = load_configuration.load_configuration(config_file)\n config = load_configuration.affix_production_tag(config, ['db_collection', 'db_production_files_collection'])\n\n database = MongoDbUtil('admin', db_server=config['db_server'], db_name=config['db_name']).database()\n\n # spawn a stats heartbeat\n stats_heartbeat = StatsHeartbeat(config['heartbeat_interval'],\n database[config['db_collection']],\n accum_stats={'completed_job': 0, 'completed_muDst': 0, 'failed_job':0, 'failed_muDst': 0, 'timeout_job': 0},\n stats={'total_in_queue': 0, 'running': 0, 'running_bfc': 0, 'pending': 0, 'completing': 0, 'unknown': 0})\n logging.info(\"Heartbeat daemon spawned\")\n\n # loop over queued jobs and update status\n files_coll = database[config['db_production_files_collection']]\n\n while True:\n\n try:\n slurm_jobs = slurm_utility.get_queued_jobs(config['slurm_user'])\n stats = {'total_in_queue': len(slurm_jobs), 'running': 0, 'running_bfc': 0, 'pending': 0, 'completing': 0, 'unknown': 0}\n\n for job in files_coll.find({'$or': [{'status': 'PENDING'}, {'status': 'RUNNING'}]}):\n\n #job is still in queue, update info\n if job['slurm_id'] in slurm_jobs:\n\n state = slurm_jobs[job['slurm_id']]\n if state == 'PENDING':\n stats['pending'] += 1\n elif state == 'RUNNING':\n stats['running'] += 1\n stats['running_bfc'] += job['number_of_cores']\n if state != job['status']:\n job['status'] = 'RUNNING'\n files_coll.update_one({'_id':job['_id']}, {'$set': job}, upsert=False)\n elif state == 'COMPLETING':\n stats['completing'] += 1\n else:\n stats['unknown'] += 1\n\n #job is out of queue, check status\n else:\n\n try:\n job_stats = slurm_utility.get_job_stats(job['slurm_id'])\n except slurm_utility.Error:\n logging.warning('Slurm is not available...')\n continue\n\n state = job_stats['state']\n if state == 'COMPLETED':\n job['status'] = 'COMPLETED'\n stats_heartbeat.accum_stats['completed_job'] += 1\n\n if not pass_qa(job):\n job['failed'] += 1\n stats_heartbeat.accum_stats['failed_muDst'] += 1\n else:\n stats_heartbeat.accum_stats['completed_muDst'] += 1\n\n job['Elapsed'] = job_stats['Elapsed']\n job['CPUTime'] = job_stats['CPUTime']\n job['CpuEff'] = job_stats['CpuEff']\n job['MaxRSS'] = job_stats['MaxRSS']\n job['MaxVMSize'] = job_stats['MaxVMSize']\n job['Reserved'] = job_stats['Reserved']\n files_coll.update_one({'_id':job['_id']}, {'$set': job}, upsert=False)\n elif state == 'FAILED':\n stats_heartbeat.accum_stats['failed_job'] += 1\n job['failed'] += 1\n job['status'] = 'FAILED'\n files_coll.update_one({'_id':job['_id']}, {'$set': job}, upsert=False)\n elif state == 'TIMEOUT':\n stats_heartbeat.accum_stats['timeout_job'] += 1\n job['failed'] += 1\n job['status'] = 'TIMEOUT'\n files_coll.update_one({'_id':job['_id']}, {'$set': job}, upsert=False)\n else:\n stats['unknown'] += 1\n\n stats_heartbeat.stats = stats\n\n except slurm_utility.Error:\n logging.warning('Slurm is not available...')\n\n time.sleep(config['recheck_sleep_interval'])", "def main():\n # Declaration\n cnt_errors = 0\n\n # Get the script path, all DBF files inside this path will be converted into CSV.\n script_path = os.path.dirname(__file__)\n\n # Clear the console screen\n clear()\n\n # Script is starting to find all DBF files.\n print('Script is searching for DBF files.')\n\n # Search for DBF files inside the script path.\n for dirpath, dirname, filenames in os.walk(script_path):\n for filename in filenames:\n if filename.endswith(\".dbf\"):\n print(\"Convert: {filename} to .csv\".format(filename=filename))\n\n # Combine both strings\n full_path = dirpath + \"\\\\\" + filename\n\n # Try to load the DBF file\n try:\n table = dbfread.DBF(full_path, encoding=\"windows-1252\", ignore_missing_memofile=False)\n except dbfread.exceptions.DBFNotFound as dbf_exc:\n print(\"Error occurred: \\n{file} \\n{error}\".format(file=filename, error=dbf_exc))\n cnt_errors += 1\n continue\n\n # Load data from table into an DataFrame.\n df = pd.DataFrame(iter(table))\n\n # Remove last four characters.\n csv_file = filename[:-4] + \".csv\"\n\n # Join the script path.\n output_path_csv = os.path.join(script_path, csv_file)\n\n # Print a message and create the csv file.\n print(\"Convert: {filename} to .csv\".format(filename=filename))\n df.to_csv(output_path_csv, sep=';')\n\n # Print out amount of not converted DBF files.\n if cnt_errors > 0:\n print('Amount of not converted files: {}'.format(cnt_errors))", "def multipleValidTxTest(self):\n log.info(\"--------------------Multiple valid Tx tests now started-------------------\")\n\n self.mvb.txWaitingPool += self.readTxFromFile('./TxFiles/MultipleValidTestTx.json')\n self.mvb.broadcastTxPools()", "def checkPre(dbConnection):\n cursor = dbConnection.cursor()\n cursor.execute(\"SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;\")\n entry = cursor.fetchone()\n cursor.close()\n return entry", "def checkRuns( self, db_files, db_run_override, db ):\n\t\tto_keep = []\n\t\tensemble_info_collection = db.ensemble_info\n\t\tfor i in db_files:\n\t\t\ttry:\n\t\t\t\tconn = sqlite3.connect( i )\n\t\t\t\tc = conn.cursor()\n\t\t\t\tc.execute(\"SELECT num_iterations FROM run_infos;\")\n\t\t\t\trun_info = c.fetchone()\n\t\t\t\tif run_info is None:\n\t\t\t\t\tpass\n\t\t\t\telif run_info[0] < 1000:\n\t\t\t\t\t# incomplete runs\n\t\t\t\t\tpass\n\t\t\t\telse:\n\t\t\t\t\tif db_run_override == None:\n\t\t\t\t\t\t# do not include runs that are already in the database\n\t\t\t\t\t\t# check for existence\n\t\t\t\t\t\trun_name = i.split(\"/\")[-2]\n\t\t\t\t\t\tif ensemble_info_collection.find( { \"run_name\": run_name } ).count() > 0:\n\t\t\t\t\t\t\tpass\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tto_keep.append( i )\n\t\t\t\t\telse:\n\t\t\t\t\t\tto_keep.append( i )\n\t\t\texcept Exception:\n\t\t\t\tpass\n\t\treturn to_keep", "def execute():\r\n arcpy.AddMessage(\"START BCA Processing\")\r\n arcpy.env.workspace = config.temp_data_gdb\r\n arcpy.env.overwriteOutput = True\r\n sys.path.append(config.notif_system_script_folder)\r\n\r\n # Other Variables\r\n arcpy.AddMessage(\"Import toolbox\")\r\n arcpy.ImportToolbox(config.notif_toolbox)\r\n REGEX_FOR_INVALID_CHARS = re.compile(r'[^0-9a-zA-Z]+')\r\n todayDate = datetime.datetime.now().strftime(\"%Y%m%d\")\r\n logFile = file(\r\n config.report_processing_log + \"\\\\\" + todayDate + \"_NotificationSystemLog\" + \".txt\", \"a\")\r\n\r\n\r\n # get all unzipped files uploaded to shared folder\r\n configfiles = [os.path.join(dirpath, f)\r\n for dirpath, dirnames, files in os.walk(config.SharedFolder)\r\n for f in files if f.endswith('.csv') or f.endswith('.xls') or f.endswith('.xlsx') or f.endswith('.XLS')]\r\n\r\n correct_config_files = [f for f in configfiles if \"\\BCAWeeklyPermitReport\\\\\" in f]\r\n\r\n # PREPARE workspace\r\n arcpy.AddMessage(\"Preparing workspace...\")\r\n for BCAreport in correct_config_files:\r\n\r\n input_file_name = BCAreport.split(\"\\\\\")[-1]\r\n\r\n MukimConstruct = arcpy.SearchCursor(config.MukimConstructSource)\r\n PermitDateExists = False\r\n\r\n for row in MukimConstruct:\r\n aux = input_file_name[:8]\r\n if \"CORRECTED\" not in BCAreport.upper():\r\n filedate = datetime.datetime.strptime(aux, \"%Y%m%d\")\r\n else:\r\n clean_filename = input_file_name.split(\".\")[0]\r\n filedate = datetime.datetime.strptime(clean_filename[-8:], \"%Y%m%d\")\r\n if filedate == row.PERMIT_DATE and \"CORRECTED\" not in BCAreport.upper():\r\n PermitDateExists = True\r\n break\r\n if PermitDateExists and \"CORRECTED\" not in BCAreport.upper():\r\n PermitDateExistsLog = file(\r\n config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] +\r\n \" file's Permit Date already exists\" + \".log\",\r\n \"a\")\r\n PermitDateExistsLog.write(\r\n \"Permit Date for the file \" + input_file_name + \" already exists in Mukim Construct at \" + str(\r\n datetime.datetime.now()))\r\n logFile.writelines(\r\n \"Permit Date for the file \" + input_file_name + \" already exists in Mukim Construct at \" + str(\r\n datetime.datetime.now()) + \"\\n\")\r\n\r\n else:\r\n\r\n # 00. Creation of geodatabases that will serve as workspaces\r\n logFile.writelines(\"00 Creation of temp gdb starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n if arcpy.Exists(config.TempDataGDB):\r\n arcpy.Delete_management(config.TempDataGDB)\r\n arcpy.CreateFileGDB_management(config.Notification, \"Temp_data.gdb\")\r\n else:\r\n arcpy.CreateFileGDB_management(config.Notification, \"Temp_data.gdb\")\r\n\r\n if arcpy.Exists(config.SDEDataGDB):\r\n arcpy.Delete_management(config.SDEDataGDB)\r\n arcpy.CreateFileGDB_management(config.Notification, \"Source.gdb\")\r\n else:\r\n arcpy.CreateFileGDB_management(config.Notification, \"Source.gdb\")\r\n\r\n if arcpy.Exists(config.CurrentMukimConstructDataGDB):\r\n arcpy.Delete_management(config.CurrentMukimConstructDataGDB)\r\n arcpy.CreateFileGDB_management(config.Notification, \"Final_data.gdb\")\r\n else:\r\n arcpy.CreateFileGDB_management(config.Notification, \"Final_data.gdb\")\r\n\r\n logFile.writelines(\"00 Creation of temp gdb ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 01. Import the base data\r\n logFile.writelines(\"01 Import of base data starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.FeatureClassToFeatureClass_conversion(config.MukimConstructSource, config.CurrentMukimConstructDataGDB,\r\n \"MUKIM_CONSTRUCT\", \"\", \"\", \"\")\r\n arcpy.FeatureClassToFeatureClass_conversion(config.MukimConstructByProjSource, config.CurrentMukimConstructDataGDB,\r\n \"MUKIM_CONSTRUCT_BYPROJ\", \"\", \"\", \"\")\r\n arcpy.FeatureClassToFeatureClass_conversion(config.DepotSource, config.SDEDataGDB, \"DepotBoundary\", \"\", \"\", \"\")\r\n arcpy.FeatureClassToFeatureClass_conversion(config.CatchmentSource, config.SDEDataGDB, \"CatchmentBoundary\", \"\", \"\", \"\")\r\n arcpy.FeatureClassToFeatureClass_conversion(config.LandlotSource, config.TempDataGDB, \"Land_lot\", \"\", \"\", \"\")\r\n # Calculate the lot key without letter\r\n arcpy.AddField_management(config.LandLot, \"Lotkey_wo_letter\", \"TEXT\", \"\", \"\", \"10\", \"\", \"NULLABLE\", \"NON_REQUIRED\",\r\n \"\")\r\n arcpy.CalculateField_management(config.LandLot, \"Lotkey_wo_letter\", \"!lot_key![:10]\", \"PYTHON\", \"\")\r\n\r\n logFile.writelines(\"01 Import of base data ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n\r\n # START THE LOOP TO PROCESS ALL THE FILES\r\n clcounter = 0\r\n\r\n if len(correct_config_files) == 0:\r\n logFile.writelines(\"No BCA report to process at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n arcpy.AddMessage(\"Processing files...\")\r\n for BCAreport in configfiles:\r\n\r\n clcounter += 1\r\n arcpy.AddMessage(BCAreport)\r\n input_file_name = BCAreport.split(\"\\\\\")[-1]\r\n MukimConstruct = arcpy.SearchCursor(config.MukimConstructSource)\r\n PermitDateExists = False\r\n\r\n # CHEKC FILE DATE EXISTS\r\n for row in MukimConstruct:\r\n aux = input_file_name[:8]\r\n if \"CORRECTED\" not in BCAreport.upper():\r\n filedate = datetime.datetime.strptime(aux, \"%Y%m%d\")\r\n else:\r\n clean_filename = input_file_name.split(\".\")[0]\r\n filedate = datetime.datetime.strptime(clean_filename[-8:], \"%Y%m%d\")\r\n if filedate == row.PERMIT_DATE and \"CORRECTED\" not in input_file_name.upper():\r\n PermitDateExists = True\r\n break\r\n\r\n HEADERVALID = True\r\n with xlrd.open_workbook(BCAreport) as wb:\r\n sh = wb.sheet_by_index(0)\r\n for r in range(sh.nrows):\r\n colcount = 0\r\n if sh.row_values(r)[colcount] == 'Error_Message':\r\n HEADERVALID = True\r\n elif sh.row_values(r)[colcount] == 'Project Ref No' or sh.row_values(r)[colcount] == 'Project_Ref_No':\r\n HEADERVALID = True\r\n else:\r\n PermitDateExistsLog = file(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[\r\n 0] + \" file's header format is not acceptable for processing\" + \".log\", \"a\")\r\n PermitDateExistsLog.write(\r\n \"The header format for the file \" + input_file_name + \" is not acceptable for processing at \" + str(\r\n datetime.datetime.now()))\r\n logFile.writelines(\r\n \"The header format for the file \" + input_file_name + \" is not acceptable for processing at \" + str(\r\n datetime.datetime.now()) + \"\\n\")\r\n HEADERVALID = False\r\n break\r\n\r\n if not PermitDateExists and HEADERVALID:\r\n logFile.writelines(\"Starts processing \" + BCAreport + \" at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # Status update to run/not run the SiteInspection Update\r\n Log_SiteInspectionUpdate = file(config.SiteInspectionUpdate, \"w\")\r\n Log_SiteInspectionUpdate.writelines(\"NO\")\r\n Log_SiteInspectionUpdate.close()\r\n\r\n # 02. Import the BCA report to a geodatabase table\r\n logFile.writelines(\"02 Import of table to gdb starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n if arcpy.Exists(config.TempDataGDB + \"\\\\ConvertedBCAreport_02\"):\r\n arcpy.Delete_management(config.TempDataGDB + \"\\\\ConvertedBCAreport_02\")\r\n arcpy.CreateTable_management(config.TempDataGDB, \"ConvertedBCAreport_02\", config.TemplateConvertedBCAreport)\r\n else:\r\n arcpy.CreateTable_management(config.TempDataGDB, \"ConvertedBCAreport_02\", config.TemplateConvertedBCAreport)\r\n if arcpy.Exists(BCAreport[:-5] + '_err' + '.csv'):\r\n # rename old error report\r\n os.remove(BCAreport[:-5] + '_err' + '.csv')\r\n else:\r\n result = \"Error file does not exist\"\r\n if BCAreport.endswith('.xls') or BCAreport.endswith('.xlsx') or BCAreport.endswith('.XLS'):\r\n rows_out = arcpy.InsertCursor(config.BCAReportGDBTable)\r\n fldlist = arcpy.ListFields(config.BCAReportGDBTable)\r\n fldlist.pop(0)\r\n with xlrd.open_workbook(BCAreport) as wb:\r\n sh = wb.sheet_by_index(0)\r\n for r in range(sh.nrows):\r\n colcount = 0\r\n if sh.row_values(r)[colcount] != 'Error_Message':\r\n colcount = 0\r\n else:\r\n colcount = 1\r\n break\r\n for r in range(sh.nrows):\r\n colcounter = colcount\r\n if r > 0:\r\n new_row_out = rows_out.newRow()\r\n for efld in fldlist:\r\n if efld.name <> 'OBJECTID' and efld.name <> 'ConcatFields':\r\n new_row_out.setValue(efld.name, sh.row_values(r)[colcounter])\r\n colcounter += 1\r\n\r\n logFile.writelines(\"Inserting: \" + str(new_row_out) + \"\\n\")\r\n rows_out.insertRow(new_row_out)\r\n del rows_out, new_row_out\r\n\r\n elif BCAreport.endswith('.csv'):\r\n\r\n BCAreportread = csv.DictReader(open(BCAreport, 'rb'), delimiter=',', quotechar='\"')\r\n rows_out = arcpy.InsertCursor(config.BCAReportGDBTable)\r\n for attribute in BCAreportread:\r\n new_row_out = rows_out.newRow()\r\n new_row_out.Project_Ref_No = attribute['Project_Ref_No']\r\n new_row_out.Project_Title = attribute['Project_Title']\r\n new_row_out.House_Blk_No = attribute['House_Blk_No']\r\n new_row_out.Road_Name = attribute['Road_Name']\r\n new_row_out.Level_No = attribute['Level_No']\r\n new_row_out.Unit_No = attribute['Unit_No']\r\n new_row_out.Building_Name = attribute['Building_Name']\r\n new_row_out.Postal_Code = attribute['Postal_Code']\r\n new_row_out.Project_Mukim_nos = attribute['Project_Mukim_nos']\r\n new_row_out.Project_Lot_nos = attribute['Project_Lot_nos']\r\n new_row_out.Permit_Type_of_Work = attribute['Permit_Type_of_Work']\r\n new_row_out.Type_of_Work = attribute['Type_of_Work']\r\n new_row_out.Owner_s_name = attribute['Owners_name']\r\n new_row_out.Owner_s_firm_name = attribute['Owners_firm_name']\r\n new_row_out.Owner_s_address = attribute['Owners_address']\r\n new_row_out.Owner_s_Tel_No = attribute['Owners_Tel_No']\r\n new_row_out.Owner_s_Email_address = attribute['Owners_Email_address']\r\n new_row_out.Builder_s_name = attribute['Builders_name']\r\n new_row_out.Builder_s_firm_name = attribute['Builders_firm_name']\r\n new_row_out.Builder_s_address = attribute['Builders_address']\r\n new_row_out.Builder_s_Tel_No = attribute['Builders_Tel_No']\r\n new_row_out.Builder_s_email_address = attribute['Builders_email_address']\r\n new_row_out.PE_s_name = attribute['PEs_name']\r\n new_row_out.PE_s_firm_name = attribute['PEs_firm_name']\r\n new_row_out.PE_s_address = attribute['PEs_address']\r\n new_row_out.PE_s_Tel_No = attribute['PEs_Tel_No']\r\n new_row_out.PE_s_Email_address = attribute['PEs_Email_address']\r\n new_row_out.Architect_s_name = attribute['Architects_name']\r\n new_row_out.Architect_s_firm_name = attribute['Architects_firm_name']\r\n new_row_out.Architect_s_address = attribute['Architects_address']\r\n new_row_out.Architect_s_Tel_No = attribute['Architects_Tel_No']\r\n new_row_out.Architect_s_Email_address = attribute['Architects_Email_address']\r\n new_row_out.Project_Cost = attribute['Project_Cost']\r\n new_row_out.Project_Duration = attribute['Project_Duration']\r\n new_row_out.Approval_Date_DD_MM_YYYY_ = attribute['Approval_Date']\r\n rows_out.insertRow(new_row_out)\r\n if new_row_out:\r\n del new_row_out\r\n if rows_out:\r\n del rows_out\r\n\r\n except:\r\n log_error(\"Error in 02 Import of table to gdb: \", logFile)\r\n logFile.writelines(\"02 Import of table to gdb ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 03. Remove spaces in key fields for the concatenation\r\n logFile.writelines(\"03 Removing of spaces starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n rowsSpace = arcpy.UpdateCursor(config.BCAReportGDBTable)\r\n\r\n for row in rowsSpace:\r\n ProjRef = row.Project_Ref_No.strip()\r\n ProjMukim = row.Project_Mukim_nos.strip()\r\n ProjLot = row.Project_Lot_nos.strip()\r\n BuilderN = row.Builder_s_name.strip()\r\n row.Project_Ref_No = ProjRef\r\n row.Project_Mukim_nos = ProjMukim\r\n row.Project_Lot_nos = ProjLot\r\n row.Builder_s_name = BuilderN\r\n rowsSpace.updateRow(row)\r\n if row:\r\n del row\r\n if rowsSpace:\r\n del rowsSpace\r\n except:\r\n log_error(\"Error in 03 Removing of spaces: \", logFile)\r\n logFile.writelines(\"03 Removing of spaces ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 04. Concatenate Project_Ref_No, Project_Mukim_nos, Project_Lot_nos, Builder_s_name\r\n logFile.writelines(\"04 Concatenate the three fields starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n rows = arcpy.UpdateCursor(config.BCAReportGDBTable)\r\n for row in rows:\r\n expression = str(row.Project_Ref_No) + \"-\" + str(row.Project_Mukim_nos) + \"-\" + str(\r\n row.Project_Lot_nos) + \"-\" + str(row.Builder_s_name)\r\n row.ConcatFields = expression\r\n rows.updateRow(row)\r\n if row:\r\n del row\r\n if rows:\r\n del rows\r\n\r\n except:\r\n log_error(\"Error in 04 Concatenate the three fields: \", logFile)\r\n logFile.writelines(\"04 Concatenate the three fields ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 05. Create temporary tables for Unique and Duplicate records\r\n logFile.writelines(\"05 Create temporary tables starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n if arcpy.Exists(config.TempDataGDB + \"\\\\Uniquerows\"):\r\n arcpy.Delete_management(config.TempDataGDB + \"\\\\Uniquerows\")\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Uniquerows\", config.TemplateConcat, \"\")\r\n else:\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Uniquerows\", config.TemplateConcat, \"\")\r\n\r\n if arcpy.Exists(config.TempDataGDB + \"\\\\Duplicaterows\"):\r\n arcpy.Delete_management(config.TempDataGDB + \"\\\\Duplicaterows\")\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Duplicaterows\", config.TemplateConcat, \"\")\r\n else:\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Duplicaterows\", config.TemplateConcat, \"\")\r\n except:\r\n log_error(\"Error in 05 Create temporary tables: \", logFile)\r\n logFile.writelines(\"05 Create temporary tables ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 06. Separate unique and duplicate records\r\n logFile.writelines(\"06 Separate unique and duplicate rows starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n print \"Start step 06\"\r\n rows_inCB02 = arcpy.UpdateCursor(config.BCAReportGDBTable)\r\n rows_outUnique = arcpy.InsertCursor(config.UniqueRecords)\r\n # print rows_outUnique\r\n rows_outDuplicate = arcpy.InsertCursor(config.DuplicateRecords)\r\n\r\n rows_unique = []\r\n rows_duplicates = []\r\n for row in rows_inCB02:\r\n if row.ConcatFields not in rows_unique:\r\n rows_unique = rows_unique + [row.ConcatFields]\r\n else:\r\n rows_duplicates = rows_duplicates + [row.ConcatFields]\r\n\r\n print \"Start step 06 1\"\r\n for item in rows_unique:\r\n print \"clcounter: \" + str(clcounter)\r\n print \"item: \" + str(item)\r\n newrow = rows_outUnique.newRow()\r\n newrow.Concat = item\r\n # print newrow\r\n rows_outUnique.insertRow(newrow)\r\n\r\n print \"Start step 06 2\"\r\n for item in rows_duplicates:\r\n print \"clcounter: \" + str(clcounter)\r\n print \"item: \" + str(item)\r\n newrow = rows_outDuplicate.newRow()\r\n newrow.Concat = item\r\n rows_outDuplicate.insertRow(newrow)\r\n\r\n print \"Start step 06 3\"\r\n\r\n if rows_inCB02:\r\n del rows_inCB02\r\n if rows_outUnique:\r\n del rows_outUnique\r\n if rows_outDuplicate:\r\n del rows_outDuplicate\r\n if row:\r\n del row\r\n except:\r\n log_error(\"Error in 06 Separate unique and duplicate rows: \", logFile)\r\n logFile.writelines(\"06 Separate unique and duplicate rows ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 07. Get the rest of the fields for Uniquerows table\r\n logFile.writelines(\r\n \"07 Get the rest of the fields for unique rows starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.env.workspace = config.TempDataGDB\r\n arcpy.AddMessage(\"Starting toolbox JoinUniqueRestofFields\")\r\n\r\n try:\r\n arcpy.JoinUniqueRestofFields()\r\n except:\r\n log_error(\"Error in 07 Get the rest of the fields for unique rows: \", logFile)\r\n logFile.writelines(\r\n \"07 Get the rest of the fields for unique rows ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 08. Get the rest of the fields for Duplicaterows table\r\n logFile.writelines(\r\n \"08 Get the rest of the fields for duplicate rows starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.AddMessage(\"START toolbox JoinDuplicateRestofFields\")\r\n try:\r\n arcpy.JoinDuplicateRestofFields()\r\n\r\n except:\r\n log_error(\"Error in 08 Get the rest of the fields for duplicate rows: \", logFile)\r\n\r\n logFile.writelines(\r\n \"08 Get the rest of the fields for duplicate rows ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 09. Log duplicate records\r\n logFile.writelines(\"09 Log duplicate records starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.AddMessage(\"Logging duplicate records\")\r\n try:\r\n # Initialize the error log\r\n wbk = xlwt.Workbook()\r\n sheet = wbk.add_sheet('Book 1')\r\n row_count = 0\r\n col_count = 0\r\n header = ['Error_Message', 'Project_Ref_No', 'Project_Title', 'House_Blk_No', 'Road_Name', 'Level_No',\r\n 'Unit_No', 'Building_Name', 'Postal_Code', 'Project_Mukim_nos', 'Project_Lot_nos',\r\n 'Permit_Type_of_Work', 'Type_of_Work', 'Owners_name', 'Owners_firm_name', 'Owners_address',\r\n 'Owners_Tel_No', 'Owners_Email_address', 'Builders_name', 'Builders_firm_name',\r\n 'Builders_address', 'Builders_Tel_No', 'Builders_email_address', 'PEs_name', 'PEs_firm_name',\r\n 'PEs_address', 'PEs_Tel_No', 'PEs_Email_address', 'Architects_name', 'Architects_firm_name',\r\n 'Architects_address', 'Architects_Tel_No', 'Architects_Email_address', 'Project_Cost',\r\n 'Project_Duration', 'Approval_Date']\r\n for fieldname in header:\r\n sheet.write(row_count, col_count, fieldname)\r\n col_count += 1\r\n wbk.save(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n\r\n # Log duplicate records\r\n rows = arcpy.SearchCursor(config.DuplicateRows)\r\n\r\n row_count = 1\r\n col_count = 0\r\n row = None\r\n for row in rows:\r\n message = ['Duplicate record in the BCA report', row.Project_Ref_No, row.Project_Title,\r\n row.House_Blk_No, row.Road_Name, row.Level_No, row.Unit_No, row.Building_Name,\r\n row.Postal_Code, row.Project_Mukim_nos, row.Project_Lot_nos, row.Permit_Type_of_Work,\r\n row.Type_of_Work, row.Owner_s_name, row.Owner_s_firm_name, row.Owner_s_address,\r\n row.Owner_s_Tel_No, row.Owner_s_Email_address, row.Builder_s_name,\r\n row.Builder_s_firm_name, row.Builder_s_address, row.Builder_s_Tel_No,\r\n row.Builder_s_email_address, row.PE_s_name, row.PE_s_firm_name, row.PE_s_address,\r\n row.PE_s_Tel_No, row.PE_s_Email_address, row.Architect_s_name, row.Architect_s_firm_name,\r\n row.Architect_s_address, row.Architect_s_Tel_No, row.Architect_s_Email_address,\r\n row.Project_Cost, row.Project_Duration, row.Approval_Date_DD_MM_YYYY_]\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n wbk.save(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n if row:\r\n del row\r\n if rows:\r\n del rows\r\n except:\r\n log_error(\"Error in 09 Log duplicate records: \", logFile)\r\n\r\n logFile.writelines(\"09 Log duplicate records ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 10. Split rows based on Mukim numbers\r\n logFile.writelines(\"10 Splitting of rows based on mukim starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n if arcpy.Exists(config.SplittedMukimRows):\r\n arcpy.Delete_management(config.SplittedMukimRows)\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Splitted_rows_mukim_03\", config.TemplateBCAReport, \"\")\r\n else:\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Splitted_rows_mukim_03\", config.TemplateBCAReport, \"\")\r\n\r\n if arcpy.Exists(config.SplittedProjLotRows):\r\n arcpy.Delete_management(config.SplittedProjLotRows)\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Splitted_rows_projlot_04\", config.TemplateBCAReport, \"\")\r\n else:\r\n arcpy.CreateTable_management(config.TempDataGDB, \"Splitted_rows_projlot_04\", config.TemplateBCAReport, \"\")\r\n\r\n rows_in = arcpy.SearchCursor(config.UniqueRows)\r\n rows_out = arcpy.InsertCursor(config.SplittedMukimRows)\r\n\r\n for row in rows_in:\r\n list_mukim_nos = row.Project_Mukim_nos.split(\",\")\r\n for proj_mukim_nos_id in list_mukim_nos:\r\n new_row_out = rows_out.newRow()\r\n new_row_out.Project_Mukim_nos = proj_mukim_nos_id\r\n new_row_out.PROJECTMUKIM_RAW = row.Project_Mukim_nos\r\n new_row_out.Project_Ref_No = row.Project_Ref_No\r\n new_row_out.Project_Title = row.Project_Title\r\n new_row_out.House_Blk_No = row.House_Blk_No\r\n new_row_out.Road_Name = row.Road_Name\r\n new_row_out.Level_No = row.Level_No\r\n new_row_out.Unit_No = row.Unit_No\r\n new_row_out.Building_Name = row.Building_Name\r\n new_row_out.Postal_Code = row.Postal_Code\r\n new_row_out.Project_Lot_nos = row.Project_Lot_nos\r\n new_row_out.Permit_Type_of_Work = row.Permit_Type_of_Work\r\n new_row_out.Type_of_Work = row.Type_of_Work\r\n new_row_out.Owner_s_name = row.Owner_s_name\r\n new_row_out.Owner_s_firm_name = row.Owner_s_firm_name\r\n new_row_out.Owner_s_address = row.Owner_s_address\r\n new_row_out.Owner_s_Tel_No = row.Owner_s_Tel_No\r\n new_row_out.Owner_s_Email_address = row.Owner_s_Email_address\r\n new_row_out.Builder_s_name = row.Builder_s_name\r\n new_row_out.Builder_s_firm_name = row.Builder_s_firm_name\r\n new_row_out.Builder_s_address = row.Builder_s_address\r\n new_row_out.Builder_s_Tel_No = row.Builder_s_Tel_No\r\n new_row_out.Builder_s_email_address = row.Builder_s_email_address\r\n new_row_out.PE_s_name = row.PE_s_name\r\n new_row_out.PE_s_firm_name = row.PE_s_firm_name\r\n new_row_out.PE_s_address = row.PE_s_address\r\n new_row_out.PE_s_Tel_No = row.PE_s_Tel_No\r\n new_row_out.PE_s_Email_address = row.PE_s_Email_address\r\n new_row_out.Architect_s_name = row.Architect_s_name\r\n new_row_out.Architect_s_firm_name = row.Architect_s_firm_name\r\n new_row_out.Architect_s_address = row.Architect_s_address\r\n new_row_out.Architect_s_Tel_No = row.Architect_s_Tel_No\r\n new_row_out.Architect_s_Email_address = row.Architect_s_Email_address\r\n new_row_out.Project_Cost = row.Project_Cost\r\n new_row_out.Project_Duration = row.Project_Duration\r\n new_row_out.Approval_Date_DD_MM_YYYY_ = row.Approval_Date_DD_MM_YYYY_\r\n rows_out.insertRow(new_row_out)\r\n if row:\r\n del row\r\n if new_row_out:\r\n del new_row_out\r\n if rows_in:\r\n del rows_in\r\n if rows_out:\r\n del rows_out\r\n except:\r\n log_error(\"Error in 10 Splitting of rows based on mukim: \", logFile)\r\n\r\n logFile.writelines(\"10 Splitting of rows based on mukim ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 11.Split rows based on Project lot numbers\r\n arcpy.AddMessage(\"Splitting rows based on project lots\")\r\n\r\n logFile.writelines(\r\n \"11 Splitting of rows based on project lot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n rows_in03 = arcpy.SearchCursor(config.SplittedMukimRows)\r\n rows_out04 = arcpy.InsertCursor(config.SplittedProjLotRows)\r\n\r\n for row in rows_in03:\r\n list_proj_lot_nos = row.Project_Lot_nos.split(\",\")\r\n print list_proj_lot_nos\r\n for proj_lot_nos_id in list_proj_lot_nos:\r\n print proj_lot_nos_id\r\n new_row_out = rows_out04.newRow()\r\n new_row_out.Project_Lot_nos = proj_lot_nos_id\r\n new_row_out.PROJECTMUKIM_RAW = row.PROJECTMUKIM_RAW\r\n new_row_out.PROJECTLOT_RAW = row.Project_Lot_nos\r\n new_row_out.Project_Ref_No = row.Project_Ref_No\r\n new_row_out.Project_Title = row.Project_Title\r\n new_row_out.House_Blk_No = row.House_Blk_No\r\n new_row_out.Road_Name = row.Road_Name\r\n new_row_out.Level_No = row.Level_No\r\n new_row_out.Unit_No = row.Unit_No\r\n new_row_out.Building_Name = row.Building_Name\r\n new_row_out.Postal_Code = row.Postal_Code\r\n new_row_out.Project_Mukim_nos = row.Project_Mukim_nos\r\n new_row_out.Permit_Type_of_Work = row.Permit_Type_of_Work\r\n new_row_out.Type_of_Work = row.Type_of_Work\r\n new_row_out.Owner_s_name = row.Owner_s_name\r\n new_row_out.Owner_s_firm_name = row.Owner_s_firm_name\r\n new_row_out.Owner_s_address = row.Owner_s_address\r\n new_row_out.Owner_s_Tel_No = row.Owner_s_Tel_No\r\n new_row_out.Owner_s_Email_address = row.Owner_s_Email_address\r\n new_row_out.Builder_s_name = row.Builder_s_name\r\n new_row_out.Builder_s_firm_name = row.Builder_s_firm_name\r\n new_row_out.Builder_s_address = row.Builder_s_address\r\n new_row_out.Builder_s_Tel_No = row.Builder_s_Tel_No\r\n new_row_out.Builder_s_email_address = row.Builder_s_email_address\r\n new_row_out.PE_s_name = row.PE_s_name\r\n new_row_out.PE_s_firm_name = row.PE_s_firm_name\r\n new_row_out.PE_s_address = row.PE_s_address\r\n new_row_out.PE_s_Tel_No = row.PE_s_Tel_No\r\n new_row_out.PE_s_Email_address = row.PE_s_Email_address\r\n new_row_out.Architect_s_name = row.Architect_s_name\r\n new_row_out.Architect_s_firm_name = row.Architect_s_firm_name\r\n new_row_out.Architect_s_address = row.Architect_s_address\r\n new_row_out.Architect_s_Tel_No = row.Architect_s_Tel_No\r\n new_row_out.Architect_s_Email_address = row.Architect_s_Email_address\r\n new_row_out.Project_Cost = row.Project_Cost\r\n new_row_out.Project_Duration = row.Project_Duration\r\n new_row_out.Approval_Date_DD_MM_YYYY_ = row.Approval_Date_DD_MM_YYYY_\r\n rows_out04.insertRow(new_row_out)\r\n\r\n if row:\r\n del row\r\n if new_row_out:\r\n del new_row_out\r\n if rows_in03:\r\n del rows_in03\r\n if rows_out04:\r\n del rows_out04\r\n # print int(arcpy.GetCount_management(SplittedProjLotRows).getOutput(0))\r\n except:\r\n log_error(\"Error in 11 Splitting of rows based on project lot: \", logFile)\r\n logFile.writelines(\r\n \"11 Splitting of rows based on project lot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 12. Remove spaces in Mukim and Project lot values\r\n logFile.writelines(\r\n \"12 Removing of spaces in mukim and project lot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.AddMessage(\"Cleaning project lots\")\r\n try:\r\n\r\n rowsSpaces = arcpy.UpdateCursor(config.SplittedProjLotRows)\r\n\r\n for row in rowsSpaces:\r\n lot_no_spaces = row.Project_Lot_nos.strip()\r\n mukim_no_spaces = row.Project_Mukim_nos.strip()\r\n row.Project_Lot_nos = lot_no_spaces\r\n row.Project_Mukim_nos = mukim_no_spaces\r\n rowsSpaces.updateRow(row)\r\n if row:\r\n del row\r\n if rowsSpaces:\r\n del rowsSpaces\r\n except:\r\n log_error(\"Error in 12 Removing of spaces in mukim and project lot: \", logFile)\r\n logFile.writelines(\r\n \"12 Removing of spaces in mukim and project lot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 13. Log empty Mukimlot or date fields\r\n logFile.writelines(\r\n \"13 Log empty mukim and project lot nos starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n rowsEmpty = arcpy.UpdateCursor(config.SplittedProjLotRows)\r\n\r\n for row in rowsEmpty:\r\n message = ['Missing Project lot or Mukim numbers', row.Project_Ref_No, row.Project_Title,\r\n row.House_Blk_No, row.Road_Name, row.Level_No, row.Unit_No, row.Building_Name,\r\n row.Postal_Code, row.Project_Mukim_nos, row.Project_Lot_nos, row.Permit_Type_of_Work,\r\n row.Type_of_Work, row.Owner_s_name, row.Owner_s_firm_name, row.Owner_s_address,\r\n row.Owner_s_Tel_No, row.Owner_s_Email_address, row.Builder_s_name,\r\n row.Builder_s_firm_name, row.Builder_s_address, row.Builder_s_Tel_No,\r\n row.Builder_s_email_address, row.PE_s_name, row.PE_s_firm_name, row.PE_s_address,\r\n row.PE_s_Tel_No, row.PE_s_Email_address, row.Architect_s_name, row.Architect_s_firm_name,\r\n row.Architect_s_address, row.Architect_s_Tel_No, row.Architect_s_Email_address,\r\n row.Project_Cost, row.Project_Duration, row.Approval_Date_DD_MM_YYYY_]\r\n message2 = ['Missing Project duration or Approval date', row.Project_Ref_No, row.Project_Title,\r\n row.House_Blk_No, row.Road_Name, row.Level_No, row.Unit_No, row.Building_Name,\r\n row.Postal_Code, row.Project_Mukim_nos, row.Project_Lot_nos, row.Permit_Type_of_Work,\r\n row.Type_of_Work, row.Owner_s_name, row.Owner_s_firm_name, row.Owner_s_address,\r\n row.Owner_s_Tel_No, row.Owner_s_Email_address, row.Builder_s_name,\r\n row.Builder_s_firm_name, row.Builder_s_address, row.Builder_s_Tel_No,\r\n row.Builder_s_email_address, row.PE_s_name, row.PE_s_firm_name, row.PE_s_address,\r\n row.PE_s_Tel_No, row.PE_s_Email_address, row.Architect_s_name,\r\n row.Architect_s_firm_name, row.Architect_s_address, row.Architect_s_Tel_No,\r\n row.Architect_s_Email_address, row.Project_Cost, row.Project_Duration,\r\n row.Approval_Date_DD_MM_YYYY_]\r\n if row.Project_Mukim_nos is None or (len(row.Project_Mukim_nos) < 4):\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsEmpty.deleteRow(row)\r\n elif row.Project_Lot_nos is None or (len(row.Project_Lot_nos) == 0):\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsEmpty.deleteRow(row)\r\n if row.Project_Duration is None or (len(row.Project_Duration) < 1):\r\n col_count = 0\r\n for element in message2:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsEmpty.deleteRow(row)\r\n\r\n elif row.Approval_Date_DD_MM_YYYY_ is None or (len(row.Approval_Date_DD_MM_YYYY_) < 1):\r\n col_count = 0\r\n for element in message2:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsEmpty.deleteRow(row)\r\n wbk.save(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n if row:\r\n del row\r\n if rowsEmpty:\r\n del rowsEmpty\r\n except:\r\n log_error(\"Error in 13 Log for empty mukim and project lot nos: \", logFile)\r\n logFile.writelines(\"13 Log empty mukim and project lot nos ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 14. Error log for those with bad values\r\n arcpy.AddMessage(\"14 Logging bad values\")\r\n logFile.writelines(\"14 Log if bad values exist starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsBadValues = arcpy.UpdateCursor(config.SplittedProjLotRows)\r\n\r\n for row in rowsBadValues:\r\n message = ['Mukim or Project lot numbers have bad values', row.Project_Ref_No, row.Project_Title,\r\n row.House_Blk_No, row.Road_Name, row.Level_No, row.Unit_No, row.Building_Name,\r\n row.Postal_Code, row.Project_Mukim_nos, row.Project_Lot_nos, row.Permit_Type_of_Work,\r\n row.Type_of_Work, row.Owner_s_name, row.Owner_s_firm_name, row.Owner_s_address,\r\n row.Owner_s_Tel_No, row.Owner_s_Email_address, row.Builder_s_name,\r\n row.Builder_s_firm_name, row.Builder_s_address, row.Builder_s_Tel_No,\r\n row.Builder_s_email_address, row.PE_s_name, row.PE_s_firm_name, row.PE_s_address,\r\n row.PE_s_Tel_No, row.PE_s_Email_address, row.Architect_s_name, row.Architect_s_firm_name,\r\n row.Architect_s_address, row.Architect_s_Tel_No, row.Architect_s_Email_address,\r\n row.Project_Cost, row.Project_Duration, row.Approval_Date_DD_MM_YYYY_]\r\n if len(REGEX_FOR_INVALID_CHARS.findall(row.Project_Lot_nos)) > 0:\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsBadValues.deleteRow(row)\r\n elif len(REGEX_FOR_INVALID_CHARS.findall(row.Project_Mukim_nos)) > 0:\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsBadValues.deleteRow(row)\r\n elif len(uptodigit(row.Project_Lot_nos)) > 0:\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n rowsBadValues.deleteRow(row)\r\n wbk.save(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n\r\n if row:\r\n del row\r\n if rowsBadValues:\r\n del rowsBadValues\r\n except:\r\n log_error(\"Error in 14 Log if bad values exist: \", logFile)\r\n logFile.writelines(\"14 Log if bad values exist ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 15. Add zeros for Project Lot numbers\r\n logFile.writelines(\"15 Add zeros starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsZeros = arcpy.UpdateCursor(config.SplittedProjLotRows)\r\n letters = string.ascii_letters\r\n for row in rowsZeros:\r\n letter_count = len(filter(functools.partial(operator.contains, letters), row.Project_Lot_nos))\r\n filled_string = row.Project_Lot_nos.zfill(5 + letter_count)\r\n row.Project_Lot_nos = filled_string\r\n rowsZeros.updateRow(row)\r\n if row:\r\n del row\r\n if rowsZeros:\r\n del rowsZeros\r\n except:\r\n log_error(\"Error in 15 Add zeros: \", logFile)\r\n logFile.writelines(\"15 Add zeros ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 16. Add and populate fields Mukim_Lot_No, Mukimlot_wo_letter, and Permit_date\r\n logFile.writelines(\"16 Add and populate fields starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsPop = arcpy.UpdateCursor(config.SplittedProjLotRows)\r\n for row in rowsPop:\r\n expression = str(row.Project_Mukim_nos) + \"-\" + str(row.Project_Lot_nos)\r\n row.Mukim_Lot_No = expression\r\n date = filedate.strftime(\"%Y%m%d\")\r\n year = int(date[:4])\r\n month = int(date[4:6])\r\n day = int(date[6:8])\r\n permit_date = datetime.datetime(year, month, day)\r\n row.Permit_date = permit_date\r\n rowsPop.updateRow(row)\r\n if row:\r\n del row\r\n if rowsPop:\r\n del rowsPop\r\n # Calculate Mukimlot_wo_letter\r\n arcpy.CalculateField_management(config.SplittedProjLotRows, \"Mukimlot_wo_letter\", \"!Mukim_Lot_No![:10]\",\r\n \"PYTHON_9.3\", \"\")\r\n\r\n except:\r\n log_error(\"Error in 16 Add and populate fields: \", logFile)\r\n logFile.writelines(\"16 Add and populate fields ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 17.Match mukim lot and land lot\r\n logFile.writelines(\"17 Match mukim lot with landlot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.MatchMukimLandLot()\r\n except:\r\n log_error(\"Error in 17 Match mukim lot with landlot: \", logFile)\r\n logFile.writelines(\"17 Match mukim lot with landlot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 18.Get unmatched mukim lot with land lot\r\n logFile.writelines(\"18 Get unmatched mukim lot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.AddMessage(\"18 Get unmatched mukim lot\")\r\n try:\r\n arcpy.GetUnmatchedMukimLot()\r\n\r\n except:\r\n log_error(\"Error in 18 Get unmatched mukim lot: \", logFile)\r\n\r\n logFile.writelines(\"18 Get unmatched mukim lot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 19. Log errors for unmatched mukim lots\r\n logFile.writelines(\"19 Log unmatched mukim lot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsUnmatched = arcpy.SearchCursor(config.UnmatchedMukimLot)\r\n row = None\r\n\r\n for row in rowsUnmatched:\r\n message = ['Unmatched mukim lot with the land lot', row.Project_Ref_No, row.Project_Title,\r\n row.House_Blk_No, row.Road_Name, row.Level_No, row.Unit_No, row.Building_Name,\r\n row.Postal_Code, row.Project_Mukim_nos, row.Project_Lot_nos, row.Permit_Type_of_Work,\r\n row.Type_of_Work, row.Owner_s_name, row.Owner_s_firm_name, row.Owner_s_address,\r\n row.Owner_s_Tel_No, row.Owner_s_Email_address, row.Builder_s_name,\r\n row.Builder_s_firm_name, row.Builder_s_address, row.Builder_s_Tel_No,\r\n row.Builder_s_email_address, row.PE_s_name, row.PE_s_firm_name, row.PE_s_address,\r\n row.PE_s_Tel_No, row.PE_s_Email_address, row.Architect_s_name, row.Architect_s_firm_name,\r\n row.Architect_s_address, row.Architect_s_Tel_No, row.Architect_s_Email_address,\r\n row.Project_Cost, row.Project_Duration, row.Approval_Date_DD_MM_YYYY_]\r\n col_count = 0\r\n for element in message:\r\n sheet.write(row_count, col_count, element)\r\n col_count += 1\r\n row_count += 1\r\n wbk.save(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n if row:\r\n del row\r\n if rowsUnmatched:\r\n del rowsUnmatched\r\n\r\n with xlrd.open_workbook(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\") as wb:\r\n sh = wb.sheet_by_index(0)\r\n if sh.nrows == 1:\r\n os.remove(config.ErrorLogFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \"_err\" + \".xls\")\r\n\r\n except arcpy.ExecuteError:\r\n log_error(\"Error in 19 Log unmatched mukim lot: \", logFile)\r\n logFile.writelines(\"19 Log unmatched mukim lot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 20. Prepare the table for MukimConstruct matching (add required fields)\r\n logFile.writelines(\"20 Add fields to be used for matching starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n if arcpy.Exists(config.MUKIMCONSTRUCTImport):\r\n arcpy.Delete_management(config.MUKIMCONSTRUCTImport)\r\n arcpy.FeatureClassToFeatureClass_conversion(config.MukimConstructSource, config.TempDataGDB,\r\n \"MUKIM_CONSTRUCT_Import\")\r\n else:\r\n arcpy.FeatureClassToFeatureClass_conversion(config.MukimConstructSource, config.TempDataGDB,\r\n \"MUKIM_CONSTRUCT_Import\")\r\n\r\n arcpy.AddField_management(config.MatchedMukimLot, \"Concat_4fields\", \"Text\", \"\", \"\", \"\")\r\n arcpy.AddField_management(config.MUKIMCONSTRUCTImport, \"Concat_4fields\", \"Text\", \"\", \"\", \"\")\r\n arcpy.AddField_management(config.MatchedMukimLot, \"PROJ_DURATION_MTHS2\", \"Double\", \"\", \"\", \"\")\r\n except:\r\n log_error(\"Error in 20 Add fields to be used for matching: \", logFile)\r\n logFile.writelines(\"20 Add fields to be used for matching ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 21. Calculate Project Duration as months\r\n logFile.writelines(\"21 Calculate PROJ_DURATION as months starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsProjDur = arcpy.UpdateCursor(config.MatchedMukimLot)\r\n\r\n for row in rowsProjDur:\r\n durationstr = row.PROJ_DURATION_MTHS\r\n if \"Month\" in row.PROJ_DURATION_MTHS:\r\n durationintmth = int(durationstr.split(' ')[0])\r\n row.PROJ_DURATION_MTHS2 = durationintmth\r\n elif \"Year\" in row.PROJ_DURATION_MTHS:\r\n durationintyr = int(durationstr.split(' ')[0]) * 12\r\n row.PROJ_DURATION_MTHS2 = durationintyr\r\n rowsProjDur.updateRow(row)\r\n if rowsProjDur:\r\n del rowsProjDur\r\n if row:\r\n del row\r\n\r\n arcpy.DeleteField_management(config.MatchedMukimLot, \"PROJ_DURATION_MTHS\")\r\n arcpy.AddField_management(config.MatchedMukimLot, \"PROJ_DURATION_MTHS\", \"Double\")\r\n arcpy.CalculateField_management(config.MatchedMukimLot, \"PROJ_DURATION_MTHS\", \"[PROJ_DURATION_MTHS2]\")\r\n except:\r\n log_error(\"Error in 21 Calculate PROJ_DURATION as months: \", logFile)\r\n logFile.writelines(\"21 Calculate PROJ_DURATION as months ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 22. Concatenate 4 fields to be used in checking if mukimlot already exists in MUKIMCONSTRUCT\r\n logFile.writelines(\"22 Concatenate 4 fields starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsConcat1 = arcpy.UpdateCursor(config.MUKIMCONSTRUCTImport)\r\n\r\n for row in rowsConcat1:\r\n expression = str(row.PROJ_REF_NO) + \"-\" + str(row.BUILDER_NAME) + \"-\" + str(\r\n row.LOT_KEY) + \"-\" + str(row.PERMIT_DATE)\r\n row.Concat_4fields = expression\r\n rowsConcat1.updateRow(row)\r\n if row:\r\n del row\r\n if rowsConcat1:\r\n del rowsConcat1\r\n\r\n rowsConcat2 = arcpy.UpdateCursor(config.MatchedMukimLot)\r\n\r\n for row in rowsConcat2:\r\n expression = str(row.PROJ_REF_NO) + \"-\" + str(row.BUILDER_NAME) + \"-\" + str(\r\n row.LOT_KEY) + \"-\" + str(row.PERMIT_DATE)\r\n row.Concat_4fields = expression\r\n rowsConcat2.updateRow(row)\r\n if row:\r\n del row\r\n if rowsConcat2:\r\n del rowsConcat2\r\n except:\r\n log_error(\"Error in 22 Concatenate 4 fields: \", logFile)\r\n logFile.writelines(\"22 Concatenate 4 fields ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 23.Match mukim lot with mukim construct\r\n logFile.writelines(\"23 Match mukimlot with mukim construct at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.env.workspace = config.TempDataGDB # \"G:\\\\Project\\\\GERIUPGRADE\\\\GPTools\\\\NotificationSysTools\\\\BCAReportProcessing\\\\Temp_data.gdb\"\r\n try:\r\n arcpy.MatchedMukimlotMukimConstruct()\r\n except:\r\n log_error(\"Error in 23 Match mukimlot with mukim construct: \", logFile)\r\n logFile.writelines(\"23 Match mukimlot with mukim construct ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 24.Copy raw values to project lot and project mukim columns and delete the 2 fields\r\n logFile.writelines(\"24 Recalculate projlot and projmukim based on original values starts at \" + str(\r\n datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rowsRaw = arcpy.UpdateCursor(config.MatchedMukimLot)\r\n\r\n for row in rowsRaw:\r\n row.PROJ_MUKIM_NOS = row.PROJECTMUKIM_RAW\r\n row.PROJ_LOT_NOS = row.PROJECTLOT_RAW\r\n rowsRaw.updateRow(row)\r\n if row:\r\n del row\r\n if rowsRaw:\r\n del rowsRaw\r\n except:\r\n log_error(\"Error in 24 Recalculate projlot and projmukim based on original values:\", logFile)\r\n logFile.writelines(\"24 Recalculate projlot and projmukim based on original values ends at \" + str(\r\n datetime.datetime.now()) + \"\\n\")\r\n\r\n # 25. Export Cleaned BCA Permit report for CWD\r\n logFile.writelines(\r\n \"25 Export of Cleaned BCA Permit report starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n # Initialize the file\r\n CleanedBCAPermitReport = xlwt.Workbook()\r\n book = CleanedBCAPermitReport.add_sheet('Book 1')\r\n countrow = 0\r\n countcol = 0\r\n fields = ['Project Ref No', 'Project Title', 'House Blk No', 'Road Name', 'Level No', 'Unit No',\r\n 'Building Name', 'Postal Code', 'Project Mukim nos', 'Project Lot nos', 'Permit Type of Work',\r\n 'Type of Work', \"Owner's name\", \"Owner's firm name\", \"Owner's address\", \"Owner's Tel No\",\r\n \"Owner's Email address\", \"Builder's name\", \"Builder's firm name\", \"Builder's address\",\r\n \"Builder's Tel No\", \"Builder's email address\", \"PE's name\", \"PE's firm name\", \"PE's address\",\r\n \"PE's Tel No\", \"PE's Email address\", \"Architect's name\", \"Architect's firm name\",\r\n \"Architect's address\", \"Architect's Tel No\", \"Architect's Email address\", 'Project Cost',\r\n 'Project Duration', 'Approval Date(DD/MM/YYYY)']\r\n for fieldname in fields:\r\n book.write(countrow, countcol, fieldname)\r\n countcol += 1\r\n CleanedBCAPermitReport.save(config.CleanedBCAPermitFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \".xls\")\r\n\r\n # Copy the data to Excel File\r\n data = arcpy.SearchCursor(config.MatchedMukimLot)\r\n\r\n countrow = 1\r\n countcol = 0\r\n for row in data:\r\n message = [row.PROJ_REF_NO, row.PROJ_TITLE, row.HOUSE_BLK_NO, row.ROAD_NAME, row.LEVEL_NO,\r\n row.UNIT_NO, row.BUILDING_NAME, row.POSTAL_CODE, row.PROJ_MUKIM_NOS, row.PROJ_LOT_NOS,\r\n row.PERMIT_WORK_TYPE, row.WORK_TYPE, row.OWNER_NAME, row.OWNER_FIRM_NAME, row.OWNER_ADDR,\r\n row.OWNER_TEL, row.OWNER_EMAIL, row.BUILDER_NAME, row.BUILDER_FIRM_NAME,\r\n row.BUILDER_ADDR, row.BUILDER_TEL, row.BUILDER_EMAIL, row.PE_NAME, row.PE_FIRM_NAME,\r\n row.PE_ADDR, row.PE_TEL, row.PE_EMAIL, row.ARCHITECT_NAME, row.ARCHITECT_FIRM_NAME,\r\n row.ARCHITECT_ADDR, row.ARCHITECT_TEL, row.ARCHITECT_EMAIL, row.PROJ_COST,\r\n row.PROJ_DURATION_MTHS, row.PROJ_APPROVAL_DATE]\r\n countcol = 0\r\n for element in message:\r\n book.write(countrow, countcol, element)\r\n countcol += 1\r\n countrow += 1\r\n CleanedBCAPermitReport.save(config.CleanedBCAPermitFolder + \"\\\\\" + input_file_name.split(\".\")[0] + \".xls\")\r\n if row:\r\n del row\r\n if data:\r\n del data\r\n except:\r\n log_error(\"Error in 25 Export of Cleaned BCA Permit Report: Error in 26 Catchment calculation: \", logFile)\r\n logFile.writelines(\"25 Export of Cleaned BCA Permit Report ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 26. Catchment calculation\r\n arcpy.env.workspace = config.TempDataGDB\r\n logFile.writelines(\"26 Catchment calculation starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.CatchmentCalculation()\r\n except:\r\n log_error(\"Error in 26 Catchment calculation: \", logFile)\r\n logFile.writelines(\"26 Catchment calculation ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 27. Depot calculation\r\n logFile.writelines(\"27 Depot calculation starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.DepotCalculation()\r\n except:\r\n log_error(\"Error in 27 Depot calculation: \", logFile)\r\n logFile.writelines(\"27 Depot calculation ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 28. Re-add date fields and populate\r\n logFile.writelines(\"28 Re-add date fields and populate starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.AddField_management(config.MUKIMCONSTRUCT_Temp, \"PERMIT_DATE\", \"Date\")\r\n arcpy.AddField_management(config.MUKIMCONSTRUCT_Temp, \"PROJ_APPROVAL_DATE2\", \"Date\")\r\n arcpy.AddField_management(config.MUKIMCONSTRUCT_Temp, \"PROJ_END_DATE\", \"Date\")\r\n\r\n rows = arcpy.UpdateCursor(config.MUKIMCONSTRUCT_Temp)\r\n\r\n for row in rows:\r\n date = filedate.strftime(\"%Y%m%d\")\r\n year = int(date[:4])\r\n month = int(date[4:6])\r\n day = int(date[6:8])\r\n permit_date = datetime.datetime(year, month, day)\r\n row.PERMIT_DATE = permit_date\r\n row.PROJ_APPROVAL_DATE2 = datetime.datetime.strptime(row.PROJ_APPROVAL_DATE, '%d/%m/%Y')\r\n rows.updateRow(row)\r\n if row:\r\n del row\r\n if rows:\r\n del rows\r\n except:\r\n log_error(\"Error in 28 Re-add fields and populate: \", logFile)\r\n logFile.writelines(\"28 Re-add fields and populate ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 29. Calculate the end date field\r\n logFile.writelines(\"29 Calculate the end date field starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n\r\n rowsEndDate = arcpy.UpdateCursor(config.MUKIMCONSTRUCT_Temp)\r\n\r\n for row in rowsEndDate:\r\n sourcedate = row.PROJ_APPROVAL_DATE2\r\n # sourcedate = datetime.datetime.strptime(row.PROJ_APPROVAL_DATE2 , '%d/%m/%Y')\r\n months = int(row.PROJ_DURATION_MTHS)\r\n d = add_months(sourcedate, months)\r\n row.PROJ_END_DATE = d\r\n rowsEndDate.updateRow(row)\r\n if row:\r\n del row\r\n if rowsEndDate:\r\n del rowsEndDate\r\n except:\r\n log_error(\"Error in 29 Calculate the end date field: \", logFile)\r\n logFile.writelines(\"29 Calculate the end date field ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 30. Calculate Project Total Area\r\n logFile.writelines(\"30 Project total area calculation starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.ProjectTotalArea()\r\n except:\r\n log_error(\"Error in 30 Project total area calculation: \", logFile)\r\n logFile.writelines(\"30 Project total area calculation ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 31. Calculate the BCA_CORRECTED_BY\r\n logFile.writelines(\"31 Calculate the BCA_CORRECTED_BY starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n rows_BCA_CB = arcpy.UpdateCursor(config.MUKIMCONSTRUCT_Temp)\r\n\r\n for row in rows_BCA_CB:\r\n if \"\\WSN\\\\\" in BCAreport:\r\n row.BCA_CORRECTED_BY = \"WSN\"\r\n elif \"\\WRN\\\\\" in BCAreport:\r\n row.BCA_CORRECTED_BY = \"WRN\"\r\n elif \"\\CWD\\\\\" in BCAreport:\r\n row.BCA_CORRECTED_BY = \"CWD\"\r\n rows_BCA_CB.updateRow(row)\r\n if row:\r\n del row\r\n if rows_BCA_CB:\r\n del rows_BCA_CB\r\n except:\r\n log_error(\"Error in 31 Calculate the BCA_CORRECTED_BY: \", logFile)\r\n\r\n # 32. Remove spaces in PROJ_REF_NO\r\n logFile.writelines(\r\n \"32 Removing of spaces in mukim and project lot starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n try:\r\n rowsSpaces = arcpy.UpdateCursor(config.MUKIMCONSTRUCT_Temp)\r\n\r\n for row in rowsSpaces:\r\n lot_no_spaces = row.PROJ_REF_NO.strip()\r\n row.PROJ_REF_NO = lot_no_spaces\r\n rowsSpaces.updateRow(row)\r\n if row:\r\n del row\r\n if rowsSpaces:\r\n del rowsSpaces\r\n except:\r\n log_error(\"Error in 32 Removing of spaces in mukim and project lot: \", logFile)\r\n logFile.writelines(\r\n \"32 Removing of spaces in mukim and project lot ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 33. Process the Mukim Construct by Project\r\n logFile.writelines(\r\n \"33 Process the Mukim Construct by Project starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.env.overwriteOutput = True\r\n try:\r\n MUKIM_CONSTRUCT_BYPROJ_IMPORT = config.TempDataGDB + \"\\\\MUKIM_CONSTRUCT_BYPROJ_IMPORT\"\r\n MUKIMCONBYPROJ_SORT = config.TempDataGDB + \"\\\\MUKIMCONBYPROJ_SORT\"\r\n MUKIM_CONSTRUCT_BYPROJ_DISS = config.TempDataGDB + \"\\\\MUKIM_CONSTRUCT_BYPROJ_DISS\"\r\n MUKIM_CONSTRUCT_BYPROJ_DISS__2_ = config.TempDataGDB + \"\\\\MUKIM_CONSTRUCT_BYPROJ_DISS\"\r\n\r\n if arcpy.Exists(MUKIM_CONSTRUCT_BYPROJ_IMPORT):\r\n arcpy.Delete_management(MUKIM_CONSTRUCT_BYPROJ_IMPORT)\r\n if arcpy.Exists(MUKIMCONBYPROJ_SORT):\r\n arcpy.Delete_management(MUKIMCONBYPROJ_SORT)\r\n if arcpy.Exists(MUKIM_CONSTRUCT_BYPROJ_DISS):\r\n arcpy.Delete_management(MUKIM_CONSTRUCT_BYPROJ_DISS)\r\n\r\n arcpy.MUKIMCONBYPROJ()\r\n # arcpy.MUKIMCONSTRUCTBYPROJProcess2()\r\n\r\n arcpy.Sort_management(MUKIM_CONSTRUCT_BYPROJ_IMPORT, MUKIMCONBYPROJ_SORT, \"PROJ_END_DATE DESCENDING\",\r\n \"UR\")\r\n arcpy.Dissolve_management(MUKIMCONBYPROJ_SORT, MUKIM_CONSTRUCT_BYPROJ_DISS, \"PROJ_REF_NO\",\r\n \"LOT_KEY FIRST;PROJ_REF_NO FIRST;PROJ_TITLE FIRST;HOUSE_BLK_NO FIRST;ROAD_NAME FIRST;POSTAL_CODE FIRST;LEVEL_NO FIRST;UNIT_NO FIRST;BUILDING_NAME FIRST;PROJ_MUKIM_NOS FIRST;PROJ_LOT_NOS FIRST;PERMIT_WORK_TYPE FIRST;WORK_TYPE FIRST;OWNER_NAME FIRST;OWNER_FIRM_NAME FIRST;OWNER_ADDR FIRST;OWNER_TEL FIRST;OWNER_EMAIL FIRST;BUILDER_NAME FIRST;BUILDER_FIRM_NAME FIRST;BUILDER_ADDR FIRST;BUILDER_TEL FIRST;BUILDER_EMAIL FIRST;PE_NAME FIRST;PE_FIRM_NAME FIRST;PE_ADDR FIRST;PE_TEL FIRST;PE_EMAIL FIRST;ARCHITECT_NAME FIRST;ARCHITECT_FIRM_NAME FIRST;ARCHITECT_ADDR FIRST;ARCHITECT_TEL FIRST;ARCHITECT_EMAIL FIRST;PROJ_TOT_AREA FIRST;PROJ_PARENT_CWDCATCHMENT FIRST;PROJ_PARENT_WSNDEPOT FIRST;PROJ_PARENT_WRPCATCHMENT FIRST;BCA_CORRECTED_BY FIRST;PROJ_DURATION_MTHS FIRST;PROJ_COST FIRST\",\r\n \"MULTI_PART\", \"DISSOLVE_LINES\")\r\n arcpy.JoinField_management(MUKIM_CONSTRUCT_BYPROJ_DISS, \"FIRST_PROJ_REF_NO\", MUKIMCONBYPROJ_SORT,\r\n \"PROJ_REF_NO\", \"PROJ_APPROVAL_DATE;PROJ_END_DATE;PERMIT_DATE\")\r\n arcpy.CalculateField_management(MUKIM_CONSTRUCT_BYPROJ_DISS__2_, \"FIRST_PROJ_TOT_AREA\",\r\n \"[Shape_Area]/10000\", \"VB\", \"\")\r\n\r\n except:\r\n log_error(\"Error in 33 Process the Mukim Construct by Project: \", logFile)\r\n logFile.writelines(\r\n \"33 Process the Mukim Construct by Project ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n arcpy.AddMessage(\"33 END process MUKIM CONSTRUCT\")\r\n\r\n # 34. Filter on-going projects\r\n\r\n logFile.writelines(\"34 Filter on-going projects starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n # TempDataGDB = \"G:\\\\Project\\\\GERIUPGRADE\\\\GPTools\\\\NotificationSysTools\\\\BCAReportProcessing\\\\Temp_data.gdb\"\r\n MUKIM_CONSTRUCT_BYPROJ_DISS = config.TempDataGDB + \"\\\\MUKIM_CONSTRUCT_BYPROJ_DISS\"\r\n rowsIn = arcpy.UpdateCursor(MUKIM_CONSTRUCT_BYPROJ_DISS)\r\n\r\n row = None\r\n for row in rowsIn:\r\n strdays = str(row.PROJ_END_DATE.date() - datetime.date.today())\r\n splitDays = strdays.split()\r\n if splitDays[0] == '0:00:00':\r\n result = \"On-going project (but will end today)\"\r\n else:\r\n if int(splitDays[0]) < 0:\r\n rowsIn.deleteRow(row)\r\n else:\r\n result = \"On-going project\"\r\n if rowsIn:\r\n del rowsIn\r\n if row:\r\n del row\r\n\r\n except:\r\n log_error(\"Error in 34 Filter on-going projects: \", logFile)\r\n logFile.writelines(\"34 Filter on-going projects ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # 35. Append the new data to MUKIM_CONSTRUCT\r\n logFile.writelines(\r\n \"35 Append the new data to MUKIM_CONSTRUCT starts at \" + str(datetime.datetime.now()) + \"\\n\")\r\n try:\r\n arcpy.AppendNewData()\r\n except:\r\n log_error(\"Error in 35 Append the new data to MUKIM_CONSTRUCT: \", logFile)\r\n logFile.writelines(\r\n \"35 Append the new data to MUKIM_CONSTRUCT ends at \" + str(datetime.datetime.now()) + \"\\n\")\r\n\r\n # Clean the memory and the schema lock\r\n arcpy.RefreshCatalog(config.Notification)\r\n arcpy.Compact_management(config.TempDataGDB)\r\n gc.collect()\r\n\r\n # Status update to run/not run the SiteInspection Update\r\n Log_SiteInspectionUpdate = file(config.SiteInspectionUpdate, \"w\")\r\n Log_SiteInspectionUpdate.writelines(\"YES\")\r\n Log_SiteInspectionUpdate.close()\r\n\r\n arcpy.AddMessage(\"END BCA Processing\")\r\n arcpy.AddMessage(\"Passing file date to other functions: \" + repr(filedate))\r\n\r\n # Generate Report\r\n import ReportGeneration_Adhoc_WithProjects as gen_report\r\n gen_report.run(filedate)\r\n #\r\n # # Send email to departments\r\n # import EmailGenerationCompletion_adhoc as send_dept_notification\r\n # if \"CORRECTED\" in BCAreport.upper():\r\n # send_dept_notification.run(filedate, corrected=True)\r\n # else:\r\n # send_dept_notification.run(filedate)\r\n\r\n # Generate advisory letters\r\n import LetterGeneration as letter_gen\r\n letter_gen.run(filedate)\r\n #\r\n # # Send letters to project team\r\n # import EmailGeneration as send_advisory_email\r\n # send_advisory_email.run(filedate)\r\n\r\n\r\n # 36. Move the BCAReport in the backup folder\r\n for BCAreport in correct_config_files:\r\n\r\n input_file_name = BCAreport.split(\"\\\\\")[-1]\r\n bk_file_path = os.path.join(config.BCAreportBackupFolder, input_file_name)\r\n\r\n # if the same file name exists in the backup folder, rename the new file with timestamp and move\r\n if os.path.exists(bk_file_path):\r\n\r\n new_filename = datetime.datetime.now().strftime(\"%Y%m%d-%H%M\") + input_file_name\r\n new_filepath = os.path.join(config.BCAreportBackupFolder, new_filename)\r\n shutil.copy(BCAreport, new_filepath)\r\n os.remove(BCAreport)\r\n\r\n # if the filename does not exist in the backup folder, move the file to backup\r\n else:\r\n shutil.move(BCAreport, config.BCAreportBackupFolder)\r\n\r\n logFile.writelines(\"Moved the BCA report to the backup folder at \" + str(datetime.datetime.now()) + \"\\n\")\r\n logFile.close()", "def variant_call_single_end(sam_file):\n\n\ttotal_reads_number = wccount(sam_file)\n\tpercentage_of_total_file = 0\n\n\tchr_seq = get_ref_geno(chr_name)\n\n\tglobal table_name\n\tcon = lite.connect(db_name)\n\twith con:\n\t\tcur = con.cursor()\n\n\t\tinputfile_sam = open(currentPath + sam_file, \"r\")\n\t\tsam_line_first = inputfile_sam.readline() # the first read line in a pair\n\t\ttotal_reads_num = 0\n\t\tcovered_snp_total_number = 0\n\n\t\tinsert_size_lower_bond = 0\n\t\tinsert_size_upper_bond = 1000\n\n\t\twhile sam_line_first != '':\n\t\t\tif not sam_line_first.startswith(\"@\"):\n\t\t\t\tcurrent_percent = int(float(total_reads_number * percentage_of_total_file) / 100)\n\t\t\t\tif total_reads_num == current_percent:\n\t\t\t\t\tprint \"current progress: \", percentage_of_total_file\n\t\t\t\t\tpercentage_of_total_file += 10\n\n\t\t\t\ttotal_reads_num += 1\n\t\t\t\telements_first = sam_line_first.strip().split()\n\t\t\t\ttry:\n\t\t\t\t\tread_ID_first = elements_first[0].strip()\n\t\t\t\t\tchrName_first = elements_first[2].strip()\n\t\t\t\t\tinsert_size_first = abs(int(elements_first[8].strip())) # insert_size for second read is negative\n\t\t\t\texcept:\n\t\t\t\t\tprint \"error in first read:\", sam_line_first\n\t\t\t\t#print \"this is a new read\"\t\n\t\t\t\tif (insert_size_first >= insert_size_lower_bond) and (insert_size_first <= insert_size_upper_bond):\n\t\t\t\t\tif True:\n\t\t\t\t\t\tif chrName_first.startswith(chr_name):\n\t\t\t\t\t\t\t# first read\n\t\t\t\t\t\t\tqName_first = elements_first[0].strip()\n\t\t\t\t\t\t\tflag_first = elements_first[1].strip()\n\t\t\t\t\t\t\tstart_position_first = int(elements_first[3].strip())\n\t\t\t\t\t\t\tread_sequence_first = elements_first[9].strip()\n\t\t\t\t\t\t\tread_length_first = len(read_sequence_first)\n\t\t\t\t\t\t\tquality_score_sequence_first = elements_first[10].strip()\n\n\t\t\t\t\t\t\tif len(read_sequence_first)\t== len(quality_score_sequence_first):\n\t\t\t\t\t\t\t\tfor i in range(read_length_first):\n\t\t\t\t\t\t\t\t\tcurrent_base_position = start_position_first + i\n\t\t\t\t\t\t\t\t\tA_depth = 0\n\t\t\t\t\t\t\t\t\tT_depth = 0\n\t\t\t\t\t\t\t\t\tC_depth = 0\n\t\t\t\t\t\t\t\t\tG_depth = 0\n\n\t\t\t\t\t\t\t\t\tcovered_snp = read_sequence_first[i] # ith position is the covered snp\n\t\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\t\tquality_score_symbol = quality_score_sequence_first[i]\n\t\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\t\tprint \"error in\", sam_line_first\n\t\t\t\t\t\t\t\t\t\tquality_score_symbol = 'N'\n\t\t\t\t\t\t\t\t\tif (not covered_snp == 'N') and (\n\t\t\t\t\t\t\t\t\t\t(ord(quality_score_symbol) - 33) > quality_score_threshold): # check quality_score\n\t\t\t\t\t\t\t\t\t\tif covered_snp == \"A\":\n\t\t\t\t\t\t\t\t\t\t\tA_depth += 1\n\t\t\t\t\t\t\t\t\t\telif covered_snp == \"T\":\n\t\t\t\t\t\t\t\t\t\t\tT_depth += 1\n\t\t\t\t\t\t\t\t\t\telif covered_snp == \"C\":\n\t\t\t\t\t\t\t\t\t\t\tC_depth += 1\n\t\t\t\t\t\t\t\t\t\telif covered_snp == \"G\":\n\t\t\t\t\t\t\t\t\t\t\tG_depth += 1\n\n\t\t\t\t\t\t\t\t\t\tcur.execute(\"SELECT * from \" + table_name + \" where position=\" + str(\n\t\t\t\t\t\t\t\t\t\t\tcurrent_base_position))\n\t\t\t\t\t\t\t\t\t\trow = cur.fetchone()\n\t\t\t\t\t\t\t\t\t\tif row == None:\n\t\t\t\t\t\t\t\t\t\t\tinset_querry = \"INSERT INTO \" + table_name + \\\n\t\t\t\t\t\t\t\t\t\t\t \" (position, chr, ref_allele, A_depth, T_depth, C_depth, G_depth ) VALUES (\" + \\\n\t\t\t\t\t\t\t\t\t\t\t str(current_base_position) + \\\n\t\t\t\t\t\t\t\t\t\t\t \",'\" + chrName_first + \"','\" + chr_seq[\n\t\t\t\t\t\t\t\t\t\t\t\t current_base_position - 1] + \"',\" + str(A_depth) + \",\" + str(\n\t\t\t\t\t\t\t\t\t\t\t\tT_depth) \\\n\t\t\t\t\t\t\t\t\t\t\t + \",\" + str(C_depth) + \",\" + str(G_depth) + \")\"\n\t\t\t\t\t\t\t\t\t\t\t#print inset_querry\n\t\t\t\t\t\t\t\t\t\t\tcur.execute(inset_querry)\n\t\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\t\tA_depth += int(row[3])\n\t\t\t\t\t\t\t\t\t\t\tT_depth += int(row[4])\n\t\t\t\t\t\t\t\t\t\t\tC_depth += int(row[5])\n\t\t\t\t\t\t\t\t\t\t\tG_depth += int(row[6])\n\t\t\t\t\t\t\t\t\t\t\tupdate_querry = \"UPDATE \" + table_name + \" set A_depth=\" + str(A_depth) + \\\n\t\t\t\t\t\t\t\t\t\t\t \", T_depth=\" + str(T_depth) + \", C_depth=\" + str(\n\t\t\t\t\t\t\t\t\t\t\t\tC_depth) + \", G_depth=\" + \\\n\t\t\t\t\t\t\t\t\t\t\t str(G_depth) + \" where position=\" + str(current_base_position)\n\t\t\t\t\t\t\t\t\t\t\t#print update_querry\n\t\t\t\t\t\t\t\t\t\t\tcur.execute(update_querry)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tprint \"different in read length and quality length\", sam_line_first\n\t\t\t\t\telse:\n\t\t\t\t\t\tprint \"first and second read ID do not match\", read_ID_first\n\t\t\tsam_line_first = inputfile_sam.readline()\n\t\tinputfile_sam.close()\n\treturn total_reads_num", "def bulk_insert(self, file):\n \n self.feed_type.ad_mapper.iter_from_file(file)\n\n max_pending = 10000 # Max INSERTs pending to commit\n current_pending = 0 # count the number of ads processing from the xml\n inserted_ads = 0\n\n info = {'status': None, 'file': file, 'inserted': None, 'e_msg': None}\n pending_raw_ads = []\n record_ids = []\n old_ads = 0\n repeated_ads = 0\n while True:\n try:\n raw_ad = RawAd()\n raw_ad.raw_content = self.feed_type.ad_mapper.get_raw_content()\n raw_ad.feed_in = self\n\n ######################## Begin - Filter section ################################\n # @TODO: Filters should be dinamic. E.g: implement some kind of observer pattern\n date_info = self.feed_type.ad_mapper.exec_method(\"FECHA\", raw_ad = raw_ad)\n days = (dtt.today() - dtt.strptime(date_info[\"date\"], date_info[\"_format\"])).days \n ######################## End - Filter section ################################\n\n\n if days > 30:\n old_ads += 1\n continue # It skips the remaining code in the loop. \n # This way we don't call to database in each iteration \n\n\n ######################## Begin - Filter section ################################\n # @TODO: Filters should be dinamic. E.g: implement some kind of observer pattern\n id = self.feed_type.ad_mapper.exec_method(\"ID\", raw_ad = raw_ad)[\"_id_in_feed\"]\n record_id = id + \",\" + self.feed_type.ad_mapper.exec_method(\"URL\", raw_ad = raw_ad)[\"link\"]\n ad_exists = DBSession.execute(\"SELECT 1 FROM fp_feeds_in_records WHERE id = :id\", {\"id\": record_id}).first()\n ######################## End - Filter section ################################\n if ad_exists:\n repeated_ads += 1\n else:\n pending_raw_ads.append(\n {\n \"raw_ad\": raw_ad.raw_content,\n \"feed_in_id\": self.id\n })\n\n record_ids.append({\"id\": record_id})\n\n current_pending += 1\n \n if( current_pending == max_pending):\n self.__insert(pending_raw_ads, record_ids)\n\n inserted_ads += current_pending\n current_pending = 0\n\n except StopIteration:\n if(current_pending != 0):\n self.__insert(pending_raw_ads, record_ids)\n \n inserted_ads += current_pending\n current_pending = 0\n\n # It updates the processed date's feed\n self.last_processed_date = date.today()\n DBSession.commit()\n \n info['status'] = 'ok'\n info['inserted'] = inserted_ads\n info['repeated_ads'] = repeated_ads\n info['old_ads'] = old_ads\n\n return info\n\n except Exception as e:\n info['status'] = type(e).__name__\n info['inserted'] = inserted_ads\n info['e_msg'] = str(e)\n info['repeated_ads'] = repeated_ads\n info['old_ads'] = old_ads\n\n return info", "def need_completion_refresh(queries):\n for query in sqlparse.split(queries):\n try:\n first_token = query.split()[0]\n if first_token.lower() in ('alter', 'create', 'use', '\\\\r',\n '\\\\u', 'connect', 'drop'):\n return True\n except Exception:\n return False", "def run_test(self):\n\n with open(self.file, \"r\", encoding=\"utf-8\") as file_stream, open(\n self.construct_and_get_shadow_file(file_stream), \"r\", encoding=\"utf-8\"\n ) as shadow_file:\n tracker = PyFunceble.engine.HashesTracker(shadow_file.name)\n\n with Manager() as manager:\n self.__run_multiprocess_test(shadow_file, manager, tracker=tracker)\n\n shadow_file_name = shadow_file.name\n\n if PyFunceble.CONFIGURATION.shadow_file:\n PyFunceble.helpers.File(shadow_file_name).delete()\n\n if self.autocontinue.is_empty():\n with open(self.file, \"r\", encoding=\"utf-8\") as file_stream, open(\n self.construct_and_get_shadow_file(\n file_stream, ignore_inactive_db_check=True\n ),\n \"r\",\n encoding=\"utf-8\",\n ) as shadow_file:\n with Manager() as manager:\n self.__run_multiprocess_test(\n file_stream, manager, ignore_inactive_db_check=True\n )\n\n shadow_file_name = shadow_file.name\n\n if PyFunceble.CONFIGURATION.shadow_file:\n PyFunceble.helpers.File(shadow_file_name).delete()\n\n with Manager() as manager:\n self.__run_multiprocess_test(\n chain(self.inactive_db.get_to_retest()), manager\n )\n\n with Manager() as manager:\n self.complements_test_started = True\n self.__run_multiprocess_test(\n self.get_complements(self.autocontinue), manager\n )\n self.complements_test_started = False\n\n with Manager() as manager:\n self.__run_multiprocess_test(chain(self.mining.list_of_mined()), manager)\n\n tracker.reset_position()\n self.cleanup(self.autocontinue, self.autosave, test_completed=True)", "def test_autocommit_on(self):\n res = self.dbh.run_autocommit(True)\n self.assertIsNotNone(res)\n self.assertEqual(res[0], 4)", "def parse_documents():\n\n\tcount_before = control.find().count()\n\n\tprint \"There are currently %i unprocessed records.\" % count_before\n\n\t#dispatch\n\t# executor = concurrent.futures.ThreadPoolExecutor(10)\n\t# futures = [executor.submit(analyze_message, document) for document in control.find()]\n\t# concurrent.futures.wait(futures)\n\n\tfor document in control.find():\n\t\tanalyze_message(document)\n\n\tcount_after = control.count()\n\tprint \"There are now %i stored records.\" % control.count()", "def pdfProcessing():\n global DATABASE\n conn = db.create_connection(DATABASE)\n DOCUMENT_ORIGIN_CODE = \"DOSSIER_PATIENT\"\n\n pathFolder = \"fichiers source/\"\n extension = \".pdf\"\n pdfFileArrayPath = glob.glob(pathFolder + \"*\" + extension)\n print(\" - Processing pdf\", end=\"\")\n for file in pdfFileArrayPath:\n text = readFile.readPdfFile(file)\n query = getDocumentQuery(text, DOCUMENT_ORIGIN_CODE, file, pathFolder, extension)\n \n db.insert_document(conn, query)\n print(\".\", end = '')\n #commit the changes to db\n conn.commit()\n #close the connection\n conn.close()\n print(\"\\n\")", "def refreshDatabase(dbConnection):\n try:\n cursor = dbConnection.cursor()\n cursor.execute(\"UPDATE transcriptions SET pending = FALSE WHERE COALESCE(transcription, '') = '';\")\n dbConnection.commit()\n cursor.close()\n except Exception as e:\n Tools.writeException(\"refreshDatabase\", e)", "def syncDatabase(self, event):\n \n # Check if empty/already synced\n try:\n self.file_len('I:\\\\Scanned Barcodes\\\\BARCODES.txt')\n except UnboundLocalError:\n error = wx.MessageDialog(self, \n 'Scanner is empty or has already been synced.',\n 'Error', style=wx.STAY_ON_TOP|wx.OK|wx.ICON_ERROR)\n error.ShowModal()\n error.Destroy()\n \n return\n \n with open('I:\\\\Scanned Barcodes\\\\BARCODES.txt') as f:\n unknown = set()\n try:\n m = shelve.open('Database\\\\master\\\\master.db')\n reader = csv.reader(f)\n length = self.file_len('I:\\\\Scanned Barcodes\\\\BARCODES.txt')\n progress = wx.ProgressDialog('Please wait',\n 'Syncing Database, Please Wait...', \n length, None, style=wx.PD_REMAINING_TIME|\n wx.PD_SMOOTH|wx.PD_AUTO_HIDE|\n wx.PD_CAN_ABORT)\n keepGoing = True\n count = 0\n for line in reader:\n count += 1\n date = line[0].replace('/','-')\n if m.has_key(line[3]):\n n = shelve.open('Database\\\\Dailies\\\\Daily'+date+'.db',\n writeback=True)\n if n.has_key(line[3]):\n n[line[3]] += 1\n \n else:\n n.setdefault(line[3], 1)\n n.close()\n \n else:\n unknown.add(line[3])\n o = shelve.open('Database\\\\Unknown\\\\unknown'\n +date+'.db', writeback=True)\n \n if o.has_key(line[3]):\n o[line[3]] += 1\n \n else:\n o.setdefault(line[3], {1, date})\n o.close()\n \n (keepGoing, skip) = progress.Update(count)\n if not keepGoing:\n progress.Destroy()\n \n progress.Destroy() \n m.close()\n \n except:\n print 'Unexpected error:', sys.exc_info()[0]\n raise\n \n if len(unknown) >= 1:\n s = 'During sync, the compiler found unknown barcodes'\n ' Would you like to manually enter them into the database?'\n \n q = wx.MessageDialog(self, s, 'Notice', \n wx.STAY_ON_TOP|wx.ICON_QUESTION|wx.YES_NO)\n \n result = q.ShowModal()\n \n if result == 5103:\n compiler = bcompiler.Compiler(None, 'Barcode Entry')\n compiler.iterationStation()\n \n else:\n pass\n \n with open('I:\\\\Scanned Barcodes\\\\BARCODES.txt','w') as q:\n q.write('')\n \n return", "def database_script_list(bs_id, command, arguments_list, threads, expe_proc_time,\n attempt=1):\n #works out the table from the command\n if command == 'make_beam':\n table = 'Beamform'\n if command == 'prepsubband':\n table = 'Prepdata'\n elif command == 'realfft':\n table = 'FFT'\n elif command == 'accelsearch':\n table = 'Accel'\n elif command == 'prepfold':\n table = 'Fold'\n con = lite.connect(DB_FILE, timeout = TIMEOUT)\n with con:\n cur = con.cursor()\n for ai, arguments in enumerate(arguments_list):\n cur.execute(\"INSERT OR IGNORE INTO {0} (Rownum, AttemptNum, BSID, Command, Arguments, CPUs, ExpProc) VALUES(?, ?, ?, ?, ?, ?, ?)\".format(table), (ai, attempt, bs_id, command, arguments, threads, expe_proc_time))\n #update expected jobs\n if attempt == 1:\n cur.execute(\"UPDATE PulsarSearch SET {0}JobExp=? WHERE Rownum=?\".format(table), (len(arguments_list),bs_id))\n else:\n cur.execute(\"SELECT {0}JobExp FROM PulsarSearch WHERE Rownum=?\".format(table), (bs_id,))\n table_job_exp = cur.fetchone()[0]\n cur.execute(\"UPDATE PulsarSearch SET {0}JobExp=? WHERE Rownum=?\".format(table), (len(arguments_list) + table_job_exp, bs_id))\n cur.execute(\"SELECT TotalJobExp FROM PulsarSearch WHERE Rownum=?\", (bs_id,))\n search_job_exp = cur.fetchone()[0]\n if search_job_exp is None:\n search_job_exp = 0\n cur.execute(\"UPDATE PulsarSearch SET TotalJobExp=? WHERE Rownum=?\", (len(arguments_list) + search_job_exp, bs_id))\n\n return", "def Do_RT():\r\n \"\"\"Prases through each line of to-date stream log\"\"\"\r\n \"\"\"Rudundancy check to make sure tweet ID has not already been tweeted\"\"\"\r\n \r\n config = config_create()\r\n x1 = config.get('Latest_Log', 'currentstreamlog')\r\n x2 = config.get('Latest_Log', 'currenttweetlog')\r\n x3 = config.get('Latest_Log', 'overalllog')\r\n \r\n DoRT = True\r\n twi = open(x2).read() #To date's Tweet log\r\n twilog = open(x3).read() #To date Overal Tweeted Log\r\n with open(x1) as f:\r\n for line in f:\r\n \r\n one = line.split(\",\")\r\n \r\n #if any(x in twi for x in one) or one[0] in twilog or ' ' in one[0]:\r\n if one[0] in twi or one[0] in twilog or ' ' in one[0]:\r\n \r\n DoRT = False\r\n \r\n else:\r\n DoRT = True\r\n \r\n if DoRT == True:\r\n RT(one[0], one[1])\r\n \r\n return", "def run_sql_transformations(self): \n conn = pg2.connect(user='postgres', dbname='penny', host='localhost', port='5432', password='password')\n for d in self.get_list_of_dates():\n print(d) \n df = pd.read_sql(\"Select count(*) as acount from auctions where auctiontime < '\" + d + \"' and qauctionID not in (SELECT DISTINCT AuctionID from bid_transform)\", conn)\n print (df.acount[0])\n if (df.acount[0] > 0):\n bashCommand = \"sudo -u postgres psql -d penny -f new_transformations.sql -v auction_date='\" + d + \"'\"\n process = subprocess.Popen(bashCommand.split())\n output, error = process.communicate()\n conn.close", "def verify_preload(upload_id, language=None):\n prev_lang = None\n if not language is None:\n prev_lang = get_language()\n activate(language)\n\n upload = SubjectUpload.objects.get(id=upload_id)\n geolevel, nunits = LegislativeLevel.get_basest_geolevel_and_count()\n\n # This seizes postgres -- probably small memory limits.\n #aligned_units = upload.subjectstage_set.filter(portable_id__in=permanent_units).count()\n\n permanent_units = geolevel.geounit_set.all().order_by(\n 'portable_id').values_list(\n 'portable_id', flat=True)\n temp_units = upload.subjectstage_set.all().order_by(\n 'portable_id').values_list(\n 'portable_id', flat=True)\n\n # quick check: make sure the first and last items are aligned\n ends_match = (permanent_units[0] == temp_units[0] and\n permanent_units[permanent_units.count()\n - 1] == temp_units[temp_units.count() - 1])\n msg = _(\n 'There are a correct number of geounits in the uploaded Subject file, '\n )\n if not ends_match:\n msg += _(\n 'but the geounits do not have the same portable ids as those in the database.'\n )\n\n # python foo here: count the number of zipped items in the\n # permanent_units and temp_units lists that do not have the same portable_id\n # thus counting the portable_ids that are not mutually shared\n aligned_units = len(\n filter(lambda x: x[0] == x[1], zip(permanent_units, temp_units)))\n\n if ends_match and nunits != aligned_units:\n # The number of geounits in the uploaded file match, but there are some mismatches.\n mismatched = nunits - aligned_units\n msg += _n(\n 'but %(count)d geounit does not match the geounits in the database.',\n 'but %(count)d geounits do not match the geounits in the database.',\n mismatched) % {\n 'count': mismatched\n }\n\n if not ends_match or nunits != aligned_units:\n logger.debug(msg)\n\n upload.status = 'ER'\n upload.save()\n upload.subjectstage_set.all().delete()\n\n status = {'task_id': None, 'success': False, 'messages': [msg]}\n\n else:\n try:\n # The next task will load the units into the characteristic table\n task = copy_to_characteristics.delay(\n upload_id, language=language).task_id\n\n status = {\n 'task_id': task,\n 'success': True,\n 'messages': [_('Copying records to characteristic table ...')]\n }\n\n except:\n logger.error(\n \"Couldn't copy characteristics: %s\" % traceback.format_exc())\n # reset the language back to the default\n if not prev_lang is None:\n activate(prev_lang)\n\n return status", "def run_sql_file(self, sqlfile):\n try:\n queries = self.get_queries_from(sqlfile)\n queries_executed = 0\n for query in queries:\n if self._execute_query(query, values=None): # execute each query\n queries_executed += 1\n print(\"{} Executed queries from {}\".format(queries_executed, sqlfile))\n except pymysql.InternalError as error:\n print(error.args[1])", "def execute_sql_files(connection, sql_files):\n for filename in sql_files:\n statement = resource_text(filename)\n for sub_statement in statement.split(\";\"):\n if sub_statement.strip():\n connection.execute(text(sub_statement))", "def process_data(cur, conn, filepath, func):\r\n # get all files matching extension from directory\r\n all_files = []\r\n for root, dirs, files in os.walk(filepath):\r\n files = glob.glob(os.path.join(root, '*.json'))\r\n for f in files:\r\n all_files.append(os.path.abspath(f))\r\n\r\n\r\n\r\n # iterate over files and process\r\n for datafile in all_files:\r\n func(cur, datafile) ######### de function zy procces song file bta5od l filepath w currsor\r\n conn.commit()\r\n\r\n return all_files", "def get_transcription(self):\n q_tscript1 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?subject .\n ?stmt belvoc:hasObject ?target .\n ?subject a belvoc:AbundanceActivity .\n ?subject belvoc:hasActivityType belvoc:Transcription .\n ?subject belvoc:hasChild ?tf .\n ?tf a belvoc:ProteinAbundance .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n }\n \"\"\"\n q_tscript2 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?tf .\n ?stmt belvoc:hasObject ?target .\n ?tf a belvoc:ProteinAbundance .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n }\n \"\"\"\n q_tscript3 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel ?mod ?pos\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?subject .\n ?stmt belvoc:hasObject ?target .\n ?subject a belvoc:ModifiedProteinAbundance .\n ?subject belvoc:hasModificationType ?mod .\n ?subject belvoc:hasChild ?tf .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n OPTIONAL { ?subject belvoc:hasModificationPosition ?pos . }\n }\n \"\"\"\n for q_tscript in (q_tscript1, q_tscript2, q_tscript3):\n res_tscript = self.g.query(q_tscript)\n for stmt in res_tscript:\n # Get modifications on the subject, if any\n if q_tscript == q_tscript1:\n tf = self._get_agent(stmt[0], stmt[3])\n tf.activity = ActivityCondition('transcription', True)\n elif q_tscript == q_tscript3:\n mod = term_from_uri(stmt[6])\n mod_pos = term_from_uri(stmt[7])\n mc = self._get_mod_condition(mod, mod_pos)\n if mc is None:\n continue\n tf = self._get_agent(stmt[0], stmt[3])\n tf.mods = mods=[mc]\n else:\n tf = self._get_agent(stmt[0], stmt[3])\n # Parse out the elements of the query\n evidence = self._get_evidence(stmt[2])\n target = self._get_agent(stmt[1], stmt[4])\n stmt_str = strip_statement(stmt[2])\n # Get the relationship (increases/decreases, etc.)\n rel = term_from_uri(stmt[5])\n if rel == 'DirectlyIncreases' or rel == 'DirectlyDecreases':\n is_direct = True\n else:\n is_direct = False\n # Build the INDRA statement\n stmt = None\n if rel == 'DirectlyIncreases' or rel == 'Increases':\n stmt = IncreaseAmount(tf, target, evidence)\n elif rel == 'DirectlyDecreases' or rel == 'Decreases':\n stmt = DecreaseAmount(tf, target, evidence)\n # If we've matched a pattern, mark this as a converted statement\n if stmt is not None:\n if is_direct:\n self.statements.append(stmt)\n self.converted_direct_stmts.append(stmt_str)\n else:\n self.indirect_stmts.append(stmt)\n self.converted_indirect_stmts.append(stmt_str)", "def __is_complete__(self,configs,*args,**kwargs):\n current_dir = self.output_dir\n if GenericProcess.__is_complete__(self,*args,**kwargs):\n return True\n elif not os.path.isfile(self.complete_file):\n if hasattr(self,\"upload_dir\"):\n current_dir = self.upload_dir\n if not os.path.isfile(self.complete_file.replace(self.output_dir,self.upload_dir)): #If the output directory has already been cleaned, check the upload dir.\n return False\n else: \n return False\n if hasattr(self, \"snp_path\") and not self.snp_path is None and hasattr(self,\"analysis_ready_bam_path\") and not self.analysis_ready_bam_path is None:\n if not os.path.isdir(os.path.dirname(self.snp_path)) or not os.path.dirname(os.path.isfile(self.analysis_ready_bam_path)):\n return False\n if not os.path.isfile(self.snp_path) or not os.path.isfile(self.analysis_ready_bam_path):\n snp_file = False\n bam_file = False\n return False\n if not self.upload_dir is None:\n for file in os.listdir(os.path.join(self.upload_dir,self.description)):\n if file.endswith('.vcf'):\n snp_file = True \n if file.endswith('.bam'):\n bam_file = True \n if not snp_file or not bam_file:\n if configs[\"system\"].get(\"Logging\",\"debug\") is \"True\":\n print \"At least one of the output files is missing for sample \" + str(self.sample_key) + \":\"\n if not os.path.isfile(self.snp_path):\n print \"Missing \"+ self.snp_path\n if not os.path.isfile(self.analysis_ready_bam_path):\n print \"Missing \"+ self.analysis_ready_bam_path\n #os.remove(self.complete_file)\n #template_dir = configs['system'].get('Common_directories','template')\n #qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio_no_postprocess'))\n #self.__fill_template__(qsub_template,os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n #self.__launch__(configs['system'],os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n return False\n else:\n check_file = os.path.join(current_dir,'project-summary.csv')\n #If the process is complete, check to make sure that the check file is created. If not, send email once.\n if not os.path.isfile(check_file) and configs['pipeline'].has_option('Template_files','bcbio_no_postprocess') and current_dir==self.output_dir:\n #subject, body = self.__generate_general_error_text__(config)\n #send_email(subject,body)\n #self.fail_reported = True\n os.remove(self.complete_file)\n template_dir = configs['system'].get('Common_directories','template')\n qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio_no_postprocess'))\n self.__fill_template__(qsub_template,os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n self.__launch__(configs['system'],os.path.join(self.output_dir,\"bcbio_no_postprocess.sh\"))\n return False\n #store_stats_in_db(self)\n self.__finish__(*args,**kwargs)\n return True", "def missing_checker(self):\n self.verification_attempt += 1\n if self.verification_attempt >= 6:\n self.finish(False, \"After 5 failed attempts to verify the download,\"\n \" it is apparent that some accession numbers cannot be \"\n \"matched to the FASTA titles. If you can perform manual\"\n \" validation, consider using the '-nv' switch to skip \"\n \" the automatic verification step.\")\n print(\"Checking for sequences that did not download... Please wait.\")\n ver_ids = self.fasta_parser(self.outfile)\n if self.accn_cache.tell() != 0:\n # If accecsion numbers are cached, just load them instead of\n # downloading them again\n self.accn_cache.seek(0)\n ncbi_accn_set = pickle.load(self.accn_cache)\n print(\"Using cached accession numbers.\")\n else:\n retmax = 50000\n if self.original_count <= retmax:\n retmax = self.original_count\n ncbi_accn_set = set()\n for i in range(0, self.original_count, retmax):\n if i + retmax < self.original_count:\n end = i + retmax\n else:\n end = self.original_count\n print(\"Downloading accession %i to %i of \"\n \"%i\" % (i + 1, end, self.original_count))\n\n subset = set(self.ncbi_search(self.database,\n self.term,\n \"n\",\n retmax=retmax,\n retstart=i)[\"accn\"])\n ncbi_accn_set = ncbi_accn_set.union(subset)\n\n # Remove any Master records from the accn set:\n # See https://www.biostars.org/p/305310/#305317\n master_records = set()\n for accn in ncbi_accn_set:\n if bool(re.search('[A-Z]{4}0+(\\.\\d){0,}$', accn)):\n master_records.add(accn)\n print(\"WARNING: Master record found and \"\n \"removed: %s.\" % (accn))\n ncbi_accn_set = ncbi_accn_set - master_records\n\n # Create an accecsion number cache. This should avoid subsequent\n # accession number downloads.\n pickle.dump(ncbi_accn_set, self.accn_cache, pickle.HIGHEST_PROTOCOL)\n\n missing_ids = ncbi_accn_set - ver_ids\n\n if missing_ids != set():\n not_missing = self.check_unconformant(missing_ids, ver_ids)\n missing_ids = missing_ids - not_missing\n\n if missing_ids == set():\n self.finish(success=True)\n\n return missing_ids", "def process_files(user, application, complete_path, init_es, tool, scan_name, user_host, to_name,hook_log=None):\n try:\n application = Application.objects.get(id=application)\n scan = Scan.objects.get(name=scan_name)\n scan.scanlog.status = 'Initiated'\n scan.scanlog.save()\n scan_log = scan.scanlog\n scan_log.status = 'In Progress'\n scan_log.save()\n try:\n if tool == 'Burp': \n parse_burp(complete_path,user,init_es)\n elif tool == 'ZAP':\n ext = complete_path.split('.')[-1]\n if ext == 'json':\n parse_zap_json(complete_path,user,init_es)\n elif ext == 'xml':\n parse_zap(complete_path,user,init_es)\n elif tool == 'AppSpider':\n parse_appspider(complete_path,user,init_es)\n elif tool == 'Arachni':\n parse_arachni(complete_path,user,init_es)\n elif tool == 'Bandit':\n parse_bandit(complete_path,user,init_es)\n elif tool == 'Checkmarx':\n parse_checkmarx(complete_path,user,init_es)\n elif tool == 'AppScan - DAST':\n parse_appscan_dast(complete_path,user,init_es)\n elif tool == 'AppScan - SAST':\n parse_appscan_sast(complete_path,user,init_es)\n elif tool == 'OWASP Dependency Checker':\n parse_owasp_dep_checker(complete_path,user,init_es)\n elif tool == 'w3af':\n w = W3afParser(complete_path,user,init_es,tool)\n w.parse_xml()\n elif tool == \"HP Fortify\":\n parse_hp_fortify(complete_path,user,init_es)\n elif tool == \"Xanitizer\":\n parse_xanitizer(complete_path,user,init_es)\n elif tool == \"FindSecBugs\":\n parser_findsecbug(complete_path,user,init_es) \n info_debug_log(ip=user_host,user=user,event='XML Parsing',status='success')\n if hook_log:\n hook_log.scan_process_event = True\n hook_log.scan_process_exception = ''\n hook_log.scan_process_datetime = timezone.now()\n hook_log.scan_id = scan.name\n hook_log.vul_process_event = True\n hook_log.vul_process_exception = ''\n hook_log.vul_process_datetime = timezone.now()\n hook_log.save()\n scan_log.status = 'Completed'\n scan_log.save()\n except BaseException as e:\n scan_log.status = 'Killed'\n scan_log.save()\n scan.delete()\n log_exception(e)\n if hook_log:\n hook_log.vul_process_event = False\n hook_log.vul_process_exception = e\n hook_log.vul_process_datetime = timezone.now()\n hook_log.scan_process_event = False\n hook_log.scan_process_exception = e\n hook_log.scan_process_datetime = timezone.now()\n hook_log.scan_id = ''\n hook_log.save()\n # general_error_messages.delay(path='process_files function',msg=log_exception(e))\n critical_debug_log(ip=user_host,user=user,event=e,status='failure')\n except BaseException as e:\n log_exception(e)\n scan_log.status = 'Killed'\n scan_log.save()\n critical_debug_log(ip=user_host,user=user,event=e,status='failure')\n if hook_log:\n hook_log.scan_process_event = False\n hook_log.scan_process_exception = e\n hook_log.scan_process_datetime = timezone.now()\n hook_log.scan_id = ''\n hook_log.save() \n finally:\n info_debug_log(ip=user_host,user=user,event='Remove file after XML parsing',status='success')\n remove_file(complete_path)", "def test_check_cds_10(self):\n self.cds1.translation_table = 1\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)", "def watcher():\n # checking if logger is working or not\n # if not then handle exception otherwise watcher thread would stop\n create_logs = True\n try:\n logger = Logger()\n logger.pipeline_logs(\"Watcher Started\")\n except pymongo.errors.ServerSelectionTimeoutError:\n create_logs = False\n except Exception as e:\n print(e)\n create_logs = False\n\n cloud = Cloud()\n db = DbConnector()\n current_file_count = None\n total_file_count = None\n print('WATCHER THREAD STARTED')\n while True:\n if total_file_count is None or current_file_count is None:\n filenames = cloud.get_file_names('wafer/data/prediction/')\n filecount = len(filenames)\n current_file_count = filecount\n total_file_count = filecount\n elif current_file_count < total_file_count:\n total_file_count = current_file_count\n else:\n time.sleep(120)\n filenames = cloud.get_file_names('wafer/data/prediction/')\n current_file_count = len(filenames)\n if current_file_count != total_file_count and current_file_count != 0:\n print('TRIGGERED PREDICTION')\n if create_logs is True:\n logger.pipeline_logs('TRIGGER : PREDICTION TRIGGERED : New Files Found For Prediction')\n\n prepare_data = PreparePredictionData(logger, cloud, db)\n prepare_data.prepare()\n\n predictor = Predictor(logger, cloud, db)\n predictor.predict()\n if create_logs is True:\n logger.pipeline_logs('TRIGGER : PROCESS COMPLETED : PREDICTIONS SAVED TO DATABASE ')\n total_file_count = current_file_count", "def confirmSharding(filename, shards):\n #shardfilenames = splitFileIntoShards(filename, shardsize)[:-1]\n print('Start confirmation')\n filelength = findFLength(filename)\n shardsize = int(np.ceil(filelength / shards))\n splitFileIntoShards(filename, shardsize)\n #fd = os.popen('wc -l < {}'.format(filename))\n totalwordcount = filelength #int(fd.read())\n runcount = 0\n cnt = 0\n print(\"Starting Shard counting\")\n shardfilenames = [\"x{:04d}_shard\".format(i) for i in range(shards)]\n for sf in shardfilenames:\n try:\n fd = os.popen('wc -l < {}'.format(sf))\n count = fd.read()\n print(\"current: {}\".format(count))\n runcount = runcount + int(count)\n except OSError:\n print(\"Something unexpected happened in the OS. Moving on...\")\n finally:\n cnt += 1\n print(\"current run cnt {}: {}\".format(cnt, runcount))\n print(\"TotalWordCount = {}\".format(totalwordcount))\n print(\"RunCount = {}\".format(runcount))\n return totalwordcount==runcount", "def _submit_to_queue(self, script_file):", "def isTCPRunning():\r\n running = True\r\n for file in LogTypes.getLogFileNames():\r\n print(file)\r\n with open(file, 'r') as f:\r\n try:\r\n last_line = f.read().splitlines()[-1]\r\n except:\r\n last_line = \"iperf3: exiting\"\r\n print(\"file is empty\")\r\n\r\n if last_line != \"iperf3: exiting\" and last_line != \"iperf3: error - unable to connect to server: Cannot assign requested address\":\r\n print (last_line)\r\n speed = re.findall(r\"\\d+.?\\d+ [A-Z]?bits/sec\", last_line)\r\n print(speed)\r\n number = re.findall(r\"\\d+.?\\d+\", speed[-1])\r\n print(number)\r\n if float(number[-1]) > EXPECTED_SPEED:\r\n print(file[0:-4] + \" 2-way TCP test is running\")\r\n else:\r\n print(file[0:-4] + \" 2-way TCP test is not running well\")\r\n else:\r\n print(file[0:-4] + \" 2-way TCP test is not running\")\r\n time.sleep(1)\r\n clearFileContents(file)\r\n running = False\r\n #break\r\n\r\n return running", "def dataverse_check(self):\n # get all the objects in the archive query\n success, query_list = ArchiveQueryJob.get_all_objects()\n if not success:\n self.query_error = True\n self.error_list.append(query_list)\n # get the latest version ID of dataset\n version_id_obj = GetDataSetFileInfo()\n success_version_id, version_number = version_id_obj.get_version_number()\n if not success_version_id:\n self.add_err_msg(version_number)\n\n # get all files in laterst version ( not in draft )\n list_obj = ListFilesInDataset(version_number)\n success_dataverse_files, file_list = list_obj.return_status()\n if not success_dataverse_files:\n self.dataverse_error = True\n self.error_list.append(file_list)\n\n if not self.query_error:\n for query_obj in query_list:\n obj = query_obj.as_dict()\n self.query_list.append(obj['datafile_id'])\n\n if not self.dataverse_error:\n for dataverse_obj in file_list['data']:\n self.dataverse_list.append(dataverse_obj['dataFile']['id'])\n\n self.check_result = self.compare(self.query_list, self.dataverse_list)\n\n print(\"query_list\", self.query_list)\n print(\"dataverse_list\", self.dataverse_list)\n\n print('errors', self.error_list)\n print('result', self.check_result)", "def automatic_checking(files):\n for i in range(10):\n fft_checking(files[i])", "def upload_transcripts(request):\r\n response = {\r\n 'status': 'Unknown server error',\r\n 'subs': '',\r\n }\r\n\r\n locator = request.POST.get('locator')\r\n if not locator:\r\n return error_response(response, 'POST data without \"locator\" form data.')\r\n\r\n try:\r\n item = _get_item(request, request.POST)\r\n except (InvalidKeyError, ItemNotFoundError):\r\n return error_response(response, \"Can't find item by locator.\")\r\n\r\n if 'transcript-file' not in request.FILES:\r\n return error_response(response, 'POST data without \"file\" form data.')\r\n\r\n video_list = request.POST.get('video_list')\r\n if not video_list:\r\n return error_response(response, 'POST data without video names.')\r\n\r\n try:\r\n video_list = json.loads(video_list)\r\n except ValueError:\r\n return error_response(response, 'Invalid video_list JSON.')\r\n\r\n source_subs_filedata = request.FILES['transcript-file'].read().decode('utf8')\r\n source_subs_filename = request.FILES['transcript-file'].name\r\n\r\n if '.' not in source_subs_filename:\r\n return error_response(response, \"Undefined file extension.\")\r\n\r\n basename = os.path.basename(source_subs_filename)\r\n source_subs_name = os.path.splitext(basename)[0]\r\n source_subs_ext = os.path.splitext(basename)[1][1:]\r\n\r\n if item.category != 'video':\r\n return error_response(response, 'Transcripts are supported only for \"video\" modules.')\r\n\r\n # Allow upload only if any video link is presented\r\n if video_list:\r\n sub_attr = source_subs_name\r\n try:\r\n # Generate and save for 1.0 speed, will create subs_sub_attr.srt.sjson subtitles file in storage.\r\n generate_subs_from_source({1: sub_attr}, source_subs_ext, source_subs_filedata, item)\r\n\r\n for video_dict in video_list:\r\n video_name = video_dict['video']\r\n # We are creating transcripts for every video source, if in future some of video sources would be deleted.\r\n # Updates item.sub with `video_name` on success.\r\n copy_or_rename_transcript(video_name, sub_attr, item, user=request.user)\r\n\r\n response['subs'] = item.sub\r\n response['status'] = 'Success'\r\n except Exception as ex:\r\n return error_response(response, ex.message)\r\n else:\r\n return error_response(response, 'Empty video sources.')\r\n\r\n return JsonResponse(response)", "def shouldPFC4TURLsBeCreated(analysisJob, transferType, eventService):\n\n status = False\n\n# allowDirectAccess = readpar('allowdirectaccess').lower()\n# if allowDirectAccess.lower() == \"true\":\n if analysisJob:\n # get the file access info\n useCT, oldPrefix, newPrefix, useFileStager, directIn = getFileAccessInfo()\n\n # forced TURL (only if copyprefix has enough info)\n #_oldPrefix, _newPrefix = getPlainCopyPrefices()\n if directIn: # and (_oldPrefix != \"\" and _newPrefix != \"\" and _oldPrefix != \"dummy\" and _newPrefix != \"dummy\"):\n tolog(\"Reset old/newPrefix (forced TURL mode)\")\n oldPrefix = \"\"\n newPrefix = \"\"\n\n tolog(\"use copytool = %s (should be false for file stager)\" % str(useCT))\n tolog(\"useFileStager = %s (should be true for file stager)\" % str(useFileStager))\n tolog(\"directIn = %s (should be true for file stager)\" % str(directIn))\n tolog(\"oldPrefix = %s (should be empty if TURL based PFC is required)\" % (oldPrefix))\n tolog(\"newPrefix = %s (should be empty if TURL based PFC is required)\" % (newPrefix))\n\n # PFC should be TURL based for file stager or for direct i/o if old/new prefices are not specified\n if not useCT and directIn and oldPrefix == \"\" and newPrefix == \"\":\n # useFileStager thus need not be set (or used here), but directIn must be True\n # if not useCT and directIn and oldPrefix == \"\" and newPrefix == \"\":\n status = True\n else:\n if transferType == \"direct\":\n tolog(\"Will attempt to create a TURL based PFC (for transferType %s)\" % (transferType))\n status = True\n# else:\n# if allowDirectAccess == \"\":\n# tolog(\"This site has not set allowDirectAccess - direct access/file stager not allowed\")\n# else:\n# tolog(\"This site has allowDirectAccess = %s - direct access/file stager not allowed\" % (allowDirectAccess))\n\n # override if necessary for event service\n if eventService:\n if not 'HPC_HPC' in readpar('catchall'):\n status = True\n\n if status:\n tolog(\"TURL based PFC required\")\n else:\n tolog(\"TURL based PFC not required\")\n\n return status", "def run_dbtask():\n pick = '../pickle/filePathList.pickle'\n with open(pick, 'rb') as p:\n filelist = pickle.load(p)\n\n ogsample = dict()\n\n panelDict = {\n '9b': 'panel9b',\n '3b': 'panel3b',\n '11b': 'panel11b',\n '1b': 'panel1b',\n 'M7': 'lungcancer7',\n 'M50': 'cancer50',\n 'M78': 'cancer78',\n 'Mb': 'brca',\n '14b': 'panel14b',\n '2b': 'panel2b',\n '8b': 'panel8b',\n 'unknow': 'unknow'\n }\n\n tissueDict = {\n 'CFD': 'cfDNA',\n 'FFPED': 'FFPE',\n 'FNAD': 'FFPE',\n 'LEUD': 'Normal',\n 'FRED': 'FFPE',\n 'HYTD': 'FFPE',\n 'HYCFD': 'cfDNA',\n 'GD': 'gDNA',\n }\n exceptList = []\n for file in filelist:\n dirname, basename = os.path.split(file)\n try:\n fullid, suffix = basename.split('_R', 1)\n except BaseException:\n exceptList.append(file)\n print(file, \"Cannot be splited\")\n continue\n\n if 'test' in basename:\n exceptList.append(file)\n continue\n try:\n ogid = re.search('OG[\\d]{5,}|OG[\\d]+OL[\\d]+|HD[\\d]+|1G[\\d]+', fullid).group(0)\n capm = re.search('(CA-PM-[\\d]+|CA_PM_[\\d]+)', dirname)\n if capm is None:\n capm = 'CA-PM-Lost'\n else:\n capm = capm.group(0)\n tissue = re.search('CFD|FFPED|FNAD|LEUD|FRED|HYTD|HYCFD|GD', fullid).group(0)\n try:\n panel = re.search('[1-9]+b|M[b\\d]+', fullid).group(0)\n except BaseException:\n panel = 'unknow'\n\n tissue = tissueDict[tissue]\n panel = panelDict[panel]\n if 'R1' in basename:\n r1 = file\n r1_size = os.path.getsize(file)\n r1_size = \"%d\" % (r1_size / 1000000)\n r1_createtime = datetime.utcfromtimestamp(os.path.getctime(file))\n r2 = None\n if 'R2' in basename:\n r2 = file\n r2_size = os.path.getsize(file)\n r2_size = \"%d\" % (r2_size / 1000000)\n r2_createtime = datetime.utcfromtimestamp(os.path.getctime(file))\n r1 = None\n\n if fullid not in ogsample:\n ogsample[fullid] = dict()\n ogsample[fullid]['ogid'] = ogid\n ogsample[fullid]['tissue'] = tissue\n ogsample[fullid]['panel'] = panel\n ogsample[fullid]['capm'] = capm\n if r1:\n ogsample[fullid]['r1'] = r1\n ogsample[fullid]['r1_size'] = r1_size\n ogsample[fullid]['createtime'] = r1_createtime\n if r2:\n ogsample[fullid]['r2'] = r2\n ogsample[fullid]['r2_size'] = r2_size\n ogsample[fullid]['createtime'] = r2_createtime\n except BaseException:\n exceptList.append(file)\n print(file, 'cannot be parsed!')\n continue\n\n # write out parse error\n with open('../pickle/ogsample.pickle', 'wb') as og:\n pickle.dump(ogsample, og)\n print(\"%d file cannot be parse,saved to pickle/exceptList.pickle\" % len(exceptList))\n with open('../pickle/exceptList.pickle', 'wb') as ep:\n pickle.dump(exceptList, ep)\n\n # store to db\n dberrorlist = []\n for fullid in ogsample.keys():\n idtitle = ['ogid', 'capm', 'r1', 'r2', 'tissue', 'panel', 'r1_size', 'r2_size', 'createtime']\n infolist = [ogsample[fullid][x] for x in idtitle]\n infolist = [fullid] + infolist\n try:\n store2db.save2db(infolist)\n except BaseException as e:\n print(e)\n dberrorlist.append(fullid)\n continue\n # write out store to db error\n print(\"%d record cannot be store to db,saved to dberrorlist.pickle\" % (len(dberrorlist)))\n with open('pickle/dberrorlist.pickle', 'wb') as dp:\n pickle.dump(dberrorlist, dp)\n with open('pickle/filelist.xls', 'w') as fx:\n for file in filelist:\n fx.write(file + '\\n')", "def _process(run):\n logger.info('Checking run {}'.format(run.id))\n t_file = os.path.join(CONFIG['analysis']['status_dir'], 'transfer.tsv')\n if run.is_transferred(t_file):\n # In this case I am either processing a run that is in transfer\n # or that has been already transferred. Do nothing.\n # time to time this situation is due to runs that are copied back from NAS after a reboot.\n # This check avoid failures\n logger.info('Run {} already transferred to analysis server, skipping it'.format(run.id))\n return\n\n if run.get_run_status() == 'SEQUENCING':\n # Check status files and say i.e Run in second read, maybe something\n # even more specific like cycle or something\n logger.info('Run {} is not finished yet'.format(run.id))\n elif run.get_run_status() == 'TO_START':\n if run.get_run_type() == 'NON-NGI-RUN':\n # For now MiSeq specific case. Process only NGI-run, skip all the others (PhD student runs)\n logger.warn(\"Run {} marked as {}, \"\n \"TACA will skip this and move the run to \"\n \"no-sync directory\".format(run.id, run.get_run_type()))\n # Archive the run if indicated in the config file\n if 'storage' in CONFIG:\n run.archive_run(CONFIG['storage']['archive_dirs'])\n return\n # Otherwise it is fine, process it\n logger.info((\"Starting BCL to FASTQ conversion and demultiplexing for run {}\".format(run.id)))\n try:\n run.demultiplex_run()\n except:\n logger.info((\"Error demultiplexing for run {}\".format(run.id)))\n pass\n elif run.get_run_status() == 'IN_PROGRESS':\n logger.info((\"BCL conversion and demultiplexing process in \"\n \"progress for run {}, skipping it\".format(run.id)))\n elif run.get_run_status() == 'COMPLETED':\n logger.info((\"Preprocessing of run {} is finished, transferring it\".format(run.id)))\n\n # Transfer to analysis server if flag is True\n if run.transfer_to_analysis_server:\n logger.info('Transferring run {} to {} into {}'\n .format(run.id,\n run.CONFIG['analysis_server']['host'],\n run.CONFIG['analysis_server']['sync']['data_archive']))\n run.transfer_run(t_file)\n\n # Archive the run if indicated in the config file\n if 'storage' in CONFIG:\n run.archive_run(CONFIG['storage']['archive_dirs'])", "def run_necessary_migrations(sql_migrations: List[str], english_migrations: List[str]):\n\n\n con = sqlite3.connect(DB_NAME)\n cur = con.cursor()\n\n cur.execute('''\n SELECT name FROM sqlite_master WHERE type='table' AND name = '__plainapi_migrations';\n ''')\n rows = cur.fetchall()\n existing_migrations: List[Any] = []\n if len(rows) == 0:\n # create the table\n cur.execute('''\n CREATE TABLE __plainapi_migrations (\n id INTEGER PRIMARY KEY AUTOINCREMENT, \n sql_query VARCHAR(500) NOT NULL, \n english_query VARCHAR(500) NOT NULL\n );\n ''')\n else:\n cur.execute('''\n SELECT sql_query, english_query FROM __plainapi_migrations ORDER BY id ASC;\n ''')\n for sql_query, english_query in cur.fetchall():\n existing_migrations.append({'sql': sql_query, 'english': english_query})\n\n # ensure the existing migrations are correct\n for a, b in zip(existing_migrations, english_migrations):\n if a['english'] != b:\n raise ValueError(f'Invalid previously applied migration (it has been changed):\\n \"{a[\"english\"]}\" -> \"{b}\"')\n\n if len(sql_migrations) != len(english_migrations):\n raise ValueError('Internal: There are more SQL migrations than original English migrations')\n\n if len(existing_migrations) < len(sql_migrations):\n print('Running migrations...')\n for idx, (sql, english) in enumerate(zip(sql_migrations, english_migrations)):\n if idx < len(existing_migrations):\n pass\n else:\n print(f' ...{english}')\n cur.execute(sql)\n cur.execute('''\n INSERT INTO __plainapi_migrations (sql_query, english_query) VALUES (?, ?);\n ''', (sql, english,))\n print('All up to date.')\n else:\n print('No migrations to run.')\n\n con.commit()", "def makeLargeTracts(input_queue, output_queue, config, db_config):\n\n \n # capture the process name\n my_name = mp.current_process().name\n my_ip_address = socket.gethostbyname(socket.gethostname())\n\n while True:\n try:\n # get the next element out of the queue\n inputs = input_queue.get()\n try:\n if inputs[0] is None: break\n\n # extract the terms from the queue list\n numprov_path = inputs[0] \n blockm_df = inputs[1] \n out_tract_path = inputs[2] \n out_county_path = inputs[3] \n out_tract_df = inputs[4]\n out_county_df = inputs[5] \n start_time = inputs[6] \n worker_speed = inputs[7]\n config = inputs[8]\n geom = 'geoid%s' % config['census_vintage'][2:]\n\n continue_run, block_numprov = openNumprovFile(numprov_path, geom, \n my_name, my_ip_address, worker_speed, \n start_time, output_queue)\n\n if continue_run:\n continue_run, block_numprov = mergeWithDataFrame(\n block_numprov, blockm_df, geom, my_name, \n my_ip_address, worker_speed, start_time, \n output_queue) \n\n if continue_run:\n for geo in ['tract', 'county']:\n continue_run, out_df = findPerCapitaProviders(my_name, \n my_ip_address, geo, block_numprov, \n output_queue, start_time, config, \n worker_speed, eval('out_%s_df' % geo))\n \n if continue_run:\n continue_run = outputGeoData(out_df, \n eval('out_%s_path' % geo), my_name, \n my_ip_address, geo, worker_speed, \n start_time, output_queue)\n\n except:\n pass\n\n except:\n # nothing in the queue, wait and check again\n time.sleep(1)\n\n return True", "def runtask(self): \n self.status = 1 #Declare Task as Running.\n \n #Get Ready to Capture Encountered Errors for Mailing\n globs.curErrBlock = ErrorBlock(\"Error Analysis for Task: \"+self.Action) \n\n \"\"\"\n You can practically do anything while a task is running.\n\n Here are some helper functions:\n (Go to these functions for more info)\n 1. runSQLQuery : Executes any sql script.\n 2. find_errors/findErrorsInFiles : Checks a file list for errors and report them.\n 3. Popen : Inbuilt function for executing batch scripts.\n 4. safecopy : copies a file to its destination, reports if file not found.\n\n \"\"\"\n\n if self.op == 1:\n #Task for Gathering Stats\n #Execute Script from the log folder\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n os.chdir(self.schema)\n #The following statement generates a string which contains the absolute path of the sql script and any parameters\n sqlcommand = bytes('@'+globs.props['JDA_HOME']+'\\\\config\\\\database\\\\scpoweb\\\\gather_db_stats '+self.schema, 'utf-8')\n #The following function automatically executes the sqlcommand given above\n runSQLQuery(sqlcommand, globs.props['System_Username'], globs.LogPipe)\n #The following code is used for handling error inside a single file\n log_file = \"\\\\\".join([globs.ARCHIVEFOLDER, self.phase, self.schema, \"gather_db_stats.log\"])\n errFound = find_errors(log_file, [\"ORA-\", \"PLS-\"])\n if errFound:\n self.status = 4\n os.chdir(globs.PROGDIR)\n elif self.op == 2:\n #Task for Counting Rows\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n os.chdir(self.schema)\n sqlcommand = bytes(\"@'%s\\\\sqls\\\\CountRows'\"%globs.PROGDIR+ self.schema, 'utf-8')\n runSQLQuery(sqlcommand, self.schema, sys.__stdout__)\n os.chdir(globs.PROGDIR)\n \n elif self.op == 3:\n #Task for Counting Invalid Objects\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n os.chdir(self.schema)\n sqlcommand = bytes(\"@'%s\\\\sqls\\\\InvalidObjects'\"%globs.PROGDIR+ self.schema, 'utf-8')\n runSQLQuery(sqlcommand, self.schema, sys.__stdout__)\n os.chdir(globs.PROGDIR)\n \n elif self.op == 4:\n #Task for WWFMGR Premigration Script\n progPath = os.getcwd()\n #Store location of the batch scriptfolder\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\platform\\\\migration\\\\'\n #Switch Current Working Directory to the Script Folder\n os.chdir(scriptFolder)\n #Use Popen built-in command to execute required script\n #stdout is set to where you want to display the output, LogPipe is our custom console\n session = Popen(['premigrate_webworks.cmd', globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=globs.LogPipe)\n #Wait until Script Finishes Executing\n session.communicate()\n #Move to the Log Folder\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(\"Premigration\")\n #Prepare a list of files that need to be backed up\n BACKUPFILES = ['premigrate.log', 'gen_refschema.log', 'platform_db_creation.log', 'refsch_check.log', 'r_query.log']\n for f in BACKUPFILES:\n #Copy Files one by one\n safecopy(scriptFolder+f, self.schema)\n #Check All Files for Errrors\n if findErrorsInFiles(BACKUPFILES, self):\n self.status = 4\n os.chdir(globs.PROGDIR)\n elif self.op == 5:\n #Task for WWFMGR migration scripts\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\platform\\\\migration\\\\'\n os.chdir(scriptFolder)\n session = Popen(['migrate_webworks.cmd', globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=globs.LogPipe)\n session.communicate()\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(\"Migration\")\n BACKUPFILES = ['migrate_webworks.log', 'platform_db_creation.log', 'gen_refschema.log']\n for f in BACKUPFILES:\n safecopy(scriptFolder+f, self.schema)\n if findErrorsInFiles(BACKUPFILES, self):\n self.status = 4\n os.chdir(globs.PROGDIR)\n\n elif self.op == 6:\n #Task for Monitor Premigration Scripts\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\monitor\\\\migration\\\\'\n os.chdir(scriptFolder)\n session = Popen(['premigrate_monitor.cmd', globs.props['Monitor_Password'], globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=globs.LogPipe)\n session.communicate()\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(\"Premigration\")\n BACKUPFILES = ['premigrate.log', 'platform_db_creation.log', 'gen_refschema.log', 'refsch_check.log']\n for f in BACKUPFILES:\n safecopy(scriptFolder+f, self.schema)\n if findErrorsInFiles(BACKUPFILES, self):\n self.status = 4 \n os.chdir(globs.PROGDIR)\n elif self.op == 7:\n #Task for Monitor Migration Scripts\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\monitor\\\\migration\\\\'\n os.chdir(scriptFolder)\n session = Popen(['migrate_monitor.cmd', globs.props['Monitor_Password'], globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=globs.LogPipe)\n session.communicate()\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(\"Migration\")\n BACKUPFILES = ['migrate_monitor.log', 'platform_db_creation.log', 'gen_refschema.log', 'ema_populate_wwf.log', 'enroll_app_schema.log']\n for f in BACKUPFILES:\n safecopy(scriptFolder+f, self.schema)\n if findErrorsInFiles(BACKUPFILES, self):\n self.status = 4\n os.chdir(globs.PROGDIR)\n \n elif self.op == 13:\n #Task for SCPOMGR Premigration Scripts\n d = globs.saveDir()\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\scpoweb\\\\migration\\\\'\n os.chdir(scriptFolder)\n session = Popen(['premigrate_scpo.cmd', globs.props['SCPO_Password'], globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=sys.__stdout__)\n session.communicate()\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n BACKUPFILES = ['create_scporefschema.log', 'create_wwfrefschema.log', 'grant_manu_privs.log', 'premigrate_scpo.log', 'show_badrows.log']\n for f in BACKUPFILES:\n safecopy(scriptFolder+f, self.schema)\n found = findErrorsInFiles(BACKUPFILES, self)\n globs.SignalObj.updateErrorSignal.emit(\"Review show_badrows.log in %s before proceeding\"%(\"\\\\\".join([globs.ARCHIVEFOLDER, self.phase, self.schema])))\n self.status = 4\n d.restore()\n elif self.op == 9:\n #Task for SCPOMGR Migration Scripts\n d = globs.saveDir()\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\scpoweb\\\\migration\\\\'\n os.chdir(scriptFolder)\n session = Popen(['migrate_scpo.cmd', globs.props['SCPO_Password'], globs.props['WebWORKS_Password'], globs.props['System_Username'], globs.props['System_Password']], stdin=PIPE, stdout=sys.__stdout__)\n session.communicate()\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n BACKUPFILES = ['create_scporefschema.log', 'create_wwfrefschema.log', 'grant_manu_privs.log', 'migrate_scpo.log']\n for f in BACKUPFILES:\n safecopy(scriptFolder+f, self.schema)\n if findErrorsInFiles(BACKUPFILES, self):\n self.status = 4\n d.restore()\n elif self.op == 10:\n #Task for Checking Row Count Matching\n phase = \"Premigration\"\n predct = getattr(globs,'RowCountDict'+phase)\n phase = \"Postmigration\"\n postdct = getattr(globs,'RowCountDict'+phase)\n res = (predct == postdct)\n if not res:\n globs.SignalObj.updateErrorSignal.emit(\"Row Count Matching Failed!\")\n self.status = 4\n elif self.op == 11:\n #Task for Invalid Object Count Matching\n phase = \"Premigration\"\n predct = getattr(globs,'InvalidCountDict'+phase)\n phase = \"Postmigration\"\n postdct = getattr(globs,'InvalidCountDict'+phase)\n res = (predct == postdct)\n if not res:\n globs.SignalObj.updateErrorSignal.emit(\"Invalid Object Count Matching Failed!\")\n self.status = 4\n elif self.op == 103:\n #Task for Creating Manguistics Package in JDA_SYSTEM\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n user = globs.props['JDA_SYSTEM_Username']\n print(\"Creating the ManugisticsPkg table in the JDA System schema\")\n sqlcommand = bytes('@'+globs.props['JDA_HOME']+'\\\\config\\\\database\\\\platform\\\\ManugisticsPkg '+user, 'utf-8')\n stdout, stdin = runSQLQuery(sqlcommand, user, globs.LogPipe)\n log_file = \"\\\\\".join([globs.ARCHIVEFOLDER, self.phase, \"ManugisticsPkg.log\"])\n errFound = find_errors(log_file, [\"ORA-\", \"PLS-\"])\n if errFound:\n self.status = 4\n os.chdir(globs.PROGDIR)\n elif self.op == 104:\n #Task for Creating ABPP Schema if it doesn't exist \n progPath = os.getcwd()\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\platform\\\\'\n os.chdir(scriptFolder)\n session = Popen(['createAbppSchema.cmd'], stdin=PIPE, stdout=globs.LogPipe)\n session.communicate()\n os.chdir(progPath)\n\n elif self.op == 105:\n #Task for Providing ABPP necessary Grants\n sqlcommand = bytes('@sqls/ABPP_GRANTS', 'utf-8')\n runSQLQuery(sqlcommand, globs.props['System_Username'], globs.LogPipe)\n elif self.op == 106:\n #Task for Updating ABPP Schema\n progPath = os.getcwd()\n scriptFolder = globs.props['JDA_HOME']+'\\\\config\\\\database\\\\platform\\\\'\n os.chdir(scriptFolder)\n session = Popen(['updateAbppSchema.cmd', '-coreServices'], stdout=globs.LogPipe, stdin = PIPE)\n session.communicate()\n os.chdir(progPath)\n elif self.op == 107:\n #Premigration Custom Script\n os.chdir(globs.ARCHIVEFOLDER)\n os.chdir(self.phase)\n sqlcommand = bytes(\"@'%s\\\\sqls\\\\custompremgr'\"%globs.PROGDIR, 'utf-8')\n runSQLQuery(sqlcommand, 'JDA_SYSTEM', sys.__stdout__)\n os.chdir(globs.PROGDIR)\n elif self.op == 202:\n #Sample Task Error\n log_file = globs.PROGDIR+'\\\\tmp\\\\sample.log'\n errFound = find_errors(log_file, [\"ORA-\", \"PLS-\"])\n if errFound:\n self.status = 4\n globs.curErrBlock.finalize()", "def take_action(self, parsed_args):\n if parsed_args.file:\n for file in parsed_args.file:\n if not os.path.exists(file):\n self.logger.error('Specified file does not exist: {}'.format(file))\n continue\n self.logger.info('File uploading is started: {}'.format(file))\n file_id = self.app.metagen.upload_files(file)\n if not file_id:\n return False\n self.logger.info('File {} has been sent to analysis.'.format(file))\n self.logger.info('Use File ID to get Analysis Result: {}'.format(file_id))\n self.logger.info('Task Done')", "def main():\n st.info(\n \"This webpage lets you upload wav audio file and transribe it to Amharic, CHECK THAT OUT !!\")\n st.markdown(STYLE, unsafe_allow_html=True)\n st.header(\"Upload audio file\")\n file = st.file_uploader(\"Audio file\", type=FILE_TYPES)\n show_file = st.empty()\n if not file:\n show_file.info(\"Please upload a file of type: \" +\n \", \".join(FILE_TYPES))\n return\n\n file_type = get_file_type(file)\n if file_type == FileType.PYTHON:\n st.code(file.getvalue())\n\n elif file_type == FileType.SOUND:\n # st.code(file.getvalue())\n audio_bytes = file.read()\n st.audio(audio_bytes, format=\"audio/ogg\")\n\n else:\n data = pd.read_csv(file)\n st.dataframe(data.head(10))\n\n with open(os.path.join(\"./tempfile\", file.name), \"wb\") as f:\n f.write(file.getbuffer())\n st.success(\"Processing File..\")\n\n st.header(\"Transcribe audio\")\n if st.button('Transcribe'):\n st.write(\"\")\n with st.spinner('wait for it ...'):\n time.sleep(60)\n st.success('Done!')\n else:\n st.write('')\n\n # if file:\n # token, t_id = upload_file(file)\n # result = {}\n # #polling\n # sleep_duration = 1\n # percent_complete = 0\n # progress_bar = st.progress(percent_complete)\n # st.text(\"Currently in queue\")\n # while result.get(\"status\") != \"processing\":\n # percent_complete += sleep_duration\n # time.sleep(sleep_duration)\n # progress_bar.progress(percent_complete/10)\n # result = get_text(token,t_id)\n\n # sleep_duration = 0.01\n\n # for percent in range(percent_complete,101):\n # time.sleep(sleep_duration)\n # progress_bar.progress(percent)\n\n # with st.spinner(\"Processing.....\"):\n # while result.get(\"status\") != 'completed':\n # result = get_text(token,t_id)\n\n # st.balloons()\n # st.header(\"Transcribed Text\")\n # st.subheader(result['text'])\n\n file.close()", "def check_job(self, a_thread, _):\n if not a_thread.isAlive():\n self.close_button.disabled = False\n self.popup_label.text = \"Process finished. Processed records:\" + str(self.count_funct())\n return False", "def score_transcriptions(transcriber: Transcriber):\n if transcriber.evaluation_mode:\n best_wer = 10000\n best = None\n for lmwt in range(\n transcriber.min_language_model_weight, transcriber.max_language_model_weight\n ):\n for wip in transcriber.word_insertion_penalties:\n transcriber.transcribe_config.language_model_weight = lmwt\n transcriber.transcribe_config.word_insertion_penalty = wip\n os.makedirs(transcriber.evaluation_log_directory, exist_ok=True)\n\n jobs = [x.score_arguments(transcriber) for x in transcriber.corpus.jobs]\n if transcriber.transcribe_config.use_mp:\n run_mp(score_func, jobs, transcriber.evaluation_log_directory)\n else:\n run_non_mp(score_func, jobs, transcriber.evaluation_log_directory)\n ser, wer = transcriber.evaluate()\n if wer < best_wer:\n best = (lmwt, wip)\n transcriber.transcribe_config.language_model_weight = best[0]\n transcriber.transcribe_config.word_insertion_penalty = best[1]\n for j in transcriber.corpus.jobs:\n score_args = j.score_arguments(transcriber)\n for p in score_args.tra_paths.values():\n shutil.copyfile(\n p,\n p.replace(transcriber.evaluation_directory, transcriber.transcribe_directory),\n )\n else:\n jobs = [x.score_arguments(transcriber) for x in transcriber.corpus.jobs]\n if transcriber.transcribe_config.use_mp:\n run_mp(score_func, jobs, transcriber.working_log_directory)\n else:\n run_non_mp(score_func, jobs, transcriber.working_log_directory)", "def detect_completion(self):\n results_dir = glob.glob(f\"{self.production.rundir}\")\n if len(results_dir)>0: # dynesty_merge_result.json\n if len(glob.glob(os.path.join(results_dir[0], f\"extrinsic_posterior_samples.dat\"))) > 0:\n return True\n else:\n return False\n else:\n return False", "def perform_filecheck():\n\n\t# Open files\n\ttrain = open('train_aae_final', 'r')\n\ttest = open('test_aae_final', 'r')\n\n\n\t# Check number of training and testing samples\n\tprint (\"\")\n\tprint (\"Number of training samples =\", len(train.readlines()))\n\tprint (\"Number of testing samples =\", len(test.readlines()))\n\tprint (\"\")\n\n\ttrain.close()\n\ttest.close()", "def syntax_check(self, working_dir, disco_ver):\n\n bar = ''\n spinner = ''\n if progressbar:\n progressbar.streams.flush()\n progressbar.streams.wrap_stdout()\n widgets = [progressbar.AnimatedMarker(),\n ' Checking syntax. ',\n progressbar.Timer()]\n bar = progressbar.ProgressBar(widgets=widgets,\n max_value=progressbar.UnknownLength,\n redirect_stdout=True)\n else:\n spinner = itertools.cycle(['-', '/', '|', '\\\\'])\n log.debug(\"Module progressbar2 is not installed, will show progress in usual manner.\")\n pass\n\n tpl_mod_dir = os.path.abspath(os.path.join(__file__ , \"../..\"))\n tplint_exe_path = tpl_mod_dir + '\\\\tplint\\\\tplint.exe'\n tplint_tax_path = tpl_mod_dir+'\\\\taxonomy\\\\00taxonomy.xml'\n syntax_passed = False\n\n # errors_re = re.compile(\"\\s+Errors:\\s+(.+)\")\n # mod_re = re.compile(\"Module:\\s+(.+)\")\n match_result = re.compile(\"(?P<error>\\w+\\s\\w+) at or near '(?P<near>\\S+)', \"\n \"line (?P<line>\\d+), in (?P<module>\\S+)\")\n\n log.debug(\"Syntax: Will check_ide all files in path: \" + str(working_dir))\n\n if disco_ver not in self.SYNTAX_SUPPORTED:\n log.info(\"NOTE: tplint was updated in 2016 last time, \"\n \"so we can use only version which is not greater then 11.0\")\n disco_ver = 11.0\n\n # Lines will be collected in list:\n result_out = []\n if os.path.exists(tplint_exe_path) and os.path.exists(tplint_tax_path):\n cmd = \" --discovery-versions=\"+str(disco_ver)+\" --loglevel=WARN -t \"+tplint_tax_path\n\n # noinspection PyBroadException\n try:\n open_path = subprocess.Popen(tplint_exe_path+cmd, cwd=working_dir, stdout=subprocess.PIPE)\n\n # Show progress with fancy progressbar:\n while open_path.stdout is not None:\n if progressbar:\n bar.update()\n else:\n sys.stdout.write(next(spinner))\n sys.stdout.flush()\n sys.stdout.write('\\b') # Working fine in win CMD but not in PyCharm.\n\n out = open_path.stdout.readline()\n result_out.append(out.decode('UTF-8').rstrip('\\r'))\n\n if not out:\n break\n time.sleep(0.01)\n # Final result:\n result = ''.join(result_out)\n if \"No issues found!\" in result:\n # Close bar, do not forget to.\n if progressbar:\n bar.finish()\n log.info(\"Build OK: Syntax: PASSED!\")\n syntax_passed = True\n sys.stdout.flush()\n\n elif match_result.findall(result):\n # Close bar, do not forget to.\n if progressbar:\n bar.finish()\n # error_modules = mod_re.findall(result)\n # errors = errors_re.findall(result)\n log.error(\"Syntax: ERROR: Some issues found!\"\"\\n\" + str(result))\n sys.stdout.flush()\n else:\n log.error(\"Syntax: Something is not OK \\n\" + str(result))\n except:\n log.error(\"Syntax: Tplint cannot run, check_ide if working dir is present!\")\n log.error(\"Syntax: Tplint use path: \" + tpl_mod_dir)\n else:\n log.warning(\"Path to tplint module is not exist. Please check_ide this: \"\n \"https://github.com/trianglesis/BMC_TPL_IDE#syntax-check_ide\")\n # noinspection PyPep8\n log.debug(\"Those paths expected: \"\n \"\\ntplint_exe_path - \"+str(tplint_exe_path) +\n \"\\ntplint_tax_path - \"+str(tplint_tax_path))\n\n return syntax_passed", "def checkUpstreamScheduler():", "def test_03(self):\n e = Emulator()\n e.init()\n e.make_transfer_prepare_condition()\n\n Emulator.run_transfer_prepare()\n qs = TransferPrepare.objects.filter(is_processed=False)\n assert qs.count() > 0\n\n Emulator.run_transfer_donkies_prepare()\n\n qs = TransferPrepare.objects.filter(is_processed=False)\n assert qs.count() == 0", "def __look__missing_termcount_info(self):\n logging.debug('Starting method that looks for missing Term Count data.')\n counter = 0\n max_vids_to_process = self.num_vids_to_use\n logging.info('Examining ' + str(max_vids_to_process) + ' records.')\n list_vids_no_tc_data = []\n percent_tracker = PercentTracker(max_vids_to_process, int_output_every_x_percent=10)\n for vid_id in self.transcripts_ds:\n execution_should_continue = self.var_mgr.var_retrieve(my_globals.str_execution_may_go_on)\n if (not execution_should_continue) or (counter >= max_vids_to_process):\n break\n transcript = Transcript(vid_id)\n transcript.set_transcript_directory(self.str_path_to_transcripts_files)\n transcript.load_transcript_object_from_dictionary(self.transcripts_ds.fetch_data(vid_id))\n has_tc_data = transcript.is_termcount_filename_populated()\n if not has_tc_data:\n # we are here if the video has a transcript (it exists in the transcripts SimpleDS),\n # but the field for the filename of the Term Count file has never been populated.\n list_vids_no_tc_data.append(vid_id)\n counter += 1\n percent_tracker.update_progress(counter,\n str_description_to_include_in_logging='Finding missing term-count files.')\n return list_vids_no_tc_data" ]
[ "0.5762406", "0.56196094", "0.5543294", "0.54251677", "0.54135394", "0.53959346", "0.5356444", "0.5315741", "0.5314573", "0.5207271", "0.5122057", "0.510715", "0.50821364", "0.50786346", "0.5072173", "0.50637865", "0.50570357", "0.505323", "0.50277853", "0.5014339", "0.4977006", "0.49643832", "0.49452335", "0.49304596", "0.4917264", "0.4916763", "0.4914915", "0.49081287", "0.4903229", "0.49031702", "0.4890435", "0.48690873", "0.48651877", "0.48555124", "0.48408136", "0.48399135", "0.48322514", "0.4820463", "0.48203355", "0.48112044", "0.48110497", "0.4810885", "0.4809977", "0.4800055", "0.47990304", "0.47973442", "0.47871467", "0.47858542", "0.47849768", "0.47839737", "0.4772871", "0.47711796", "0.47652525", "0.47632053", "0.47616357", "0.47363317", "0.4730852", "0.47205716", "0.47190285", "0.4717215", "0.47066104", "0.4699173", "0.46974173", "0.4693714", "0.46861756", "0.4685887", "0.46849436", "0.46844232", "0.46801686", "0.46791542", "0.4675756", "0.4674492", "0.4669862", "0.4667408", "0.46658474", "0.4665639", "0.46635085", "0.46502176", "0.46472764", "0.4641228", "0.4636296", "0.46326813", "0.46308634", "0.4626586", "0.4626269", "0.46253967", "0.46252882", "0.4621409", "0.46132806", "0.46076792", "0.46076393", "0.46044186", "0.4600304", "0.45954362", "0.4591859", "0.4583793", "0.4581557", "0.45772773", "0.45735943", "0.45723042" ]
0.7170837
0
scans all rss feeds for new
def updateScript(dbconnection): cursor = dbconnection.cursor() cursor.execute("select rss, name, source from podcasts;") rssArray = cursor.fetchall() for rss in rssArray: print("chekcing name " + str(rss[1])) url = str(rss[0]) name = str(rss[1]) source = str(rss[2]) rssArray = DatabaseInteract.rssCheck(name, source, url) for item in rssArray: if(DatabaseInteract.checkIfExists(dbconnection, item[0]) == False): DatabaseInteract.insertClip(dbconnection, item[2], name, item[3], item[1], item[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scanFeedList(self): \r\n data = self.feed_handler.listScanFeeds()\r\n data = data[:MAX_FEEDS_SCAN]\r\n for idx, feed in enumerate(data):\r\n print \"feeds ... / [%s/%s] (%s docs:%s passed)\" % (idx, len(data),self.feed_item_ctr, self.feed_passed)\r\n try:\r\n baseURL = feed.mainUrl\r\n self.processData(baseURL) \r\n self.createFeedItems()\r\n except Exception, ex:\r\n print(\"ERR: failed to process data and create feed item=%s\" % ex)\r\n print \"done\"", "def check_feeds(self):\n lst = []\n for feed in self.feeds:\n feed.update()\n if feed.get_new_entries():\n lst.append(feed)\n return lst", "def watch(self):\n all_rss_feeds = [feed for feed in models.RSSFeed.query.all()]\n\n for rss_feed in all_rss_feeds:\n rss_feed.aggregate()", "def process_all_rss(reprocess=False):\n sources = list()\n logger.debug(\"Collecting sources\")\n monitors = mongo.db[app.config['MONITORS_COLLECTION']]\n for item in monitors.find({'active': True}):\n sources.append(item['metadata'].get('rss_link'))\n\n contents = [feedparser.parse(x) for x in sources]\n logger.debug(\"Processing sources\")\n for source in contents:\n for idx, item in enumerate(source.get('entries')):\n response = get_article(item, source['href'], reprocess)\n if response['from_store'] or reprocess:\n continue\n clean_link = response['article']['feed_source']\n monitors.update({'metadata.rss_link': clean_link},\n {'$set': {'checked': now_time()}})\n correct_counts()", "def run_rss(self):\n\n pass", "def check_for_new_links(feed):\n #read the feed\n feed_url = feed[\"feed_url\"]\n feed_data = feedparser.parse(feed_url)\n\n #parse out entries in the feed for the information we want\n entries = []\n for entry in feed_data.entries:\n parsed_entry = {}\n parsed_entry[\"title\"] = entry[\"title\"]\n parsed_entry[\"link\"] = entry[\"link\"]\n parsed_entry[\"published\"] = entry[\"published\"]\n parsed_entry[\"feed_url\"] = feed_url\n entries.append(parsed_entry)\n\n #check for new entries since the last known entry\n #chop off all entries starting at the last_seen_link\n if \"last_seen_link\" in feed:\n last_link = feed[\"last_seen_link\"]\n idx = -1\n for cidx in range(len(entries)):\n if entries[cidx][\"link\"] == last_link:\n idx = cidx\n break\n #else is a new link\n entries = entries[:idx]\n\n return list(reversed(entries))", "def parse_rss(link, mode):\n\n one_feed = []\n news_counter = 0\n app.logger.info(f'Parsing feed: {link}')\n # Get file from internet, open it with xml-parser\n rss = feedparser.parse(link)\n\n for entry in rss.entries:\n\n if mode == 'latest':\n news_item_date = get_timestamp(entry.published)\n\n # Stop reading RSS if current news is already older than time\n # when user last got the news feed\n if news_item_date < last_time_user_got_news:\n return one_feed\n\n post = {'title': entry.title,\n 'published': get_timestamp(entry.published)}\n\n # Try to get link to image from one of a place where it can be\n try:\n pic = entry.enclosures[0].href\n except(IndexError, AttributeError):\n pic = get_img_source(entry.summary)\n\n post['image'] = pic if pic else url_for('static',\n filename=\"400x400.jpg\")\n\n link = entry.link\n post['link'] = link\n domain_name = re.search(r'://(.+?)/', link).group(1)\n post['domain_name'] = domain_name if domain_name else 'unknown'\n\n one_feed.append(post)\n\n if mode != 'latest':\n return one_feed\n else:\n print('There are no new news at all.')\n return []", "def add_rss(url):", "def updateOneFeed(self):\n feeds = backend.Feed.query.order_by(\"check_date\").limit(1).all()\n if feeds:\n feed = feeds[0]\n print feed.check_date\n # Only check if it has not been checked in at least 10 minutes\n if (datetime.datetime.now() - feed.check_date).seconds > 600:\n print \"Scheduled update of: \",feed.xmlurl\n fetcher_in.put(['update', feed.xmlurl, feed.etag, feed.check_date])", "def download_feed_return_objects(rss_url):\r\n try:\r\n feed_obj = rss_exists(rss_url)\r\n except:\r\n yield None\r\n return\r\n\r\n feed_obj_found = False\r\n feed_parser_results, success = get_rss(rss_url)\r\n\r\n if feed_parser_results is None:\r\n error_reporter.captureMessage(u'Feed Parser results is None', **dict(rss_url=rss_url))\r\n yield None\r\n return\r\n\r\n if feed_obj is None:\r\n feed_obj = create_new_feed(feed_parser_results, rss_url)\r\n else:\r\n feed_obj_found = True\r\n\r\n feed_id = feed_obj.id\r\n feed_obj.title = feed_parser_results.get(\"title\", \"\") or \"\"\r\n max_length_field(feed_obj, 'title', 100)\r\n\r\n feed_obj.status_code = feed_parser_results.get(\"status\", \"\") or 200\r\n feed_obj.status = find_feed_status_from_scode(feed_obj)\r\n\r\n feed_obj.etag = cut_clean_etag(feed_parser_results.get(\"etag\", \"\"))\r\n\r\n updated_date = feed_parser_results.get(\"updated_parsed\")\r\n feed_obj.updated = dt.fromtimestamp(mktime(updated_date)) if updated_date is not None else dt.utcnow()\r\n #\tfeed_obj.published = dt.fromtimestamp(mktime(published_date)) if published_date is not None else None\r\n feed_obj.last_check = dt.utcnow()\r\n\r\n # We could be creating a new feed, or updating the existing one.\r\n yield feed_obj\r\n rss_posts = []\r\n\r\n for feed_article in feed_parser_results.get(\"entries\", []):\r\n ptime = feed_article.get(\"published_parsed\", None)\r\n post_date = dt.fromtimestamp(mktime(ptime)) if ptime is not None else dt.utcnow()\r\n #\t\tprint \"%r\" % post\r\n p = Post(\r\n id=uuid.uuid1(),\r\n title=feed_article.get(\"title\", \"\"),\r\n author=feed_article.get(\"author\", \"\"),\r\n href=feed_article.get(\"href\", \"\"),\r\n post_id=feed_article.get(\"id\", \"\"),\r\n published_at=post_date,\r\n feed_id=feed_id\r\n )\r\n\r\n p.original_title = max_length_field(p, 'title', 200)\r\n p.original_author = max_length_field(p, 'author', 200)\r\n\r\n p.content_html = feed_article.get(\"content\", \"\") or \"\"\r\n\r\n if feed_article.has_key(\"media_content\"):\r\n media_contents = feed_article.get(\"media_content\", []) or []\r\n if media_contents is not None and (not isinstance(media_contents, basestring)) and isinstance(\r\n media_contents, collections.Iterable):\r\n p.media = [media.get(\"url\") for media in media_contents]\r\n\r\n hasHash = False\r\n\r\n if feed_article.has_key(\"feedburner_origlink\"):\r\n p.original_link = feed_article.get(\"feedburner_origlink\", \"\")\r\n if non_empty_str(p.original_link):\r\n p.link_hash = url_hash(safe_str(p.original_link))\r\n hasHash = True\r\n\r\n if feed_article.has_key(\"link\"):\r\n p.href = feed_article.get(\"link\", \"\")\r\n if not hasHash and non_empty_str(p.href):\r\n p.link_hash = url_hash(safe_str(p.href))\r\n hasHash = True\r\n\r\n if not hasHash:\r\n print \"Post don't have any hash\"\r\n\r\n p.title_hash = url_hash(safe_str(p.title)) if non_empty_str(p.title) else \"\"\r\n p.post_id_hash = url_hash(safe_str(p.post_id)) if non_empty_str(p.post_id) else \"\"\r\n\r\n if feed_article.has_key(\"tags\"):\r\n if isinstance(feed_article['tags'], collections.Iterable):\r\n p.tags = [pst.get(\"term\") for pst in feed_article['tags']]\r\n\r\n rss_posts.append(p)\r\n\r\n has_posts = len(rss_posts) > 0\r\n post_id_hashes = [p.post_id_hash for p in rss_posts]\r\n #\tpost_title_hashes = [p.title_hash for p in rss_posts]\r\n post_link_hashes = [p.link_hash for p in rss_posts]\r\n\r\n found_posts_id_hashes = []\r\n found_posts_link_hashes = []\r\n\r\n if feed_obj_found and has_posts:\r\n existing_posts = find_existing_posts(feed_id, post_id_hashes, post_link_hashes)\r\n\r\n for ex_post_id_hash, ex_link_hash in existing_posts:\r\n found_posts_id_hashes.append(ex_post_id_hash)\r\n found_posts_link_hashes.append(ex_link_hash)\r\n\r\n has_existing_posts = len(found_posts_id_hashes) > 0 or len(found_posts_link_hashes) > 0\r\n\r\n new_post_count = 0\r\n if has_posts:\r\n for rss_post in rss_posts:\r\n should_skip = False\r\n\r\n if has_existing_posts:\r\n if non_empty_str(rss_post.post_id_hash) and rss_post.post_id_hash in found_posts_id_hashes:\r\n should_skip = True\r\n elif rss_post.link_hash in found_posts_link_hashes:\r\n should_skip = True # \"Link Hash found in existing records\"\r\n\r\n if not should_skip:\r\n new_post_count += 1\r\n yield rss_post\r\n\r\n feed_history = FeedHistory(id=uuid.uuid1(),\r\n feed_id=feed_obj.id,\r\n timestamp=dt.utcnow(),\r\n status=feed_obj.status_code,\r\n post_count=new_post_count,\r\n etag=feed_obj.etag)\r\n yield feed_history", "def parse_rss(database, feed, depth=1):\n # Get the updates article count, and article urls and publish dates.\n rss_a = rss_feed(feed)\n \n # Get all (article urls, publish dates) pairs\n articles = []\n pairs = rss_a[1].items()\n for url, pubdate in pairs: \n articles += crawl_url(database, url, date=pubdate, depth=depth)\n \n return articles", "def _generate_feeds_once(use_batching: bool, mark_tweets_as_rss_fed: bool = True) -> int:\n all_new_tweets = db.get_tweets_to_rss_feed(-1 if use_batching else Config.RSS_MAX_ITEMS)\n if len(all_new_tweets) > 0:\n username_to_tweets = defaultdict(list)\n for tweet in all_new_tweets:\n if _should_include_tweet(tweet):\n username_to_tweets[tweet.user.screen_name].append(tweet)\n for username, tweets in username_to_tweets.items():\n logging.info('Updating RSS feed of %s with %s tweets.', username, len(tweets))\n _update_feed(username, tweets, use_batching)\n if mark_tweets_as_rss_fed:\n db.mark_tweets_as_rss_fed(\n username, tweets[0].user.name, [tweet.id for tweet in tweets], Config.TTL_SECONDS\n )\n _update_feeds_html()\n return len(all_new_tweets)", "def generate_feeds():\n os.makedirs(Config.FEED_ROOT_PATH, exist_ok=True)\n use_batching = Config.DAILY_DIGEST is not None\n\n while True:\n _generate_feeds_once(use_batching=use_batching)\n interval = _interval_between_generating_feeds(Config.REFRESH_INTERVAL_SECONDS, Config.DAILY_DIGEST)\n logging.info('Sleeping %ss before attempting to generate feeds again.', interval)\n time.sleep(interval)", "def get_rss(self):\r\n rssfiles = []\r\n \r\n rssfiles.append(feedparser.parse(self.url))\r\n return rssfiles", "def zhihu_rss_fetcher(ctx):\n URL = 'http://www.zhihu.com/rss'\n coll = ctx.get_mongo_collection()\n\n for entry in fetch_rss(URL).entries:\n try:\n coll.insert({'_id': entry.link})\n except DuplicateKeyError:\n continue\n ctx.new_item(TextOnlyItem(entry.title, entry.description), ['zhihu'],\n parse_entry_time(entry),\n {'id': entry.link})\n log_info(u'zhihu: new entry: {} {}'.format(entry.link,\n entry.title))", "def scrape(self):\n\n for feed in self.id_list:\n\n try:\n d = self.g.get_connections(feed, 'feed')\n except Exception as e:\n print(\"Error reading feed id %s, exception: %s\" % (feed, e))\n continue\n\n no_messages = 0\n self.no_messages = no_messages\n count = 1\n print(\"Scraping page %s of feed id %s\" % (count, feed))\n self.write_data(d)\n\n try:\n paging = d['paging']\n if 'next' in paging:\n next_page_url = paging['next']\n\n while next_page_url:\n\n count += 1\n print(\"Scraping page %s\" % count)\n\n try:\n # convert json into nested dicts and lists\n with urlopen(next_page_url) as url:\n read_url = url.read()\n d = simplejson.loads(read_url)\n except Exception as e:\n print(\"Error reading id %s, exception: %s\" % (feed, e))\n continue\n\n if len(d['data']) == 0:\n print(\"There aren't any other posts. Scraping of feed id %s is done! \" % feed)\n break\n\n self.write_data(d)\n\n if 'paging' in d:\n if 'next' in d['paging']:\n next_page_url = d['paging']['next']\n else:\n break\n\n except:\n if self.no_messages > 0:\n print(\"There aren't any other pages. Scraping of feed id %s is done! \" % feed)\n else:\n print(\"There is nothing to scrape. Perhaps the id you provided is a personal page.\")\n continue\n\n self.con.close()", "def _process_feeds(self):\n if self._feeds is None:\n return\n try:\n for feed_parser in self._feed_parsers:\n # all of the nested try excepts\n try:\n for article in feed_parser.get_new_articles():\n self._downloader.queue_article(article)\n for article in self._recursive_source.get_new_articles():\n self._downloader.queue_article(article)\n except Exception as e:\n logging.exception(e)\n\n except TypeError:\n raise ValueError(\"'feeds' must be a list of RSS feed URLs to process.\")", "def __update_feed(feed_obj):\n\n url = feed_obj.url\n feed = feedparser.parse(url)\n\n try:\n feed.feed.title\n except AttributeError:\n return\n\n # List of new entries in downloaded XML\n new_entries = feed.entries\n new_entries_titles = [entry.title for entry in new_entries]\n\n # List of current entries in database\n old_entries = Entry.objects.filter(feed=feed_obj)\n old_entries_titles = [entry.title for entry in old_entries]\n\n # Check what old entries arn't in new entries\n # They will be deleted\n for entry_title in old_entries_titles:\n if entry_title not in new_entries_titles:\n Entry.objects.get(title=entry_title, feed=feed_obj).delete()\n\n # Add all new entries\n __add_entries(new_entries, feed_obj)\n\n # Update time and save\n feed_obj.time = datetime.now()\n feed_obj.save()", "async def checkNew(self):\n items = self.source.getRecent()\n items.reverse()\n if items:\n for item in items:\n if item.title not in self.cache:\n print(f'{str(self.source)}: {item.title}')\n self.cache.append(item.title)\n for itemList in self.list:\n if item.title == itemList['title'] or item.title == itemList['title_english']:\n await self.sendPing(item.title, item.progress, item.link, itemList['image_url'])\n else:\n print(f'Failed retrieving from {str(self.source)}')", "def storeFeeds(self, url, feeds):\n for feed in feeds:\n _date = time.localtime()\n if 'published_parsed' in feed:\n _date = feed['published_parsed']\n date = datetime(_date.tm_year, _date.tm_mon, _date.tm_mday)\n doc = {\n '_id': md5_new(feed.id).hexdigest(),\n 'title': feed.title,\n 'date': date,\n 'link': feed.link,\n 'summary': feed.summary,\n 'type': url,\n 'status': 'new',\n }\n try:\n self.feedsCol.insert(doc)\n except DuplicateKeyError:\n pass", "def check_feeds():\n\n session = oercloud.Session()\n\n # load the entry point handlers for different feed types\n handlers = aggregator.handlers.get()\n\n for feed in session.query(oercloud.Feed):\n\n if (time.time() - feed.last_import) > feed.update_interval:\n\n # this feed needs updated -- call the appropriate handler\n aggregator.LOG.info(\"Updating %s\" % feed)\n\n if feed.feed_type in handlers:\n handlers[feed.feed_type].load()(feed)\n else:\n # no handler... log a warning\n aggregator.LOG.warning(\"No handler for feed type %s\" % \n feed.feed_type)", "def retrieveFeed(self, rss_url):\n url = 'http://{}'.format(rss_url)\n result = feedparser.parse(url)\n if result.status != 200:\n sys.stdout.write('request failed for retrieve this RSS ({})\\n'.format(url))\n else:\n self.storeFeeds(url, result['items'])", "def update_db_from_rss():\n today = date.today()\n url = 'https://newyork.craigslist.org/search/jjj?query=unpaid&sort=rel&format=rss'\n\n cached_feed = CachedFeed.query.filter_by(rss_url=url, date=today).first()\n if not cached_feed:\n resp = requests.get(url)\n cached_feed = CachedFeed(rss_url=url, text=resp.text)\n db.session.add(cached_feed)\n db.session.commit()\n\n feed = feedparser.parse(cached_feed.text)\n\n for entry in feed.entries:\n link = entry['link']\n\n # Skip postings that already exist when scanning\n posting = Posting.query.filter_by(url=link, rss_url=url).first()\n if posting:\n continue\n\n posting_resp = requests.get(link)\n posting_soup = BeautifulSoup(posting_resp.text)\n\n replylink = posting_soup.find(id=\"replylink\")\n contact_href = replylink.get('href') if replylink else None\n\n contact_url = urljoin(url, contact_href)\n contact_resp = requests.get(contact_url)\n contact_soup = BeautifulSoup(contact_resp.text)\n\n anonemail_el = contact_soup.find(class_=\"anonemail\")\n title = posting_soup.find('title').text\n\n posting = Posting(title=title,\n url=link,\n rss_url=url,\n text=unicode(posting_soup.find(id='postingbody')),\n region='nyc',\n posted_at = datetime.fromtimestamp(mktime(entry.published_parsed)),\n email=anonemail_el.text if anonemail_el else None,\n email_subject=title,\n email_body=current_app.config['EMAIL_DEFAULT_BODY']\n )\n\n db.session.add(posting)\n\n print(u\"finished {}, sleeping\".format(link))\n time.sleep(15)\n\n db.session.commit()", "def get_rss_feed(feed_key):\n\n if rss_feeds[feed_key]['updated'] is None:\n # Update Cache\n entries = update_cache(feed_key)\n elif (datetime.datetime.today() - rss_feeds[feed_key]['updated']).seconds > (60 * 5):\n # Update Cache\n entries = update_cache(feed_key)\n else:\n # Read Cache\n entries = get_cache(feed_key)\n\n return entries", "def rss_feed(rss_url):\n try:\n # Use feedparser to analyze given RSS feed, if it is valid RSS.\n d = feedparser.parse(rss_url)\n except:\n return \"Sorry, invalid RSS feed. Please check and try again later.\"\n \n total = len(d['entries'])\n updates = dict()\n for index, item in enumerate(d['entries']):\n # Convert publish time from ctime format to iso-time format.\n a_time = time_convert(item.published)\n # Set article url ad dictionary key, with publish date as value. \n updates[str(item.link)] = a_time \n return (total, updates)", "def update_rss_feed(torrent_dir, suggested_name, url, download_url, tree_size, torrents):\n # Fetching the existing feed, if possible\n filepath = os.path.join(torrent_dir, '{}.rss'.format(suggested_name))\n try:\n with open(filepath, 'rb') as fd:\n doc = xml.dom.minidom.parse(fd)\n\n except IOError:\n # The RSS file does not exist; it is probably a first run\n doc = None\n\n # Fixing download URL, if need be, such that it ends with a slash\n if download_url[-1] != '/':\n download_url += '/'\n\n # Building/Verifying the XML structure\n try:\n chan = check_rss_dom_structure(doc)\n except:\n doc, chan = init_rss_dom_structure(url)\n\n for torrent_data in torrents:\n item = doc.createElement('item')\n chan.appendChild(item)\n\n title_elmt = doc.createElement('title')\n title_txt = doc.createTextNode('Package {} for tree_size {}'.format(torrent_data[2], tree_size))\n title_elmt.appendChild(title_txt)\n item.appendChild(title_elmt)\n\n desc_elmt = doc.createElement('description')\n desc_txt = doc.createTextNode(\n 'Comment: {} Creation Date: {}'.format(torrent_data[0]['comment'], torrent_data[0]['creation date'])\n )\n desc_elmt.appendChild(desc_txt)\n item.appendChild(desc_elmt)\n\n guid_elmt = doc.createElement('guid')\n fp = codecs.getencoder('hex')(torrent_data[1])[0]\n guid_txt = doc.createTextNode(fp.decode('UTF-8'))\n guid_elmt.appendChild(guid_txt)\n item.appendChild(guid_elmt)\n\n enclosure_elmt = doc.createElement('enclosure')\n enclosure_elmt.setAttribute('url', download_url + build_torrent_name(url, torrent_data[2], tree_size))\n enclosure_elmt.setAttribute('type', 'application/x-bittorrent')\n enclosure_elmt.setAttribute('len', str(torrent_data[3]))\n item.appendChild(enclosure_elmt)\n\n with open(filepath, 'wb') as fd:\n fd.write(doc.toxml('UTF-8'))", "def main():\n # Construct the feed generator\n f = LogBufferFeed(FEED_DIR)\n f.MAX_AGE = 24 * 60 * 60 # 1 day\n f.FEED_META['feed.title'] = '%s Referrering Links' % SITE_NAME\n f.FEED_META['feed.tagline'] = \\\n 'New referring links from Apache access.log on %s' % SITE_NAME\n \n # Load up tail of access log, parse, and filter\n new_lines = bookmark_tailgrep(ACCESS_LOG, max_initial_lines=100000)\n all_events = parse_access_log(new_lines)\n events = [ x for x in all_events if event_filter(x) ]\n \n # Scan through latest events for new referrers\n referrers_seen = shelve.open(REFER_SEEN)\n new_referrers = []\n for evt in events:\n k = '%(referrer)s -> %(path)s' % evt\n if not referrers_seen.has_key(k):\n referrers_seen[k] = 1\n new_referrers.append( (evt['referrer'], evt['path']) )\n referrers_seen.close()\n \n # If there were new referrers found, insert a new entry.\n if len(new_referrers) > 0:\n \n # Build a list of hyperlinks for referrers\n links_out = [\n LINK_TMPL % {\n 'SITE_ROOT' : SITE_ROOT,\n 'referrer' : x[0],\n 'path' : x[1],\n }\n for x in new_referrers\n ]\n \n # Build a summary for this entry.\n summary = SUMMARY_TMPL % { \n 'count' : len(new_referrers), \n 'links' : \"\\n\".join(links_out)\n }\n \n # Construct and append a new entry\n entry = FeedEntryDict({\n 'title' : '%s new referrers' % len(new_referrers),\n 'link' : '',\n 'summary' : summary\n })\n f.append_entry(entry)\n\n # Output the current feed entries as both RSS and Atom\n open(FEED_NAME_FN % 'rss', 'w').write(f.scrape_rss())\n open(FEED_NAME_FN % 'atom', 'w').write(f.scrape_atom())", "def update_feeds(self) -> tuple[set[str], set[str]]:\n\n updated_urls = set()\n error_urls = set()\n self.sync_reader()\n with make_reader(self.reader_db_file) as reader:\n for (url, value) in reader.update_feeds_iter():\n if isinstance(value, UpdatedFeed):\n logger.info(f'Got updated feed for {url} with {value.new} new entries '\n f'and {value.modified} updated entries.')\n if value.new:\n updated_urls.add(url)\n elif isinstance(value, ReaderError):\n logger.error(f'Got error when updating {url}')\n error_urls.add(url)\n return updated_urls, error_urls", "def get_news(url):\r\n \r\n # parse RSS feed into list of dictionaries\r\n feed = feedparser.parse(url)\r\n\r\n # no RSS feed articles for url\r\n if len(feed['entries']) == 0:\r\n return []\r\n \r\n # get first ten articles from the RSS feed\r\n news = []\r\n i = 0\r\n while True:\r\n if i == len(feed['entries']) or i > 30:\r\n break\r\n \r\n try:\r\n # get link to article\r\n link = feed[\"entries\"][i][\"link\"]\r\n\r\n # get title of article\r\n title = feed[\"entries\"][i][\"title\"]\r\n \r\n try:\r\n # get raw summary of article\r\n summary_raw = feed[\"entries\"][i][\"summary\"]\r\n \r\n # format summary\r\n summary = \"\"\r\n for c in summary_raw:\r\n if c == \"<\":\r\n summary += \"...\"\r\n break\r\n summary += c\r\n except KeyError as e:\r\n logging.error(\"no summary for RSS feed article: {}\".format(link))\r\n summary = \"read more here...\"\r\n \r\n # get raw date \r\n date_raw = feed[\"entries\"][i][\"published_parsed\"]\r\n \r\n if date_raw is None:\r\n date = feed[\"entries\"][i][\"published\"]\r\n \r\n else:\r\n # format date\r\n year = str(date_raw.tm_year)\r\n months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\", \"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\r\n month = months[date_raw.tm_mon - 1]\r\n day = str(date_raw.tm_mday)\r\n weekdays = [\"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\"]\r\n wday = weekdays[date_raw.tm_wday]\r\n hour = str(date_raw.tm_hour)\r\n hour = \"{:2}\".format(hour).format(' ','0')\r\n min = str(date_raw.tm_min)\r\n min = \"{:2}\".format(min).replace(' ','0')\r\n date = hour + \":\" + min + \" - \" + wday + \" \" + month + \" \" + day + \", \" + year\r\n \r\n # compile entry and append to news list\r\n entry = {\"link\":link, \"title\":title, \"date\":date, \"summary\":summary}\r\n \r\n # sanitize entry\r\n for key in entry:\r\n # apostrophe\r\n entry[key] = entry[key].replace(\"&#39;\", \"'\")\r\n # right single quotation mark\r\n entry[key] = entry[key].replace(\"’\", \"&#8217;\")\r\n # left single quotation mark\r\n entry[key] = entry[key].replace('\"', \"&#8216;\")\r\n # right double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8221;\")\r\n # left double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8220;\")\r\n # Weird ampersand formatting\r\n entry[key] = entry[key].replace(\"&amp;\", \"&\")\r\n \r\n # prepare entry for sqlite queries\r\n entry[key] = surround(entry[key])\r\n \r\n # add entry to news list\r\n news.append(entry)\r\n \r\n # max 10 entries\r\n if len(news) == 10:\r\n break\r\n i += 1\r\n \r\n except Exception as e:\r\n logging.error(e)\r\n i += 1\r\n pass\r\n \r\n # success\r\n return news", "def process_refresh_watchpost(self, rssis_list):\n try:\n for eddy_namespace in rssis_list:\n refresh = self.refresh_watchpost(eddy_namespace, rssis_list[eddy_namespace])\n if refresh:\n print(\"\\t...atualizado\\n\\teddy_namespace{}\\n\\tstatus {}\".format(refresh.eddy_namespace,\n refresh.status))\n except Exception as e:\n print('process refresh watchpost', e)", "def crawl(self):\n self._process_urls()\n self._process_feeds()\n self._downloader.process_all()\n return self._feeds is not None", "def get_feed(self):\n possible_endings = ('rss', 'rss/')\n if not self.url or not self.url.endswith(possible_endings):\n print('Please check URL(is RSS?) and Internet connection')\n sys.exit()\n try:\n data = feedparser.parse(self.url)\n except urllib.error.URLError:\n print('Please input correct URL')\n sys.exit()\n self.get_content(data)\n return self.items", "def request_rss(self, url):\n return feedparser.parse(url)", "def feed_read(self, feed):\n if feed != self.current_feed:\n return\n self.action_mark_all_read.setDisabled(True)\n for item in self.current_feed.get_items():\n self.update_item(item)", "def test_update_new_no_last_updated(reader):\n parser = Parser()\n reader._parser = parser\n\n feed = parser.feed(1, datetime(2010, 1, 1))\n\n reader.add_feed(feed.url)\n # updated must be None if last_updated is None\n reader._storage.update_feed(\n FeedUpdateIntent(feed.url, None, feed=feed._replace(updated=None))\n )\n\n reader.update_feeds(new=True)\n\n parser.entry(1, 1, datetime(2010, 1, 1))\n reader.update_feeds(new=True)\n\n # the entry isn't added because feed is not new on the second update_feeds\n assert len(list(reader.get_entries(feed=feed.url))) == 0", "def get_rss_infos():\n\n url_rss_lib = \"http://www.liberation.fr/rss\"\n soup = utils.recovery_flux_url_rss(url_rss_lib)\n\n rss_items = soup.find_all(\"li\")\n\n rss_list = []\n\n link_rss = []\n\n for ri in rss_items:\n if ri.get(\"class\") == ['rss-item']:\n rss_list.append(ri.a.get('href'))\n\n for rl in rss_list:\n soup = utils.recovery_flux_url_rss(rl)\n entre = soup.find_all('entry')\n for e in entre:\n link_rss.append(e.link.get('href'))\n\n return link_rss", "def fetch_feeds(self):\n feed_list = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n rss_title = rss.get('title', '-')\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n feed_list.append({\n 'title':rss_title,\n 'href':rss_href,\n 'status': feed.get('status', 400),\n 'updated': feed.get('updated', None),\n 'updated_parsed': feed.get('updated_parsed', None),\n 'encoding': feed.get('encoding', None),\n 'bozo': feed.get('bozo', None),\n 'headers': feed.get('headers', {}),\n 'etag': feed.get('etag', None),\n 'version': feed.get('version', None),\n 'entries': feed.get('entries', []),\n 'namespaces': feed.get('namespaces', None)\n })\n\n return feed_list", "async def process(self, timeout=60):\n\n previous_date = self.previous_date()\n new_date = previous_date\n last_sent_message_date = previous_date\n now = pendulum.now('UTC')\n\n self.log.info(\"Begining processing feed %s, previous date %s\",\n self.name, previous_date)\n\n for entry in await self.fetch_and_parse(timeout):\n\n pubdate = dateutil.parser.parse(entry.published, tzinfos=rssalertbot.BOGUS_TIMEZONES)\n entry.published = pendulum.from_timestamp(pubdate.timestamp())\n # also save a prettified string format\n entry.datestring = self.format_timestamp_local(entry.published)\n\n # skip anything that's stale\n if entry.published <= previous_date:\n continue\n\n event_id = md5((entry.title + entry.description).encode()).hexdigest()\n last_sent = self.storage.load_event(self.feed, event_id)\n re_alert = self.cfg.get('re_alert', rssalertbot.RE_ALERT_DEFAULT)\n should_delete_message = False\n\n if entry.published > now:\n if last_sent and now < last_sent.add(hours=re_alert):\n continue\n self.storage.save_event(self.feed, event_id, now)\n else:\n if entry.published > new_date:\n new_date = entry.published\n should_delete_message = last_sent\n\n self.log.debug(\"Found new entry %s\", entry.published)\n\n # alert on it\n await self.alert(entry)\n if new_date > last_sent_message_date:\n self.storage.save_date(self.feed, new_date)\n last_sent_message_date = new_date\n\n if should_delete_message:\n self.log.debug(f\"Deleting stored date for message {event_id}\")\n self.storage.delete_event(self.feed, event_id)\n\n self.log.info(\"End processing feed %s, previous date %s\", self.name, new_date)", "def run(self):\n while self.i < len(self.series):\n # Grab line + RSS\n s = self.series[self.i]\n rss = self.request_rss(s.feedUrl)\n\n # Compose Episodes\n ep_dicts = []\n for entry in rss['entries']:\n ep_dicts.append(Episode(s, entry).__dict__)\n\n # Build result dict\n result_dict = dict()\n result_dict['series'] = deepcopy(s.__dict__)\n result_dict['series']['genres'] = \\\n result_dict['series']['genres'].split(';')\n result_dict['series']['type'] = 'series'\n result_dict['episodes'] = ep_dicts\n\n # Store podcast\n self.storer.store(result_dict)\n\n # Move onto the next one\n self.i += 20\n print(\"Retrieved \" + str(s.id))", "def rss_fetch():\n items = {}\n\n def add_item(pubDate, title, link):\n nonlocal items\n idx = float(parsedate_to_datetime(pubDate).timestamp())\n while idx in items:\n idx = idx + 0.1\n dbg(\"Adding item: %11.1f \\\"%s\\\" %s\" % (idx, title, link))\n items[idx] = {}\n items[idx]['title'] = title\n items[idx]['link'] = link\n\n state = \"\" # state parser is in (\"\", \"item\", \"title\", \"link\", \"pubDate\")\n title = \"\" # Currently parsing this title.\n link = \"\" # \" \" \" link\n pubDate = \"\" # \" \" \" pubDate (index)\n\n def start_element(name, attrs):\n nonlocal state\n nonlocal title\n nonlocal link\n nonlocal pubDate\n dbg(\"Start: %s %s %s\" %(name, str(attrs), str((state, title, link, pubDate))))\n if state == \"\":\n if name == \"item\":\n state = \"item\"\n elif state == \"item\":\n if name == \"title\":\n state = \"title\"\n if title:\n prn(\"Two titles?\")\n sys.exit(1)\n elif name == \"link\":\n state = \"link\"\n if link:\n prn(\"Two links?\")\n sys.exit(1)\n elif name == \"pubDate\":\n state = \"pubDate\"\n if pubDate:\n prn(\"Two pubDates?\")\n sys.exit(1)\n\n\n def end_element(name):\n nonlocal state\n nonlocal title\n nonlocal pubDate\n nonlocal link\n dbg(\"End: %s %s\" % (name, str((state, title, link, pubDate))))\n if state == \"item\":\n if name == \"item\":\n if title == \"\":\n prn(\"No title at end item.\")\n sys.exit(1)\n if link == \"\":\n prn(\"No link at end item.\")\n sys.exit(1)\n if pubDate == \"\":\n prn(\"No pubDate at end item.\")\n sys.exit(1)\n else:\n add_item(pubDate, title, link)\n state = \"\"\n title = \"\"\n link = \"\"\n pubDate = \"\"\n elif state == \"title\":\n if name == \"title\":\n state = \"item\"\n elif state == \"link\":\n if name == \"link\":\n state = \"item\"\n elif state == \"pubDate\":\n if name == \"pubDate\":\n state = \"item\"\n\n def char_data(data):\n nonlocal state\n nonlocal title\n nonlocal pubDate\n nonlocal link\n dbg(\"Data: %s %s)\" % (str(data), str((state, title, link, pubDate))))\n if state == \"title\":\n title = title + data\n elif state == \"link\":\n link = link + data\n elif state == \"pubDate\":\n pubDate = pubDate + data\n\n\n p = xml.parsers.expat.ParserCreate(\"UTF-8\")\n\n p.StartElementHandler = start_element\n p.EndElementHandler = end_element\n p.CharacterDataHandler = char_data\n\n with urllib.request.urlopen('https://news.ycombinator.com/rss') as f:\n xml_file = b\"\"\n while True:\n r = f.read(255)\n if r:\n xml_file = xml_file + r\n else:\n break\n\n try:\n p.Parse(xml_file.decode(\"UTF-8\"), True)\n except:\n dbg(\"Writing fetched RSS feed to file...\")\n err_f = open(parse_error_output_file, \"ab\")\n err_f.write(b\"GET URL: \")\n err_f.write(f.geturl().encode(\"UTF-8\"))\n err_f.write(b\"\\nReturn Code: \")\n err_f.write((\"%d\\n\" % (f.getcode(), )).encode(\"UTF-8\"))\n err_f.write(b\"Meta Info:\\n\")\n err_f.write(f.info().as_bytes(unixfrom=True))\n err_f.write(b\"XML output:\\n\")\n err_f.write(xml_file)\n err_f.close()\n dbg(\"Done.\")\n raise\n\n return items", "def get_rss(address, website):\n #print address\n try:\n results = pattern.web.Newsfeed().search(address, count=100,\n cached=False, timeout=30)\n logger.debug('There are {} results from {}'.format(len(results),\n website))\n \n #print \"Results found\"\n except Exception as e:\n print 'There was an error. Check the log file for more information.'\n logger.warning('Problem fetching RSS feed for {}. {}'.format(address,\n e))\n results = None\n\n return results", "def getFeedFromXXX(RSSlink):\n summary =\"\"\n link =\"\"\n if \"packetstormsecurity\" in RSSlink:\n link =\"link\"\n summary=\"summary_detail\"\n elif \"jetlib\" in RSSlink:\n link=\"id\"\n summary=\"summary\"\n myFeed=\"\"\n try:\n myFeed = feedparser.parse(RSSlink)\n except:\n print(\"problem with the db website.try to change the source db in option !\")\n return None\n entries = [item for item in myFeed.items() if \"entries\" in item]\n tupleInsideEntries =entries[0]\n #print len(tupleInsideEntries[1])#show the number of result founded\n for dicItem in tupleInsideEntries[1]:\n if dicItem.get(\"title\")==\"No Results Found\":\n return False #break from this loop if theres no result\n print (\"Title : \"+dicItem.get(\"title\"))#title\n if summary ==\"summary_detail\": #packetstormsecurity\n print (\"Description : \"+str(dicItem.get(summary).get(\"value\")))#description\n else:\n print (\"Description : \"+str(dicItem.get(summary)))\n print (\"Date : \"+dicItem.get(\"published\"))#date\n print (\"Link : \"+dicItem.get(link)) #link\n print (\"#################################################################################\")\n return True", "def alert_new_posts(self):\n\n for ind, post in enumerate(self.parsed_feed['items']):\n # Record when we match the last-seen post. We will send alerts for\n # all posts occuring after match.\n if not self.is_new_post(post):\n cutoff = ind\n break\n item_list = list(reversed(self.parsed_feed['items'][:ind]))\n if len(item_list) == 0:\n return\n print '%d posts to send alerts for' % len(item_list)\n for post in item_list:\n if self.last_post is None or self.is_new_post(post):\n # Set text body\n tiny_url = tinyurl.create_one(str(post['id']))\n text_body = str(post['title']) + ' - ' + tiny_url\n self.send_sms(text_body)\n print 'Sent text for %s' % tiny_url\n break\n self.set_last_post(post)", "async def check():\r\n while True:\r\n if rss.check_new():\r\n item = rss.most_recent()\r\n queue = format_message.format_notes(item)\r\n for message in queue:\r\n await client.send_message(client.get_channel(\"350634825516056577\"), message)\r\n await asyncio.sleep(28800) # Check every 8 hours\r", "def test_get_feeds_order_added(reader):\n parser = Parser()\n reader._parser = parser\n\n reader._now = lambda: naive_datetime(2010, 1, 1)\n feed1 = parser.feed(1, datetime(2010, 1, 2))\n reader.add_feed(feed1.url)\n\n reader._now = lambda: naive_datetime(2010, 1, 2)\n feed2 = parser.feed(2, datetime(2010, 1, 1))\n reader.add_feed(feed2.url)\n\n reader._now = lambda: naive_datetime(2009, 12, 31)\n feed3 = parser.feed(3, datetime(2010, 1, 3))\n reader.add_feed(feed3.url)\n\n assert list(f.url for f in reader.get_feeds(sort='added')) == '2 1 3'.split()\n\n reader.update_feeds()\n\n assert list(f.url for f in reader.get_feeds(sort='added')) == '2 1 3'.split()", "def execute_task(self, *args):\n from flankers.scrawler import Scrawler\n\n RSS_FEEDS_CACHE = memcache.get('RSS_FEEDS_CACHE')\n if not RSS_FEEDS_CACHE or len(RSS_FEEDS_CACHE) == 0:\n RSS_FEEDS_CACHE = Scrawler.load_links()\n memcache.set('RSS_FEEDS_CACHE', RSS_FEEDS_CACHE)\n\n print len(RSS_FEEDS_CACHE)\n\n l = RSS_FEEDS_CACHE.pop()\n print l\n entries = Scrawler.read_feed(l)\n if entries:\n for entry in entries:\n #\n # Store feed\n #\n store_feed(entry)\n memcache.set('RSS_FEEDS_CACHE', RSS_FEEDS_CACHE)\n return None\n\n memcache.set('RSS_FEEDS_CACHE', RSS_FEEDS_CACHE)\n print \"This Feed has no entries\"\n return None", "def parse_and_alert(self):\n self.parse_feed()\n self.alert_new_posts()", "def parse_feed(feed, last_update, entry, get_updated = lambda e: e.updated_parsed[:6]):\n\n entries = []\n for e in feed.entries:\n if datetime(*get_updated(e)) > last_update:\n new = entry(e)\n if new != None:\n entries.append(new)\n return entries", "def update(self):\n feed = feedparser.parse(self._schema % self.project)\n added = []\n for entry in feed['entries']:\n if entry['id'] not in self.entries:\n self.entries[entry['id']] = entry\n added.append(entry)\n return added", "def feed() -> None:\n ...", "def latestEntriesRss():\n now = datetime.now()\n latestEntries = session.query(Pokemon).order_by(desc(Pokemon.date_entered))\\\n .limit(20)\n rss = render_template('rss.xml', lastBuildDate=now, entries=latestEntries)\n response = make_response(rss)\n response.headers[\"Content-Type\"] = \"application/xml\"\n return response", "def test_feed_generator(self):\n moksha.feed_cache = FakeCache()\n feed = Feed(url='http://lewk.org/rss')\n iter = feed.iterentries()\n data = iter.next()\n assert iter.next()", "def test_update_new_not_modified(reader):\n parser = NotModifiedParser()\n reader._parser = parser\n\n feed = parser.feed(1, naive_datetime(2010, 1, 1))\n\n reader.add_feed(feed.url)\n reader._storage.update_feed(FeedUpdateIntent(feed.url, None, feed=feed))\n\n reader.update_feeds(new=True)\n\n parser = Parser.from_parser(parser)\n reader._parser = parser\n\n parser.entry(1, 1, naive_datetime(2010, 1, 1))\n reader.update_feeds(new=True)\n\n # the entry isn't added because feed is not new on the second update_feeds\n assert len(list(reader.get_entries(feed=feed.url))) == 0", "def find_feeds(keyword):\n feeds = []\n\n for url in google_find_urls(keyword):\n feeds.extend(find_rss_feeds(url))\n\n return feeds", "def parse_feed(self):\n parsed_feed = feedparser.parse(self.rss_url)\n # Check for malformed feed\n if parsed_feed['bozo']:\n raise Exception('malformed rss feed!')\n self.parsed_feed = parsed_feed", "def createFeedItems(self):\r\n for item in self.item_data:\r\n self.initCreateFeedItem(item)\r\n self.createItem(item)", "def getOldEpisodes(config, rss, chan, namespaces):\n # Indicates items are to be added. Needed to know whether or not to\n # manually add namespaces. Yes, it is wonky. A side effect of the way\n # ElementTree adds namespaces.\n itemsAdded = False\n # Return value for the old episode elements which can be empty\n # if no old episodes exist\n items = None\n # Return value for the first year of publication as indicated by the\n # `pubDate` on the earliest episode. Used for generating the copyright\n # string. Can be empty if no old episodes exist.\n firstYear = None\n\n xmlFilepath = config['xmlFilepath']\n\n if os.path.isfile(xmlFilepath):\n # Load and strip the XML\n with open(xmlFilepath, 'r') as f:\n xmlStr = ''\n for line in f:\n # strip leading and trailing whitespace so minidom can prettify\n # without adding extraenous new lines\n xmlStr += line.lstrip().rstrip()\n\n # Parse the XML\n rssPrev = ET.ElementTree()\n\n try:\n rssPrev = ET.ElementTree(ET.fromstring(xmlStr))\n except:\n logger.fatal(\"Unable to parse \\'\" + xmlFilepath + \"\\'\")\n exit(1)\n\n # Find all the items and append them to the new tree\n items = rssPrev.getroot().findall('channel/item', namespaces)\n\n # Append found items and add appropriate namespaces\n if items:\n # Indicate items are to be added\n itemsAdded = True\n\n # Items do not carry an Atom namespace element, so add it manually\n rss.set(\"xmlns:atom\", \"http://www.w3.org/2005/Atom\")\n\n # Find the earliest `lastBuildDate` to determine copyright\n pubDates = rssPrev.getroot().findall('channel/item/pubDate',\n namespaces)\n\n for pubDate in pubDates:\n # Parse out the year\n year = re.findall(r\" \\d{4} \", pubDate.text)[0].lstrip().rstrip()\n\n # Set the year if empty or lower\n if not firstYear:\n firstYear = year\n else:\n if int(year) < int(firstYear):\n firstYear = year\n\n # No items were added, then add all namespace attributes manually.\n if not itemsAdded:\n for prefix, uri in namespaces.iteritems():\n rss.set(\"xmlns:\" + prefix, uri)\n\n return items, firstYear", "def reprocess_all_feeds():\n logger.debug(\"Executing the heartbeat task and returning\")\n celery.send_task('process_all_rss', kwargs={'reprocess': True})\n return render_template('index.html', name=\"HEARTBEAT\")", "def getRSS(self):\n return [rss for rss in self.rssCol.find()]", "def update():\n\n # load the OPML file and update any feeds\n for o in oercloud.Session().query(oercloud.Feed).filter_by(\n feed_type=oercloud.feed.OPML):\n \n aggregator.LOG.info(\"Loading OPML from %s\" % o.url)\n update_feed_list(opml.parse(o.url))\n\n # check each feed and see if it should be polled\n check_feeds()", "def download_filings(feedpath,args=None):\n\tlogger.info(\"Processing RSS feed %s\",feedpath)\n\n\tdir = filings_dir(feedpath)\n\tos.makedirs(dir,exist_ok=True)\n\n\tfiling_urls = []\n\tfor filing in feed_tools.read_feed(feedpath):\n\t\tif args:\n\t\t\tif args.company_re and not bool(args.company_re.match(filing['companyName'])):\n\t\t\t\tcontinue\n\t\t\tif args.cik and args.cik != filing['cikNumber']:\n\t\t\t\tcontinue\n\t\t\tif args.sic and args.sic != filing['assignedSic']:\n\t\t\t\tcontinue\n\t\t\tif args.form_type and args.form_type != filing['formType']:\n\t\t\t\tcontinue\n\t\tif 'enclosureUrl' in filing and not exists_filing(dir,filing['enclosureUrl'],filing['enclosureLength']):\n\t\t\tfiling_urls.append(filing['enclosureUrl'])\n\t\tif args and getattr(args,'with_exhibits',False):\n\t\t\tfiling_urls.extend( filing.get( 'exhibitList', [] ) )\n\n\tlogger.info(\"Start downloading %d new filings\",len(filing_urls))\n\twith concurrent.futures.ThreadPoolExecutor(max_workers=args.max_threads) as executor:\n\t\tfutures = [executor.submit(download_filing,dir,url,args.max_retries) for url in filing_urls]\n\t\tfor future in concurrent.futures.as_completed(futures):\n\t\t\ttry:\n\t\t\t\tfuture.result()\n\t\t\texcept Exception as e:\n\t\t\t\tprint(e)", "def test_rss_is_parseable(self):\r\n [make_bookmark() for i in range(10)]\r\n transaction.commit()\r\n\r\n res = self.app.get('/rss')\r\n\r\n self.assertEqual(\r\n res.status,\r\n \"200 OK\",\r\n msg='recent status is 200, ' + res.status)\r\n\r\n # http://packages.python.org/feedparser/\r\n # introduction.html#parsing-a-feed-from-a-string\r\n parsed = feedparser.parse(res.body)\r\n links = []\r\n for entry in parsed.entries:\r\n links.append({\r\n 'title': entry.title,\r\n 'category': entry.category,\r\n 'date': time.strftime('%d %b %Y', entry.updated_parsed),\r\n 'description': entry.description,\r\n 'link': entry.link,\r\n })\r\n\r\n self.assertTrue(links, 'The feed should have a list of links.')\r\n self.assertEqual(10, len(links), 'There are 10 links in the feed.')\r\n\r\n sample_item = links[0]\r\n self.assertTrue(sample_item['title'], 'Items have a title.')\r\n self.assertTrue(\r\n sample_item['link'],\r\n 'Items have a link to reach things.')\r\n self.assertTrue(\r\n 'description' in sample_item,\r\n 'Items have a description string.')", "def index_news_articles(self):\n # Get the RSS feed\n print('Fetching the RSS feed')\n item_list = rss_fetch.get_all_feed_urls(self.rss_url_file)\n # Index all the feed items into ES\n print('Going to index {0} news articles...'.format(len(item_list)))\n drop_count=0\n for item in item_list:\n try:\n # Use item specific id while indexing to avoid duplication\n self.es.index(index=self.index, doc_type=self.doc_type, id=item['id'], body=item)\n except KeyError:\n drop_count += 1\n traceback.print_exc()\n except elasticsearch.exceptions.RequestError:\n drop_count += 1\n traceback.print_exc()\n\n print('Indexed {0} Dropped {1}'.format(len(item_list)-drop_count, drop_count))\n print('Current index size {0}'.format(self.get_index_size()))", "def save_news_to_cache(self, rss_feed):\n\n self.print_if_verbose(\n f\"Method 'save_news_to_cache' is working: \\n\"\n f\"Saving news to cache... \\n\"\n )\n\n rss_feed_to_cache_title = self.source\n\n if not os.path.exists(\"cache\"):\n os.mkdir(\"cache\")\n os.chdir(\"cache\")\n\n if not os.path.exists(\"image_cache\"):\n os.mkdir(\"image_cache\")\n os.chdir(\"image_cache\")\n self.full_path_to_image_cache = os.getcwd()\n os.chdir(\"..\")\n\n try:\n with open(\"rss_reader_cache.json\", \"r\", encoding=\"utf-8\") as cache_file:\n data_from_cache = json.load(cache_file)\n except:\n data_from_cache = [{rss_feed_to_cache_title: []}]\n self.print_if_verbose(f\"Cache file does not exist, a new one will be created. \\n\")\n\n is_append_flag = False\n\n for feed in data_from_cache:\n if rss_feed_to_cache_title in feed.keys():\n for news in rss_feed[\"News\"]:\n if news not in feed[rss_feed_to_cache_title]:\n feed[rss_feed_to_cache_title].append(news)\n self.save_image_to_image_cache(news[\"ImageLink\"], f\"{news['ImageCacheName']}\")\n is_append_flag = True\n\n if not is_append_flag:\n data_from_cache.append({rss_feed_to_cache_title: rss_feed[\"News\"]})\n for news in rss_feed[\"News\"]:\n self.save_image_to_image_cache(news[\"ImageLink\"], f\"{news['ImageCacheName']}\")\n\n with open(\"rss_reader_cache.json\", \"w\", encoding=\"utf-8\") as cache_file:\n json.dump(data_from_cache, cache_file, indent=3)\n\n os.chdir(\"..\")\n\n self.print_if_verbose(\n f\"News were added to cache successfully. \\n\"\n f\"Method 'save_news_to_cache' is finished. \\n\"\n )\n\n return data_from_cache", "def fetch_entries(self):\n entries = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n [entries.append(FeedDocument(entry.get('title', ''), entry.get('summary', ''))) for entry in feed.get('entries', [])]\n return entries", "def collect():\n\n stats = {}\n for feed in Feed.objects:\n try:\n logger.info('Fetching from {0}...'.format(feed.ext_url))\n new_articles = fetch(feed)\n stats[feed.ext_url] = len(new_articles)\n\n except SAXException as e:\n if feed.errors is None:\n feed.errors = 0\n\n # Error with the feed, make a note.\n logger.info('Error fetching from {0}.'.format(feed.ext_url))\n feed.errors += 1\n feed.save()\n pretty_stats = json.dumps(stats, sort_keys=True, indent=4)\n notify('Corpora collection complete.', 'Total article count: {0}\\n\\nResults for this pass:\\n{1}'.format(len(Article.objects), pretty_stats))", "def get_rss(limit):\n rss_data = feedparser.parse(URL)\n if limit == 1:\n title = rss_data.entries[0].title\n link = rss_data.entries[0].link\n rss_print(title, link)\n else:\n for i in range(0, limit):\n title = rss_data.entries[i].title\n link = rss_data.entries[i].link\n\n print(Back.CYAN + str(i + 1) + \"\\t\")\n rss_print(title, link)", "def process_content(self, channel) -> dict:\n\n self.print_if_verbose(f\"Method 'process_content' is working:\")\n\n if self.limit is None or self.limit >= self.news_amount:\n self.limit = self.news_amount\n\n rss_feed = {}\n rss_feed[\"Feed\"] = channel.findtext('title')\n rss_feed[\"Description\"] = channel.findtext('description')\n rss_feed[\"Link\"] = channel.findtext('link')\n rss_feed[\"Language\"] = channel.findtext('language')\n rss_feed[\"News\"] = []\n\n append_news_to_rss_feed = 0\n\n self.print_if_verbose(f\"Adding data to the work dict 'rss_feed'...\")\n\n POSSIBLE_IMAGE_TAGS = (\"content\", \"thumbnail\", \"image\")\n POSSIBLE_IMAGE_ATTR = (\"url\", \"href\")\n\n for item in channel.iterfind(\"item\"):\n child_news = {}\n child_news[\"Title\"] = item.findtext(\"title\")\n child_news[\"Link\"] = item.findtext(\"link\")\n child_news[\"PubDate\"] = self.get_formatted_date(item.findtext(\"pubDate\"))\n child_news[\"Source\"] = item.findtext(\"source\")\n child_news[\"ImageLink\"] = None\n child_news[\"ImageCacheName\"] = None\n\n for tag in POSSIBLE_IMAGE_TAGS:\n for item_field in item:\n if tag in item_field.tag:\n for attr in POSSIBLE_IMAGE_ATTR:\n if attr in item_field.attrib:\n child_news[\"ImageLink\"] = item_field.attrib[attr]\n child_news[\"ImageCacheName\"] = \\\n f\"{''.join(char for char in child_news['Link'] if char.isalnum())}.jpg\"\n break\n if child_news[\"ImageLink\"]:\n break\n if child_news[\"ImageLink\"]:\n break\n\n rss_feed[\"News\"].append(child_news)\n\n append_news_to_rss_feed += 1\n if append_news_to_rss_feed == self.limit:\n break\n\n self.print_if_verbose(\n f\"{append_news_to_rss_feed} news were added. \\n\"\n f\"Method 'process_content' is finished. \\n\"\n )\n\n return rss_feed", "def _read_new_entries(self, is_first_read: bool) -> bool:\n new_text = self.watched_file.read()\n if new_text:\n for entry_txt in new_text.split(\"\\n\"):\n if entry_txt:\n self._register_entry(entry_txt, is_first_read)\n return len(new_text) > 0", "def add_new_posts(last_updated=None):\n for blog in Blog.objects.all():\n try:\n document = feedparser.parse(blog.feed_url)\n except:\n print \"error parsing\"\n continue\n\n if last_updated is None:\n print(\"- Adding %i articles from %s\" % (len(document['entries']), blog.title))\n\n for entry in document['entries']:\n # now we create a new post\n post = Post()\n post.blog = blog\n post.title = entry['title']\n\n if 'summary' in entry:\n post.content = entry['summary']\n if 'content' in entry:\n post.content = entry['content']\n\n post.link = entry['link']\n post.save()\n else:\n # TODO: only parse from a date\n pass", "def update_feed_list(opml):\n\n session = oercloud.Session()\n\n # see if this needs handled\n for item in opml:\n\n # see if this is an inclusion\n if item.type == 'link':\n\n # see if it's an OPML inclusion\n if item.url[-5:] == '.opml':\n # its OPML -- follow the link\n aggregator.LOG.debug(\"Following OPML inclusion to %s\" %\n item.url)\n update_feed_list(opml.parse(item.url))\n\n else:\n # not an inclusion -- add it to our feed list if needed\n if session.query(oercloud.Feed).filter_by(\n url = item.xmlUrl).count() == 0:\n\n # new feed -- find the appropriate user\n user = oercloud.User.by_name_url(\n item.text, item.xmlUrl)\n\n aggregator.LOG.info(\"Adding feed: %s\" % item.xmlUrl)\n\n session.save(\n oercloud.Feed(item.xmlUrl, user.uId, 0, item.type)\n )\n\n session.commit()\n\n # finally, recurse to check for sub-elements\n update_feed_list(item)", "def updateNewsFeed(self):\n try:\n news, events, categories, eventCategories = self.requestData()\n for language in NEWSFEED_LANGUAGES:\n self.newsFeedModel.update(news[language], events[language], categories[language],\n eventCategories[language], language=language)\n except Exception as e:\n print(\"there was a problem while updating the news feed\")\n raise e", "def check_news_site(sitename):\n delay = 10\n maxcount = 5\n stories = jsoninterchange.loader(sitename)\n for i in range(maxcount):\n print('Checking {} {}'.format(sitename, i + 1))\n dp.news_scraper(soupify(urls[sitename]), sitename, stories)\n time.sleep(delay)\n jsoninterchange.dumper(stories, sitename)", "def gnews(self):\n\t\tfeed_url = self.get_feed()\n\t\tfeed_data = feedparser.parse(feed_url)\n\t\tprint(\"\")\n\t\ttype_tiny = pyshorteners.Shortener()\n\t\tfor data in feed_data[\"items\"]:\n\t\t\ttiny_url = type_tiny.tinyurl.short(data[\"link\"])\n\t\t\t#tiny_url = tinyurl.create_one(data[\"link\"])\n\t\t\tprint('\\033[33m' + data[\"title\"] + \" : \" + Style.RESET_ALL + tiny_url)\n\t\t\tprint(\"\")", "def rssCheck(podcastName, source, url):\n try:\n headers = {'Accept':'text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8' ,'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763'}\n req = requests.get(url, headers=headers)\n root = etree.fromstring(req.text)\n rssArray = []\n for element in root[0].iter('item'):\n try:\n title = element.find(\"title\").text.replace(\"''\", \"'\")\n description = element.find(\"description\").text.replace(\"<strong>\", \"\").replace(\"</strong>\", \"\").replace(\"&amp;\", \"and\").replace(\"'\",\"''\")\n date = element.find(\"pubDate\").text\n date = date.split(\" \")\n date = datetime.strptime(date[1] + date[2] + date[3], \"%d%b%Y\")\n dateString = str(date.month) + \"-\" + str(date.day) + \"-\" + str(date.year)\n url = ResolveRouter.urlRouter(podcastName, source, element)\n except:\n print(\"error in XMLDetailsDebug parsing issue\")\n if(len(title) > 0 and len(description) > 0 and len(dateString) > 0 and len(url) > 0):\n rssArray.append([title, dateString, url, description])\n else:\n print(\"error in XMLDetailsDebug parsing issue\")\n return rssArray\n except Exception as e:\n print(e)\n Tools.writeException(\"getXMLDetailsDebug\", e)", "def get_feed(self):\n\t\turl=\"http://news.google.com/news?ned=%s&topic=%s&output=rss\"\n\t\tlinks=[{\"ned\":\"us\", \"type\":\"h\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"w\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"nz\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"sa\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"b\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"t\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"m\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"s\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"e\"},\n\t\t\t ]\n\t\tfeed = links[self.get_input()]\n\t\treturn url%(feed[\"ned\"],feed[\"type\"])", "def subscribe(self, *rss_feeds):\n valid_feeds = [i for i in rss_feeds if i not in self.subscriptions] # Removes any duplicate subscriptions\n self.subscriptions.extend(valid_feeds)\n self.save()\n # import ipdb; ipdb.set_trace()", "def get_links_all(self, number_links):\r\n podcast_data = []\r\n\r\n for entry in self.rss[0].entries: \r\n try:\r\n podcast_data = [entry.published, entry.title, \r\n entry.enclosures[0]['href'], \r\n self.rss[0].feed.title\r\n ]\r\n except IOError as err:\r\n print err\r\n except UnicodeDecodeError as err:\r\n print err\r\n else:\r\n self.podcast_list.append(podcast_data)\r\n if number_links != 0:\r\n if len(self.podcast_list) == number_links: \r\n return None\r\n return None", "def read_data(self, token, trigger_id, date_triggered):\n now = arrow.utcnow().to(settings.TIME_ZONE)\n published = ''\n my_feeds = []\n\n # get the URL from the trigger id\n rss = Rss.objects.get(trigger_id=trigger_id)\n\n logger.debug(\"RSS Feeds from %s : url %s\", rss.name, rss.url)\n\n # retrieve the data\n feeds = Feeds(**{'url_to_parse': rss.url}).datas()\n\n if hasattr(feeds.feed, 'published_parsed'):\n published = datetime.datetime.utcfromtimestamp(\n time.mktime(feeds.feed.published_parsed))\n elif hasattr(feeds.feed, 'updated_parsed'):\n published = datetime.datetime.utcfromtimestamp(\n time.mktime(feeds.feed.updated_parsed))\n\n if published == '':\n published = now\n else:\n published = arrow.get(str(published),\n 'YYYY-MM-DD HH:mm:ss').to(settings.TIME_ZONE)\n\n date_triggered = arrow.get(\n str(date_triggered),\n 'YYYY-MM-DD HH:mm:ss').to(settings.TIME_ZONE)\n\n if date_triggered is not None and\\\n published is not None and\\\n now >= published and\\\n published >= date_triggered:\n my_feeds = feeds.entries\n cache.set('th_rss_' + str(trigger_id), my_feeds)\n cache.set('th_rss_uuid_{}'.format(rss.uuid), my_feeds)\n # return the data\n return my_feeds", "def _get_current_rss_items(feed_path: str) -> List[str]:\n if os.path.isfile(feed_path):\n with open(feed_path) as xfd:\n feed_str = xfd.read()\n items = ['<item>{}'.format(ip) for ip in feed_str.split('<item>')[1:]]\n if len(items) > 0:\n items[-1] = items[-1].replace('</channel>', '').replace('</rss>', '')\n return items\n return []", "def _updateFeed(pk):\n feed = get_object_or_404(Feed, pk=pk)\n\n rawFeed, entries = feed._fetch_feed() \n\n feed.title = rawFeed.get('title', None)\n feed.subtitle = rawFeed.get('subtitle', None)\n feed.copyright = rawFeed.get('rights', None)\n feed.ttl = rawFeed.get('ttl', None)\n feed.atomLogo = rawFeed.get('logo', None)\n\n # Try to find the updated time\n updated = rawFeed.get(\n 'updated_parsed',\n rawFeed.get('published_parsed', None),\n )\n\n if updated:\n updated = datetime.datetime.fromtimestamp(\n time.mktime(updated)\n )\n\n feed.pubdate = updated\n\n super(Feed, feed).save()\n\n if entries:\n dbEntriesCreate = []\n dbEntriesupdate = []\n for raw_entry in entries:\n entry = Entry.objects.parseFromFeed(raw_entry)\n entry.feed = feed\n\n try:\n newEntry = Entry.objects.get(guid=entry.guid, feed=feed)\n except:\n newEntry = None\n\n \n if newEntry:\n # if it was updated, then mark it as unread, otherwise no need to do anything\n if newEntry.date > entry.date:\n entry.state = ENTRY_UNREAD\n id = newEntry.id\n newEntry = entry\n newEntry.id = id\n dbEntriesupdate.append(newEntry)\n else:\n dbEntriesCreate.append(entry)\n\n with transaction.atomic():\n if len(dbEntriesCreate)>0:\n Entry.objects.bulk_create(dbEntriesCreate)\n if len(dbEntriesupdate)>0:\n fields = ['feed', 'state', 'title' , 'content', 'date', 'author', 'url' ,'comments_url']\n Entry.objects.bulk_update(dbEntriesupdate, fields)\n\n return", "def addNewFeed(feed):\n # config exist?\n configfile_path = confighome+\"config\"\n print(\"::checking for config\")\n if fileAccessible(configfile_path,'r'):\n print(\"::reading config\")\n appendFeed(feed,configfile_path)\n elif fileAccessible(configfile_path,'w'):\n createNewConfig(feed,configfile_path)\n else:\n print(\"::unable to read\")", "def get_news(rss_feed):\r\n\r\n class _CurrentData(object):\r\n \"\"\"Class holding a set of current attributes.\"\"\"\r\n item = None\r\n text = None\r\n\r\n def _start_element_handler(name, attrs):\r\n \"\"\"Handle XML start-elements.\"\"\"\r\n if name == 'item':\r\n # Allocate a new item.\r\n current.item = NewsItem()\r\n\r\n def _end_element_handler(name):\r\n \"\"\"Handle XML end-elements.\"\"\"\r\n if name == 'item':\r\n news_items.append(current.item)\r\n elif name in ('title', 'description', 'link', 'category'):\r\n try:\r\n setattr(current.item, name, current.text)\r\n except AttributeError:\r\n # The parser has run into a non-news item.\r\n pass\r\n\r\n def _char_data_handler(data):\r\n \"\"\"Handle XML element character data.\"\"\"\r\n current.text = data\r\n\r\n news_items = list()\r\n current = _CurrentData()\r\n\r\n parser = expat.ParserCreate()\r\n parser.StartElementHandler = _start_element_handler\r\n parser.EndElementHandler = _end_element_handler\r\n parser.CharacterDataHandler = _char_data_handler\r\n\r\n news_handle = urllib2.urlopen(rss_feed)\r\n xml_data = news_handle.read()\r\n \r\n parser.Parse(xml_data)\r\n\r\n return news_items", "def get_feeds():\n feeds = {}\n for _configuration_key, _configuration in blogs.all():\n if not _configuration.use_generic_feeds:\n continue\n\n class EntryFeed(Feed):\n configuration = _configuration\n configuration_key = _configuration_key\n\n title_template = _configuration.feed_title_template_name\n description_template = \\\n _configuration.feed_description_template_name\n\n feed_type = feedgenerator.Rss201rev2Feed\n\n def get_site(self):\n if not hasattr(self, '_current_site'):\n self._current_site = Site.objects.get_current()\n return self._current_site\n\n def title(self):\n if self.configuration.feed_title is not None:\n return self.configuration.feed_title\n return self.get_site().name\n \n def link(self):\n if self.configuration.feed_link is not None:\n return self.configuration.feed_link\n return \"http://%s/\" % (self.get_site().domain)\n \n def description(self):\n if self.configuration.feed_description is not None:\n return self.configuration.feed_description\n return \"Latest entries on %s\" % self.get_site().name\n \n def items(self):\n items = self.configuration.model.live.all()\n return items[:self.configuration.feed_limit]\n \n def item_pubdate(self, obj):\n return obj.pub_date\n\n def item_link(self, obj):\n return self.configuration.get_entry_absolute_url(obj)\n\n if _configuration.feed_format == feed_formats.ATOM:\n # Alter the class to support Atom feeds instead of RSS.\n EntryFeed.feed_type = feedgenerator.Atom1Feed\n EntryFeed.subtitle = EntryFeed.description\n\n feeds[_configuration_key] = EntryFeed\n return feeds", "def get_feed_entries_task():\n get_feed_entries()\n logger.info(\"Entries for Feed\")", "def update_handler(sender, update, **kwargs):\n\n feeds = Feed.objects.filter(feed_url=sender.topic)\n\n for feed in feeds:\n for entry in update.entries:\n r = requests.get(entry['link'])\n\n kippt = feed.created_by.kippt_client()\n\n clip = kippt.clips(params={'url': r.url})\n\n if clip['meta']['total_count'] == 0:\n if feed.list_id:\n list_id = feed.list_id\n else:\n list_id = feed.created_by.list_id\n\n kippt.clips.create(\n r.url,\n list_id,\n title=entry['title'],\n notes=entry['summary']\n )", "def test_feed_subclassing(self):\n moksha.feed_cache = FakeCache()\n class MyFeed(Feed):\n url = 'http://lewk.org/rss'\n feed = MyFeed()\n assert feed.url == 'http://lewk.org/rss'\n assert feed.num_entries() > 0\n for entry in feed.iterentries():\n pass\n for entry in feed.get_entries():\n pass", "def add_talks_from_rss(self, feed_url):\r\n plugin = self.plugman.get_plugin_by_name(\"Rss FeedParser\", \"Importer\")\r\n feedparser = plugin.plugin_object\r\n presentations = feedparser.get_presentations(feed_url)\r\n\r\n if presentations:\r\n for presentation in presentations:\r\n talk = Presentation(presentation[\"Title\"],\r\n presentation[\"Speaker\"],\r\n presentation[\"Abstract\"], # Description\r\n presentation[\"Level\"],\r\n presentation[\"Event\"],\r\n presentation[\"Room\"],\r\n presentation[\"Time\"],\r\n presentation[\"Time\"])\r\n self.insert_presentation(talk)\r\n\r\n else:\r\n log.info(\"RSS: No data found.\")", "def test_rss_added(self):\r\n body_str = \"application/rss+xml\"\r\n res = self.app.get('/recent')\r\n\r\n self.assertEqual(\r\n res.status,\r\n \"200 OK\",\r\n msg='recent status is 200, ' + res.status)\r\n self.assertTrue(\r\n body_str in res.body,\r\n msg=\"Request should contain rss str: \" + res.body)", "def test_feed(app, status, warning):\n app.build()\n assert app.statuscode == 0\n\n feed_path = app.outdir / \"blog/atom.xml\"\n assert (feed_path).exists()\n\n with feed_path.open() as feed_opened:\n feed_tree = lxml.etree.parse(feed_opened)\n entries = feed_tree.findall(\"{http://www.w3.org/2005/Atom}entry\")\n assert len(entries) == 2\n\n entry = entries[0]\n title = entry.find(\"{http://www.w3.org/2005/Atom}title\")\n assert title.text == \"Foo Post Title\"\n summary = entry.find(\"{http://www.w3.org/2005/Atom}summary\")\n assert summary.text == \"Foo post description with link.\"\n categories = entry.findall(\"{http://www.w3.org/2005/Atom}category\")\n assert len(categories) == 2\n assert categories[0].attrib[\"label\"] == \"BarTag\"\n assert categories[0].attrib[\"term\"] == \"BarTag\"\n assert categories[1].attrib[\"label\"] == \"Foo Tag\"\n assert categories[1].attrib[\"term\"] == \"FooTag\"\n content = entry.find(\"{http://www.w3.org/2005/Atom}content\")\n assert \"Foo post content.\" in content.text\n update_time = entry.find(\"{http://www.w3.org/2005/Atom}updated\")\n first_entry_date = datetime.strptime(update_time.text, POST_DATETIME_FMT)\n\n empty_entry = entries[1]\n title = empty_entry.find(\"{http://www.w3.org/2005/Atom}title\")\n assert title.text == \"Foo Empty Post\"\n summary = empty_entry.find(\"{http://www.w3.org/2005/Atom}summary\")\n assert summary is None\n categories = empty_entry.findall(\"{http://www.w3.org/2005/Atom}category\")\n assert len(categories) == 0\n content = empty_entry.find(\"{http://www.w3.org/2005/Atom}content\")\n assert 'id=\"foo-empty-post\"' in content.text\n update_time = empty_entry.find(\"{http://www.w3.org/2005/Atom}updated\")\n second_entry_date = datetime.strptime(update_time.text, POST_DATETIME_FMT)\n\n # check order of post based on their dates\n assert first_entry_date > second_entry_date\n\n social_path = app.outdir / \"blog/social.xml\"\n assert (social_path).exists()\n\n with social_path.open() as social_opened:\n social_tree = lxml.etree.parse(social_opened)\n social_entries = social_tree.findall(\"{http://www.w3.org/2005/Atom}entry\")\n assert len(social_entries) == len(entries)\n\n social_entry = social_entries[0]\n title = social_entry.find(\"{http://www.w3.org/2005/Atom}title\")\n assert title.text == \"Foo Post Title\"\n summary = social_entry.find(\"{http://www.w3.org/2005/Atom}summary\")\n assert summary.text == \"Foo post description with link.\"\n categories = social_entry.findall(\"{http://www.w3.org/2005/Atom}category\")\n assert len(categories) == 2\n assert categories[0].attrib[\"label\"] == \"BarTag\"\n assert categories[1].attrib[\"label\"] == \"Foo Tag\"\n content = social_entry.find(\"{http://www.w3.org/2005/Atom}content\")\n assert \"Foo Post Title\" in content.text", "def on_button_click(self):\r\n rss = self.feedURLEdit.text()\r\n feed = feedparser.parse(str(rss))\r\n\r\n website = feed[\"feed\"][\"title\"]\r\n for key in feed[\"entries\"]:\r\n title = key[\"title\"]\r\n link = key[\"link\"]\r\n summary = key[\"summary\"]\r\n self.data.append([title, website, summary, link])\r\n\r\n self.rssModel.update(self.data)\r\n self.rssTable.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)", "def __add_entries(entries, feed):\n\n for entry in entries:\n try:\n # If there is entry with such title in this feed\n Entry.objects.get(title=entry.title, feed=feed)\n continue\n except Entry.DoesNotExist:\n pass\n\n # Try to find another entries with such title\n e = Entry.objects.filter(title=entry.title)\n # If found\n if len(e) != 0:\n e = e[0]\n # Copy all containing\n entry_obj = Entry(title=e.title,\n description=e.description,\n entry=e.entry, feed=feed)\n entry_obj.save()\n # Or create new Entry from scratch\n else:\n entry_name = entry.title + '.html'\n # If bad link or entry name\n try:\n urlretrieve(entry.link, entry_name)\n\n entry_file = open(entry_name)\n entry_file = File(entry_file)\n\n entry_obj = Entry(title=entry.title,\n description=entry.description,\n entry=entry_file, feed=feed)\n entry_obj.save()\n\n os.remove(entry_name)\n except:\n # Go to next entry\n continue", "def article_extractor(rss_feed_link):\n user_agent = {\"user-agent\": \"Mozilla/5.0 (Windows NT 6.2; Win64;\\\n x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1\"}\n try:\n feed = requests.get(rss_feed_link, headers=user_agent)\n except requests.exceptions.ConnectionError:\n print(\"No internet connection\")\n exit()\n\n dirty_content = BeautifulSoup(feed.text, \"xml\")\n return dirty_content", "def refresh_all(self) -> None:\n self._update_thread.force_refresh_folder(self.feed_cache)", "def iter_feed(gd_client):\n feed = gd_client.GetContactsFeed()\n while feed:\n for entry in feed.entry:\n yield entry\n # Check whether there is another page and if yes\n next_link = feed.GetNextLink()\n feed = None\n if next_link:\n feed = gd_client.GetContactsFeed(uri=next_link.href)", "def feed(self):\n HTMLParser.feed(self, self.dirty_html)", "def _retrieveFeed(self):\n url = self.url\n if url!='':\n self._last_update_time_in_minutes = time.time()/60\n self._last_update_time = DateTime()\n d = feedparser.parse(url)\n if getattr(d, 'bozo', 0) == 1 and not isinstance(d.get('bozo_exception'),\n ACCEPTED_FEEDPARSER_EXCEPTIONS):\n self._loaded = True # we tried at least but have a failed load\n self._failed = True\n return False\n self._title = d.feed.title\n self._siteurl = d.feed.link\n self._items = []\n for item in d['items']:\n try:\n link = item.links[0]['href']\n itemdict = {\n 'title': item.title,\n 'url': link,\n 'summary': item.get('description', ''),\n }\n if hasattr(item, \"updated\"):\n try:\n itemdict['updated'] = DateTime(item.updated)\n except DateTimeError:\n # It's okay to drop it because in the\n # template, this is checked with\n # ``exists:``\n pass\n except AttributeError:\n continue\n self._items.append(itemdict)\n self._loaded = True\n self._failed = False\n return True\n self._loaded = True\n self._failed = True # no url set means failed\n return False # no url set, although that actually should not really happen", "def parse_medium_rss_export(rss_file):\n\n rss_file.seek(0)\n root = etree.parse(rss_file).getroot()\n items = root.find(\"channel\").findall(\"item\")\n for item in items:\n url = item.find(\"link\").text\n title = item.find(\"title\").text.strip()\n ts_str = item.find(\"pubDate\").text\n time = datetime.strptime(ts_str, \"%a, %d %b %Y %H:%M:%S %Z\")\n \n yield {\n 'url': url,\n 'timestamp': str(time.timestamp()),\n 'title': title or None,\n 'tags': '',\n 'sources': [rss_file.name],\n }", "def __writeRSS2file(inputitems):\n feed_items = []\n for item in inputitems:\n print \"appending... \", item[1]\n feed_items.append(RSS2format(item[1]))\n rss = PyRSS2Gen.RSS2(\n title = \"A File-based RSS Feed Generator\",\n link = \"lphiri.cs.uct.ac.za/simplyct\",\n description = \"A File-based RSS Feed Generator\",\n lastBuildDate = datetime.utcnow(),\n items = feed_items\n )\n print feed_items\n rss.write_xml(open(\"new-simplyctrss2.xml\", \"w\"))\n print \"END: FEED GENERATOR[WRITING]: \", time.time()", "async def _scan(self, url: str, parent: str):\n\n Reporter.scan(parent, url)\n try:\n res = await self.session.get(url)\n except aiohttp.ClientError as e:\n Reporter.error(parent, url, e)\n return\n\n if res.status >= 400:\n Reporter.broken(parent, url, res.status)\n return\n\n for link in await self._find_links(res):\n if link not in self.visited:\n self.visited.add(link)\n self.q.put_nowait((link, url))", "def get_rss_links(url):\n\n import re\n import requests \n\n # Use the requests module to get page source\n page_source = requests.get(url)\n\n # Find all of the RSS links according to some pattern.\n # The pattern that is searched for can be changed, and will probably\n # be site specific. You may also want to look for multiple patterns.\n # We use set() here to eliminate duplicate links. \n\n return set(re.findall(r'http.*\\.xml', page_source.text))" ]
[ "0.7721408", "0.7200292", "0.6963098", "0.67966145", "0.67705613", "0.6618283", "0.6454477", "0.6439002", "0.6414561", "0.63198656", "0.62931746", "0.6283974", "0.6249403", "0.62478334", "0.62359893", "0.6225703", "0.6203846", "0.6194489", "0.6180839", "0.6162735", "0.6159219", "0.61492455", "0.6148878", "0.61383635", "0.61282915", "0.61046094", "0.603862", "0.60312885", "0.6005676", "0.5992563", "0.59382915", "0.593058", "0.59235907", "0.58883756", "0.5887721", "0.58820176", "0.5843492", "0.58247054", "0.5813597", "0.5801687", "0.5796318", "0.5785903", "0.5761476", "0.5755789", "0.57463527", "0.5740552", "0.57397383", "0.57393956", "0.57220674", "0.57141644", "0.5709192", "0.56978333", "0.56935805", "0.5687803", "0.5676066", "0.5659735", "0.5656119", "0.56306815", "0.56138307", "0.5602647", "0.5601682", "0.5588675", "0.5582147", "0.55668265", "0.556524", "0.5551567", "0.55387604", "0.5536515", "0.55273885", "0.55245155", "0.55230486", "0.5520396", "0.55119795", "0.54982984", "0.5491077", "0.5488211", "0.5481909", "0.5480239", "0.5477944", "0.54680455", "0.5465082", "0.5464397", "0.5453885", "0.5451659", "0.54499376", "0.54416245", "0.5438593", "0.5435099", "0.5417965", "0.5404914", "0.53763187", "0.5375266", "0.53728926", "0.53701854", "0.53613997", "0.53323835", "0.5332167", "0.53315836", "0.53272223", "0.53163", "0.5304086" ]
0.0
-1
Waits for the running transcription processes to end (2 min intervals). \n Then deletes everything in the 'podcasts' folder, parses all transcripts, and updates the databases
def resetScript(dbConnection, maxConcurrent): while (Tools.numRunningProcesses() != 0): # wait for the transcriptions to end. Pings every 2 mins time.sleep(120) emptyPodcastFolder = Tools.cleanupFolder("podcasts") DatabaseInteract.refreshDatabase(dbConnection)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task_queue_podcasts():\n data = get_task_arguments()\n user_uid = data[\"user_uid\"]\n\n client = google.cloud.storage.Client()\n bucket = client.get_bucket(settings.PODCAST_STORAGE_BUCKET)\n podcasts = Podcast.get_user_podcasts(user_uid)\n for podcast in podcasts:\n old_entries = [entry for entry in podcast.feed.entries\n if entry.published + datetime.timedelta(settings.EPISODE_EXPIRATION_DAYS) <\n datetime.datetime.utcnow()]\n\n for old_entry in old_entries:\n link = old_entry.link\n path = urllib.parse.urlparse(link).path\n bucket_relative_path = os.path.sep.join(path.split(os.path.sep)[2:])\n blob = bucket.blob(bucket_relative_path)\n blob.delete()\n podcast.feed.remove(old_entry)\n podcast.save()\n\n # determine if the podcast has been used in recent enough time\n if podcast.last_accessed + datetime.timedelta(settings.PODCAST_EXPIRATION_DAYS) < \\\n datetime.datetime.utcnow():\n podcast.delete()\n else:\n add_task(url_for(\"task_recursive_download_podcast\"),\n {\"user_uid\": user_uid, \"podcast_id\": podcast.id})\n return OK_RESPONSE", "def transcribe_proc():\n while True:\n # Get result of transcription\n transcribe_result = transcriber.transcribe_stream(\n audio_stream(), sample_rate, sample_width, channels\n )\n\n _LOGGER.debug(\"Transcription result: %s\", transcribe_result)\n\n transcribe_result = transcribe_result or Transcription.empty()\n transcribe_dict = dataclasses.asdict(transcribe_result)\n transcribe_dict[\"timeout\"] = is_timeout\n\n print_json(transcribe_dict)\n transcription_printed.set()", "def cleanup(self):\n self.all_wav_to_mp3()\n self.past_songs_db.close()\n self.move_tracks_to_music_folder( )\n self.delete_leftovers()\n print \"Cleanup finished\"", "def parseUpload(dbconnection, fileName):\n nhContent = ParseText.nohupTranscriptionContent(fileName)\n count = 0\n while count < len(nhContent[0]):\n try:\n rtf = nhContent[0][count]\n transcription = nhContent[1][count].replace(\"'\", \"''\").replace(\"_\", \"\")\n dbID = nhContent[2][count].replace(\".\", \"\")\n duration = nhContent[3][count]\n DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID)\n count += 1\n except:\n print(\"couldnt upload one at index \" + str(count))\n count += 1", "def transcribeAll(service, url, fileName):\n if(service == \"omny.fm\"):\n url = url.replace(\".mp3\",\"\") + \".mp3\"\n subprocess.Popen(\"wget -c -O ./podcasts/\" + fileName + \".mp3 \" + url + \" && sleep 40 && ffmpeg -i ./podcasts/\"\n + fileName + \".mp3 -acodec pcm_s16le -ac 1 -ar 8000 ./podcasts/\" + fileName + \".wav && sleep 10 && rm ./podcasts/\" \n + fileName + \".mp3 && nohup ./online2-wav-nnet3-latgen-faster --online=false --do-endpointing=false \"\n + \"--frame-subsampling-factor=3 --config=online.conf --max-mem=2000000000 --max-active=7000 --beam=15.0 --lattice-beam=6.0 \"\n + \"--acoustic-scale=1.0 --word-symbol-table=words.txt final.mdl HCLG.fst 'ark:echo utterance-id\" + fileName \n + \" utterance-id\" + fileName + \"|' 'scp:echo utterance-id\" + fileName + \" ./podcasts/\" + fileName + \".wav|' 'ark:/dev/null' &\", shell=True)", "def wait(self):\n num_pings = 0\n # Some streams seem to start fine with up to 4 pings before beginning download?\n # More investigation is needed\n max_pings = 1 + self._pingouts\n # timeout after 1 minute\n timeout = datetime.datetime.now() + datetime.timedelta(minutes=1)\n try:\n for line in self._process.stderr:\n # TODO: add mpegts or other variants depending on the container settings? or no?\n # if \"Output #0, mp4\" in line:\n if \"Output #0\" in line:\n self._process.communicate()\n self.move_to_dest()\n self._pingouts = 0\n break\n elif \"HandleCtrl, Ping\" in line:\n num_pings += 1\n if num_pings > max_pings:\n # The main issue with this is that the slain processes will not have their files moved\n # But I think this is preferable to the other solutions I've come up with.\n # For future reference, those were:\n #\n # 1) Sending SIGINT then continuing to read stderr until it exited (sometimes it doesn't)\n # 2) Sending SIGINT, storing a reference to the process, then restarting the download.\n # This prevents the process from being garbage collected until the Watcher is\n # 3) Sending SIGINT, then storing info about src and dest paths for the stopped download.\n # If a reference to the process is NOT stored, there's no way to be sure it has finished writing\n # (if it's writing at all). The only way was to give them a grace period and then just start\n # moving, but this adds undesirable time to the cleanup phase, when we may want to restart\n # a falsely completed Watcher asap.\n # 4) Just moving the file straightaway. This is obviously bad since ffmpeg takes a few moments to\n # finish.\n # NOTE: only option #1 was actually tried, the others were partially written before being\n # abandoned as their problems became clear\n #\n # Two additional options exist (not mutually exclusive):\n # 1) Passing the dead processes off to a queue and having another thread clean up.\n # 2) Having regular maintenance sweep the active folder and move files it can be sure are done\n # to their proper folders.\n #\n # I *probably* need to use 1) eventually, especially once I figure out how to actually end\n # stuck processes without killing the parent. But it requires a lot more code.\n # Until then let's just see how this works.\n #\n # When that time does come, a Downloader copy constructor may be useful.\n download_logger.debug(\"Download pinged {} times: Stopping\".format(num_pings))\n self._pingouts += 1\n self.stop()\n\n # close stderr to force the loop to exit\n time.sleep(0.1)\n self._process.stderr.close()\n time.sleep(0.1)\n # process will be garbage collected when the next one is started, or the Watcher dies\n # self._process = None\n # This *should* work for newer builds of FFmpeg without librtmp.\n # Only question is whether 1 minute is too long (or too short).\n # UPDATE: Why doesn't this ever seem to work?\n # is it because FFmpeg freezes output and hangs now? so we're never getting another line to iterate over\n # elif datetime.datetime.now() > timeout:\n # download_logger.debug(\"Download of {} timed out\".format(self.outfile))\n # self.stop()\n # time.sleep(0.1)\n # self._process.stderr.close()\n # time.sleep(0.1)\n else:\n time.sleep(0.2)\n\n except ValueError:\n download_logger.debug('ffmpeg stderr closed unexpectedly')\n\n # Is it possible for the process to end prematurely?\n return self._process.returncode", "def _cleanUp(self):\r\n limit = datetime.now() - timedelta(seconds=self._timeout)\r\n\r\n toClean = [msg for msg in self._incompleteMsgs if msg.older(limit)]\r\n\r\n if toClean:\r\n for msg in toClean:\r\n self._incompleteMsgs.remove(msg)\r\n\r\n log.msg('{0} incomplete messages have been dropped '\r\n 'from assembler.'.format(len(toClean)))\r\n\r\n toClean = [uri for uri, (_, timestamp) in self._binaries.iteritems()\r\n if timestamp < limit]\r\n\r\n if toClean:\r\n for uri in toClean:\r\n del self._binaries[uri]\r\n\r\n log.msg('{0} unused binaries have been dropped '\r\n 'from assembler.'.format(len(toClean)))", "def Main(self):\n while not self.IsStopping():\n if not self.args.truncate_interval:\n # Truncating is disabled. But we should keep the main thread running,\n # or else PluginSandbox will assume the plugin has crashed, and will\n # take the plugin down.\n # TODO(kitching): Consider altering PluginSandbox to allow Main to\n # return some particular value which signifies \"I am\n # exiting of my own free will and I should be allowed to\n # continue running normally.\"\n self.Sleep(100)\n continue\n\n self.info('Truncating database...')\n self.buffer_file.Truncate()\n self.info('Truncating complete. Sleeping %d secs...',\n self.args.truncate_interval)\n self.Sleep(self.args.truncate_interval)", "def finish(self):\n old_message = None\n cooldown = 5\n while not self.queue_manager.check_finished():\n status = self.get_upload_status(0)\n datagen_workers = f\"{status.sets_being_generated} data generators, \"\n msg = f\"Waiting for {datagen_workers}{status.sets_being_loaded} uploads to finish\"\n if old_message != msg or cooldown < 1:\n old_message = msg\n self.logger.info(msg)\n self.update_running_totals()\n self.print_running_totals()\n cooldown = 5\n else:\n cooldown -= 1\n time.sleep(WAIT_TIME)\n\n self.log_failures()\n\n self.logger.info(\"\")\n self.logger.info(\" == Results == \")\n self.update_running_totals()\n self.print_running_totals()\n elapsed = format_duration(timedelta(seconds=time.time() - self.start_time))\n\n if self.run_until.sobject_name:\n result_msg = f\"{self.sobject_counts[self.run_until.sobject_name].successes} {self.run_until.sobject_name} records and associated records\"\n else:\n result_msg = f\"{self.run_until.target:,} iterations\"\n\n self.logger.info(f\"☃ Snowfakery created {result_msg} in {elapsed}.\")", "def finish_stager_tasks(self):\n\n update_files = {}\n messages = []\n while not self.finished_queue.empty():\n file = self.finished_queue.get()\n update_files[file['content_id']] = {'status': ContentStatus.AVAILABLE,\n 'pfn_size': file['pfn_size'],\n 'pfn': file['pfn']}\n msg = {'event_type': 'FILE_AVAILABLE',\n 'payload': {'scope': file['scope'],\n 'name': file['name'],\n 'startEvent': file['min_id'],\n 'lastEvent': file['max_id'],\n 'pfn': file['pfn']},\n 'created_at': date_to_str(datetime.datetime.utcnow())}\n messages.append(msg)\n\n self.logger.info('Got %s staged outputs' % len(update_files))\n update_contents_by_id(update_files)\n\n if self.send_messaging:\n for msg in messages:\n self.messaging_queue.put(msg)", "def cleaner():\n session = Session()\n while True:\n _database_operations.purge_old_jobs(session)\n time.sleep(30)", "def main(self):\n no_posts_found = 0\n while True:\n print(f\"...Searching for posts to cleanse..\")\n for post in self.reddit.subreddit(self.subreddit).stream.submissions(pause_after=1):\n if post is None:\n no_posts_found += 1\n print(f\".....Will run through {self.subreddit} one final time\")\n break\n else:\n if post.locked:\n post.mod.remove()\n else:\n post.mod.lock()\n post.mod.remove()\n print(f\"Post removed: {post.id}\")\n if no_posts_found == 2:\n print(f\"{self.subreddit} has been successfully cleansed.\")\n break\n print(f\"...Taking a small break! Be back in {self.delay} seconds\")\n time.sleep(self.delay)", "def process_transcript(transcript_label):\n transcript_key = f\"{transcript_label}.json\"\n\n # Load Transcribe output from S3.\n raw_transcript = get_transcribe_output(transcript_key)\n\n # Parse to assign speaker parts.\n speaker_parts = assign_speakers(raw_transcript)\n\n # Identify Karen and Georgia.\n assigned = karen_or_georgia(speaker_parts)\n\n # Update the full transcript.\n build_transcript(assigned)\n\n # Upload the latest transcript to S3.\n s3 = boto3.resource(\"s3\")\n s3.Bucket(os.getenv(\"S3_BUCKET\")).upload_file(\"main_transcript.txt\", \"main_transcript.txt\")", "def clean_old_data():\n logger.info('Cleaning standalone files on disk...')\n for absolute_path in glob.glob(MEDIA_URL + '*'):\n file_name = os.path.basename(absolute_path)\n try:\n relative_path = os.path.join(AUDIOS_URL, file_name)\n audio = Audio.objects.get(filename=relative_path)\n if audio.get_type() == 'episode':\n try:\n # If there are inactive audios on its being\n for e in audio.podcast.episode_set.exclude(pk=audio.podcast.active_episode.pk):\n if not e.is_active():\n logger.info('Inactive audio found in podcast set. Erasing files.')\n e.delete_files()\n except Exception, e:\n logger.exception(e.message)\n except ObjectDoesNotExist, e:\n logger.info('A file with no audio registered in database')\n if os.path.isfile(relative_path):\n logger.info('Erasing: %s' % relative_path)\n os.remove(relative_path)\n logger.info('... Done.')", "async def clean_up():\n # Load Settings\n settings = await fetch_settings()\n\n try:\n if settings[\"previous version\"] == settings[\"version\"]:\n await upgrade()\n except KeyError:\n await upgrade()\n\n old_version = settings[\"previous version\"]\n new_version = settings[\"version\"]\n\n if float(new_version) <= 1.2:\n # Deleting repeats\n connection = await connect()\n repeats = await connection.fetch(f\"\"\"\n SELECT * FROM \"{settings[\"table\"]}\"\n WHERE \"UID\" IN (SELECT \"UID\" FROM \"{settings[\"table\"]}\" GROUP BY \"UID\" HAVING COUNT(*) > 1);\n \"\"\")\n\n uniques = {}\n removed = []\n\n for article in repeats:\n if article[\"UID\"] in uniques.keys():\n removed.append(uniques[article[\"UID\"]][\"ID\"])\n uniques[article[\"UID\"]] = article\n\n for article_id in removed:\n await connection.execute(f\"\"\"\n DELETE FROM \"{settings[\"table\"]}\"\n WHERE \"ID\" = {article_id};\n \"\"\")\n\n # Fixing IDs\n all_articles = await connection.fetch(f\"\"\"\n SELECT * FROM \"{settings[\"table\"]}\";\n \"\"\")\n\n transaction = connection.transaction()\n await transaction.start()\n\n try:\n # Empty Table\n await connection.execute(f\"\"\"\n DELETE FROM \"{settings[\"table\"]}\";\n \"\"\")\n\n # Reset ID Column\n await connection.execute(f\"\"\"\n ALTER SEQUENCE \"{settings[\"table\"]}_ID_seq\"\n RESTART WITH 1\n \"\"\")\n\n # Reinsert Articles\n for article in all_articles:\n text = unquote(article[\"Text\"].replace(\"'\", \"''\"))\n\n date_released = article[\"dateReleased\"]\n if date_released.year >= 3300:\n date_released = date_released.replace(year=(article[\"dateReleased\"].year - GAME_YEAR_OFFSET))\n\n title = article[\"Title\"].strip().replace(\"'\", \"''\")\n if title == \"\" or title is None:\n title = \"No Title Available\"\n\n await connection.execute(f\"\"\"\n INSERT INTO \"{settings[\"table\"]}\" (\"Title\", \"UID\", \"dateReleased\", \"dateAdded\", \"Text\")\n VALUES ($1, $2, $3, $4, $5);\n \"\"\", title, article[\"UID\"], date_released, article[\"dateAdded\"], text)\n except Exception as e:\n print(\"\\n\\nProcess failed due to exception. Reverting.\\n\\n\")\n await transaction.rollback()\n raise e\n\n else:\n await transaction.commit()\n\n await connection.close()\n\n settings = await fetch_settings()\n settings[\"previous version\"] = settings[\"version\"]\n\n with open(\"Settings.json\", \"w\") as file:\n json.dump(settings, file, indent=2)", "def min_cleanup(self):\n self.past_songs_db.close()", "def runAutoCheck(dbConnection, maxConcurrent):\n # checks if any shows are pending.\n fileContent = DatabaseInteract.checkPre(dbConnection)\n if(len(fileContent) > 0 and Tools.numRunningProcesses() < maxConcurrent):\n cursor = dbConnection.cursor()\n cursor.execute(\"UPDATE transcriptions SET pending = TRUE WHERE id = '\" + str(fileContent[1]) + \"';\")\n dbConnection.commit()\n cursor.close()\n url = fileContent[0]\n indexID = str(fileContent[1]) # get the ID instead of the filename\n service = str(fileContent[3])\n # podcastName = fileContent[2]\n Tools.transcribeAll(service, url, indexID) # download the mp3 will print when done", "async def process(self, timeout=60):\n\n previous_date = self.previous_date()\n new_date = previous_date\n last_sent_message_date = previous_date\n now = pendulum.now('UTC')\n\n self.log.info(\"Begining processing feed %s, previous date %s\",\n self.name, previous_date)\n\n for entry in await self.fetch_and_parse(timeout):\n\n pubdate = dateutil.parser.parse(entry.published, tzinfos=rssalertbot.BOGUS_TIMEZONES)\n entry.published = pendulum.from_timestamp(pubdate.timestamp())\n # also save a prettified string format\n entry.datestring = self.format_timestamp_local(entry.published)\n\n # skip anything that's stale\n if entry.published <= previous_date:\n continue\n\n event_id = md5((entry.title + entry.description).encode()).hexdigest()\n last_sent = self.storage.load_event(self.feed, event_id)\n re_alert = self.cfg.get('re_alert', rssalertbot.RE_ALERT_DEFAULT)\n should_delete_message = False\n\n if entry.published > now:\n if last_sent and now < last_sent.add(hours=re_alert):\n continue\n self.storage.save_event(self.feed, event_id, now)\n else:\n if entry.published > new_date:\n new_date = entry.published\n should_delete_message = last_sent\n\n self.log.debug(\"Found new entry %s\", entry.published)\n\n # alert on it\n await self.alert(entry)\n if new_date > last_sent_message_date:\n self.storage.save_date(self.feed, new_date)\n last_sent_message_date = new_date\n\n if should_delete_message:\n self.log.debug(f\"Deleting stored date for message {event_id}\")\n self.storage.delete_event(self.feed, event_id)\n\n self.log.info(\"End processing feed %s, previous date %s\", self.name, new_date)", "def start_queue(self):\n working_list = self.generate_tweets_queue()\n tweet_list = working_list[\"tweets\"]\n padding_list = working_list[\"padding\"]\n\n for tweet in tweet_list:\n counter = PADDING_RATIO\n # main tweet\n post = self.tdata.post_update(tweet[1])\n if post:\n print \"\\\"\" + tweet[1] + \"\\\" tweet updated successfully.\"\n self.tdata.send_tweet(tweet[0], self.user_data[\"uid\"])\n else:\n print \"Failed to send... exiting.\"\n sys.exit(1)\n # padding updates\n while(counter > 0):\n sleep(BASE_DELAY)\n pad_tweet = padding_list.pop()\n post = self.tdata.post_update(pad_tweet[1])\n if post:\n print \"\\\"\" + pad_tweet[1] + \"\\\" padding tweet updated successfully.\"\n self.tdata.send_padding_tweet(pad_tweet[0], self.user_data[\"uid\"])\n counter -= 1\n else:\n print \"Failed to update padding tweet... exiting.\"\n sys.exit(1)", "def __removing_loop(self) -> None:\r\n\r\n # repeat until stop flag is set\r\n while not self.__stopper.wait(self.CLEANUP_EXPIRED_INTERVAL):\r\n now = int(datetime.now(self.__tz).timestamp())\r\n log.debug('Removing...')\r\n\r\n # iterate through database and remove expired encounters\r\n for enc_id, despawn_time in self.__pokes_db.copy().items():\r\n if despawn_time - now < 5:\r\n del self.__pokes_db[enc_id]", "def run(self):\n logic.remove_movie_medias()\n (quote, movie_name) = get_quote_and_movie_name()\n print('Chosen movie: {}'.format(movie_name))\n movie_poster_url = get_movie_poster_url(movie_name)\n self.sending_process(quote, movie_poster_url, movie_name)\n logic.remove_movie_medias()\n sys.exit()", "def startScandir(self):\n while self.isAlive:\n files = self.getNewFiles(self.inbox)\n while len(files) > 0:\n for full_filename in files:\n try:\n self.workflow.processFile(full_filename, 'new')\n except:\n et, ev, tb = sys.exc_info()\n serviceconfig.logger.error('got exception during the processing of the new file \"%s\"\\n\"%s\"' % (full_filename, str(ev)))\n serviceconfig.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))\n serviceconfig.sendMail('ERROR', 'File Processing FAILURE: %s' % str(et), 'Exception generated during the processing of the new file \"%s\":\\n%s\\n%s' % (full_filename, str(ev), ''.join(traceback.format_exception(et, ev, tb))))\n self.reportAction(full_filename, 'failure', str(et))\n files = self.getNewFiles(self.inbox)\n if self.timeout > 0:\n count = (self.timeout*60) / 10\n i = 0\n try:\n while self.isAlive:\n time.sleep(10)\n i = i+1\n if i >= count:\n break\n except:\n et, ev, tb = sys.exc_info()\n serviceconfig.logger.error('got Sleep exception \"%s\"' % str(ev))\n serviceconfig.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))\n serviceconfig.sendMail('ERROR', 'Sleep Processing FAILURE: %s' % str(et), 'Exception generated during the sleep process:\\n%s\\n%s' % (str(ev), ''.join(traceback.format_exception(et, ev, tb))))\n else:\n self.isAlive = False\n serviceconfig.logger.info('No more files to process. Exiting...')", "def cleanup(self):\n log = logging.getLogger('mailman.runner')\n # Send SIGTERMs to all the child processes and wait for them all to\n # exit.\n for pid in self._kids:\n try:\n os.kill(pid, signal.SIGTERM)\n except OSError as error:\n if error.errno == errno.ESRCH:\n # The child has already exited.\n log.info('ESRCH on pid: %d', pid)\n # Wait for all the children to go away.\n while self._kids:\n try:\n pid, status = os.wait()\n self._kids.drop(pid)\n except OSError as error:\n if error.errno == errno.ECHILD:\n break\n elif error.errno == errno.EINTR:\n continue\n raise", "def clean_documents():\n start = datetime.now()\n for i, raw_filename in enumerate(os.listdir(RAW_DIR)):\n fullpath = os.path.join(RAW_DIR, raw_filename)\n if os.path.isfile(fullpath):\n print(\"Cleaning {0} {1}\".format(i, fullpath), file=stderr)\n try:\n with open(fullpath, \"r\") as f:\n text = f.read()\n text = clean(text)\n soup = BeautifulSoup(text, \"html.parser\")\n cleaned = visible_text(soup)\n score = germanwings_score(cleaned)\n if not score:\n print(\"not germanwings: {0}\".format(raw_filename))\n else:\n clean_filename = os.path.join(CLEAN_DIR, raw_filename)\n with open(clean_filename, \"w\") as f:\n f.write(cleaned.encode(\"ascii\", \"ignore\"))\n except Exception as exc:\n print(\"{0}: {1}\".format(fullpath, exc), file=stderr)\n end = datetime.now()\n print(\"Elapsed time to clean: {0}\".format(end - start), file=stderr)", "def _parse_transcription_file(self, root: str, name: str) -> None:\n trans_path = os.path.join(root, name)\n with open(trans_path, \"r\", encoding=\"utf-8\") as trans:\n # Each line has the form \"ID THE TARGET TRANSCRIPTION\"\n for line in trans:\n id_, transcript = line.split(maxsplit=1)\n dropped = self._process_audio(root, id_)\n if not dropped:\n self._process_transcript(transcript)", "def __cleanup(self, ttl_in_sec):\n ttl_in_ms = ttl_in_sec * 1000\n while True:\n logging.debug(\"cleanup action...\")\n current_ts = self.__current_timestamp_in_ms()\n self.lock.acquire()\n for key, value in self.orderedDict.items():\n if value[1] > current_ts - ttl_in_ms:\n break\n else:\n self.orderedDict.pop(key, None)\n self.lock.release()\n time.sleep(ttl_in_sec)", "def main_loop(bot):\n # Start looping\n i = 0\n bot.tick()\n for comment in bot.r_all.stream.comments():\n # Check if comment is and iambic pentameter\n done = bot.process_comment(comment)\n # If enough commebts have been processed, kill the procgram\n if done:\n exit()\n # Increment counter\n i += 1\n # Report periodically\n if i >= bot.options.report_every:\n # Print infos\n percent_length_removed = (bot.n_length_removed) / bot.options.report_every * 100\n print('Analyzed %d comments, ' % i +\n '%.2f%% too short/long, ' % percent_length_removed +\n 'found %d iambic pentameters ' % bot.n_pentameters_epoch +\n '(total: %d), ' % bot.n_pentameters +\n '%.1f comments/s' % (i / bot.tick()))\n sys.stdout.flush()\n # Sleep a bit\n time.sleep(bot.options.sleep_for) # Reset periodic counters\n # Reset periodic counters\n bot.n_length_removed = 0\n bot.n_pentameters_epoch = 0\n i = 0\n # Occasionally tweet a quatrain\n try:\n bot.tweet_quatrain()\n except Exception as e:\n print(\"Failed to tweet \" + str(e), file=sys.stderr)", "def parse_transcript(self):\n\t\t\n\t\toutput_text = tempfile.NamedTemporaryFile(mode = 'r')\n\t\twith tempfile.NamedTemporaryFile(delete=False) as input_text:\n\t\t\tinput_text.write(self.transcript_string.encode('utf-8'))\n\t\t\t#to write to the file, convert to utf-8; to use for jinja, convert it back to unicode\n\n\t\tos.popen(\"python vocab_resources/splitta/sbd.py -m vocab_resources/splitta/model_nb -t \" + input_text.name +\" -o \" + output_text.name)\n\t\tos.remove(input_text.name)\n\n\t\twith open(output_text.name) as parsed_text:\n\t\t\tsentence_index = {}\n\t\t\tfor index, sentence in enumerate(parsed_text):\n\t\t\t\tsentence = sentence.rstrip()\n\t\t\t\tsentence_index[index] = sentence\n\n\t\tsentence_index[len(sentence_index)] = \"Unable_to_find_matching_sentence\" #avoid outliers\n\t\tself.sentence_index = sentence_index", "def main():\n exit_if_already_started()\n while True:\n for timeframe in ['all', 'month', 'week']:\n subreddits = load_list('subs.txt')\n while subreddits:\n # Grab all images/comments from sub, remove from list\n parse_subreddit(subreddits.pop(0), timeframe)", "def podcast_download(self):\r\n warnings.filterwarnings(\"ignore\", category=UnicodeWarning)\r\n now = datetime.datetime.now()\r\n\r\n for podcast_file in self.podcast_list:\r\n published, name, link, title = podcast_file\r\n if self.podcast_list != []:\r\n line_file = (published + ';' + title + ';' + name + ';' + link).encode(\"utf-8\") \r\n if line_file in open(self.download_log).read():\r\n pass\r\n else:\r\n title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')\r\n download_folder = os.path.join('downloads', title)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n try:\r\n published = str(parser.parse(published))[:10]\r\n except IOError as error:\r\n print 'Error' + (error) + ': File - ' + str(title)\r\n download_folder = os.path.join(download_folder, published)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n namefile_unicode = link[link.rfind('/')+1:]\r\n namefile_str = unicodedata.normalize('NFKD', \r\n namefile_unicode).encode('ascii', 'ignore')\r\n namefile_str = namefile_str.decode('utf-8', 'ignore').encode(\"utf-8\")\r\n if '.mp3' in namefile_str:\r\n len_name = namefile_str.index('.mp3')\r\n elif '.MP3' in namefile_str:\r\n len_name = namefile_str.index('.MP3')\r\n namefile_str = namefile_str[:len_name + 4]\r\n fileoutput = os.path.join(download_folder, namefile_str)\r\n name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')\r\n print str(published) + '; ' + name\r\n ## downlink\r\n download_file(link, fileoutput) \r\n ## tagging\r\n mp3_tagging(fileoutput, podcast_file)\r\n ## write log\r\n write_file(self.download_log, line_file)\r\n end = datetime.datetime.now()\r\n print '\\r' + 'Download Time = ' + str(end-now) + '\\r'\r\n return None", "def convert_thread(self):\n thread_mp3 = Thread(target=self.convert_text)\n thread_mp3.setDaemon(True)\n thread_mp3.start()\n thread_words = Thread(target=self.find_5_words)\n thread_words.setDaemon(True)\n thread_words.start()\n thread_clean_5_words = Thread(target=self.clean_5_words)\n thread_clean_5_words.setDaemon(True)\n thread_clean_5_words.start()", "def cleanup(self):\n process_handler.terminate_root_and_child_processes(self._proc.pid)\n self._read_thread.join()\n if self._data_dir:\n shutil.rmtree(self._data_dir, ignore_errors=True)", "def _execute( self ):\n \n while True:\n \n try:\n file = self.toBeCopied.get_nowait()\n except Queue.Empty:\n return S_OK()\n \n gLogger.verbose( '%s - %s being processed' % ( file[ 'lfn' ], file[ 'pfn' ] ) )\n\n ### Upload file to SE and register it in DIRAC\n dirac = Dirac()\n initialTime = time.time()\n uploadStatus = dirac.addFile(file[ 'lfn' ], file[ 'pfn' ], self.CopyToSE)\n elapsedTime = time.time() - initialTime\n \n if uploadStatus['OK']:\n gLogger.info('File {} upload took {} s. Now deleting local file.'.format(file[ 'lfn' ], round(elapsedTime,2)))\n \n ### If file has metadata then register it in the respective dir.\n ### It is safe to re-register the meta data\n if 'metaData' in file :\n # register this metadata\n if file['metaData']:\n res = self.registerDirMetaData(file[ 'lfn' ], file['metaData'])\n if not res['OK']:\n ### If registering meta data failed, then finish the thread gracefully and go to next thread\n self.toBeCopied.task_done()\n continue\n\n else:\n gLogger.error('Meta Data for this dir (%s) was not found.' %(lpn))\n \n ### Now remove the file\n self.removeLocalFile(file[ 'pfn' ])\n \n else:\n gLogger.error('Failed to upload file (%s). Message is (%s)' %(file[ 'lfn' ], uploadStatus['Message']))\n\n # Used together with join !\n self.toBeCopied.task_done()", "def short():\n countneg = 0\n countpos = 0\n testset_id = 4\n\n testfiles = db.session.query(evaluation.Testfile).filter(evaluation.Testfile.testset_id==testset_id)\n print \"Number testfiles: %s\" % testfiles.count()\n for i, tf in enumerate(testfiles):\n if i % 100 == 0:\n print i\n with audioread.audio_open(tf.file.path.encode(\"utf-8\")) as f:\n duration = f.duration\n if duration < 60.0:\n if tf.file.negative:\n countneg+=1\n else:\n countpos+=1\n print \"Removing short duration file: %s (%s)\" % (tf.file.path.encode(\"utf-8\"), duration)\n cur = db.session.query(evaluation.Result).filter(evaluation.Result.testfile_id==tf.id)\n print \"%d results to remove\" % cur.count()\n cur.delete()\n db.session.query(evaluation.Testfile).filter(evaluation.Testfile.id==tf.id).delete()\n db.session.commit()\n testfiles = db.session.query(evaluation.Testfile).filter(evaluation.Testfile.testset_id==testset_id)\n print \"New number testfiles: %s\" % testfiles.count()\n print \"deleted negative: %s\" % countneg\n print \"deleted positive: %s\" % countpos", "def endTasks():\n __terminalState.bufferedReaderTask.cancel()", "async def _async_perform_unsubscribes(self) -> None:\n if not self._pending_unsubscribes:\n return\n\n topics = list(self._pending_unsubscribes)\n self._pending_unsubscribes = set()\n\n async with self._paho_lock:\n result, mid = await self.hass.async_add_executor_job(\n self._mqttc.unsubscribe, topics\n )\n _raise_on_error(result)\n for topic in topics:\n _LOGGER.debug(\"Unsubscribing from %s, mid: %s\", topic, mid)\n\n await self._wait_for_mid(mid)", "def cleanup(self) -> None:\n\n self._update_thread.requestInterruption()\n self._update_thread.schedule_update_event.set()\n if self._update_thread.wait(1) is False:\n logging.info(\"not enough time to stop thread 0.5\")\n self._save_feeds()\n self._sqlite_connection.close()", "def main():\n if __debug__:\n start_time = timeit.default_timer()\n\n at.delete_expired_tokens()\n\n if __debug__:\n end_time = timeit.default_timer()\n print(\"\\nProgram completed in {} \"\n \"seconds.\".format(end_time - start_time))", "def cleanup(self):\n logger.debug('Beginning cleanup ...')\n self.stop()\n\n #Clear subscriptions\n for sub, dev in self.subs.items():\n dev.clear_sub(sub)\n\n #Clear databases\n self.subs.clear()\n self.cmds.clear()\n\n #Remove pcaspy internals\n self._thread = None\n self.driver = None\n self.server = None\n logger.info('Cleanup finished')", "def processText(self, text: str, filename: str) :\n execution_time = 0.\n\n directory = os.path.join(self.execution_time_dir, AUDIO_DIR, self.getTTS().getName())\n make_dir(directory)\n time_for_generating_audio_fpath = os.path.join(directory, filename + \".txt\")\n \n audio_fpath = self.getTTS().getAudioPath(\n text=text, audio_dir=self.audio_dir, filename=filename)\n \n if self.recompute or not os.path.exists(audio_fpath):\n # print(audio_fpath)\n start_time = time.time()\n self.getTTS().generateAudio(text=text, audio_fpath=audio_fpath)\n save_execution_time(fpath=time_for_generating_audio_fpath, execution_time=time.time() - start_time)\n \n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_generating_audio_fpath) \n \n transcription_dir = os.path.join(self.transcription_dir, self.getTTS().getName())\n \n transcriptions = {}\n for asr in self.asrs :\n directory = os.path.join(\n self.execution_time_dir, TRANSCRIPTION_DIR, self.getTTS().getName(), asr.getName())\n make_dir(directory)\n time_for_recognizing_audio_fpath = os.path.join(\n directory, filename + \".txt\")\n\n if self.recompute :\n start_time = time.time()\n # TODO: \n # change recognize audio -> input audio instead of fpath\n # audio = asr.loadAudio(audio_fpath=audio_fpath)\n # transcription = asr.recognizeAudio(audio=audio)\n # asr.saveTranscription(transcription_fpath, transcription)\n transcription = asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.setTranscription(transcription)\n asr.saveTranscription(transcription_dir=transcription_dir, filename=filename)\n save_execution_time(fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n \n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n num_retry = 0\n while transcription == \"\" and num_retry < self.max_num_retry :\n start_time = time.time()\n asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.saveTranscription(\n transcription_dir=transcription_dir, filename=filename)\n save_execution_time(\n fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n\n if asr.getName() == \"wit\" :\n random_number = float(random.randint(9, 47))/10.\n time.sleep(random_number)\n\n num_retry += 1\n\n transcriptions[asr.getName()] = preprocess_text(transcription)\n\n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_recognizing_audio_fpath) \n \n\n cases = self.caseDeterminer(text, transcriptions)\n # if sum(cases.values()) == 0 :\n # print(text)\n # print(transcriptions[\"wav2vec2\"])\n # print(cases)\n # print()\n \n for asr_name, case in cases.items() :\n self.saveCase(self.case_dir, self.getTTS().getName(), asr_name, filename, str(case))\n\n # print(f\"Execution time: {execution_time}\")\n return cases, execution_time", "def re_process(self):\n rmtree(self.processed_dir)\n os.makedirs(self.processed_dir)\n self.process()\n\n print('Done!')", "def upload_cleanup(ip_addr, songs, delete_paths, tzinfo):\n log = logging.getLogger(__name__)\n files_attrs = [j[:3] for j in sorted((s.attrs for s in songs), key=lambda i: i[-1])]\n\n # Lock card to prevent host from making changes and copy helper Lua script.\n try:\n log.info('Preparing FlashAir card for changes.')\n initialize_upload(ip_addr, tzinfo)\n if delete_paths:\n log.info('Deleting %d file(s)/dir(s) on the FlashAir card.', len(delete_paths))\n delete_files_dirs(ip_addr, delete_paths)\n if songs:\n log.info('Uploading %d song(s).', len(songs))\n upload_files(ip_addr, files_attrs)\n except FlashAirURLTooLong:\n log.exception('Lua script path is too long for some reason???')\n except FlashAirNetworkError:\n raise # To be handled in caller.\n except FlashAirError:\n log.exception('Unexpected exception.')", "def cleanup():\n if config.get('global').get('no_cleanup'):\n return\n logging.info('Cleaning up temp directories')\n try:\n tmp_path = config.get('global').get('tmp_path')\n if os.path.exists(tmp_path):\n rmtree(tmp_path)\n except Exception as e:\n logging.error(format_debug(e))\n print_message('Error removing temp directories')\n\n try:\n archive_path = os.path.join(\n config.get('global').get('output_path'),\n 'script_archive',\n time.strftime(\"%Y-%m-%d-%I-%M\"))\n if not os.path.exists(archive_path):\n os.makedirs(archive_path)\n run_script_path = config.get('global').get('run_scripts_path')\n if os.path.exists(run_script_path):\n move(run_script_path, archive_path)\n except Exception as e:\n logging.error(format_debug(e))\n logging.error('Error archiving run_scripts directory')", "def cleanup(self):\n deletes = []\n for item in self._collect.find({'status': 'started'}, {'_id': True}):\n deletes.append(pymongo.DeleteOne(item))\n # Remove them\n if len(deletes):\n print(\"Delete\", self._collect.bulk_write(deletes).deleted_count)", "async def disconnect_bg_task(self):\n while True:\n for sid in self.voice_states:\n state = self.voice_states[sid]\n music = state.songs._queue\n if not state.current:\n if not music:\n state.speech_player.cancel()\n state.audio_player.cancel()\n del self.voice_states[sid]\n await state.voice.disconnect()\n await asyncio.sleep(100)", "async def cleanup_messages(\n messages: List[discord.Message], sec_delay: int = 10\n) -> None:\n for message in messages:\n # Adding `delay` kwarg spawns a task, so wrapping that task in a task is redundant...\n # These tasks cause dpytest to break, and py-cord supposedly has \"sane rate-limiting\"\n # So tasks here are being removed all together.\n # Another mention, we could/should leverage channel.delete_messages() for bulk cleanup, however\n # dpytest doesn't support it yet either lol.\n await asyncio.sleep(sec_delay)\n await message.delete()", "async def backgroundLoop(self):\n while True:\n self.logger.debug(\"Checking to see if we need to garbage collect\")\n await self.checkGarbageCollect()\n\n self.logger.debug(\"Executing auto-purge\")\n await self.doAutoPurge()\n\n await asyncio.sleep(SLEEP_TIME)", "def async_transcribe(audio_file_paths,\n bucket_name,\n output_tsv_path,\n sample_rate,\n language_code,\n speaker_count=0,\n begin_sec=0.0):\n tmp_audio_file = tempfile.mktemp(suffix=\".flac\")\n print(\"Temporary audio file: %s\" % tmp_audio_file)\n audio_duration_s = concatenate_audio_files(audio_file_paths, tmp_audio_file)\n\n storage_client = storage.Client()\n bucket = storage_client.bucket(bucket_name)\n destination_blob_name = os.path.basename(tmp_audio_file)\n blob = bucket.blob(destination_blob_name)\n print(\"Uploading %s to GCS bucket %s\" % (tmp_audio_file, bucket_name))\n blob.upload_from_filename(tmp_audio_file)\n gcs_uri = \"gs://%s/%s\" % (bucket_name, destination_blob_name)\n print(\"Uploaded to GCS URI: %s\" % gcs_uri)\n\n client = speech.SpeechClient()\n audio = speech.RecognitionAudio(uri=gcs_uri)\n enable_speaker_diarization = speaker_count > 0\n config = speech.RecognitionConfig(\n encoding=speech.RecognitionConfig.AudioEncoding.FLAC,\n sample_rate_hertz=sample_rate,\n language_code=language_code,\n enable_speaker_diarization=enable_speaker_diarization,\n diarization_speaker_count=speaker_count)\n\n operation = client.long_running_recognize(config=config, audio=audio)\n timeout_s = int(audio_duration_s * 0.25)\n print(\n \"Waiting for async ASR operation to complete \"\n \"(audio duration: %.3f s; ASR timeout: %d s)...\" %\n (audio_duration_s, timeout_s))\n response = operation.result(timeout=timeout_s)\n blob.delete()\n os.remove(tmp_audio_file)\n\n utterances = []\n for result in response.results:\n # The first alternative is the most likely one for this portion.\n alt = result.alternatives[0]\n utterances.append(alt.transcript)\n print(u\"Transcript: {}\".format(alt.transcript))\n diarized_words = [(\n word.word, word.speaker_tag, word.start_time.total_seconds(),\n word.end_time.total_seconds()) for word in alt.words]\n # print(\"Confidence: {}\".format(result.alternatives[0].confidence))\n\n regrouped_utterances = regroup_utterances(utterances, diarized_words)\n with open(output_tsv_path, \"w\" if not begin_sec else \"a\") as f:\n if not begin_sec:\n # Write the TSV header.\n f.write(tsv_data.HEADER + \"\\n\")\n utterance_counter = 0\n for (regrouped_utterance,\n speaker_index, start_time_sec, end_time_sec) in regrouped_utterances:\n utterance_counter += 1\n line = \"%.3f\\t%.3f\\t%s\\t%s [U%d] [Speaker #%d]\" % (\n start_time_sec + begin_sec,\n end_time_sec + begin_sec,\n tsv_data.SPEECH_TRANSCRIPT_TIER,\n regrouped_utterance,\n utterance_counter,\n speaker_index)\n print(line)\n f.write(line + \"\\n\")", "def _run(self):\n\t\tself._trigger_manager = UnplugTrigger(self._audio_manager,\n\t\t self._logger)\n\t\tself._trigger_manager.start_listening()\n\t\twhile not self._trigger_manager.is_triggered():\n\t\t\ttime.sleep(0.1)\n\t\tself.is_running = False\n\t\tself._logger.info('Waiting on saves to complete...')\n\t\tself._video_thread.join()\n\t\tself._audio_thread.join()\n\t\tself.mix_audio_and_video()", "def schedule_cleanup(config):\n LOGGER.debug(\"Starting cleanup scheduler\")\n cleanup = Cleanup(config)\n cleanup.start()\n LOGGER.debug(\"Running initial cleanup\")\n cleanup.cleanup()", "def process_data():\n for message in get_messages_from_sqs():\n try:\n message_content = json.loads(message.body)\n input_file = urllib.unquote_plus(message_content\n ['Records'][0]['s3']['object']\n ['key']).encode('utf-8')\n s3.download_file(input_bucket_name, input_file, input_file)\n output_file = os.path.join(output_dir, os.path.splitext(input_file)[0]+'.csv')\n parse_patient_data(input_file, output_file)\n upload_data(output_file)\n cleanup_files(input_file, output_file)\n except:\n message.change_visibility(VisibilityTimeout=0)\n continue\n else:\n message.delete()", "def tidyUp(self):\n shutil.rmtree(self.tmpDirectory)\n self.console.accept()\n self.threadPool.waitForDone()", "def loop(self):\n while True:\n self.maybe_disconnect()\n\n # Grab any new events\n item_ids = []\n events = []\n come_back_soon = False\n try:\n while True:\n item = self.queuedir.pop()\n if not item:\n break\n if len(events) > 50:\n come_back_soon = True\n break\n\n try:\n item_id, fp = item\n item_ids.append(item_id)\n log.debug(\"Loading %s\", item)\n events.extend(json.load(fp))\n except:\n log.exception(\"Error loading %s\", item_id)\n raise\n finally:\n fp.close()\n log.info(\"Loaded %i events\", len(events))\n self.send(events)\n for item_id in item_ids:\n log.info(\"Removing %s\", item_id)\n try:\n self.queuedir.remove(item_id)\n except OSError:\n # Somebody (re-)moved it already, that's ok!\n pass\n except:\n log.exception(\"Error processing messages\")\n # Don't try again soon, something has gone horribly wrong!\n come_back_soon = False\n for item_id in item_ids:\n self.queuedir.requeue(item_id, self.retry_time, self.max_retries)\n\n if come_back_soon:\n # Let's do more right now!\n log.info(\"Doing more!\")\n continue\n\n # Wait for more\n # don't wait more than our max_idle/max_connect_time\n now = time.time()\n to_wait = None\n if self._disconnect_timer:\n to_wait = self._disconnect_timer - now\n if to_wait < 0:\n to_wait = None\n log.info(\"Waiting for %s\", to_wait)\n self.queuedir.wait(to_wait)", "def task_clean_tmp_files():\n client = google.cloud.storage.Client()\n blobs = client.list_blobs(settings.PODCAST_STORAGE_BUCKET,\n prefix=settings.PODCAST_TMP_STORAGE_DIRECTORY)\n for blob in blobs:\n if blob.time_created.replace(tzinfo=None) + datetime.timedelta(1) <= datetime.datetime.now():\n blob.delete()\n\n return OK_RESPONSE", "def finalize(self):\n for p in self._processes:\n if p.join(30) is None and p.exitcode is None:\n p.kill()", "def cleanup(self):\n\n # NOTE(jbresnah) call stop on each of the servers instead of\n # checking the pid file. stop() will wait until the child\n # server is dead. This eliminates the possibility of a race\n # between a child process listening on a port actually dying\n # and a new process being started\n servers = [self.api_server, self.conductor_server, ]\n for s in servers:\n try:\n s.stop()\n except Exception:\n pass\n\n for f in self.files_to_destroy:\n if os.path.exists(f):\n os.unlink(f)", "def cleanup(cloud, prefix, timeout=600, interval=10):\n global RECYCLE\n\n # Get nodes with prefix\n procs, nodes = [], cloud.get_nodes_by_prefix(prefix)\n if nodes:\n log.info(f\"Nodes with prefix '{prefix}' are {', '.join(nodes)}\")\n else:\n log.error(f\"No nodes are available with prefix '{prefix}'\")\n\n # Start deleting nodes in parallel\n for node in nodes:\n proc = mp.Process(target=delete_node, args=(node, cloud, timeout, interval))\n proc.start()\n procs.append(proc)\n\n # Wait till all nodes gets cleaned\n [p.join() for p in procs]\n\n # Get nodes woth prefix\n procs, volumes = [], cloud.get_volumes_by_prefix(prefix)\n if volumes:\n log.info(f\"Volumes with prefix '{prefix}' are {', '.join(volumes)}\")\n else:\n log.error(f\"No volumes available with prefix '{prefix}'.\")\n\n # Start deleting volumes in parallel\n for volume in volumes:\n proc = mp.Process(target=delete_volume, args=(volume, cloud, timeout, interval))\n proc.start()\n procs.append(proc)\n\n # Wait till all volumes gets cleaned\n [p.join() for p in procs]\n\n # Check if any resource deletion failed\n stale = [r.name for r in RECYCLE if r.state]\n if stale:\n msg = f\"Failed to clean resources {', '.join(stale)}\"\n log.error(msg)\n raise OperationFailedError(msg)\n\n return True", "def _finalize_iteration(self, verbose: bool):\n super().delete_remote_files()\n self.comm.storyteller.document_task(task=\"adam_documentation\")", "def cleanUp(self):\n self.dirMonitor.stop()\n self.filesList.cleanUp()", "def finalize_results(self, traversed_path: List[str]):\n self.set_end_time()\n self.set_no_longer_active()\n self.results = traversed_path\n app = get_celery_app()\n if not TESTING:\n app.control.revoke(\n self.task_id, terminate=True, signal=\"SIGKILL\"\n ) # pragma: no cover", "def autovacuum(self):\n deadline = datetime.now() - timedelta(days=self._removal_interval)\n jobs = self.with_context(active_test=False).search(\n [('date_done', '<=', fields.Datetime.to_string(deadline))],\n )\n jobs.unlink()\n return True", "def recv_loop():\n\n if not os.path.isdir(tq_dir):\n os.mkdir(tq_dir)\n if not os.path.isdir(rq_dir):\n os.mkdir(rq_dir)\n\n while True:\n time.sleep(1)\n #print ('polling')\n try:\n files = os.listdir(rq_dir)\n except:\n print ('Could not get listing of directory ' + rq_dir + '\\n')\n quit()\n\n files.sort()\n for f in files:\n fname = rq_dir + '/' + f\n #print (fname)\n if os.path.isfile(fname):\n print ('---')\n print ('Processing ' + fname + ' ...')\n with open (fname, 'r') as h:\n for m in h:\n m.rstrip('\\n')\n parse_aprs (m.rstrip('\\n'))\n os.remove(fname)\n else:\n \t\t#print (fname + ' is not an ordinary file - ignore')\n pass", "async def cleanup(self) -> None:\n if self.args.sync:\n self.stop_events_sync()\n self._processing.join()\n else:\n await self.stop_events_async()\n await self._processing\n try:\n raise self._error\n except TypeError:\n pass", "def run_tximport():\n eligible_experiments = (\n Experiment.objects.annotate(num_organisms=Count(\"organisms\"))\n .filter(num_organisms=1, technology=\"RNA-SEQ\", num_processed_samples=0)\n .prefetch_related(\"samples__results\")\n )\n\n paginator = Paginator(eligible_experiments, PAGE_SIZE)\n page = paginator.page()\n\n # Next is to figure out how many samples were processed for\n # each experiment. Should be able to reuse code from salmon\n # cause it does this stuff.\n tximport_pipeline = ProcessorPipeline.TXIMPORT\n\n while True:\n creation_count = 0\n\n for experiment in page.object_list:\n quant_results = get_quant_results_for_experiment(experiment)\n\n if should_run_tximport(experiment, quant_results, True):\n processor_job = ProcessorJob()\n processor_job.pipeline_applied = tximport_pipeline.value\n processor_job.ram_amount = 8192\n # This job doesn't need to run on a specific volume\n # but it uses the same Nomad job as Salmon jobs which\n # do require the volume index.\n processor_job.volume_index = random.choice(list(get_active_volumes()))\n processor_job.save()\n\n assoc = ProcessorJobOriginalFileAssociation()\n # Any original file linked to any sample of the\n # experiment will work. Tximport is somewhat special\n # in that it doesn't actuallhy use original files so\n # this is just used to point to the experiment.\n assoc.original_file = experiment.samples.all()[0].original_files.all()[0]\n assoc.processor_job = processor_job\n assoc.save()\n\n creation_count += 1\n\n try:\n send_job(tximport_pipeline, processor_job)\n except Exception:\n # If we cannot queue the job now the Foreman will do\n # it later.\n pass\n\n logger.info(\"Created %d tximport jobs for experiments past the thresholds.\", creation_count)\n\n if not page.has_next():\n break\n else:\n page = paginator.page(page.next_page_number())", "def get_transcription(url):\n\n # Checks the format of the URL\n if \"https://www.youtube.com/watch?v=\" in url:\n input_url_id = url.replace(\"https://www.youtube.com/watch?v=\", \"\")\n elif \"https://youtu.be/\" in url:\n input_url_id = url.replace(\"https://youtu.be/\", \"\")\n\n # Creates a blank list to iterate over\n text_parts = []\n\n # Gets a list of all available transcripts\n try:\n\n list_of_transcripts = YouTubeTranscriptApi.list_transcripts(input_url_id)\n print(\"Checking for Transcriptions...\")\n\n # Checks to see if a manual transcript is created if not, checks to see if a generated one is created\n if 'en-US' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en-US'])\n elif 'en' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en'])\n elif 'en' in list_of_transcripts._generated_transcripts:\n print(\"Auto-Generated Transcription Found.\")\n transcript = list_of_transcripts.find_generated_transcript(['en'])\n\n # Saves the transcript into a variable to iterate over\n raw_transcription = transcript.fetch()\n\n # Indexing of raw transcripts\n iteration_of_raw = 0\n\n # Iterates over each dictionary and extracts 'text' key then appends the blank text_parts list\n for i in raw_transcription:\n indexed_dictionary = raw_transcription[iteration_of_raw]\n text_from_dictionary = indexed_dictionary['text']\n text_parts.append(text_from_dictionary)\n iteration_of_raw += 1\n # Defines how we want each text element to be separated with\n separator_for_each_text = \" \"\n\n # Joins the separator with the text_parts\n clean_transcription = separator_for_each_text.join(text_parts)\n\n # Returns the cleaned transcripts\n return clean_transcription\n\n except:\n print(\"No Transcriptions Found\")\n clean_transcription = \"No Transcriptions Found\"\n return clean_transcription", "def run_publisher_async(args1, stop_event):\n global cml_adapter\n global subscriptions\n\n while(not stop_event.isSet()):\n try:\n publisher_loop()\n time.sleep(publishQuotesInterval)\n except Exception as e:\n logError({\"Msg\": str(e), \"Method\": \"run_publisher_async\"})", "def _cleanup_crawl(self, crawl_id):\n self.crawlQueue.remove(crawl_id)\n self.cleanQueue.append(crawl_id) # monitor cleaning\n _mark_timer_complete(crawl_id, self.engine_redis)\n cmd_line = self._cleanup_command(crawl_id)\n if self.test:\n self.clean_command = cmd_line\n else:\n p = subprocess.Popen(cmd_line, shell=True)\n if self.debug:\n self.logger.debug(\"cmd_line: %s\", cmd_line, extra=self.log_header)", "async def cleanup(self):\n pass", "def do_latest_job(self):\n self.symlinks.remove_symlinks_from_old_runs(self.wiki.date)\n self.feeds.cleanup_feeds()", "def cleanup(self):\n files = self.nlst()\n latest = self.latest_filename\n for filename in files:\n if filename != latest:\n result = self.delete(filename)\n logger.info(f\"Deleted old export from FTP: {result}\")", "def process_files(audio_files, context=[]):\n\n results = []\n bar_limit = len(audio_files)\n client = speech.SpeechClient()\n with Bar('Processing:', max=bar_limit) as bar:\n for audio in audio_files:\n response = convert_speech_to_text(client, audio, context)\n (transcription, confidence) = transcript(response)\n results.append({\n \"path\": audio,\n \"transcription\": transcription,\n \"confidence\": confidence\n })\n bar.next()\n return results", "def main():\n # transcribe_audio()\n summarize()", "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.split(' ') for item in data]\n data = [\n item[:-2] + [' '.join(item[-2:])] if item[-1] == '\\n' else item\n for item in data\n ]\n data = [item for sublist in data for item in sublist]\n\n df = pd.DataFrame(data, columns=['word'])\n df['conversation_id'] = 1\n\n return df", "def transcribe_gcs(gcs_uri):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n audio = types.RecognitionAudio(uri=gcs_uri)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.FLAC,\n enable_word_time_offsets=True,\n #sample_rate_hertz=32000,\n language_code='en-US')\n\n operation_start_time = time.time()\n operation = client.long_running_recognize(config, audio)\n\n\n print('Waiting for operation to complete...')\n response = operation.result(timeout=None)\n operation_end_time = time.time()\n operation_elapsed_time = operation_end_time - operation_start_time\n operation_time_string = format_time_string(operation_elapsed_time)\n\n last_result_index = len(response.results)-1\n last_word_index = len(response.results[last_result_index].alternatives[0].words)-1\n audio_duration = response.results[last_result_index].alternatives[0].words[last_word_index].end_time.seconds\n audio_duration_string = format_time_string(audio_duration)\n\n counter = 1\n srt_file_name = gcs_uri[gcs_uri.rfind(\"/\")+1:gcs_uri.rfind(\".mp4-audio.\")]+\".srt\"\n srt_file = open(srt_file_name, \"w\")\n\n srt_file_name2 = gcs_uri[gcs_uri.rfind(\"/\") + 1:gcs_uri.rfind(\".mp4-audio.\")] + \"2.srt\"\n srt_file2 = open(srt_file_name2, \"w\")\n\n transcription_file_name = gcs_uri[gcs_uri.rfind(\"/\") + 1:gcs_uri.rfind(\"-audio.\")] + \"-transcription.txt\"\n transcription_file = open(transcription_file_name, \"w\")\n\n word_list = concat_word_list(response.results)\n phrase_list = make_phrase_list(word_list)\n write_srt_file(srt_file2, phrase_list)\n\n # Print the first alternative of all the consecutive results.\n for result in response.results:\n transcript = result.alternatives[0].transcript.strip()\n seconds = result.alternatives[0].words[0].start_time.seconds\n last_word_index = len(result.alternatives[0].words)-1\n end_seconds = result.alternatives[0].words[last_word_index].end_time.seconds\n outstring = format_time_string(seconds) + \" - \" +transcript\n print(outstring + \"\\n\")\n transcription_file.write(outstring + \"\\n\\n\")\n\n # now write to srt file\n srt_file.write(str(counter)+\"\\n\")\n start_time_code = format_time_string(seconds) + \",000\"\n\n end_time_code = format_time_string(end_seconds) + \",000\"\n time_code = start_time_code + \" --> \" + end_time_code\n srt_file.write(time_code + \"\\n\")\n srt_file.write(transcript + \"\\n\\n\")\n counter += 1\n #print('Confidence: {}'.format(result.alternatives[0].confidence))\n srt_file.close()\n srt_file2.close()\n transcription_file.close()\n print(\"\\n------------------------------------------------\")\n print(\"Audio file length: \" + audio_duration_string)\n print(\"Transcribe operation running time: \" + operation_time_string)\n print(\"------------------------------------------------\")", "def updateScript(dbconnection):\n cursor = dbconnection.cursor()\n cursor.execute(\"select rss, name, source from podcasts;\")\n rssArray = cursor.fetchall()\n for rss in rssArray:\n print(\"chekcing name \" + str(rss[1]))\n url = str(rss[0])\n name = str(rss[1])\n source = str(rss[2])\n rssArray = DatabaseInteract.rssCheck(name, source, url)\n for item in rssArray:\n if(DatabaseInteract.checkIfExists(dbconnection, item[0]) == False):\n DatabaseInteract.insertClip(dbconnection, item[2], name, item[3], item[1], item[0])", "def parse_documents():\n\n\tcount_before = control.find().count()\n\n\tprint \"There are currently %i unprocessed records.\" % count_before\n\n\t#dispatch\n\t# executor = concurrent.futures.ThreadPoolExecutor(10)\n\t# futures = [executor.submit(analyze_message, document) for document in control.find()]\n\t# concurrent.futures.wait(futures)\n\n\tfor document in control.find():\n\t\tanalyze_message(document)\n\n\tcount_after = control.count()\n\tprint \"There are now %i stored records.\" % control.count()", "def check_running(self):\n remove = []\n\n # iterate over all \"running\" processes\n for proc in self.processes:\n # if the process has stopped\n if proc['proc'].poll() is not None:\n if proc['type'] == 'rtmpdump':\n self.logger.info(\n proc['model'] + \" is no longer being captured\")\n if os.path.isfile(proc['filename']):\n proc_stats = self.get_proc_stats(proc)\n if proc_stats['file_size'] == 0:\n self.logger.warning(\"Capture size is 0kb, deleting.\")\n os.remove(proc['filename'])\n else:\n self.move_to_complete(proc)\n message = (\"Finished:\" +\n proc['model'] + \" - \" +\n \"Started at \" +\n proc_stats['started_at'] + \" | \" +\n \"Size:\" +\n proc_stats['formatted_file_size'] + \" | \" +\n \"Duration:\" +\n proc_stats['recording_time'])\n self.logger.info(message)\n if self.push_bullet is not None:\n self.push_bullet.push_note(\"Chaturbate\", message)\n elif proc['type'] == 'ffmpeg':\n if proc['proc'].poll() == 0:\n os.remove(proc['source'])\n else:\n self.logger.warning(\"Something went wrong with ffmpeg, not deleting\")\n\n remove.append(proc['id'])\n\n # remove all items in remove from self.processes\n procs = self.processes\n for item in remove:\n procs = [f for f in procs if f['id'] != item]\n self.processes = procs", "def cleanup():\n dist.destroy_process_group()", "def run(self):\n run1=0\n while (run1==0):\n Publisher().sendMessage(\"updatetext\", \"\")\n time.sleep(3)", "def wait_for_audio_to_complete(self):\n # Wait for the text queue to drain in the \"run\" method.\n self.text_queue.join()\n # Wait for the last queued audio to complete.\n self._wait_for_player_to_complete()", "def start_transcribing():\n transcribe.main()", "async def cleanup(self, ctx):\r\n msgs = await ctx.message.channel.history(limit=100).flatten()\r\n msgs = [msg for msg in msgs if msg.author.id == self.amethyst.user.id]\r\n\r\n if (len(msgs) > 0 and\r\n ctx.me.permissions_in(ctx.channel).manage_messages):\r\n await ctx.channel.delete_messages(msgs)\r\n elif len(msgs) > 0:\r\n for msg in msgs:\r\n await msg.delete()\r\n else:\r\n return\r\n\r\n msg = await ctx.send(\"Cleaned `{}`\".format(len(msgs)))\r\n await asyncio.sleep(2.5)\r\n await msg.delete()", "def main_eventlog(config, output_fname):\n\n loop = asyncio.get_event_loop()\n\n resources = loop.run_until_complete(do_startup(config, output_fname, loop))\n loop.run_forever()\n\n log.info(\"Running cleaunup tasks ...\")\n loop.run_until_complete(do_cleanup(*resources))\n\n pending_tasks = asyncio.Task.all_tasks()\n for task in pending_tasks:\n try:\n loop.run_until_complete(task)\n except asyncio.CancelledError:\n pass\n loop.close()", "def process_ingestion_emails():\n processor = CalendarInteractionEmailProcessor()\n\n for message in get_mail_docs_in_bucket():\n source = message['source']\n try:\n documents.delete_document(bucket_id=BUCKET_ID, document_key=message['source'])\n except Exception as e:\n logger.exception('Error deleting message: \"%s\", error: \"%s\"', source, e)\n continue\n\n try:\n email = mailparser.parse_from_bytes(message['content'])\n processed, reason = processor.process_email(message=email)\n if not processed:\n logger.error('Error parsing message: \"%s\", error: \"%s\"', source, reason)\n else:\n logger.info(reason)\n except Exception as e:\n logger.exception('Error processing message: \"%s\", error: \"%s\"', source, e)\n\n logger.info(\n 'Successfully processed message \"%s\" and deleted document from bucket \"%s\"',\n source,\n BUCKET_ID,\n )", "async def cleanup_file(self, short_text, file_name):\n await asyncio.sleep(1)\n # cache short texts\n if len(short_text) > self.config.get('cache_max_letters', 100):\n os.remove(file_name)", "def end(self):\n pids_to_kill = self.get_all_pids()\n if pids_to_kill:\n kill_child_processes_by_pids(pids_to_kill)", "async def clean_up(self) -> None:", "def sync_entries():\n import time\n\n while True:\n try:\n update_pending_scripts(settings['api_handler'])\n except:\n logging.exception(\"Error occured during synchronisation\")\n time.sleep(60)", "def clean_up_for_next_cycle(self):\n shutil.copy(self.pr.config.releaseItemsFilePath, self.pr.config.backupFilesPath + '_'\n + str(datetime.now().strftime('%m-%d-%Y:%I.%M%p')) + '.txt') # take backup before clearing\n clear_file(self.pr.config.releaseItemsFileMergedBy)\n clear_file(self.pr.config.releaseItemsFilePath) # clear file for next release content\n # NOTE: user has to manually delete data added when in debug mode", "def translate_phrases(translator, phrases, language):\n for phrase in phrases:\n translator.type_phrase_to_translate(phrase)\n sleep(0.5)\n translated_phrase = translator.read_translated_phrase()\n add_translation_to_file(language, translated_phrase)", "def clear_subs_content(self):\r\n for youtube_id in self.get_youtube_ids().values():\r\n filename = 'subs_{0}.srt.sjson'.format(youtube_id)\r\n content_location = StaticContent.compute_location(self.course.id, filename)\r\n try:\r\n content = contentstore().find(content_location)\r\n contentstore().delete(content.get_id())\r\n except NotFoundError:\r\n pass", "def cleanUp(self):\n\n tapeList = sorted(glob.glob('TAPE?'))\n tapeList = ['TAPE%d' % num for num in [1, 2, 5, 6, 7, 10]]\n for tape in tapeList:\n if os.path.isfile(tape): os.remove(tape)\n # end TAPE loop", "def _cleanup_ffmpeg(self) -> None:\r\n self.ffmpeg_proc.communicate()\r\n self.ffmpeg_proc = None", "def cleanup(self,replicas):\n\t\n\t# write the last of the trj and ene buffers \n for rep in range(0,len(replicas)):\t\n self.dump_trjqueue(replicas[rep])\n self.dump_enequeue(replicas[rep])\n\n\t# close any open file handles\n for i in range(0,len(self.repfiles_trj)):\n\t self.repfiles_trj[i].close() \t# file handles for coord trajs for each replica\n self.repfiles_ene[i].close()\t# file handles for ene trajs for each replica\n\t\n self.bytemp_trj[i].close()\t\t# file handles for coord trajs arranged by temp\n self.bytemp_ene[i].close()\t\t# file handles for ene trajs arranged by temp\n self.bytemp_replica[i].close()\t# file handle - keeps track of replica number\n\t\n self.byreplica_trj[i].close()\t# file handles for coord trajs arranged by temp\n self.byreplica_ene[i].close()\t# file handles for ene trajs arranged by temp\n self.byreplica_temp[i].close()\t# file handle - keeps track of temp number", "def start_process(self):\n print 50 * '*' + '\\n' + 10 * '*' + ' STARTING SCANNING PROCESS ' + 10 * '*' + '\\n' + 50 * '*'\n\n while True:\n print str(self.stream_list) + str(self.end_times_list)\n\n self.check_if_stream_should_end()\n\n if self.is_time_to_get_game_data_for_day:\n self.write_days_games_data()\n\n # Read in file to see if it is time to analyze twitter\n read_path = self.get_write_path_for_days_games()\n\n try:\n with open(read_path) as f:\n data = json.load(f)\n current_time = datetime.datetime.now().strftime('%H:%M')\n for idx, game in enumerate(data):\n game_time = dateutil.parser.parse(game['start_time']).strftime('%H:%M')\n if game_time == current_time and not game['being_streamed']:\n # TODO - Figure out how to call a fork or child process for a certain amount of time\n # TODO - Refactor this\n self.update_is_streamed_json(index=idx)\n print 'Time to get twitter data.'\n\n search_terms_home = self.keyword_generator.generate_search_terms(game['home_team_id'])\n search_terms_away = self.keyword_generator.generate_search_terms(game['away_team_id'])\n keyword_string_home = ','.join(search_terms_home)\n keyword_string_away = ','.join(search_terms_away)\n\n keyword_string = keyword_string_home + ',' + keyword_string_away\n game_name = datetime.datetime.now().strftime('%Y-%m-%d') + '-' + game['title'].replace(' ', '-')\n\n data_gatherer = DataGatherer()\n stream = data_gatherer.get_tweet_stream(keyword_string, game['uuid'], game_name)\n self.stream_list.append(stream)\n self.end_times_list.append(self.get_time_to_end_stream(1))\n\n except IOError:\n print 'File not found'\n\n # restart loop after sleep, given by our tick_time\n self.sleep_for(self.tick_time_in_seconds)", "def task_recursive_download_podcast():\n data = get_task_arguments()\n user_uid = data[\"user_uid\"]\n podcast_id = data[\"podcast_id\"]\n\n podcast = Podcast.load(user_uid, podcast_id)\n new_feed = podcast.load_feed()\n\n # update the feed data (e.g. title, image, etc.)\n podcast.feed.title = new_feed.title\n podcast.feed.description = new_feed.description\n podcast.feed.image_url = new_feed.image_url\n podcast.save()\n\n new_entry = None\n for e in new_feed.entries:\n if e not in podcast.feed.entries and e.published + datetime.timedelta(settings.EPISODE_EXPIRATION_DAYS) > \\\n datetime.datetime.utcnow():\n new_entry = e\n if new_entry is None:\n return OK_RESPONSE\n\n podcast_type = PODCAST_TYPES[podcast.podcast_type]\n downloader = podcast_type.downloader()\n try:\n blob = downloader.download(new_entry.link)\n # update the entry to have our location and new\n new_entry.link = blob.public_url\n new_entry.bytes = blob.size\n new_entry.mimetype = blob.content_type\n except DownloadException as e:\n # no ability to download, so keep the original URL and move on.\n raise e\n # update feed and save (recall feed has pointers to the updated entry)\n # NOTE: We reload the podcast here before running `save` in case\n # another task updated this podcast while we were downloading and\n # writing the blob.\n if new_entry not in podcast.feed.entries:\n podcast.feed.insert(new_entry)\n podcast.feed.last_updated = datetime.datetime.utcnow()\n podcast.save()\n # call this task again. this ensures the system (serially) downloads\n # all the content for this URL\n # NOTE: Because we return earlier if no new_entry is found, this ensures\n # that we only re-queue download tasks in the event of new entries.\n add_task(url_for(\"task_recursive_download_podcast\"),\n {\"user_uid\": user_uid, \"podcast_id\": podcast_id})\n return OK_RESPONSE", "def wait_recording_done(hass):\n trigger_db_commit(hass)\n hass.block_till_done()\n hass.data[recorder.DATA_INSTANCE].block_till_done()\n hass.block_till_done()", "def pytest_sessionfinish(session, exitstatus):\n\n # dat files are created when using attachements\n print(\"\\n-------------------------\\nClean dpytest_*.dat files\")\n fileList = glob.glob('./dpytest_*.dat')\n for filePath in fileList:\n try:\n os.remove(filePath)\n except Exception:\n print(\"Error while deleting file : \", filePath)", "def clean_quarantined(upload_id, language=None):\n upload = SubjectUpload.objects.get(id=upload_id)\n quarantined = upload.subjectstage_set.all()\n\n # The upload succeeded\n upload.status = 'OK'\n upload.save()\n\n logger.debug('Set upload status for SubjectUpload %d to \"complete\".',\n upload_id)\n\n # delete the quarantined items out of the quarantine table\n quarantined.delete()\n\n logger.debug('Removed quarantined subject data.')\n\n prev_lang = None\n if not language is None:\n prev_lang = get_language()\n activate(language)\n\n try:\n Plan.objects.all().update(is_valid=False)\n except Exception:\n logger.warn('Could not reset the is_valid flag on all plans.')\n\n status = {\n 'task_id':\n None,\n 'success':\n True,\n 'messages': [\n _('Upload complete. Subject \"%(subject_name)s\" added.') % {\n 'subject_name': upload.subject_name\n }\n ],\n 'subject':\n Subject.objects.get(name=upload.subject_name).id\n }\n\n # reset language back to default\n if not prev_lang is None:\n activate(prev_lang)\n\n return status", "def cleanup():" ]
[ "0.5957652", "0.5755388", "0.54659915", "0.5391661", "0.5354948", "0.5351097", "0.5270798", "0.5212245", "0.5207515", "0.5192871", "0.5167002", "0.5137688", "0.5125688", "0.51148933", "0.5111066", "0.5101877", "0.5099718", "0.509771", "0.50841075", "0.5051348", "0.5043869", "0.50335795", "0.50252724", "0.50173515", "0.50056624", "0.5000102", "0.49870256", "0.49866924", "0.49820483", "0.49778092", "0.49765405", "0.49659654", "0.49530524", "0.49434373", "0.4938437", "0.4933625", "0.4930372", "0.49279597", "0.48860618", "0.4883314", "0.48783615", "0.4876732", "0.48765278", "0.4875195", "0.4857336", "0.48470074", "0.4842338", "0.48400575", "0.48369488", "0.4836541", "0.48301843", "0.4815994", "0.48051226", "0.48030582", "0.47970024", "0.47856358", "0.47801486", "0.47790107", "0.4773955", "0.47709054", "0.4763704", "0.47573885", "0.47542572", "0.47525784", "0.47474197", "0.47436348", "0.47394857", "0.47357357", "0.47277817", "0.4726005", "0.47168797", "0.47054696", "0.4701236", "0.47007754", "0.46997005", "0.46987173", "0.46976772", "0.46955517", "0.46952254", "0.46863917", "0.46835414", "0.46822396", "0.46764332", "0.4673789", "0.46673867", "0.46622714", "0.46514255", "0.46504268", "0.46491462", "0.4643209", "0.46387458", "0.46360618", "0.4628845", "0.4628005", "0.462255", "0.4621095", "0.46197638", "0.46181834", "0.46174446", "0.46156207" ]
0.67138106
0
Requires dbconnection and the filename (location) of the file being parsed
def parseUpload(dbconnection, fileName): nhContent = ParseText.nohupTranscriptionContent(fileName) count = 0 while count < len(nhContent[0]): try: rtf = nhContent[0][count] transcription = nhContent[1][count].replace("'", "''").replace("_", "") dbID = nhContent[2][count].replace(".", "") duration = nhContent[3][count] DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID) count += 1 except: print("couldnt upload one at index " + str(count)) count += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, db_file):\n pass", "def import_db(import_file):\n import_data(import_file)", "def load_file():\n global list_of_table, data_base, new_data\n open_name = askopenfilename()\n\n if Path(open_name).suffix == '.db':\n data_base = open_name\n data_base = str(data_base)\n new_data_base = parse(data_base)\n new_data = update_list_tables(new_data_base)\n new_data.clear()\n\n else:\n mistake_db_file()", "def __init__(self, db_file, name):\n self._db_file = db_file\n self._name = name\n self._dbconnect = None\n self._cursor = None", "def read_relations(db, openfile):\n pass", "def _open_sql_file(dbname):\n try:\n dbpath = pathlib.Path(dbname).resolve()\n conn = sqlite3.connect(f\"{dbpath.as_uri()}?mode=ro\", timeout=1, uri=True)\n c = conn.cursor()\n except sqlite3.Error as e:\n sys.exit(f\"An error occurred opening sqlite file: {e.args[0]} {dbname}\")\n return (conn, c)", "def __init__(self, filename):\n self._filename = filename\n if os.path.exists(filename):\n self._connect()\n else:\n self._create_database()", "def execute_script(file_name):\n conn = psycopg2.connect(config['SQLALCHEMY_DATABASE_URI'])\n cur = conn.cursor()\n sql_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), file_name)\n cur.execute(open(sql_file, 'r').read())\n conn.commit()\n cur.close()\n conn.close()", "def read_locations(db, openfile):\n pass", "def read_sql(self):\n pass", "def execute_queries_from_file(self, file_name, file_path=test_data_path):\n if file_path:\n with open(file_path + file_name, 'rb') as file:\n query = sqlalchemy.sql.text(file)\n else:\n with open(file_name, 'rb') as file:\n query = sqlalchemy.sql.text(file)\n self.execute_query(query)\n return self", "def db_file():\n return abspath('vmchecker.db')", "def open (self, sql_file):\n fd = open(sql_file, 'r')\n sql = fd.read()\n fd.close()\n self.sql = sql.replace(UTF_8_STR, \"\")", "def execute_script_from_file(self, filename):\n filename = os.path.join(self.curr_dir, filename)\n # Connect to db\n conn = sqlite3.connect(self.db_path)\n cursor = conn.cursor()\n with open(filename, \"r\", encoding=\"utf-8\") as sql_file:\n sql_script = sql_file.read()\n\n # all SQL commands (split on ';')\n sql_commands = filter(None, sql_script.split(\";\"))\n # Execute every command from the input file\n for command in sql_commands:\n # This will skip and report errors\n # For example, if the tables do not yet exist, this will skip over\n # the DROP TABLE commands\n try:\n cursor.execute(command)\n except OperationalError as msg:\n print(\"Command skipped: \", msg)\n conn.commit()\n conn.close()", "def __init__(self, db_filename):\n self._conn = sqlite3.connect(db_filename)\n self._conn.text_factory = str\n self._cursor = self._conn.cursor()", "def __init__(self, filename=None):\r\n BaseDB.__init__(self, filename, \"verifier\")", "def read_sql_from_file(self, filename):\n tmpLines = ''\n logger.info(\"Reading from {}\".format(filename))\n\n with open(filename, 'r') as fh:\n tmpLines = fh.readlines()\n \n sqlquery = \"\".join(tmpLines)\n cursor = self.conn.cursor()\n\n try:\n cursor.execute(sqlquery)\n except Exception as e:\n logger.info(e)\n sys.exit(1)\n return", "def __init__(self, in_db_path, in_db_name):\n if not os.path.isfile(in_db_path + in_db_name):\n self._connection, self._cursor = None, None\n raise Exception('PASSED IN DATABASE PATH IS NOT VALID.')\n else:\n self._connection = sqlite3.connect(in_db_path + in_db_name)\n self._cursor = self._connection.cursor()", "def read(self):\n file_path = os.path.join(self.query_path, self.filename + '.sql')\n with open(file_path, 'r') as f:\n self.raw_sql = f.read()", "def __init__(self, file, check, ddl):\n # print(file, check, ddl)\n io_helper.ensure_path(Path(file).parent)\n self.__db = sqlite3.connect(file)\n self.ensure(check, ddl)", "def read_sql_from_file(path, conn):\n with open(path, 'r', encoding='utf-8') as f:\n qu = f.read()\n \n df = read_sql(qu, conn)\n \n return df", "def __init__(self, db_file):\n self.db = TinyDB(db_file)", "def test_get_parsed_files(self):\n files = Historical_ROAs_Parser()._get_parsed_files()\n with Historical_ROAs_Parsed_Table() as t:\n for f in files:\n sql = f\"SELECT * FROM {t.name} WHERE file = '{f}'\"\n assert len(t.execute(sql)) == 1", "def test_add_parsed_files(self):\n file_name = 'a_test_file'\n Historical_ROAs_Parser()._add_parsed_files([file_name])\n with Historical_ROAs_Parsed_Table() as t:\n sql = f\"SELECT * FROM {t.name} WHERE file = '{file_name}'\"\n assert len(t.execute(sql)) == 1\n sql = f\"DELETE FROM {t.name} WHERE file = '{file_name}'\"\n t.execute(sql)", "def parse_file(filename, db_factory, load_job_id, error_handler):\n db = db_factory()\n log = parse_file.get_logger()\n log.info('loading from %s', filename)\n try:\n num_cases = 0\n with codecs.open(filename, 'r', encoding='utf-8') as f:\n parser = vtr.Parser()\n for case in parser.parse(f):\n log.info('New case: %s/%s', case['book'], case['number'])\n num_cases += 1\n\n # Store the book\n try:\n db.books.update({'_id': case['book']},\n {'$set': {'year': int(case['book'].split('/')[0]),\n 'number': case['book'].split('/')[1],\n },\n '$addToSet': {'load_jobs': load_job_id,\n },\n },\n upsert=True,\n )\n except Exception as err:\n log.error('Could not store book %s: %s', case['book'], err)\n error_handler(unicode(err))\n\n # Store the case\n case['_id'] = '%s/%s' % (case['book'], case['number'])\n # associate the case record with the job for auditing\n case['load_job_id'] = load_job_id\n # pick a \"date\" for the case\n case['date'] = case.get('hearing_date') or case.get('arrest_date')\n try:\n db.cases.update({'_id': case['_id']},\n case,\n upsert=True,\n )\n except Exception as err:\n log.error('Could not store case %s: %s', case['_id'], err)\n error_handler(unicode(err))\n\n # Add participant info\n\n # index for upsert\n for p in get_encoded_participants(case, error_handler):\n #log.info('new participant: %r', p)\n p['case_id'] = case['_id']\n p['case_number'] = case['number']\n p['date'] = case['date']\n try:\n db.participants.update(\n {'case': case['_id'],\n 'encoding': p['encoding'],\n 'full_name': p['full_name'],\n 'role': p['role'],\n },\n p,\n upsert=True,\n )\n except Exception as err:\n log.error('Could not store participant %s for case %s: %s',\n p['_id'], case['_id'], err)\n error_handler(unicode(err))\n\n # Handle errors that did not result in new case records.\n errors = ['Parse error at %s:%s \"%s\" (%s)' % \\\n (filename, num, line, err)\n for num, line, err in parser.errors\n ]\n for e in errors:\n error_handler(e)\n except (OSError, IOError) as err:\n msg = unicode(err)\n errors = [msg]\n error_handler(msg)\n return {'errors': errors,\n 'num_cases': num_cases,\n }", "def report(db, openfile):\n pass", "def __init__(self, dbfile):\n self.dbfile = dbfile\n self.cxn = sqlite3.connect(dbfile)\n self.cur = self.cxn.cursor()", "def main(csvfile, dbfile, verbose=False):\n CONN = sqlite3.connect(dbfile)\n cursor = CONN.cursor()\n create_schema(cursor)\n process_data(cursor, csvfile, verbose=verbose)\n CONN.commit()\n CONN.close()", "def run_sql_from_file(conn, path, replace={}):\n with open(path, 'r') as f:\n query = [s.strip() + ';' for s in f.read().split(';')[:-1]]\n for s in query:\n for k, v in replace.items():\n s = s.replace(k, v)\n run_sql_from_string(conn, s)", "def process_song_file(cur, filepath):\n # open song file\n df = get_file_df(filepath)\n\n # insert song record\n song_data = songs_data = [df.loc[0].song_id, df.loc[0].title, df.loc[0].artist_id, int(df.loc[0].year), int(df.loc[0].duration)]\n cur.execute(song_table_insert, song_data)\n \n # insert artist record\n artist_data = [df.loc[0].artist_id, df.loc[0].artist_name, df.loc[0].artist_location, df.loc[0].artist_latitude, df.loc[0].artist_longitude] \n\n cur.execute(artist_table_insert, artist_data)", "def open_sql_script(script_filename):\n dir = os.path.dirname(__file__)\n relative_filename = os.path.join(dir, 'sql', script_filename)\n\n file_obj = open(relative_filename, 'r')\n file_contents = file_obj.read()\n file_obj.close()\n\n return file_contents", "def set_db_file():\n\n return os.path.join(db_path, db_file)", "def load_file(self):\n\n self.df = self.sqlContext.read.csv(self.source, sep=self.sep, header=True, inferSchema=True)", "def database_file(file):\r\n fpath = path.join('databases', '{0}'.format(file))\r\n db_path = path.join(mod_path, fpath)\r\n return db_path", "def __init__(self, sql_file, engine=\"SQLite\", user=None, password=None,\n host=\"localhost\", LOG=None, attach=None):\n\n # attach cases\n if attach is None:\n attach = {}\n else:\n attach = attach.copy()\n\n if isinstance(sql_file, str):\n\n for e in DatabaseCore._engines:\n if sql_file.startswith(e + \":::\"):\n engine = e\n sql_file = sql_file[len(e) + 3:]\n if \"###\" in sql_file:\n host, sql_file = sql_file.split(\"###\")\n break\n\n if \";\" in sql_file:\n li = [s.strip() for s in sql_file.split(\";\")]\n sql_file = li[0]\n rest = li[1:]\n for s in rest:\n ok = s.split(\",\")\n if len(ok) != 2:\n raise DBException( # pragma: no cover\n \"unable to find an alias in %r\" % s)\n nick = ok[0].strip()\n file = \",\".join(ok[1:])\n attach[nick] = file.strip()\n elif sql_file.startswith(\":\"):\n if sql_file != \":memory:\":\n raise FileNotFoundError( # pragma: no cover\n \"unable to interpret file: %r\" % sql_file)\n\n # some initialization\n self._password = password\n self._user = user\n self._host = host\n\n # the rest\n if LOG is None:\n def blind(*li, **p): # pragma: no cover\n pass\n LOG = blind # pragma: no cover\n self.LOG = LOG\n\n if isinstance(LOG, dict):\n raise TypeError( # pragma: no cover\n \"fLOG should be a function, not a dictionary\")\n if isinstance(self.LOG, dict):\n raise TypeError( # pragma: no cover\n \"LOG should be a function, not a dictionary\")\n\n if engine == \"SQLite\":\n self._sql_file = sql_file\n self._engine = engine\n\n elif engine == \"ODBCMSSQL\":\n raise DBException( # pragma: no cover\n \"Unable to connect to a SQL server.\")\n\n else:\n raise DBException( # pragma: no cover\n \"unfounded engine %s in %s\" %\n (engine, \", \".join(\n DatabaseCore._engines)))\n\n # write a file able to build a database summary\n if isinstance(sql_file, str) and not self.isMemory():\n folder = os.path.split(sql_file)[0]\n if len(folder) > 0 and not os.path.exists(folder):\n os.makedirs(folder)\n summary = os.path.join(folder, \"temp_quick_look_up.py\")\n if not os.path.exists(summary):\n #cwd = os.path.join (os.path.abspath (os.path.split (__file__) [0]), \"..\", \"..\")\n #fi = os.path.split (sql_file) [1]\n\n if hasattr(DatabaseCore, \"SCRIPT_LOOKUP\"):\n script = DatabaseCore.SCRIPT_LOOKUP\n lines = script.split(\"\\n\")\n lines = [li if \"__CWD__ =\" not in li else\n li.replace(\n \"(__file__)\",\n \"(r'%s')\" %\n os.path.realpath(__file__))\n for li in lines]\n script = \"\\n\".join(lines)\n script = script.replace(\n \"python quick_look_up.py\",\n \"%s quick_look_up.py\" %\n sys.executable)\n self.LOG(\"creating script \", summary)\n try:\n f = open(summary, \"w\")\n f.write(script)\n f.close()\n except IOError:\n self.LOG(\"unable to write \", summary)\n\n self._attach = attach\n self._buffer_insert = []\n self._buffer_insert_s = 0\n\n if isinstance(sql_file, str) and self.isMemory():\n self._connection = SQLite.connect(self._sql_file)\n elif isinstance(sql_file, SQLite.Connection):\n self._connection = sql_file\n self._sql_file = \":memory:\"", "def read_gp_dbid(self):\n INFO = self.logger.info\n INFO('%s - read_gp_dbid' % self.filepath)\n\n with open(self.filepath) as f:\n self.parse(f)", "def db(filename = 'P51-11'):\n import pdb\n sys.argv[1:] = ['-v', filename]\n pdb.run('extract.main()')", "def read_db():\n # read config file\n config = configparser.ConfigParser()\n config.read_file(open(\"options.cfg\"))\n\n return config['DEFAULT']['DatabaseFilename']", "def importToSQLITE(self, db_file, sqlite_db_name):\n\n command = \"{} {} {} {}\".format('cat', db_file, '| sqlite3', sqlite_db_name)\n call(command, shell = True)", "def connect(filename=DATABASE_FILENAME):\n if not path.exists(filename):\n raise FileNotFoundError(\"Database file not found: \" + filename)\n with open(filename, 'r', encoding=\"utf-8\") as f:\n return Database(json.load(f))", "def loadDB(self,dbfilename):\n \n db=[]\n with open(dbfilename,'r',encoding='ISO-8859-1') as dbfilename:\n dbreader= csv.reader(dbfilename,delimiter=self.sDelimiter )\n for lFields in dbreader:\n db.append(lFields)\n\n return db", "def processFile(fileName):\n\n cursor = db.cursor()\n cursor.execute(\"BEGIN\")\n institutionCounter = 0\n\n def submitInstitute(bankCode, bankName, bic):\n try:\n cursor.execute(\"INSERT INTO institutions (bankCode, bic, name) VALUES(?,?,?)\", (bankCode, bic, bankName))\n except sqlite3.Error as e:\n print(\"Sorry , Error: {0} while inserting {1} ({2})\".format(e.args[0], bankCode, bic))\n\n book = xlrd.open_workbook(fileName, 'r')\n sheet = book.sheet_by_index(0)\n\n for row_index in range(2, sheet.nrows):\n submitInstitute(sheet.cell(row_index,0).value, sheet.cell(row_index,2).value, sheet.cell(row_index,1).value)\n institutionCounter += 1\n\n return institutionCounter", "def __init__(self, db_file):\n self.db = TinyDB(db_file)\n # TODO: implement db files rotation, for now just replace all data\n self.db.truncate()\n # self.db_path = pathlib.Path(db_file)\n # self.db_file_name = self.db_path.name\n # self.db_dir = self.db_path.parent", "def read(self, database ='project'):\n\t\tfile = open(self.file_name, \"r\")\n\n\t\ti = 1\n\t\tseptics = []\n\t\tfor line in file:\n\t\t\tif i > 2:\n\t\t\t\tval = line.split()\n\t\t\t\tself.check_cols(val, 13, 'septic')\n\n\t\t\t\tsep = {\n\t\t\t\t\t'name': val[0].lower(),\n\t\t\t\t\t'q_rate': val[1],\n\t\t\t\t\t'bod': val[2],\n\t\t\t\t\t'tss': val[3],\n\t\t\t\t\t'nh4_n': val[4],\n\t\t\t\t\t'no3_n': val[5],\n\t\t\t\t\t'no2_n': val[6],\n\t\t\t\t\t'org_n': val[7],\n\t\t\t\t\t'min_p': val[8],\n\t\t\t\t\t'org_p': val[9],\n\t\t\t\t\t'fcoli': val[10],\n\t\t\t\t\t'description': val[12] if val[12] != 'null' else None # 12 index because extra column\n\t\t\t\t}\n\t\t\t\tseptics.append(sep)\n\t\t\ti += 1\n\n\t\tif database == 'project':\n\t\t\tdb_lib.bulk_insert(project_base.db, project_parmdb.Septic_sep, septics)\n\t\telse:\n\t\t\tdb_lib.bulk_insert(datasets_base.db, datasets_parmdb.Septic_sep, septics)", "def pg(file):\n global_config = get_config(file)\n config = global_config.get(\"postgres\")\n databases = config.get(\"databases\")\n for db in databases:\n result = postgres.load(config, db)\n print_result(db, result)", "def process_files(conn: Connection, path: Path) -> None:\n sql = \"INSERT OR IGNORE INTO Files (filename) VALUES (?)\"\n run_sql_on_csv(conn, path, sql, (str,))", "def _create_sqlite_file_engine( conn=next( file_path_generator ), echo=True ):\n print( \"creating connection: %s \" % conn )\n return create_engine( conn, echo=echo )", "def import_file(filepath, db):\n # Logging\n log_main = logging.getLogger(__name__)\n log_import = log_main.getChild('import_files')\n log_import = log_import.getChild(filepath.split('/')[-1])\n log_import.info('started')\n start = time()\n\n # Variables used in data processing\n memory_buff = StringIO()\n curr = None\n cols = ['tweetID', 'date', 'message', 'username', 'userID', 'language',\n 'longitude', 'latitude', 'retweet']\n sql = \"\"\"COPY \"raw_tweets\" (\"tweetID\", \"date\", \"message\", \"username\", \"userID\", \"language\", \"longitude\", \"latitude\", \"retweet\") \n FROM STDIN \n WITH (FORMAT CSV, HEADER TRUE, DELIMITER '\\t');\n \"\"\"\n \n # Try reading the file\n try:\n df = pd.read_csv(filepath, \n usecols=cols, engine='c', \n memory_map=True, low_memory=False,\n dtype={'userID': np.int64, 'tweetID': np.int64})\n except Exception as e:\n log_import.warn('error on read_csv')\n memory_buff.close()\n print (e)\n return\n\n # Attempt to open up a connection to database.\n try:\n connn = db.connect()\n conn = db.raw_connection()\n curr = conn.cursor()\n except (Exception) as e:\n log_import.warn('error on server connection')\n memory_buff.close()\n if curr is not None:\n curr.close()\n print (e)\n return\n\n # Try copying the files to table.\n try:\n # Save to our buffer\n df[cols].to_csv(memory_buff, sep='\\t',\n header=True, index=False, encoding='utf-8')\n\n # Point buffer to start of memory block\n memory_buff.seek(0)\n\n # Copy records using native Postgres COPY command (FAST)\n curr.copy_expert(sql, memory_buff)\n\n # Save transaction and commit to DB\n conn.commit()\n except (Exception) as e:\n log_import.warn('error while copying to database')\n memory_buff.close()\n if curr is not None:\n curr.close()\n print (e)\n return\n finally:\n memory_buff.close()\n if curr is not None:\n curr.close()\n log_import.info('finished ({:.2f})'.format(time() - start))\n return", "def sqlfile(path, **kw):\n sql = path.read_text()\n return sql.format(**kw)", "def process_song_file(cur, filepath):\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(df[[\"song_id\", \"title\", \"artist_id\", \"year\", \"duration\"]].values[0])\n try:\n cur.execute(song_table_insert, song_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in songs table\")\n print(e)\n\n # insert artist record\n artist_data = list(df[[\"artist_id\", \"artist_name\", \"artist_location\", \"artist_latitude\", \"artist_longitude\"]].values[0])\n try:\n cur.execute(artist_table_insert, artist_data)\n except psycopg2.Error as e:\n print(\"Error: Unable to insert record in artists table\")\n print(e)", "def read_stock(db, openfile):\n pass", "def __init__(self, input_filename, config):\n\t\tself.input_filename = input_filename\n\t\tself.db = mysql.connect(config['mysql_host'], config['mysql_user'], config['mysql_pass'], config['mysql_db'])\n\t\tself.cursor = self.db.cursor()\n\t\t\"\"\"These are foreign keys that link the reports to their metadata and policy information\"\"\"\n\t\tself.metadata_fk = \"\"\n\t\tself.policy_fk = \"\"", "def read_database(db_path, db_file, *args):\n\n db_filepath = os.path.join(db_path, db_file)\n\n # list to store loaded data\n data_imported = []\n conn = sqlite3.connect(db_filepath)\n\n for data_name in args:\n\n\n info = f'Reading {data_name} from database................'\n print(info, end=\"\")\n data_name_in_db = conn.execute(\n f\"\"\"SELECT name FROM sqlite_master WHERE type='table' \n AND name='{data_name}'; \"\"\").fetchall()\n if data_name_in_db:\n df = pd.read_sql(f\"select * from {data_name}\", con=conn)\n substitute_names(df)\n # revert single column DataFrame to Series\n if 'index' in df.columns:\n df.set_index('index', inplace=True)\n df = df.squeeze('columns')\n data_imported.append(df)\n print('ok')\n else:\n data_imported.append(None)\n print('no data')\n conn.close()\n return data_imported #if len(data_imported)>1 else data_imported[0]", "def __init__(self, database_name):\n self.conn = sqlite3.connect(\"output/%s.db\" % database_name)", "def create_datastructure_from_sql_file(self):\n\n ## check whether the connection is OK and create db if OK\n try:\n db = MySQLdb.connect(host=self.host, user=self.user,passwd=self.password,port=self.port)\n cursor = db.cursor() \n cursor.execute(\"SELECT VERSION()\")\n results = cursor.fetchone()\n if results:\n ## note that databases are often created within the sql file\n sql = \"CREATE DATABASE IF NOT EXISTS %s\" % self.database;\n cursor.execute(sql)\n cdb = cursor.fetchone()\n cursor.execute('use ' + self.database) \n cursor.execute(file(self.source).read())\n results = cursor.fetchone()\n status = 'succeeded'\n else:\n msg = \"ERROR IN CONNECTION\"\n raise createDataStructureException(msg)\n status = 'failed'\n except MySQLdb.Error, e:\n msg = \"ERROR %d IN CONNECTION: %s\" % (e.args[0], e.args[1]) + \"\\nThe data structure \" + self.structureName + \" was not created. Make sure you have the permissions to the DB and that your data structure configuration files are correctly shaped. See cgs-data repository for more details.\"\n raise createDataStructureException(msg)\n status = 'failed'\n \n return(status)", "def process_song_file(cur, filepath):\n \n # open song file\n \n df = pd.read_json(filepath,lines=True)\n \n # insert song record\n song_data = df[['song_id', 'title', 'artist_id','year',\n 'duration']].values[0].tolist()\n cur.execute(song_table_insert, song_data)\n \n # insert artist record\n artist_data = df[['artist_id','artist_name',\n 'artist_location', 'artist_latitude',\n 'artist_longitude']].values[0].tolist()\n cur.execute(artist_table_insert, artist_data)", "def __init__(self, filepath):\n self.filepath = filepath\n self._conn = None\n self.conn", "def parse_file_into_db(self, filename: str, limit: Optional[int] = None):\n return parse_file(filename, self._session, self._engine, self._raw_ftrace_entry_filter, limit)", "def cursor(file_name):\n con = sql.connect(file_name)\n con.row_factory = sql.Row\n return con.cursor()", "def database_open(self):\n\t\n\t\tfilename = tkFileDialog.askopenfilename(multiple=False)\n\n\t\tif filename:\n\t\t\n\t\t\ttry:\n\t\t\t\tself.log.info(\"Trying to load file {0}\".format(filename))\n\t\t\t\tself.source = yahoo.LocalSource(filename)\n\t\t\texcept Exception, ex:\n\t\t\t\tself.log.critical(\"Cannot open file {0} as local database\".format(filename))\n\t\t\t\tself.log.error(ex.message)\n\t\t\t\n\t\t\tself.refresh_all()", "def validate_database(self, con, filename):\n # Ensure that we have avalid connection\n con.execute('CREATE TABLE t(id INTEGER NOT NULL)')\n con.close()\n # Make sure that the default database file was created (and clean up)\n assert os.path.isfile(filename)\n os.remove(filename)", "def test_get_db_list_from_file(): # ***Incomplete test\n ##########################\n # Arrange.\n infp = \"infp\"\n\n ##########################\n # Act.\n #x = get_db_list_from_file(infp)\n\n ##########################\n # Assert.\n assert True == True # ***Temporary.", "def _fromFile(self,filepath, filename):\n pass", "def __init__(self, data_path='data', db_params=ideagens):\n my_path = path.abspath(data_path)\n self.path = my_path\n\n self.db_params = db_params\n self.db = get_db(self.db_params)", "def connect(self):\n should_load_schema = False\n if not os.path.exists(self.filename):\n should_load_schema = True\n\n self._connect()\n\n if should_load_schema:\n self._load_schema()\n else:\n self._load_database()", "def process_song_file(cur, filepath):\n # open song file\n data_frame = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(data_frame[['song_id', 'title', 'artist_id', 'year', 'duration']].values[0])\n cur.execute(song_table_insert, song_data)\n\n # insert artist record\n artist_data = list(\n data_frame[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0])\n cur.execute(artist_table_insert, artist_data)", "def process_song_file(cur, filepath):\n\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(\n df[['song_id', 'artist_id', 'title', 'year', 'duration']].values[0])\n cur.execute(song_table_insert, song_data)\n\n # insert artist record\n artist_data = list(df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0])\n cur.execute(artist_table_insert, artist_data)", "def __init__(self, filename):\n\n doorControllerDB.__init__(self, \"sqlite:///\" + filename,\n echo = False,\n pool_recycle = 3600\n )", "def process_file(cur, conn, table, filepath):\n\n taxi_table_insert = (\"\"\"\n INSERT INTO {} (trip_id, taxi_id, trip_sec, trip_mile)\n VALUES (%s, %s, %s, %s);\n \"\"\".format(table))\n\n # open csv file\n # https://stackoverflow.com/questions/17444679/reading-a-huge-csv-file\n df = pd.read_csv(filepath)\n\n df = df[['Trip ID', 'Taxi ID', 'Trip Seconds', 'Trip Miles']]\n\n df.dropna(inplace=True)\n\n # insert trip records\n for index, row in df.iterrows():\n cur.execute(taxi_table_insert, row)\n conn.commit()", "def get_data_query(file_name):\n with open(file_name, 'r') as graphql_query:\n return graphql_query.read()", "def import_datafile(db, infile):\n res = stat(infile)\n mtime = datetime.utcfromtimestamp(res.st_mtime)\n\n hash = md5hash(infile)\n\n data_file = db.model.data_file\n\n # Should maybe make sure error is not set\n rec = db.get(data_file, hash)\n # We are done if we've already imported\n if rec is not None:\n return False\n\n # Values to insert\n cols = dict(\n file_hash=hash,\n file_mtime=mtime,\n basename=infile.stem,\n csv_data=None)\n\n try:\n cols['csv_data'] = extract_datatable(infile)\n except NotImplementedError as e:\n secho(str(e), fg='red', dim=True)\n\n tbl = data_file.__table__\n sql = (insert(tbl)\n .values(file_path=str(infile), **cols)\n .on_conflict_do_update(\n index_elements=[tbl.c.file_path],\n set_=dict(**cols)))\n db.session.execute(sql)\n return True", "def __init__(self):\r\n assert isfile(DBClass.db_name), \"Database doesn't exists!\"\r\n\r\n self.conn = self.create_connection()\r\n self.cursor = self.conn.cursor()", "def call_command(self, filename):\n import_csv_to_database.import_csv_to_database(\n 'filename={}'.format(filename),\n '--no_output=True'\n )", "def read_sql(name=\"total_trips.sql\"):\n template = pkg_resources.resource_filename(\"gojek\", join(\"sql\", name))\n with open(template, 'r') as myfile:\n query = myfile.read()\n return query", "def parse(database_path: str) -> str:\n new_path = database_path.split(\"/\")\n database_file_name = './' + new_path[-1]\n return database_file_name", "def schema_load(filename):\n print(uc.schema_load(filename))", "def __load_handler(self):\n with open(self.path) as file:\n for line in file:\n if line.startswith(\"\"\"# TABLE: \"\"\"):\n self.columndefinition = (line.strip('\\n')\n .replace(\"\"\"# TABLE: \"\"\", ''))\n self.tablename = self.name.replace('.', '_')\n self.tablename = self.tablename.replace('-', '_')\n self.md5_tablename = (hashlib.md5(self.tablename)\n .hexdigest()[:30])\n for columnelement in self.columndefinition.split(','):\n column = columnelement.split(':')[0].strip()\n self.columnnames.append(column)\n\n self.is_mime_handler = True", "def _read_query(self):\n try:\n # Open Google Drive and read the sql file\n self.query = GDrive().read_drive_file(self.input_source_id)\n except Exception as e:\n raise e", "def get_file_contents(self):\n with open(self.sql_file, 'r') as sql:\n text = sql.read()\n # text = text.replace('\\n', '\\n\\n')\n # text=sql.read()\n # TODO: fix some text replacement issues here\n # https://github.com/andialbrecht/sqlparse/issues/313\n return self.filter_text(text)", "def process_song_file(cursor, filepath):\n # open song file\n df = pd.read_json(filepath, lines=True)\n\n # insert artist record\n artist_columns = ['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']\n artist_data = df[artist_columns].values[0].tolist()\n cursor.execute(artist_table_insert, artist_data)\n\n # insert song record\n song_columns = ['song_id', 'title', 'artist_id', 'year', 'duration']\n song_data = df[song_columns].values[0].tolist()\n cursor.execute(song_table_insert, song_data)", "def process_song_file(cur, filepath):\n\n df = pd.read_json(filepath, lines=True)\n\n song_data = df[['song_id', 'title',\n 'artist_id', 'year', 'duration']].values[0]\n cur.execute(song_table_insert, song_data)\n\n artist_data = df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0]\n cur.execute(artist_table_insert, artist_data)", "def loadSQL_beddays(filepath='O:\\Administration\\\\02 - Økonomi og PDK\\Medarbejdermapper\\Kasper\\Focus1 - Ad hoc opgaver\\Lungemed sengedage og visitationer\\Lungemed.sql'):\n content = open(filepath, 'r').read()\n return content", "def read_db(self):\n with open(self.filename, 'r') as database:\n data = json.load(database)\n self.data = data", "def __init__(self, path, db_backend_factory=sqlite3.connect):\n path = path.decode('utf8')\n self._dbfile = path\n empty = not os.path.exists(path)\n self._dbcon = db_backend_factory(path)\n self._dbopen = True\n if empty:\n self._create_db()", "def parse(self, sql):\n # Get a temporary file name for sqlite\n db_file = tempfile.NamedTemporaryFile('w')\n # Connect to the temporary file.\n self.db = sqlite3.connect(db_file.name)\n # Enable foreign keys.\n self.db.execute('pragma foreign_keys=ON')\n # Get a cursor instance.\n self.cursor = self.db.cursor()\n\n # If sql is not a string assume it is a file.\n if not isinstance(sql, str):\n # Read the file into sql.\n sql = str(sql.read())\n\n # Execute the SQL statements from the input.\n self.cursor.executescript(sql)\n\n # Get all table names.\n self.cursor.execute(\n \"SELECT name FROM sqlite_master WHERE type='table';\")\n tables = self.cursor.fetchall()\n\n # Initialise the variable containing the parsed tables.\n self.tables = OrderedDict()\n # Run through all tables.\n for table in tables:\n # Create an entry for each table.\n self.tables[table[0]] = OrderedDict()\n\n # Get info on columns and primary keys.\n self.cursor.execute('PRAGMA table_info({})'.format(table[0]))\n # For each column\n for sql_column in self.cursor.fetchall():\n # Create an empty column entry.\n column = dict()\n # Set the name.\n column['name'] = sql_column[1]\n # Set the type\n column['type'] = sql_column[2]\n # Determine if this is a primary key\n column['primary'] = False\n if sql_column[5] == 1:\n column['primary'] = True\n # We do not know if this key has a reference yet.\n column['foreign'] = False\n\n # Add the column to the table.\n self.tables[table[0]][sql_column[1]] = column\n\n # Get information on foreign keys.\n self.cursor.execute('PRAGMA foreign_key_list({});'.format(table[0]))\n # Run through all foreign keys\n for foreign_key in self.cursor.fetchall():\n # Find the column by its name.\n for name, column in self.tables[table[0]].items():\n # Search for the name of the source column.\n if name == foreign_key[3]:\n # Add the referenced table and column in dot notation.\n self.tables[table[0]][name]['foreign'] = '{}.{}'.format(foreign_key[2], foreign_key[4])\n\n # Close the database connection\n self.db.close()\n # Make the cursor unusable for good measure.\n self.cursor = None\n\n # Run through the parsed tables and dispatch to the related call backs.\n for table_name, columns in self.tables.items():\n # New table.\n self.add_table(table_name)\n\n # Table columns.\n for column in columns.values():\n # Primary key.\n if column['primary'] is True:\n self.add_column_primary(column['name'], column['type'])\n # Foreign key.\n if column['foreign'] is not False:\n self.add_column_foreign(column['name'], column['type'], column['foreign'])\n # Just a column.\n if ((column['primary'] is not True) and\n (column['foreign'] is False)):\n self.add_column(column['name'], column['type'])", "def load_db(file):\n if os.path.isfile(file):\n try:\n start = time.time()\n db = []\n with open(file, 'r') as f:\n for item in json_lines.reader(f):\n db.append(item)\n stop = time.time() - start\n print(\"load_db time: \", stop, 'sec')\n return db\n except Exception as e:\n print(file, \"is probably corrupted. Creating empty db now...\")\n DbManager.erase_db(file)\n raise e\n\n else:\n # corrupt...\n print(\"database not found. creating new\")\n DbManager.new_db(file)", "def parse(_log, _config, file_path, db_path, atomic_properties, molecular_properties):\n output_dir = os.path.dirname(db_path)\n create_dirs(_log=_log, output_dir=output_dir)\n generate_db(file_path, db_path, atomic_properties, molecular_properties)", "def main():\n \n conn = psycopg2.connect(\"host=127.0.0.1 dbname=sparkifydb user=student password=student\")\n \n cur = conn.cursor()\n process_data(cur, conn, filepath='data/song_data',\n func=process_song_file) \n \n process_data(cur, conn, filepath='data/log_data',\n func=process_log_file)\n \n conn.close()", "def process_song_file(cur, filepath):\n df = pd.read_json(filepath, lines=True)\n \n # insert artist record\n artist_data = df[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0].tolist()\n cur.execute(artist_table_insert, artist_data)\n \n # insert song record\n song_data = df[['song_id', 'title', 'artist_id',\n 'year', 'duration']].values[0].tolist()\n cur.execute(song_table_insert, song_data)", "def __init__(self, dbfile=\"eom_default_db.sqlite\", init_db=False):\n self.sql = None\n missing = not os.path.exists(dbfile)\n self.sql = sqlite3.connect(dbfile, detect_types = sqlite3.PARSE_DECLTYPES)\n self.sql.text_factory = str\n if missing or init_db:\n self.init_rpki_rtr_tables()\n self.init_rib_tables()\n self.init_analysis_tables()", "def test_get_infile(self):\r\n pass # not practically testable, but obvious file I/O\r", "def test_process_file(self):\n # 1\n self.assertEqual(get_file_reply(files[0][0], files[0][1]), \"Inserted 4 Records\")\n results = self.database_connection.select('''SELECT COUNT(*) FROM ''' + table_name)[0][0]\n # 2\n self.assertEqual(results, 4)\n # csv, renewing connection\n self.database_connection.connect()\n # 3\n self.assertEqual(get_file_reply(files[1][0], files[1][1]), \"Inserted 4 Records\")\n results = self.database_connection.select('''SELECT COUNT(*) FROM ''' + table_name)[0][0]\n # 4\n self.assertEqual(results, 8)\n self.database_connection.connect()\n # 5\n self.assertFalse(get_file_reply(files[0][0], files[1][1]))", "def load_document(self, file_type, file_name):\n\n status, output = commands.getstatusoutput(\"mongoimport -h %s -p %s -u %s -p %s -d %s -c %s --type %s --file %s --headerline\" % (self.host, self.port, self.username, self.password, self.db_name, self.collection_name, file_type, file_name)) \n print \"status is\", status\n print \"output is\", output", "def test_import_process(self):\r\n good_file = self._get_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._firefox_data_test()", "def run_sql_file(filename, connection, version, lastversion):\n cursor = connection.cursor()\n for line in open(filename):\n cursor.execute(line)\n connection.commit()\n cursor.execute(\n \"update ecs.versionTable SET ecs.versionTable.version='{}' \"\n \"where ecs.versionTable.version ='{}';\".format(version, lastversion))\n connection.commit()\n print(\"VersionTable updated. Current version is now: {}\".format(version))", "def ReadPDB (self, pdb_path, db_path):\n\n ReadPDBFile (pdb_path, db_path)\t#", "def test_import_process(self):\r\n good_file = self._get_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._chrome_data_test()", "def loadSQL_visitations(filepath='O:\\Administration\\\\02 - Økonomi og PDK\\Medarbejdermapper\\Kasper\\Focus1 - Ad hoc opgaver\\Lungemed sengedage og visitationer\\Lungemed_visitationsoprindelse_nogroup.sql'):\n content = open(filepath, 'r').read()\n return content", "def __init__(self, db_path, db_name):\n self.db_path = db_path\n self.db_name = db_name", "def main():\n conn = psycopg2.connect(\"host=127.0.0.1 dbname=hallo user=hallo password=hallo\")\n cur = conn.cursor()\n\n process_data(cur, conn, filepath='data/song_data', func=process_song_file)\n process_data(cur, conn, filepath='data/log_data', func=process_log_file)\n\n conn.close()", "def parse(self, infile):\r\n raise NotImplementedError()" ]
[ "0.67936474", "0.6777492", "0.6385129", "0.6375183", "0.63454866", "0.6284782", "0.6269853", "0.6250333", "0.6244061", "0.6234011", "0.6226429", "0.6214333", "0.6204602", "0.61976486", "0.61935747", "0.61779267", "0.61766285", "0.6129", "0.6126773", "0.60768723", "0.60636026", "0.5999145", "0.5995669", "0.5950325", "0.5941797", "0.59370166", "0.5924094", "0.59149104", "0.5914893", "0.590297", "0.5897313", "0.588348", "0.58811146", "0.58701766", "0.58689415", "0.583607", "0.5834846", "0.5831355", "0.58120614", "0.5805212", "0.58035356", "0.58006996", "0.57966864", "0.57701993", "0.5763655", "0.5741577", "0.57280904", "0.5726529", "0.5712195", "0.57018065", "0.5700416", "0.5693599", "0.5690187", "0.5680671", "0.5679258", "0.5675436", "0.567238", "0.5668563", "0.5666807", "0.5663417", "0.56353694", "0.5632762", "0.56310296", "0.56199116", "0.56179386", "0.5615591", "0.5614094", "0.56068593", "0.5606327", "0.5604421", "0.5600804", "0.5593738", "0.55803007", "0.55746865", "0.5568844", "0.5567395", "0.5558725", "0.5555439", "0.55542827", "0.55534923", "0.55495065", "0.5541978", "0.5539855", "0.5538974", "0.55338144", "0.55231357", "0.5521545", "0.5514793", "0.55121803", "0.5510875", "0.54975426", "0.5496655", "0.5495226", "0.5493093", "0.549075", "0.54807997", "0.5479144", "0.5477439", "0.5475435", "0.5473132", "0.54729974" ]
0.0
-1
This parses the content of nohup. The size of nohup is basically unlimited but each line has to be under 300000 characters(?). This then returns the following...\n\n index 0 a list of all the occurences of realTimeFactor\n index 1 a list of all the occurences of transcriptions\n index 2 a list of all the occurences of the transcription ID\n index 3 a list of all the occurences of the total transcription time.\n\n \n\n \\Example usage\n parsedContent = nohupTranscriptionContent("ok.txt")
def nohupTranscriptionContent(filePath): try: continu = True fileContent = "" f = open(filePath, 'r') while (continu): temp = f.readline(900000) if(len(temp) == 0): continu = False else: fileContent += temp results = [] realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent) results.append(realTimeFactor) transcription = re.findall(r'utterance-id(.*?) (.*?)\n', fileContent) transcriptionList = [] transcriptionIDList = [] for item in transcription: if(len(item[1]) > 1000): transcriptionIDList.append(item[0]) transcriptionList.append(item[1]) results.append(transcriptionList) results.append(transcriptionIDList) transcriptionTime = re.findall(r'seconds / (.*?) seconds\.', fileContent) results.append(transcriptionTime) return results except Exception as e: Tools.writeException("nohupTranscriptionContent", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fileTranscriptionContent(filePath):\n try:\n continu = True\n f = open(filePath, 'r')\n fileContent = \"\"\n while (continu):\n temp = f.readline(300000)\n if(len(temp) == 0):\n continu = False\n else:\n fileContent += temp\n results = []\n f.close()\n url = re.findall(r'URL:(.*?)\\n', fileContent)\n results.append(url)\n realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent)\n results.append(realTimeFactor)\n transcription = re.findall(r'utterance-id1 (.*?)\\n', fileContent)\n for item in transcription:\n if(len(item) > 500):\n results.append(item.replace(\"'\", \"''\"))\n if((len(results[0]) > 0) and (len(results[1]) > 0) and (len(results[2]) > 0)):\n return results\n else:\n Tools.writeException(\"fileTranscriptionContent\", \"ERROR attempted to parse \" + filePath + \" but got \" + str(results))\n return False\n except Exception as e:\n Tools.writeException(\"fileTranscriptionContent\", e)", "def parseUpload(dbconnection, fileName):\n nhContent = ParseText.nohupTranscriptionContent(fileName)\n count = 0\n while count < len(nhContent[0]):\n try:\n rtf = nhContent[0][count]\n transcription = nhContent[1][count].replace(\"'\", \"''\").replace(\"_\", \"\")\n dbID = nhContent[2][count].replace(\".\", \"\")\n duration = nhContent[3][count]\n DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID)\n count += 1\n except:\n print(\"couldnt upload one at index \" + str(count))\n count += 1", "def parser_txt_file(self, content):\n ai_cpu_str = str(content.replace(b'\\n\\x00', b' ___ ').replace(b'\\x00', b' ___ '))[2:-1]\n ai_cpu_lines = ai_cpu_str.split(\" ___ \")\n result_list = list()\n ai_cpu_total_time_summary = 0\n # Node serial number.\n serial_number = 1\n for i in range(len(ai_cpu_lines) - 1):\n node_line = ai_cpu_lines[i]\n thread_line = ai_cpu_lines[i + 1]\n if \"Node\" in node_line and \"Thread\" in thread_line:\n # Get the node data from node_line\n result = self._get_kernel_result(\n serial_number,\n node_line.split(','),\n thread_line.split(',')\n )\n\n if result is None:\n continue\n\n result_list.append(result)\n # Calculate the total time.\n total_time = result[2]\n ai_cpu_total_time_summary += total_time\n # Increase node serial number.\n serial_number += 1\n elif \"Node\" in node_line and \"Thread\" not in thread_line:\n node_type_name = node_line.split(',')[0].split(':')[-1]\n logger.warning(\"The node type:%s cannot find thread data\", node_type_name)\n return ai_cpu_total_time_summary, result_list", "def _parse(self, verbose=False):\n instructions = json.load(open(self.filename, 'rb'))\n self.bpm = instructions['header']['bpm']\n self.ticks_per_beat = instructions['header']['PPQ']\n self.song_length = instructions['duration']\n self.phraseLength = instructions['phraseLength']\n\n print ('Parsing file:', self.filename)\n print ('Title', instructions['header']['name']) \n print ('BPM', self.bpm) \n\n EIGHTH_NOTE_INTERVAL_S = 60 / (2*self.bpm)\n\n # Parse the messages into buckets for each half-beat. Put them in 32-beat chunks\n chunks = []\n current_chunk = []\n index = 0\n for time in np.arange(0, self.song_length, EIGHTH_NOTE_INTERVAL_S):\n for message in instructions['tracks'][1]['notes']:\n if (message['time'] >= time and message['time'] < time + EIGHTH_NOTE_INTERVAL_S):\n current_chunk.append(str(message['midi']))\n chunks.append(current_chunk)\n index += 1\n current_chunk = []\n\n # For each bucktet, create parsed messages\n phrases = []\n current_phrase = []\n current_phrase_parsed = []\n for phrase_index in range(self.phraseLength):\n current_phrase = chunks[phrase_index*self.phraseLength:(phrase_index+1)*self.phraseLength]\n index_word = 0\n for word in current_phrase:\n word_parsed = str(index_word) + ',' + ','.join(word)\n if index_word == 0:\n self.initial_notes.append(word_parsed)\n current_phrase_parsed.append(word_parsed)\n index_word += 1\n phrases.append(current_phrase_parsed)\n current_phrase_parsed = []\n current_phrase=[]\n\n # Put them in the markov-chain\n for phrase in phrases:\n self._sequence(phrase)\n \n # Print out the resulting chunks\n if verbose:\n print ('Initial notes', self.initial_notes)\n print ('Matrix')\n self.markov_chain.print_as_matrix(20)", "def parse_headlines(self):\n headlines = re.findall(r\"^\\.\\.\\.(.*?)\\.\\.\\.[ ]?\\n\\n\", self.unixtext,\n re.M | re.S)\n headlines = [\" \".join(h.replace(\"...\",\n \", \").replace(\"\\n\", \" \").split())\n for h in headlines]\n return headlines", "def readTotitle(fh):\n preLines = []\n while True:\n l = fh.readline().strip()\n if l.startswith('>'):\n return (preLines,l)\n elif l == '':\n return preLines,None\n else:\n preLines.append(l)", "def parse(lines: List[str]):\n\n len_lines = len(lines)\n i = 0\n\n node_list = []\n \n while i < len_lines:\n line = lines[i]\n l = line.strip()\n if len(l) == 0:\n i += 1\n continue\n ls = l.split(\"\\t\")\n nlines = int(ls[0])\n content_lines = lines[i: i + nlines + 1]\n node = _build_node(content_lines)\n node_list.append(node)\n\n i = i + nlines + 1\n \n return node_list", "def read_subtitles(self):\n\n # Group 1: index, Group 2: Start Time, Group 3: End Time, Group 4: Text\n\n patterns = [\n r\"(\\d+)\\n(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\n((?:.+\\n)*.+)\",\n r\"(\\d+)\\r\\n(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\r\\n((?:.+\\r\\n)*.+)\",\n # Reports pattern\n r\"(\\d+)\\r(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\n((?:.+\\r)*.+)\"\n ]\n\n for pattern in patterns:\n re_subs = re.findall(pattern, self.subtitles, re.M | re.I)\n if(len(re_subs) > 1):\n self.re_subs = re_subs\n return\n\n raise Exception(\n f're_subs length is {len(re_subs)}. Maybe the regex pattern is falty?')", "def parse_story_file(content):\n content_raw = content.split(\"@highlight\")[0]\n content = \" \".join(filter(None, [x.strip() for x in content_raw.split(\"\\n\")]))\n return content", "def pre_process(fname, num_ex, alt_speaker):\n conversation = []\n with PathManager.open(fname) as f:\n lines = f.readlines()\n random.shuffle(lines)\n lines = lines[:num_ex]\n for line in lines:\n data = json.loads(line)\n dialogue = data[\"dialog\"]\n for item in dialogue:\n speaker = item[0]['id']\n text = item[0]['text']\n conversation += [(speaker, text)]\n conversation += [(END_OF_CONVO, END_OF_CONVO)]\n\n return conversation", "def parse_transcript(self):\n\t\t\n\t\toutput_text = tempfile.NamedTemporaryFile(mode = 'r')\n\t\twith tempfile.NamedTemporaryFile(delete=False) as input_text:\n\t\t\tinput_text.write(self.transcript_string.encode('utf-8'))\n\t\t\t#to write to the file, convert to utf-8; to use for jinja, convert it back to unicode\n\n\t\tos.popen(\"python vocab_resources/splitta/sbd.py -m vocab_resources/splitta/model_nb -t \" + input_text.name +\" -o \" + output_text.name)\n\t\tos.remove(input_text.name)\n\n\t\twith open(output_text.name) as parsed_text:\n\t\t\tsentence_index = {}\n\t\t\tfor index, sentence in enumerate(parsed_text):\n\t\t\t\tsentence = sentence.rstrip()\n\t\t\t\tsentence_index[index] = sentence\n\n\t\tsentence_index[len(sentence_index)] = \"Unable_to_find_matching_sentence\" #avoid outliers\n\t\tself.sentence_index = sentence_index", "def haikus_for_document(filename):\n text = get_text(filename)\n haikus = []\n # SpaCy has a maximum text size of 1,000,000 characters.\n # Let's use one fewer to be on the safe side.\n for chunk in chunks(text,999_999): # this underscore syntax was introduced in Python 3.6\n doc = nlp(chunk)\n for sent in doc.sents:\n haiku = check_haiku(sent)\n if haiku:\n haikus.append(haiku)\n return haikus", "def get926Monologue(filename):\n doc = Document(filename)\n monologue = \"\"\n o = re.compile('track [0-9][0-9]')\n\n # iterate over all paragraphs to get text units\n for para in doc.paragraphs:\n paragraph = para.text\n\n # timestamp e.g 15:01:27\n isHeader = o.match(paragraph.lower()) \n # ensure paragraph is not just empty line\n hasText = paragraph.lstrip()\n\n # ensure it is not an empty line\n if hasText and not isHeader:\n monologue += paragraph\n \n return [{'unit': monologue}]", "def read_transcription_file(file_path, audio_file_path):\n with open(file_path) as in_file:\n last_timestamp = 0\n res = []\n transcription = \"\"\n for line in in_file:\n time_stamp_match = re.match(\"\\[([0-9\\]+\\.[0-9]+)\\]\", line)\n #if this regex matched then the line is a timestamp\n if time_stamp_match:\n timestamp = float(time_stamp_match.group(1))\n if transcription and transcription.strip() not in ['(())', \"<no-speech>\"]:\n single_instance = {\"start_time\": last_timestamp, \n \"end_time\": timestamp,\n \"transcription\": transcription,\n \"audio_file\" : audio_file_path}\n res.append(single_instance)\n last_timestamp = timestamp\n else:\n last_timestamp = timestamp # this handles silence at beginning\n else:\n transcription = line.strip()\n \n return res", "def parse_sambamba_output(self):\r\n exons = []\r\n with open (self.file_path, \"r\") as sambamba_output:\r\n for line in sambamba_output:\r\n if line.startswith('#'):\r\n fields = line.strip().split()\r\n else:\r\n description = list(line.strip().split())\r\n i = 0\r\n exon_dict = {}\r\n while i<len(fields):\r\n exon_dict[fields[i]] = description[i]\r\n i += 1\r\n exons.append(exon_dict)\r\n return exons", "def analyse(self):\n logging.info(\"transferring text to CorpusCook...\")\n\n paragraphs = self.text.split('\\n\\n')\n print(\"mean length of splitted lines\", (mean([len(p) for p in paragraphs])))\n\n # If TIKA resolved '\\n'\n if (mean([len(p) for p in paragraphs])) > 80:\n paragraphs = [re.sub(r\"- *\\n\", '', p) for p in paragraphs]\n paragraphs = [p.replace('\\n', \" \") for p in paragraphs]\n paragraphs = [p.replace(';', \" \") for p in paragraphs]\n joiner = \" \"\n else:\n # If TIKA did not\n joiner = \" \"\n\n processed_text = joiner.join([p\n for p in paragraphs\n if\n p and\n ks_2samp(self.normal_data, list(p)).pvalue > self.threshold\n ]\n )\n\n return processed_text.strip()[:self.length_limit]", "def create_hn_text(self):\n text_list = [f\"Top {STORIES_NUMBER} from HackerNews:\"]\n sorted_stories = self.get_top_stories()\n # Format slack text\n for story in sorted_stories:\n text_list.append(\n \"*<{}|{}>* - <{}|{}>\".format(\n \"{}/item?id={}\".format(HN_URL, story[\"id\"]),\n story[\"score\"],\n # Ask HN type posts do not have 'url' key, so using get to return None\n story.get('url'),\n story[\"title\"],\n )\n )\n self.logger.debug(text_list)\n return \"\\n>\".join(text_list)", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n ### split off metadata\n \n content = re.split(\"X-FileName:.*$\", all_text, flags=re.MULTILINE, maxsplit=1)\n words = \"\"\n if len(content) > 1:\n text_string = content[1]\n\n ## remove mails that are forwarded or to which are responded\n # e.g. ---------------------- Forwarded\"\n text_string = re.split(\"-*\\sForwarded\", text_string, maxsplit=1)[0]\n\n # -----Original Message-----\n text_string = re.split(\"-*\\Original\\sMessage\", text_string, maxsplit=1)[0]\n\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # To:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n # or\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # to:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n \n text_string = re.split(\"((.*\\n){2})[Tt]o:\\s\", text_string, maxsplit=1)[0]\n\n ### remove punctuation\n # should be autopmatically by scikit learn\n #text_string = text_string.translate(string.maketrans(\"\", \"\"), string.punctuation)\n\n ### project part 2: comment out the line below\n #words = text_string\n\n ### split the text string into individual words, stem each word,\n ### and append the stemmed word to words (make sure there's a single\n ### space between each stemmed word)\n from nltk.stem.snowball import SnowballStemmer\n\n stemmer = SnowballStemmer(\"english\")\n words = [stemmer.stem(word) for word in text_string.split()]\n\n\n\n return \" \".join(words)", "def transcript_lines(transcript_text):\n lines = []\n for line in transcript_text.splitlines():\n if line.strip() and line.strip()[0] != '#':\n split = line.split(':')\n speaker = split[0][-1]\n utterance = ' '.join(split[1:]).strip()\n lines.append((speaker, utterance))\n return lines", "def parse(text, showToc=True):\n p = Parser(show_toc=showToc)\n return p.parse(text)", "def transcribe(self, fp):\n\n fp.seek(44, os.SEEK_SET)\n\n # FIXME: Can't use the Decoder.decode_raw() here, because\n # pocketsphinx segfaults with tempfile.SpooledTemporaryFile()\n data = fp.read()\n transcribed = []\n while True:\n try:\n self._decoder.start_utt()\n self._decoder.process_raw(data, False, True)\n self._decoder.end_utt()\n hyp = self._decoder.hyp()\n result = hyp.hypstr if hyp is not None else ''\n transcribed = [result] if result != '' else []\n self._logger.info('Transcribed: %r', transcribed)\n break\n except RuntimeError:\n self.reinit()\n\n if self._logfile is not None:\n with open(self._logfile, 'r+') as f:\n for line in f:\n self._logger.debug(line.strip())\n if self._logger.getEffectiveLevel() == logging.DEBUG:\n print(line.strip())\n f.truncate()\n\n return transcribed", "def ParseSeqFile(FilePath):\n SeqFile = rSeqFile(FilePath)\n TidyFile = TidyLines(SeqFile)\n \n result = []\n\n for line in TidyFile:\n t = ( ProcessLine(line) )\n result.append(t)\n return(result)", "def parse_fastqc_report(self, file_contents, s_name=None, root=None):\n \n section_headings = {\n 'sequence_quality': r'>>Per base sequence quality\\s+(pass|warn|fail)',\n 'per_seq_quality': r'>>Per sequence quality scores\\s+(pass|warn|fail)',\n 'sequence_content': r'>>Per base sequence content\\s+(pass|warn|fail)',\n 'gc_content': r'>>Per sequence GC content\\s+(pass|warn|fail)',\n 'n_content': r'>>Per base N content\\s+(pass|warn|fail)',\n 'seq_length_dist': r'>>Sequence Length Distribution\\s+(pass|warn|fail)',\n 'seq_dup_levels': r'>>Sequence Duplication Levels\\s+(pass|warn|fail)',\n 'adapter_content': r'>>Adapter Content\\s+(pass|warn|fail)',\n }\n stats_regexes = {\n 'total_sequences': r\"Total Sequences\\s+(\\d+)\",\n 'sequence_length': r\"Sequence length\\s+([\\d-]+)\",\n 'percent_gc': r\"%GC\\s+(\\d+)\",\n 'percent_dedup': r\"#Total Deduplicated Percentage\\s+([\\d\\.]+)\",\n 'percent_duplicates': r\"#Total Duplicate Percentage\\s+([\\d\\.]+)\", # old versions of FastQC\n }\n \n # Make the sample name from the input filename if we find it\n fn_search = re.search(r\"Filename\\s+(.+)\", file_contents)\n if fn_search:\n s_name = self.clean_s_name(fn_search.group(1) , root)\n \n # Throw a warning if we already have this sample and remove prev data\n # Unzipped reports means that this can be quite frequent\n # This gives a good idea of how horribly messy this module has become\n # TODO: Refactorrrr!\n if s_name in self.fastqc_stats:\n for k in self.fastqc_data:\n if k == 'sequence_quality':\n for j in self.fastqc_data[k]:\n self.fastqc_data[k][j].pop(s_name, None)\n elif k == 'adapter_content':\n aks = self.fastqc_data[k].keys()\n for s in aks:\n sn, _ = s.split(' - ')\n if sn == s_name:\n self.fastqc_data[k].pop(s, None)\n else :\n self.fastqc_data[k].pop(s_name, None)\n \n for k in self.fastqc_statuses:\n self.fastqc_statuses[k].pop(s_name, None)\n self.fastqc_stats.pop(s_name, None)\n log.debug(\"Duplicate sample name found! Overwriting: {}\".format(s_name))\n \n s = defaultdict(lambda: dict())\n s['seq_len_bp'] = 0\n s['seq_len_read_count'] = 0\n self.seq_lengths = set()\n adapter_types = []\n in_module = None\n for l in file_contents.splitlines():\n \n # Search for general stats\n for k, r in stats_regexes.items():\n r_search = re.search(r, l)\n if r_search:\n try:\n s[k] = float(r_search.group(1))\n except ValueError:\n s[k] = r_search.group(1) \n \n # Parse modules\n if in_module is not None:\n if l == \">>END_MODULE\":\n in_module = None\n else:\n \n if in_module == 'sequence_quality':\n quals = re.search(\"([\\d-]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\", l)\n if quals:\n bp = self.avg_bp_from_range(quals.group(1))\n groups = ['base', 'mean', 'median', 'lower_quart', 'upper_quart', '10_percentile', '90_percentile']\n for idx, g in enumerate(groups):\n try:\n self.fastqc_data['sequence_quality'][g][s_name][bp] = float(quals.group( idx + 1 ))\n except:\n self.fastqc_data['sequence_quality'][g][s_name][bp] = quals.group( idx + 1 )\n \n \n if in_module == 'per_seq_quality' or in_module == 'n_content':\n sections = l.split()\n try:\n self.fastqc_data[in_module][s_name][float(sections[0])] = float(sections[1])\n except ValueError:\n pass # First line - headers\n \n if in_module == 'sequence_content':\n l.replace('NaN','0')\n seq_matches = re.search(\"([\\d-]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)\", l)\n if seq_matches:\n bp = self.avg_bp_from_range(seq_matches.group(1))\n groups = ['base', 'G', 'A', 'T', 'C']\n for idx, g in enumerate(groups):\n if idx == 0:\n self.fastqc_data['sequence_content'][s_name][bp][g] = seq_matches.group( idx + 1 )\n else:\n self.fastqc_data['sequence_content'][s_name][bp][g] = float(seq_matches.group( idx + 1 ))\n \n if in_module == 'gc_content':\n gc_matches = re.search(\"([\\d]+)\\s+([\\d\\.E]+)\", l)\n if gc_matches:\n self.fastqc_data['gc_content'][s_name][int(gc_matches.group(1))] = float(gc_matches.group(2))\n \n if in_module == 'seq_length_dist':\n len_matches = re.search(\"([\\d-]+)\\s+([\\d\\.E]+)\", l)\n if len_matches:\n bp = self.avg_bp_from_range(len_matches.group(1))\n self.fastqc_data['seq_length_dist'][s_name][bp] = float(len_matches.group(2))\n self.seq_lengths.add(bp)\n s['seq_len_bp'] += float(len_matches.group(2)) * bp\n s['seq_len_read_count'] += float(len_matches.group(2))\n \n if in_module == 'seq_dup_levels':\n if l[:1] == '#':\n # Start of module - replace default dict with an OrderedDict\n self.fastqc_data['seq_dup_levels'][s_name] = OrderedDict()\n continue # Skip header line\n sections = l.split()\n try:\n # Version 11 of FastQC\n # #Duplication Level\tPercentage of deduplicated\tPercentage of total\n self.fastqc_data['seq_dup_levels_dedup'][s_name][sections[0]] = float(sections[1])\n self.fastqc_data['seq_dup_levels'][s_name][sections[0]] = float(sections[2])\n except IndexError:\n # Version 10 of FastQC and below just gives percentage, no % of dedup\n # #Duplication Level\tRelative count\n self.fastqc_data['seq_dup_levels'][s_name][sections[0]] = float(sections[1])\n \n if in_module == 'adapter_content':\n if l[:1] == '#':\n adapter_types = l[1:].split(\"\\t\")[1:]\n else:\n cols = l.split(\"\\t\")\n pos = int(cols[0].split('-', 1)[0])\n for idx, val in enumerate(cols[1:]):\n a = adapter_types[idx]\n k = \"{} - {}\".format(s_name, a)\n self.fastqc_data['adapter_content'][k][pos] = float(val)\n \n else:\n # See if this is the start of a new section\n for k, r in section_headings.items():\n r_search = re.search(r, l)\n if r_search:\n in_module = k\n # Add to the global statuses dict\n self.fastqc_statuses[k][s_name] = r_search.group(1)\n \n # Work out the average sequence length\n if s['seq_len_read_count'] > 0:\n s['avg_sequence_length'] = s['seq_len_bp'] / s['seq_len_read_count']\n \n # Get percent duplicates (percent unique given)\n if 'percent_dedup' in s:\n s['percent_duplicates'] = 100 - s['percent_dedup']\n \n # Add parsed stats to dicts\n self.fastqc_stats[s_name] = s", "def parse_text(self):\n text = self.get_data()\n line1 = text[0]\n index_list = [0]\n start_index = 3\n for i in range(1, len(text)):\n\n if line1.startswith('*'):\n index_list, start_index = self.star_parser(index_list, line1)\n elif line1.startswith('.'):\n start_index = self.dot_parser(start_index, line1, text, i)\n else:\n print \"\".rjust(start_index) + line1\n line1 = text[i]\n # Parse the last line\n if text[-1].startswith('*'):\n self.star_parser(index_list, text[-1])\n elif text[-1].startswith('.'):\n print '-'.rjust(start_index) + text[-1].lstrip('.')\n else:\n print \"\".rjust(start_index) + text[-1]", "def entrez_fasta_parser(handleFasta):\n fullList = handleFasta.read().split(\"\\n\") \n resL = []\n seqFlag = False\n for fullLine in fullList:\n if fullLine == \"\":\n seqFlag = False\n continue\n elif fullLine[0] == \">\":\n resL.append(fullLine + \"\\n\")\n seqFlag = True\n elif seqFlag:\n resL[-1] += fullLine \n return resL", "def parse_file(self, file_path) -> list:\n data = []\n with open(file_path, 'rb') as f:\n lines = pickle.load(f)\n for line in lines:\n input, output = line\n if input.strip() == \"\" or output.strip() == \"\":\n continue\n input_len = len(input.split())\n output_len = len(output.split())\n if input_len > 50 or output_len > 50:\n continue\n data_item = Text2TextDataItem(input_text=input, output_text=output, tokenizer=self.tokenizer,\n share_vocab=self.share_vocab)\n data.append(data_item)\n return data", "def hyou_reader():\n with open(HYOU_FILE_PATH, 'r') as voc_file:\n\n voc_list = []\n lesson_list = []\n\n voc_match = [\n re.compile(r\"^(\\S+)\\s*((\\S*))\\s*〔(\\S*)〕\\s*(\\S+)\"),\n re.compile(r\"^(\\S+)\\s*((\\S*))\\s*(\\S+)\"),\n re.compile(r\"^(\\S+)\\s*〔(\\S+)〕\\s*(\\S+)\"),\n re.compile(r\"^(\\S+)\\s*(\\S+)\")\n ]\n\n voc_key = [\n {\"Voc\": 1, \"Ext\": 2, \"Type\": 3, \"Meaning\": 4},\n {\"Voc\": 1, \"Ext\": 2, \"Type\": 0, \"Meaning\": 3},\n {\"Voc\": 1, \"Ext\": 0, \"Type\": 2, \"Meaning\": 3},\n {\"Voc\": 1, \"Ext\": 0, \"Type\": 0, \"Meaning\": 2},\n ]\n\n match_count = len(voc_match)\n voc_count = 0\n lesson_count = 0\n\n for voc_line in voc_file:\n if voc_line.find(\"第\") != -1 and voc_line.find(\"课\") != -1:\n voc_len = len(voc_list)\n if voc_len > 0:\n lesson_list.append(voc_list)\n\n voc_list = []\n voc_count = 0\n lesson_count = lesson_count + 1\n sound_list = sound_reader(lesson_count)\n elif not voc_line.find(\"----\") != -1 and voc_line != \"\\n\":\n voc_line.strip()\n\n voc_dict = {}\n for i in range(0, match_count):\n voc_group = voc_match[i].match(voc_line)\n if voc_group:\n for key, value in voc_key[i].items():\n if value != 0:\n voc_dict[key] = voc_group.group(value)\n else:\n voc_dict[key] = \"\"\n break\n\n if not voc_dict.has_key(\"Voc\"):\n print voc_line\n continue\n\n voc_dict[\"Time\"] = sound_list[voc_count]\n voc_count = voc_count + 1\n voc_list.append(voc_dict)\n\n voc_len = len(voc_list)\n if voc_len > 0:\n lesson_list.append(voc_list)\n\n return lesson_list", "def process_test(self, data):\n new_utts = []\n for l in data:\n tem = []\n for sent in l:\n tem.append([\"<s>\"] + sent + [\"</s>\"])\n new_utts.append(tem)\n return new_utts # 以输入的测试标题为topic,四句空诗", "def parser_binary_file(self, content):\n result_list = list()\n ai_cpu_total_time_summary = 0\n # Node serial number.\n serial_number = 1\n\n i = 0\n ai_cpu_format = StructType.format(DataPreProcessParser.AI_CPU_STRUCT.values())\n ai_cpu_size = StructType.sizeof(DataPreProcessParser.AI_CPU_STRUCT.values())\n while i < len(content):\n ai_cpu_data = struct.unpack(ai_cpu_format, content[i:i + ai_cpu_size])\n ai_cpu = DataPreProcessParser.AiCpuStruct(*ai_cpu_data)\n if ai_cpu.task_id < self._task_id_threshold:\n node_type_name = f'{ai_cpu.stream_id}_{ai_cpu.task_id}'\n if self._op_task_dict and node_type_name in self._op_task_dict:\n node_type_name = self._op_task_dict[node_type_name].split('/')[-1]\n else:\n logger.warning(\"[profiler] the op name of %s cannot be found.\", node_type_name)\n exe_time = (float(ai_cpu.run_end) - float(ai_cpu.run_start)) / self._ms_unit\n total_time = ai_cpu.total_time / self._ms_unit\n result_list.append([serial_number, node_type_name, total_time, ai_cpu.dispatch_time / self._ms_unit,\n exe_time, ai_cpu.run_start_counter / self._us_unit,\n ai_cpu.run_end_counter / self._us_unit])\n\n ai_cpu_total_time_summary += total_time\n # Increase node serial number.\n serial_number += 1\n\n i = i + self._ai_cpu_len\n\n return ai_cpu_total_time_summary, result_list", "def get_messages(message_count):\r\n\r\n file = open('messages.htm', encoding='UTF-8')\r\n\r\n html = file.read().split('</p>')\r\n file.close()\r\n\r\n TOTAL[0] = len(html) - 1\r\n\r\n # Gets rid of formatting at the beginning\r\n start = html[0].find('<div class=\"message\">')\r\n while not html[0][start].isnumeric():\r\n start += 1\r\n html[0] = html[0][start:]\r\n\r\n html.pop()\r\n\r\n threads = []\r\n\r\n que = Queue(maxsize=50)\r\n for line in html:\r\n try:\r\n clean_line = BeautifulSoup(line, 'lxml').getText()\r\n except Exception:\r\n print('Install lxml')\r\n #print(line)\r\n if len(clean_line) != 0:\r\n t = threading.Thread(target=add_option,\r\n args=(message_count, que, threads))\r\n que.put(clean_line)\r\n\r\n t.daemon = True\r\n t.start()\r\n threads.append(t)\r\n\r\n que.join()", "def parse(self):\n\n lines = self._get_file_lines( )\n\n message, translation = None, None\n comment, status, sources = None, None, None\n temp_msgid, temp_msgstr = None, None\n previous, current = None, None\n\n tstore = UT3Store( )\n\n for curl in lines:\n\n curl = curl.strip( )\n\n if len(curl) == 0:\n current = LINE_EMPTY\n elif curl[0] == '#':\n current = LINE_COMMENT\n status, comment, sources = \\\n _extract_comment_values(curl, status, comment, sources)\n else:\n keyword, message = _parse_line(curl)\n if keyword is not None:\n if keyword == \"msgid\":\n current = LINE_MSGID\n # and now initialise them for later use\n temp_msgid = message\n temp_msgstr = \"\"\n elif keyword == \"msgstr\":\n current = LINE_MSGSTR\n temp_msgstr = message\n else:\n current = LINE_UNKNOWN\n logging.error(\"unknown keyword: %s\" % (keyword))\n else:\n if message is not None:\n if current == LINE_MSGID:\n temp_msgid = temp_msgid + message\n elif current == LINE_MSGSTR:\n temp_msgstr = temp_msgstr + message\n else:\n logging.error(\"unknown mode\")\n\n if previous == LINE_MSGSTR and current != LINE_MSGSTR:\n # we're not in msgstr mode anymore --> save the current entry\n entry = _make_item(message, translation, \\\n sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n # reset the item values\n message, translation = None, None\n comment, status, sources = None, None, None\n\n # save msgid and msgstr for storing them later\n message = temp_msgid\n translation = temp_msgstr\n # save line state\n previous = current\n\n # finally append the last pair\n if previous == LINE_MSGSTR:\n entry = _make_item(message, translation, sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n return tstore", "def parse_zhuyin(line, length):\n END = '˙ˊˇˋ-'\n line = re.findall('zhuyin: .*, origin: ', line)[0][8:-10]\n zhuyin = []\n idx = 0\n for num in length:\n now = ''\n now_length = 0\n for i in range(idx, len(line)):\n now += line[i]\n\n if line[i] in END:\n now_length += 1\n if now_length == num:\n zhuyin.append(now)\n idx = i + 1\n break\n return zhuyin", "def get_texts(book: TextIO) -> list:\n content = book.read()\n chars_limit = 970\n texts = [content[i:i + chars_limit] for i in range(0, len(content), chars_limit)]\n return [\"...\" + t + \"...\" if t != texts[0] else t + \"...\" for t in texts]", "def parse_contents(self):\n self.parsed_contents = tokenize(self.contents)[0]", "def parse_text(self):\n self.text={}\n for i, lang in enumerate(LANGS):\n text=file(self.src).read()\n self.text[lang]=\"\"\n extracted, finish = \"\", 0\n start_string, stop_string = r\"<!--%s-->\" % lang, r\"<!--/%s-->\" % lang\n # Iterates to check multiple blocks of text within the file!\n # Pay attention to infinite loops!\n # AttributeError exception raised when no more blocks to extract exist\n while True:\n try:\n start=re.compile(start_string, re.IGNORECASE).search(text).span()[1]\n finish=re.compile(stop_string, re.IGNORECASE).search(text).span()[0]\n extracted+=text[start:finish]\n text=text[finish+1:]\n except AttributeError:\n break\n self.text[lang]+=extracted", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n\n ### split off metadata\n content = all_text.split(\"X-FileName:\")\n words = \"\"\n if len(content) > 1:\n ### remove punctuation\n text_string = content[1].translate(str.maketrans(\"\", \"\", string.punctuation))\n\n ### split the text string into individual words\n words = text_string.split()\n\n return words", "def parse_txt(self, path):\n with open(path) as f:\n data_txt = f.read()\n\n #Parseo bien ese txt con formato de mierda\n pattern = re.compile('(\\d{1,2}\\/\\d{1,2}\\/\\d{2}\\s\\d{2}:\\d{2})\\s-\\s(?:\\u200e*)')\n lista = pattern.split(data_txt)\n \n df = pd.DataFrame(data={\n 'timestamp':[lista[index] for index in range(len(lista)) if index%2==1],\n 'temp':[lista[index] for index in range(len(lista)) if (not index%2==1 and index!=0)]\n })\n\n df['timestamp'] = pd.to_datetime(df['timestamp'] , format='%d/%m/%y %H:%M')\n\n df[['user','message']] = df['temp'].str.split(':', 1, expand=True)\n df['message'] = df['message'].str.replace('\\\\n', ' ')\n df = df.drop(columns = 'temp')\n \n #si no hay mensaje, es porque no fue un mensaje si no una accion. Lo pongo como tal\n df['action'] = df[df['message'].isna()]['user'].str.replace('\\\\n', ' ')\n df.loc[df.loc[:,'message'].isna(), 'user'] = None\n\n self.__users = df.user.dropna().unique()\n\n df.loc[df.loc[:,'message'].isna(), 'user'] = df['action'].apply(self.__which_user)\n\n #Saco signos de puntuacion y saco mayusculas\n df['message'] = df['message'].str.replace('[{}]'.format(self.__punctuation), '').str.lower().str.strip()\n\n df = df.set_index('timestamp')\n\n return df", "def down_text(self, last_page):\n html_content = 3\n while html_content > 0:\n html_content -= 1\n try:\n\n html_content =urllib.urlopen(self.pageurl).read()\n break\n except Exception as e:\n print('Unable to download data [Time:%d][%s]' % (html_content, self.pageurl))\n insert_log('Unable to download data [Time:%d][%s]' % (html_content, self.pageurl))\n\n if isinstance(html_content, int):\n print('Unable to save data [%s]' % self.pageurl)\n insert_log('Unable to save data [%s]' % self.pageurl)\n return False\n\n print 'downling successfully from %s' % self.pageurl\n soup = BeautifulSoup(html_content)\n print soup\n #\n #对于帖子不存在了的情况进行判断\n titlep = soup.title.string\n if titlep == '403 Forbidden':\n print '出错了:%s' % self.pageurl\n insert_log('出错了:%s' % self.pageurl)\n return []\n\n alldata =[]\n audic = {}\n\n \"\"\"\n 由于第一页有主帖,因此第一页的解析会不同,要把主帖的所有信息保存下来,跟帖的信息也是如此\n \"\"\"\n #主帖的部分解析\n if((self.num == 1) and (self.rc == 0)):\n title = soup.find('div', id=\"content\").find(\"h1\").get_text().strip(\"\\r\\t\\n\")\n #section = soup.find('p', class_='crumbs').find_all('a')[1].get_text()\n auname = soup.find('div', id='content').find('span',class_=\"from\").find('a').get_text().strip(\"\\r\\t\\n\")\n auid = soup.find('div', id='content').find('span',class_=\"from\").find('a')['href'].split(\"/\")[-2]\n ctime = soup.find('div', id='content').find(\"h3\").find_all('span')[-1].get_text()\n\n audic['title'] = title\n audic['sec'] = self.section\n audic['uname'] = auname\n self.author = auname\n audic['uid'] = auid\n audic['ctime'] = ctime\n audic['ro'] = 0\n\n mydic = soup.find('div',id=\"content\").find_all(\"li\",class_=\"clearfix comment-item\")\n mi = 0\n #soup2 = BeautifulSoup((str)(htstr))\n #如果把self.rc放在外层if中用于判断是否没有记录,则对于有少量回复,而又有更新的情况,便不能跳过第一页的主帖\n #解析主帖部分内容\n if((mi == 0) and (self.num == 1)):\n mi += 1\n if(self.rc == 0):\n imgSrc = \"\"\n try:\n imgsrc = soup.find(\"div\",class_=\"topic-content\").find_all(\"div\",class_=\"topic-figure cc\")\n for img in imgsrc:\n try:\n for im in img.find_all(\"img\"):\n imgSrc += im[\"src\"] +\"\\n\"\n except:\n pass\n except:\n pass\n strtext = soup.find('div', id='link-report').find(\"div\",class_=\"topic-content\").get_text()\n\n audic['text'] = strtext.strip() + \"\\n\" + imgSrc\n\n alldata.append(audic)\n count = 0\n for htstr in mydic:\n soup2 = BeautifulSoup((str)(htstr))\n\n #解析跟帖内容\n redic = {}\n try:\n # irroder = (self.num-1)*100 +count\n #\n rtime = soup2.find('div', class_='bg-img-green').find('h4').find(\"span\").get_text()\n\n timeArray = time.strptime(rtime, \"%Y-%m-%d %H:%M:%S\")\n irorder = int(time.mktime(timeArray))\n redic['ro'] = irorder\n #update the reply count\n if (last_page and (irorder > self.rc)):\n self.rc = irorder\n imgSrc = \"\"\n try:\n imgsrc = soup2.find(\"div\",class_=\"topic-content\").find_all(\"div\",class_=\"topic-figure cc\")\n for img in imgsrc:\n try:\n for im in img.find_all(\"img\"):\n imgSrc += im[\"src\"] +\"\\n\"\n except:\n pass\n except:\n pass\n strtext = soup2.find('div', class_='reply-doc content').find(\"p\").get_text()\n redic['text'] = strtext.strip() +\"\\n\"+imgSrc\n\n\n\n uname = soup2.find('div', class_='bg-img-green').find('h4').find(\"a\").get_text()\n redic['un'] = uname\n\n if(uname != self.author):\n redic['au'] = False\n else:\n redic['au'] = True\n\n uid = soup2.find('div', class_='bg-img-green').find('h4').find(\"a\")[\"href\"].split(\"/\")[-2]\n\n redic['uid'] = uid\n\n\n redic['time'] = rtime\n\n alldata.append(redic)\n except AttributeError as e:\n alldata.append(redic)\n continue\n print 'All:%s' % self.pageurl\n return alldata", "def _parse_hdus(cls, hdulist):\n header = MetaDict(OrderedDict(hdulist[0].header))\n if len(hdulist) == 4:\n if is_time_in_given_format(hdulist[0].header['DATE-OBS'], '%d/%m/%Y'):\n start_time = Time.strptime(hdulist[0].header['DATE-OBS'], '%d/%m/%Y')\n elif is_time_in_given_format(hdulist[0].header['DATE-OBS'], '%d/%m/%y'):\n start_time = Time.strptime(hdulist[0].header['DATE-OBS'], '%d/%m/%y')\n else:\n raise ValueError(\"Date not recognized\")\n xrsb = hdulist[2].data['FLUX'][0][:, 0]\n xrsa = hdulist[2].data['FLUX'][0][:, 1]\n seconds_from_start = hdulist[2].data['TIME'][0]\n elif 1 <= len(hdulist) <= 3:\n start_time = parse_time(header['TIMEZERO'], format='utime')\n seconds_from_start = hdulist[0].data[0]\n xrsb = hdulist[0].data[1]\n xrsa = hdulist[0].data[2]\n else:\n raise ValueError(\"Don't know how to parse this file\")\n\n times = start_time + TimeDelta(seconds_from_start*u.second)\n times.precision = 9\n\n # remove bad values as defined in header comments\n xrsb[xrsb == -99999] = np.nan\n xrsa[xrsa == -99999] = np.nan\n\n # fix byte ordering\n newxrsa = xrsa.byteswap().newbyteorder()\n newxrsb = xrsb.byteswap().newbyteorder()\n\n data = DataFrame({'xrsa': newxrsa, 'xrsb': newxrsb},\n index=times.isot.astype('datetime64'))\n data.sort_index(inplace=True)\n\n # Add the units\n units = OrderedDict([('xrsa', u.W/u.m**2),\n ('xrsb', u.W/u.m**2)])\n return data, header, units", "def _extract_content(lines: list[Strip]) -> list[str]:\n content = [\"\".join(segment.text for segment in line) for line in lines]\n return content", "def parse(intLanguageName, content, formatDetails, threadstop):\r\n\r\n if len(content) == 0:\r\n return buildSyntaxNode([], 0, \"text\")\r\n\r\n if formatDetails.noFormat:\r\n return buildSyntaxNode([buildSyntaxNode(content, 0, \"plainText\")],\r\n 0, \"text\")\r\n\r\n baseDict = _buildBaseDict(formatDetails=formatDetails)\r\n\r\n## _prof.start()\r\n try:\r\n print content\r\n print baseDict\r\n t = text.parseString(content, parseAll=True, baseDict=baseDict,\r\n threadstop=threadstop)\r\n print t\r\n t = buildSyntaxNode(t, 0, \"text\")\r\n print t\r\n\r\n finally:\r\n## _prof.stop()\r\n pass\r\n\r\n return t", "def parse_trflp(lines):\r\n\r\n sample_ids = []\r\n otu_ids = []\r\n data = []\r\n non_alphanum_mask = re.compile('[^\\w|^\\t]')\r\n # not sure why the above regex doesn't cover the following regex...\r\n dash_space_mask = re.compile('[_ -]')\r\n\r\n for i, line in enumerate(lines):\r\n elements = line.strip('\\n').split('\\t')\r\n\r\n # special handling for the first line only\r\n if i == 0:\r\n # validating if the file has a header\r\n if elements[0] == '':\r\n for otu_id in elements[1:]:\r\n otu_ids.append(non_alphanum_mask.sub('_', otu_id))\r\n continue\r\n else:\r\n for j, otu_id in enumerate(elements[1:]):\r\n otu_ids.append(non_alphanum_mask.sub('_', 'Bin%3d' % j))\r\n\r\n # handling of all other lines\r\n current_row = []\r\n\r\n # converting each value in the row to int\r\n for count in elements[1:]:\r\n try:\r\n current_row.append(int(round(float(count), 0)))\r\n except ValueError:\r\n current_row.append(0)\r\n\r\n # if the sum of all the values is equial to 0 ignore line\r\n if sum(current_row) == 0:\r\n continue\r\n\r\n # adding sample header to list\r\n sample_ids.append(non_alphanum_mask.sub('.',\r\n dash_space_mask.sub('.', elements[0])))\r\n\r\n # validating the size of the headers to add missing columns\r\n # this is only valid when there is no header\r\n if len(current_row) > len(otu_ids):\r\n # modify header data\r\n extra_cols = []\r\n for j in range(len(otu_ids), len(current_row)):\r\n extra_cols.append(non_alphanum_mask.sub('_', 'Bin%3d' % j))\r\n # modify data\r\n for j in range(len(data)):\r\n data[j].extend([0] * (len(current_row) - len(otu_ids)))\r\n\r\n otu_ids.extend(extra_cols)\r\n elif len(current_row) < len(otu_ids):\r\n # modify data\r\n current_row.extend([0] * (len(otu_ids) - len(current_row)))\r\n\r\n data.append(current_row)\r\n\r\n return sample_ids, otu_ids, asarray(data).transpose()", "def getSongTextInfo():\n sids = []\n documents = []\n sFile = open('../txt/two__Lastfm_song_Docs.txt')\n lines = sFile.readlines()\n index = 0\n for line in lines:\n line.strip('\\n')\n line.strip('\\r\\n')\n items = line.split('>>')\n sid = int(items[0])\n text = items[1]\n documents.append(text)\n sids.append(sid)\n sFile.close()\n print 'len = ',len(sids)\n print 'len = ',len(documents)\n return sids,documents", "def process_input(fname,onlynugget,onlyarg):\n content=utils.readFileEncode(fname,'utf8')\n lines = content.split('\\n')[:-1]\n sentences=[]\n labels=[]\n sent=[]\n label=[]\n for i in range(len(lines)):\n if len(lines[i])>3:\n words=lines[i].split('\\t')\n word={'originalText':words[0],'offset':int(words[1])}\n sent.append(word)\n if onlynugget:\n if words[2] in NuggetList10:\n label.append(words[2]) \n else:\n label.append('O')\n elif onlyarg:\n if words[2] in ArgumentList:\n\n if 'Software' in words[2]:\n label.append(words[2][0:2]+'System')\n else:\n label.append(words[2])\n else:\n label.append('O')\n else:\n if len(sent)>0 and len(label)>0: \n sentences.append(sent)\n labels.append(label) \n sent=[]\n label=[]\n elif len(sent)==0 and i < len(lines)-1:\n sentences.append([])\n labels.append([])\n \n return sentences,labels", "def _parse_racon_hax_output(self, racon_hax_output_path):\n\n references, pileups = list(), list()\n with Pool(self.num_threads) as pool:\n with open(racon_hax_output_path) as f:\n while True: # loop for multiple contigs\n reference = f.readline().strip()\n if len(reference) == 0: # EOF\n break\n\n lines = [f.readline() for _ in range(5)]\n pileup = np.array(pool.map(self.parse_line, lines))\n\n references.append(reference)\n pileups.append(pileup)\n\n return references, pileups", "def run(self):\n # Send number of lines to the GUI\n self.total_count_signal.emit(self.number_of_lines)\n logging.info(\"Total number of lines in file %d\" % self.number_of_lines)\n\n self.result_string = \"\"\n self.token_array = []\n if self.file_type == settings.APACHE_COMMON:\n regex = re.compile(settings.APACHE_COMMON_LOG_RE)\n # Arg 2 should be list\n self.result_string = settings.APACHE_COMMON_HEADING\n for i, line in enumerate(self.file):\n item = regex.match(line)\n if not item:\n logging.error(\n \"Couldn't tokenize the following line\\n\" + line)\n continue\n token_object = TokenCommon(item.groups())\n self.token_array.append(token_object)\n self.send_result_signal(i, item.groups())\n\n elif self.file_type == settings.APACHE_COMBINED:\n regex = re.compile(settings.APACHE_COMBINED_LOG_RE)\n self.result_string = settings.APACHE_COMBINED_HEADING\n for i, line in enumerate(self.file):\n item = regex.match(line)\n if not item:\n logging.error(\n \"Couldn't tokenize the following line\\n\" + line)\n continue\n token_object = TokenCombined(item.groups())\n self.token_array.append(token_object)\n self.send_result_signal(i, item.groups())\n\n elif self.file_type == settings.SQUID:\n regex = re.compile(settings.SQUID_LOG_RE)\n self.result_string = settings.SQUID_HEADING\n for i, line in enumerate(self.file):\n item = regex.match(line)\n if not item:\n logging.error(\n \"Couldn't tokenize the following line\\n\" + line)\n continue\n token_object = TokenSquid(item.groups())\n self.token_array.append(token_object)\n self.send_result_signal(i, item.groups())\n\n self.update_progress_signal.emit(\n self.number_of_lines - 1, self.result_string)\n self.session.bulk_save_objects(self.token_array)\n self.session.commit()\n logging.info(\"All tokens inserted into database\")\n settings.Session.remove()", "def extract_page_text(self, bs_object):\n\n # kill all script and style elements\n for script in bs_object([\"script\", \"style\", \"head\"]):\n script.extract() # rip it out\n\n # get text\n text = bs_object.get_text()\n\n # break into lines and remove leading and trailing space on each\n lines = (line.strip() for line in text.splitlines())\n # break multi-headlines into a line each\n chunks = (phrase.strip() for line in lines for phrase in line.split(\" \"))\n # drop blank lines\n text_list_gen = (chunk for chunk in chunks if chunk)\n text_list = list(text_list_gen)\n # print \"TEXT LIST >>>\\n\", text_list\n \n return text_list", "def parser(in_file,verbose):\n\n # perform high-level parsing into sections\n res_file_lines = [row for row in in_file]\n tokenized_lines = tools.split_and_prune_lines(res_file_lines)\n sections = tools.extracted_sections(tokenized_lines)\n\n # split out common sections and subsequent groups of results sections\n def is_results_sentinel_section(section):\n \"\"\" Identify mesh point separator \"pseudo-section\" header.\n\n (Helper function for res_parser_spncci.)\n \"\"\"\n (section_name,_) = section\n return (section_name == \"RESULTS\")\n\n grouped_sections = tools.split_when(is_results_sentinel_section,sections)\n common_sections = list(next(grouped_sections))\n grouped_results_sections = [list(section_group) for section_group in grouped_sections]\n\n if (verbose):\n print(\"Section counts\")\n print(\" Common sections:\",len(common_sections))\n for results_section_group in grouped_results_sections:\n print(\" Results sections (by group):\",len(results_section_group))\n\n # generate results objects by mesh point\n mesh_data = []\n if (grouped_results_sections):\n # there are results sections: actual mesh, not counting run\n for results_section_group in grouped_results_sections:\n full_section_group = common_sections + results_section_group\n results = spncci_results_data.SpNCCIResultsData()\n parse_mesh_point(results,full_section_group,section_handlers)\n mesh_data.append(results)\n else:\n # no results sections: counting run\n results = spncci_results_data.SpNCCIResultsData()\n parse_mesh_point(results,common_sections,section_handlers)\n mesh_data.append(results)\n\n return mesh_data", "def _process_recog(self):\n\t\trecog_list = list()\n\t\tself.fake_start_offset = -1\n\t\tself.fake_end_offset = -1\n\t\t# with open(self.recog_file_path, 'r', encoding='utf-8') as int_f:\n\t\twith open(self.recog_file_path, 'r') as int_f:\n\t\t\tf = iter(int_f)\n\t\t\tfor line in f:\n\t\t\t\tpart = line.split()\n\t\t\t\tif not (float(part[3]) == 0 or self._none_word(part[4]) == True):\n\t\t\t\t\ttime_global = float(part[2])\n\t\t\t\t\tif self.fake_start_offset == -1:\n\t\t\t\t\t\tself.fake_start_offset = time_global\n\t\t\t\t\tself.fake_end_offset = time_global\n\t\t\t\t\t# only ignore the time stamps before and start offset and the end offset\t\t\t\n\t\t\t\t\tif len(self.noise_itv) >= 2 and (time_global < self.noise_itv[0][1] or time_global > self.noise_itv[-1][0]):\n\t\t\t\t\t\tcontinue\n\t\t\t\t\trecog_list.append([part[4], time_global, float(part[3])]) # [word, time_global, time_inv]\n\t\treturn recog_list", "def parse(self):\n i = 0\n while i < len(self.__lines):\n line = self.__lines[i]\n dt = re.match(r\"(\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{1,2}:\\d{1,2})\", line)\n if not dt:\n i += 1\n continue\n log = {\n \"datetime\": dt.group()\n }\n line = line[dt.end()+1:].rstrip(\"\\n\")[::-1]\n qq_flag = line.find(\"(\")\n log[\"qq\"] = line[qq_flag-1:0:-1]\n log[\"name\"] = line[:qq_flag:-1].strip(\" \")\n i += 1\n log[\"content\"] = self.__lines[i].rstrip(\"\\n\")\n while self.__lines[i+1] != \"\\n\":\n i += 1\n log[\"content\"] += \" \" + self.__lines[i].rstrip(\"\\n\")\n self.__logs.append(log)\n i += 2", "def process_data(file_object: TextIO) -> list:\n text_list = [line.replace('\\n', '').split(' ') for line in file_object]\n return text_list", "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.split(' ') for item in data]\n data = [\n item[:-2] + [' '.join(item[-2:])] if item[-1] == '\\n' else item\n for item in data\n ]\n data = [item for sublist in data for item in sublist]\n\n df = pd.DataFrame(data, columns=['word'])\n df['conversation_id'] = 1\n\n return df", "def parse_text(self):\n\n line_number = 0\n line_min = 0\n \n while line_number < self.line_count:\n \n if self.indentation(self.text[line_number]): \n self.tab_index.append(self.indent)\n self.text[line_number] = self.text[line_number].strip() \n line_number += 1 \n\n else:\n line_min = line_number", "def output_parse(self):\n\n off_target = open(self.ot_out, 'r')\n for x in off_target:\n print(x)\n if x[0] != '0':\n if x[0] == \"D\":\n continue\n split_1 = x.replace(\"\\n\",\"\")\n split_1 = split_1.replace(\" \",\"\")\n split_1 = split_1.split(\":\") # split_1[0] holds sequence, split_1[1] holds average OT score\n self.crRNA_dict[str(split_1[0])][1] = split_1[1]\n else:\n split_2 = x.replace(\"\\n\",\"\")\n split_2 = split_2.split(\",\") # split_2[0] holds off-target score, split_2[1] holds index in index_file\n ### output list = sequence, off-target score, off-target organism, distance, gene, location, PAM, strand, on-target score\n self.output.append([str(split_1[0]), split_2[0], split_2[1], \"distance\", self.crRNA_dict[split_1[0]][0], self.crRNA_dict[split_1[0]][3], self.crRNA_dict[split_1[0]][4], self.crRNA_dict[split_1[0]][5], self.crRNA_dict[split_1[0]][2]])\n\n ## Error generated if off-target output file shows all scores as 0\n if not self.output:\n QtWidgets.QMessageBox.question(self, \"Error!\",\"Check off-target output file\",QtWidgets.QMessageBox.Ok)\n\n with open(self.index, 'rb') as f:\n index_file = gzip.GzipFile(fileobj=f)\n index_dict = {}\n\n for x in index_file:\n x = x.decode(\"utf-8\")\n if x[0] == '>':\n line = x.replace(\">\",\"\")\n line = line.replace(\"(\",\"\")\n line = line.replace(\")\",\"\")\n line = line.replace(\"\\n\",\"\")\n line = line.replace(\",\",\"\")\n hold = line.split(\" \")\n num = hold[len(hold)-1]\n id = hold[0]\n name = \"\"\n i = 1\n while (hold[i] != \"complete\"):\n name += hold[i] + \" \"\n i += 1\n num = num.replace(\"\\r\",\"\")\n index_dict[num] = [id, name]\n f.close()\n\n for x in self.output:\n name = index_dict[x[2]][1]\n x[3] = self.mash_all[index_dict[x[2]][0]][1] # Distance\n x[2] = name # Off-target organism name\n\n ### Populates table with average off-target scores\n for index, (x,y) in enumerate(self.crRNA_dict.items()):\n avg_off = QtWidgets.QTableWidgetItem(str(y[1]))\n self.crRNA_table.setItem(index, 2, avg_off)\n self.crRNA_table.resizeColumnsToContents()\n\n\n #with open('./test_files/example_plottable.csv', 'r') as f:\n\n # count = 0\n # for x in f:\n # if count > 0:\n # line = x.replace('\\n', '')\n # arr = line.split(',')\n # self.output.append(arr)\n # count += 1\n\n markers = ['.', 'o', 'v', '^', '<', '>', '1', '2', '3', '4']\n\n organismList = {}\n\n for row in self.output:\n if row[2] != '':\n if not row[2] in organismList:\n organismList[row[2]] = []\n organismList[row[2]].append((row[1], row[3]))\n\n\n fig, axs = plt.subplots()\n\n count = 0\n for key in organismList:\n x_vals = []\n y_vals = []\n for i in range(0, len(organismList[key])):\n x_vals.append(float(organismList[key][i][0]))\n y_vals.append(float(organismList[key][i][1]))\n\n scatter = axs.scatter(x_vals, y_vals, s = 30, label = key, marker=markers[count])\n\n count += 1\n\n # produce a legend with the unique colors from the scatter\n fig.set_size_inches(3, 3, forward=True)\n legend1 = axs.legend(loc=\"upper right\", title=\"Off Target Organism\")\n legend1 = axs.legend( prop={'size': 4})\n axs.set_ylabel('Organism Distance')\n axs.set_xlabel('Off-Target Score')\n axs.set_title('gRNA selection')\n axs.add_artist(legend1)\n plt.tight_layout()\n\n self.plotWidget = FigureCanvas(fig)\n lay = QtWidgets.QVBoxLayout(self.total_crRNA)\n lay.setContentsMargins(0,0,0,0)\n lay.addWidget(self.plotWidget)\n\n\n ## self.output ---list of lists containing data for plotting\n ## Format for each index in the list: (sequence, off-target score, off-target organism, distance, gene, location, PAM, strand, on-target score", "def process_wiki_file(args: Tuple[str, str, int]) -> str:\n filepath, language, min_sent_word_count = args\n with bz2.open(filepath, \"rt\", encoding=\"utf8\") as bz2_file:\n\n # Extract text between <doc> xml tags\n soup = BeautifulSoup(bz2_file.read(), \"lxml\")\n docs = soup.find_all(\"doc\")\n wiki_dump_content = \"\"\n for i, doc in enumerate(docs):\n processed_text = process_wiki_doc_text(\n doc.text, language, min_sent_word_count\n )\n if len(processed_text) == 0:\n continue\n\n # Append to result\n if i > 0 and len(wiki_dump_content) > 0:\n wiki_dump_content += \"\\n\"\n wiki_dump_content += processed_text\n\n return wiki_dump_content", "def tailParser(inLoc):\n f = open(inLoc, 'r')\n tails = f.readlines()\n f.close()\n\n tailList = []\n\n for i in range(len(tails)):\n if i==0: continue #skips the header\n line = tails[i].rstrip().split(',')\n tailList.append(line)\n return tailList", "def generate_text_chunks(subtitle_file, chunk_size, min_chunk_size):\n\n text_chunks = list()\n chunk_start_times = list()\n chunk_end_times = list()\n\n if subtitle_file[-4:] == \".vtt\":\n # Subtitle file is in vtt format.\n\n words, word_end_times = get_words_with_end_times(subtitle_file)\n\n if words is None:\n print(\"Could not generate text chunks for file: \" + subtitle_file)\n return None, None, None\n\n # Generate text chunks of desired size\n text_chunks, chunk_start_times, chunk_end_times = generate_text_chunks_from_word_list(words, word_end_times,\n chunk_size)\n elif subtitle_file[-4:] == \".txt\":\n # Subtitle file is a plain text.\n # Possibly approximate timestamps?\n pass\n\n # Discard last chunk if too small\n if len(text_chunks[-1]) < min_chunk_size:\n text_chunks.pop()\n chunk_start_times.pop()\n chunk_end_times.pop()\n\n return text_chunks, chunk_start_times, chunk_end_times", "def homonymic_list_generator(content):\n\t# Making it global so that it can be used in other functions too.\n\tglobal verseDetails\n\t# Initialize a VerseInfo class instance.\n\tverseDetails = VerseInfo()\n\t# Result will store tuples (headword, meaning, verse)\n\tresult = []\n\t# Initialize blank verse\n\tverse = ''\n\t# lineType list holds 'h', 'm', 'v' for headword, meaning and verse lines.\n\tlineType = []\n\t# Read the content into list of lines.\n\tlines = content.split('\\n')\n\t# A temporary placeholder which will be emptied into result list\n\t# whenever the verse is allocated to it.\n\twordsOnHand = []\n\tfor line in lines:\n\t\t# If the line is headword line,\n\t\tif line.startswith('$'):\n\t\t\t# If the preceding line was a verse, and current a headword,\n\t\t\t# time to add to result list\n\t\t\tif lineType[-1] == 'v':\n\t\t\t\tverseDetails.update_verseNum(verse)\n\t\t\t\t(verse, wordsOnHand, result) = putVerse(verse, wordsOnHand, result)\n\t\t\t# Extract the headword and gender from headword line.\n\t\t\t# Typical headword line is `$headword;gender`\n\t\t\theadword, gender = line.rstrip().lstrip('$').split(';')\n\t\t\t# lineType is appended with 'h' for headword.\n\t\t\tlineType.append('h')\n\t\t# If the line is a meaning line,\n\t\telif line.startswith('#'):\n\t\t\t# typical meaning line is `#meaning1,meaning2,meaning3,...`\n\t\t\tmeanings = line.rstrip().lstrip('#').split(',')\n\t\t\t# Store the (headword, meaning) tuples in temporary wordsOnHand list.\n\t\t\t# They will keep on waiting for the verse.\n\t\t\t# Once verse is added, and a new headword starts, this will be added to result list.\n\t\t\twordsOnHand.append((headword, meanings))\n\t\t\t# lineType is marked 'm' for meaning.\n\t\t\tlineType.append('m')\n\t\telif line.startswith(';'):\n\t\t\t(tag, value) = utils.extract_tag(line)\n\t\t\tif tag == 'p':\n\t\t\t\tverseDetails.update_pageNum(value)\n\t\t\tif tag == 'k':\n\t\t\t\tverseDetails.update_kanda(value)\n\t\t\tif tag == 'v':\n\t\t\t\tverseDetails.update_varga(value)\n\t\t\tif tag == 'vv':\n\t\t\t\tverseDetails.update_subvarga(value)\n\t\t# Pass the lines having some other markers like ;k for kanda, ;v for varga etc.\n\t\telif line.startswith(';end'):\n\t\t\t# Put the last verse, as there will not be any next headword.\n\t\t\tputVerse(verse, wordsOnHand, result)\n\t\t# Lines which are unmarked are verses.\n\t\t# The verses may span more than one line too. Therefore adding them up.\n\t\telse:\n\t\t\tverse += line + '<BR>'\n\t\t\t# Mark lineType 'v' for verse.\n\t\t\tlineType.append('v')\n\treturn result", "def extractTextWithFullLayout(analyzed_data):\r\n\r\n data = []\r\n for page in analyzed_data:\r\n if not page:\r\n continue\r\n\r\n data.append([])\r\n for lt_obj in page:\r\n if isinstance(lt_obj, LTTextBox) or isinstance(lt_obj, LTTextLine):\r\n data[-1].append({\r\n 'type': 'text', # Might support more types (e.g. figures) in the future.\r\n 'text': lt_obj.get_text().split(\"\\n\"),\r\n 'layout': {\r\n 'x0': lt_obj.x0,\r\n 'x1': lt_obj.x1,\r\n 'y0': lt_obj.y0,\r\n 'y1': lt_obj.y1\r\n }\r\n })\r\n\r\n return data", "def extractMarkup(text):\n\n groups = []\n indent = None\n buff = []\n comment = None\n i = len(\"'''harbor: \")\n for line in text:\n line = line + ' '\n\n if (line.lstrip())[:i] in [\"'''harbor: \",'\"\"\"harbor: '] and indent == None:\n indent = len(line) - len(line.lstrip())\n comment = (line.lstrip())[:3]\n buff = Markup(line[indent+i:].strip(),[])\n elif line.strip() == comment and indent != None:\n indent = None\n groups.append(buff)\n buff = []\n elif indent != None:\n buff = Markup(buff.dest,buff.contents + [line[indent:]])\n return groups", "def parse(self):\n try:\n self.open_file()\n lines = list(self._file)\n\n if len(lines) > 0:\n text = ''.join(lines)\n regex = 'Song \\d+\\nStart (\\d+:\\d+:\\d+)\\nEnd (\\d+:\\d+:\\d+)\\nLength (\\d+.\\d+)'\n match = re.findall(regex, text)\n if len(match):\n starts = []\n ends = []\n lengths = []\n\n for i in range(len(match)):\n starts.append(match[i][0])\n ends.append(match[i][1])\n lengths.append(float(match[i][2]))\n\n for i in range(len(match)):\n self.debug_data.append({\n 'start':starts[i],'end':ends[i],'length':lengths[i]})\n\n match = re.search('T\\d_S(\\d{4})_.*.txt', self._filepath)\n if match:\n self._experiment_metadata['session_id'] = int(match.groups()[0])\n else:\n raise EIMParsingError(\"No valid session id found in filename %s\" % self._filepath)\n\n finally:\n if self._file and not self._file.closed:\n self.close_file()", "def parse_chunks(self):\n logger.info('parse_chunks()')\n\n while (self.replay.pos < len(self.replay)):\n chunk_type = self.replay.read_uint32()\n chunk_size = self.replay.read_int32()\n offset = self.replay.bytepos\n\n if chunk_type == ChunkTypes.CHECKPOINT.value:\n self.parse_checkpoint()\n\n elif chunk_type == ChunkTypes.EVENT.value:\n self.parse_event()\n\n elif chunk_type == ChunkTypes.REPLAYDATA.value:\n self.parse_replaydata()\n\n elif chunk_type == ChunkTypes.HEADER.value:\n self.parse_header(chunk_size)\n\n self.replay.bytepos = offset + chunk_size", "def get_large_audio_transcription(path):\n # open the audio file using pydub\n r = sr.Recognizer()\n sound = AudioSegment.from_mp3(path)\n sound.export(\"tmp.wav\", format=\"wav\")\n sound = AudioSegment.from_wav('tmp.wav')\n # split audio sound where silence is 700 miliseconds or more and get chunks\n chunks = split_on_silence(sound,\n # experiment with this value for your target audio file\n min_silence_len = 500,\n # adjust this per requirement\n silence_thresh = sound.dBFS-14,\n # keep the silence for 1 second, adjustable as well\n keep_silence=500,\n )\n folder_name = \"audio-chunks\"\n # create a directory to store the audio chunks\n if not os.path.isdir(folder_name):\n os.mkdir(folder_name)\n whole_text = \"\"\n\n chapter=(str(path.split('/')[-1])).split('_')[3]\n # if chapter == '01':\n # target=2\n # else:\n # target=1\n target=2\n # process each chunk\n for i, audio_chunk in enumerate(chunks, start=1):\n # export audio chunk and save it in\n # the `folder_name` directory.\n if i==1:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened,language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n #print(chunk_filename, \":\", text)\n whole_text += text\n # return the text for all chunks detected\n else:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened, language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n # print(chunk_filename, \":\", text)\n if chapter == '01':\n whole_text += ' ' +text\n if str(text).isalnum():\n if str(text).split(' ')[0]==' ':\n whole_text += text\n else: whole_text += ' '+text\n # return the text for all chunks detected\n\n if i==target:\n break\n if os.path.isfile('tmp.wav') :os.remove('tmp.wav')\n subprocess.run([\"rm\", \"-rf\", folder_name])\n return whole_text", "def parsing():\n # User Agents\n user_agents = '''\nAvailable User-Agents:\n winxpie60 Internet Explorer 6.0 (Windows XP)\n winxpie61 Internet Explorer 6.1 (Windows XP)\n winxpie70 Internet Explorer 7.0 (Windows XP)\n winxpie80 Internet Explorer 8.0 (Windows XP)\n winxpchrome20 Chrome 20.0.1132.47 (Windows XP)\n winxpfirefox12 Firefox 12.0 (Windows XP)\n winxpsafari5 Safari 5.1.7 (Windows XP)\n win2kie60 Internet Explorer 6.0 (Windows 2000)\n win2kie80 Internet Explorer 8.0 (Windows 2000)\n win7ie80 Internet Explorer 8.0 (Windows 7)\n win7ie90 Internet Explorer 9.0 (Windows 7)\n win7chrome20 Chrome 20.0.1132.47 (Windows 7)\n win7firefox3 Firefox 3.6.13 (Windows 7)\n win7safari5 Safari 5.1.7 (Windows 7)\n osx10safari5 Safari 5.1.1 (MacOS X 10.7.2)\n osx10chrome19 Chrome 19.0.1084.54 (MacOS X 10.7.4)\n galaxy2chrome18 Chrome 18.0.1025.166 (Samsung Galaxy S II,\\\nAndroid 4.0.3)\n galaxy2chrome25 Chrome 25.0.1364.123 (Samsung Galaxy S II,\\\nAndroid 4.0.3)\n linuxchrome26 Chrome 26.0.1410.19 (Linux)\n linuxfirefox19 Firefox 19.0 (Linux)\n '''\n\n # Description of Command Line arguments\n parser = argparse.ArgumentParser(description='Distributed Pure Python \\\nHoneyclient Implementation',\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage='python %(prog)s [ thug-options ] url',\n fromfile_prefix_chars='@',\n epilog=user_agents)\n\n def link(urls):\n links = urls.strip().split(',')\n for url in links:\n try:\n if 'http://' not in url:\n url = 'http://' + url\n urlopen(url)\n except:\n raise argparse.ArgumentTypeError(\"%s doesn't exist\"%url)\n return urls\n\n def link_file(fn):\n fobj = open(fn, 'r')\n url = fobj.readline().strip()\n urls = []\n while url:\n try:\n if 'http://' not in url:\n url = 'http://' + url\n urlopen(url)\n urls.append(url)\n except:\n raise argparse.ArgumentTypeError(\"%s doesn't exist\"%url)\n url = fobj.readline().strip()\n return urls\n\n # Mutually Exclusive Group for URL's\n links = parser.add_argument_group('URL Options')\n url = links.add_mutually_exclusive_group(required=True)\n url.add_argument('-U', '--url',\n metavar='',\n type=link,\n nargs='+',\n help=\"Enter Single/Multiple URL's to Analyze\")\n url.add_argument('-uf', '--url-file',\n metavar='',\n type=link_file,\n help=\"File containing bunch of URL's(1 per line)\")\n\n def qfile(fn):\n fobj = open(fn, 'r')\n queues = fobj.readlines()\n queues = map((lambda x: x.replace('\\n', '')), queues)\n return queues\n\n # ThugD Options\n thugd = parser.add_argument_group('Thug Distributed Options')\n thugd.add_argument('-ia', '--include-agent',\n action='store_const',\n const=agents_priority,\n help='Display Thug Version')\n # Queues Mutually Exclusive Group\n queue = thugd.add_mutually_exclusive_group(required=False)\n queue.add_argument('-qu', '--queue',\n nargs='+',\n metavar='',\n default='generic',\n help=\"Specify Queue/Queues to route URL's \\\n(*Single Queue: URL's will be routed to specified Queue, \\\n*Multiple Queues: URL's will be routed to ALL specified Queues)\")\n queue.add_argument('-qf', '--queue-file',\n metavar='',\n type=qfile,\n help=\"Specify File name containing Queue names(1 per \\\nline)\")\n\n # Thug Options\n thug = parser.add_argument_group('Thug Options')\n thug.add_argument('-V', '--version',\n action='store_true',\n help='Display Thug Version')\n thug.add_argument('-u', '--useragent',\n metavar='',\n default='winxpie60',\n help='Select a user agent(see below for values, \\\ndefault: winxpie60)')\n thug.add_argument('-e', '--events',\n metavar='',\n help='Enable comma-separated specified DOM events \\\nhandling')\n thug.add_argument('-w', '--delay',\n metavar='',\n help='Set a maximum setTimeout/setInterval delay value \\\n(in milliseconds)')\n thug.add_argument('-n', '--logdir',\n metavar='',\n help='Set the log output directory')\n thug.add_argument('-o', '--output',\n metavar='',\n help='Log to a specified file')\n thug.add_argument('-r', '--referer',\n metavar='',\n help='Specify a referer')\n thug.add_argument('-p', '--proxy',\n metavar='',\n help='Specify a proxy (see below for format and \\\nsupported schemes)')\n thug.add_argument('-l', '--local',\n action='store_true',\n help='Analyze a locally saved page')\n thug.add_argument('-x', '--local-nofetch',\n action='store_true',\n help='Analyze a locally saved page and prevent remote\\\ncontent fetching')\n thug.add_argument('-v', '--verbose',\n action='store_true',\n help='Enable verbose mode')\n thug.add_argument('-d', '--debug',\n action='store_true',\n help='Enable debug mode')\n thug.add_argument('-q', '--quiet',\n action='store_true',\n help='Disable console logging')\n thug.add_argument('-m', '--no-cache',\n action='store_true',\n help='Disable local web cache')\n thug.add_argument('-a', '--ast-debug',\n action='store_true',\n help='Enable AST debug mode (requires \\\ndebug mode)')\n thug.add_argument('-t', '--threshold',\n metavar='',\n help='Maximum pages to fetch')\n thug.add_argument('-E', '--extensive',\n action='store_true',\n help='Extensive fetch of linked pages')\n thug.add_argument('-T', '--timeout',\n metavar='',\n help='Timeout in minutes')\n\n # Plugins\n plugin = parser.add_argument_group('Plugins')\n plugin.add_argument('-A', '--adobepdf',\n metavar='',\n default='9.1.0',\n help='Specify the Adobe Acrobat Reader version \\\n(default: 9.1.0)')\n plugin.add_argument('-P', '--no-adobepdf',\n action='store_true',\n help='Disable Adobe Acrobat Reader Plugin')\n plugin.add_argument('-S', '--shockwave',\n metavar='',\n default='10.0.64.0',\n help='Specify the Shockwave Flash version \\\n(default: 10.0.64.0)')\n plugin.add_argument('-R', '--no-shockwave',\n action='store_true',\n help='Disable Shockwave Flash Plugin')\n plugin.add_argument('-J', '--javaplugin',\n metavar='',\n default='1.6.0.32',\n help='Specify the Java Plugin version (default: \\\n1.6.0.32)')\n plugin.add_argument('-K', '--no-javaplugin',\n action='store_true',\n help='Disable Java Plugin')\n\n # Classifier\n classifier = parser.add_argument_group('Classifiers')\n classifier.add_argument('-Q', '--urlclassifier',\n metavar='',\n help='Specify a list of additional (comma \\\nseparated) URL classifier rule files')\n classifier.add_argument('-W', '--jsclassifier',\n metavar='',\n help='Specify a list of additional (comma \\\nseparated) JS classifier rule files')\n\n return parser.parse_args()", "def _parse_tour(self):\n\n tour = []\n\n while True:\n try:\n s = int(self._lines.current)\n if s == -1:\n return tour\n tour.append(s)\n except ValueError:\n break\n\n try:\n next(self._lines)\n except StopIteration:\n break\n\n return tour", "def read_corpus_3(corpus_path):\n idx = 0\n data = []\n with open(corpus_path, encoding='utf-8') as fr:\n lines = fr.readlines()\n sent_, tag_h_, tag_p_ = [], [], []\n\n for line in lines:\n idx += 1\n if line.find(\"DOC-ID\") < 0 and line != '\\n':\n try:\n [char, tag_h, tag_p] = line.strip().split()\n sent_.append(char)\n tag_h_.append(tag_h)\n tag_p_.append(tag_p)\n except:\n print(line)\n else:\n # print(line)\n if idx > 1:\n data.append((sent_, tag_h_, tag_p_))\n sent_, tag_h_, tag_p_ = [], [], []\n\n return data", "def parse_pht(file_bytes):\n ph_off = file_bytes[E_PHO[0]:E_PHO[1]] #offset of PHT\n ph_siz = file_bytes[E_PSZ[0]:E_PSZ[1]] #size of PHT\n ph_num = file_bytes[E_PHN[0]:E_PHN[1]] #number of PHTs\n for i in range(ph_num):\n p_addr = file_bytes[(ph_off + (ph_siz * i) + P_OFF[0]):P_OFF[1]] #address of program within the file image\n p_size = file_bytes[(ph_off + (ph_siz * i) + P_FSZ[0]):P_FSZ[1]] #size of the program within the file image \n v_addr = file_bytes[(ph_off + (ph_siz * i) + P_VAD[0]):P_VAD[1]] #virtual address of the program out in RAM\n v_size = file_bytes[(ph_off + (ph_siz * i) + P_MSZ[0]):P_MSZ[1]] #size of the program out in RAM\n simRAM.memory[v_addr:(v_addr + v_size)] = file_bytes[p_addr:(p_addr + p_size)]", "def __parse(self):\n lines = self.data.readlines()\n for i in range(0, len(lines)):\n line = lines[i]\n if line[0] == '#':\n continue\n tokens = line.split(\"\\t\")\n time_str = tokens[self.timecol]\n if time_str.find('start:') != -1:\n time_str = time_str.split()[1] + \" \" + time_str.split()[2]\n self.calls.append((0, 0, 0))\n self.durations.append(0.0)\n elif time_str.find('end:') != -1:\n time_str = time_str.split()[1] + \" \" + time_str.split()[2]\n time = datetime.strptime(time_str, \"%Y-%m-%d %H:%M:%S\")\n self.times.append(time)\n self.calls.append((0, 0, 0))\n self.durations.append(0.0)\n break\n else:\n duration = float(tokens[6])\n fms = int(tokens[2])\n hfms = int(tokens[3])\n svs = int(tokens[4])\n self.calls.append((fms, hfms, svs))\n self.durations.append(duration)\n time = datetime.strptime(time_str, \"%Y-%m-%d %H:%M:%S\")\n self.times.append(time)\n self.length = (self.times[len(self.times) - 1] -\\\n self.times[0]).seconds", "def get_processed_content(self, fn):\n fin = open(os.path.join(self.wiki_path, fn), 'rb')\n text = fin.read()\n fin.close()\n return (x for x in gensim.utils.tokenize(text, lowercase=True, deacc=True, errors=\"ignore\") if x not in STOPLIST)", "def makeSong(text):\n song = []\n text = text.replace(\"\\n\", \";\")\n songData = text.split(\";\")\n lineNumber = 1\n for line in songData:\n _parseSongLine(song, line, lineNumber, \"text\")\n lineNumber += 1\n return song", "def get_data(path=\"/content/drive/My Drive/colab_data/WhatsApp Chat with YJHD 😂.txt\"):\n ls_rows = []\n try:\n with open(path) as f:\n for line in tqdm(f):\n message_from = None\n message_text = None\n media = False\n emojis = []\n clean_text = \"\"\n mention = None\n list_to_exclude = [\"https\",\n \"This message was deleted\",\n \"<Media omitted>\"]\n split_line = line.split(\" - \")\n try:\n date = datetime.strptime(split_line[0], \"%d/%m/%y, %H:%M\")\n except ValueError as e:\n logging.debug(\"Not a Date: \" + split_line[0] + \" Exception: \" + str(e))\n continue\n message_split = split_line[1].split(\":\")\n if len(message_split) > 1:\n message_from = message_split[0]\n message_text = message_split[1].strip()\n if \"<Media omitted>\" in message_text:\n media = True\n if any(exclude in message_text for exclude in list_to_exclude):\n message_text = None\n else:\n if \"@\" in message_text:\n new_message = \"\"\n for word in message_text.split():\n if word.startswith(\"@\"):\n mention = word\n continue\n new_message += word\n message_text = new_message\n for character in message_text:\n if character in UNICODE_EMOJI:\n emojis.append(character)\n else:\n clean_text += character\n clean_text = None if clean_text.strip() == \"\" else clean_text\n emojis = None if len(emojis) < 1 else ','.join(emojis)\n POS = __get_relevant_words(clean_text)\n ls_rows.append((date, message_from, message_text, media, emojis, clean_text, mention, POS))\n df = pd.DataFrame(ls_rows, columns=[\"time\", \"from\", \"text\", \"media\", \"emojis\", \"clean_text\", \"mention\", \"POS\"])\n df.dropna(subset=['text'], inplace=True)\n return df\n except Exception as e:\n print(\"Critical Exception \" + str(e))\n return", "def read_transcript_data(fn):\n\n def _read_lines(fn):\n # NC_000007.13\tRefSeq\tcDNA_match\t50344265\t50344518\t254\t+\t.\tID=aln58042;Target=NM_001220765.2 1 254 +;gap_count=0;identity=0.0691326;idty=1;num_ident=428;num_mismatch=0;pct_coverage=6.91326;pct_identity_gap=100;pct_identity_ungap=100;score=254\n # NC_000002.11 RefSeq cDNA_match 179671939 179672150 212 - . ID=ed951d46-194c-477a-a480-4bc64530c5ba;Target=NM_001267550.2 1 212 +;gap_count=0;identity=0.999991;idty=1;num_ident=109223;num_mismatch=1;pct_coverage=100;pct_identity_gap=99.9991;pct_identity_ungap=99.9991\n line_re = re.compile(\n \"(?P<ref_ac>\\S+)\\s+(?P<origin>\\S+)\\s+(?P<match_type>\\S+)\\s+\"\n \"(?P<g_start>\\d+)\\s+(?P<g_end>\\d+)\\s+(?P<score>\\S+)\\s+\"\n \"(?P<strand>[-+])\\s+\\.\\s+ID=(?P<aln>[^;]+);Target=(?P<tx_ac>\\S+)\"\n \"\\s+(?P<tx_start>\\d+)\\s+(?P<tx_end>\\d+).+?\"\n \"pct_coverage=(?P<pct_coverage>[^;]+);\"\n \"pct_identity_gap=(?P<pct_identity_gap>[^;]+);\"\n \"pct_identity_ungap=(?P<pct_identity_ungap>[^;]+)\"\n )\n fh = io.open(fn, \"rb\")\n while fh.peek(1)[0] == \"#\":\n fh.readline()\n while fh.peek(3)[0:3] != \"###\":\n line = fh.readline()\n try:\n yield line_re.match(line).groupdict()\n except AttributeError:\n raise Exception(\"Failed at\", line)\n raise StopIteration\n def _key(e):\n return (e[\"tx_ac\"], not e[\"ref_ac\"].startswith(\"NC_\"), e[\"ref_ac\"], e[\"aln\"])\n return itertools.groupby(sorted(_read_lines(fn), key=_key), key=_key)", "def parse(self, text, showToc=True):\n return self.parser.parse(text, showToc, attributes=ALLOWED_ATTRIBUTES)", "def parse_output(result):\n output = result['output']\n parsed = output.split('\\n')\n output = []\n for _line in parsed:\n output.append(_line.strip())\n log.debug(_line)\n return output", "def _process(self, file: bytes) -> List[Tuple[str]]:\n decoded_text = file.decode('utf-8')\n decoded_lines = decoded_text.split('\\n')\n\n # Remove titles of Wikipedia articles if desired\n if self.remove_headers:\n filtered_lines = []\n for line in decoded_lines:\n line_strip = line.strip()\n if len(line_strip) > 0:\n if line_strip[0] != '=' and line_strip[-1] != '=':\n filtered_lines.append(line)\n decoded_lines = filtered_lines\n\n eol = self.eol or ''\n if self.split_by_line:\n text = [(line.lstrip() + eol,) for line in decoded_lines]\n else:\n text = [(eol.join(decoded_lines),)]\n\n return text", "def parse_messages(self, orig):\n data=orig[1:len(orig)-1]\n output=[]\n for i in range(0, len(data), 3):\n message_data=data[i].split(',')\n message_text=data[i+1]\n output.append({'status':message_data[1], 'number':message_data[2],'date':message_data[4],'time':message_data[5],'text':message_text})\n return output", "def parsing_no_hits_data(global_avg_trimmed_length):\n\n #No Hit Counter\n no_hit_counter = 0\n\n #Totally trimmed counter\n totally_trimmed_counter = 0\n\n #No hits results\n average_trimmed_no_hit_length=[]\n\n #Opening and Parsing blast_no_hits_report.txt\n no_hit_results = open('blast_no_hits_report.txt', 'r')\n for line in no_hit_results:\n data = line.split(\"\\t\")\n \n if line.startswith('SeqID'):\n continue\n else:\n average_trimmed_no_hit_length.append(float(data[4]))\n global_avg_trimmed_length.append(float(data[4]))\n \n no_hit_counter +=1\n \n if float(data[4]) == 0:\n totally_trimmed_counter +=1\n continue\n no_hit_results.close\n\n\n return {'no_hit_counter':no_hit_counter, 'totally_trimmed_counter':totally_trimmed_counter, \n 'average_trimmed_no_hit_length':average_trimmed_no_hit_length, 'global_avg_trimmed_length':global_avg_trimmed_length}", "def read_txt(filename):\n content = [] # list with word index : word count for each track\n string = '%'\n find = False \n words = [] \n track_id = [] # list with track ID's from the MSD\n mxm_tid = [] # track ID's from musiXmatch\n str_data = []\n\n read_file = open(filename, \"r\")\n \n for line in read_file:\n if find:\n line = line.strip() # converting line into list\n index1 = line.find(',') # finds index of 1st comma\n index2 = line.find(',', index1+1) # finds index of 2nd comma\n track_id.append(line[:index1]) # appends track id to list \n mxm_tid.append(line[:index2]) # appends track id to list \n res = '{' + line[index2+1:] + '}' # simulates dictionary with string\n d = eval(res) # converts string to actual dictionary \n content.append(d) # appends track data to content list\n else:\n # obtaining line with 5,000 words \n if line.startswith(string):\n line = line[1:] # getting rid of %\n words = [word.strip() for word in line.split(',')]\n find = True # already found list of words \n read_file.close() \n \n\n return (words, content, track_id, mxm_tid)", "def read_content(path):\n input_file = open(path, \"r\")\n file_handler = input_file.readlines()\n input_file.close()\n list = []\n\n for line in file_handler:\n line = line.split('\\n')\n without_empty_strings = []\n for string in line:\n if (string != \"\"):\n without_empty_strings.append(string)\n line = without_empty_strings\n line = \"\".join(line)\n list.append(line)\n return list", "def testParseUtmpFile(self):\n parser = utmp.UtmpParser()\n storage_writer = self._ParseFile([u'utmp'], parser)\n\n self.assertEqual(storage_writer.number_of_events, 14)\n\n events = list(storage_writer.GetEvents())\n\n event = events[0]\n self.assertEqual(event.terminal, u'system boot')\n self.assertEqual(event.status, u'BOOT_TIME')\n\n event = events[1]\n self.assertEqual(event.status, u'RUN_LVL')\n\n event = events[2]\n\n expected_timestamp = timelib.Timestamp.CopyFromString(\n u'2013-12-13 14:45:09')\n self.assertEqual(event.timestamp, expected_timestamp)\n\n self.assertEqual(event.user, u'LOGIN')\n self.assertEqual(event.computer_name, u'localhost')\n self.assertEqual(event.terminal, u'tty4')\n self.assertEqual(event.status, u'LOGIN_PROCESS')\n self.assertEqual(event.exit, 0)\n self.assertEqual(event.pid, 1115)\n self.assertEqual(event.terminal_id, 52)\n expected_message = (\n u'User: LOGIN '\n u'Computer Name: localhost '\n u'Terminal: tty4 '\n u'PID: 1115 '\n u'Terminal_ID: 52 '\n u'Status: LOGIN_PROCESS '\n u'IP Address: localhost '\n u'Exit: 0')\n expected_short_message = (\n u'User: LOGIN')\n self._TestGetMessageStrings(event, expected_message, expected_short_message)\n\n event = events[12]\n\n expected_timestamp = timelib.Timestamp.CopyFromString(\n u'2013-12-18 22:46:56.305504')\n self.assertEqual(event.timestamp, expected_timestamp)\n\n self.assertEqual(event.user, u'moxilo')\n self.assertEqual(event.computer_name, u'localhost')\n self.assertEqual(event.terminal, u'pts/4')\n self.assertEqual(event.status, u'USER_PROCESS')\n self.assertEqual(event.exit, 0)\n self.assertEqual(event.pid, 2684)\n self.assertEqual(event.terminal_id, 13359)\n expected_message = (\n u'User: moxilo '\n u'Computer Name: localhost '\n u'Terminal: pts/4 '\n u'PID: 2684 '\n u'Terminal_ID: 13359 '\n u'Status: USER_PROCESS '\n u'IP Address: localhost '\n u'Exit: 0')\n expected_short_message = (\n u'User: moxilo')\n self._TestGetMessageStrings(event, expected_message, expected_short_message)", "def truncate_content(s):\n dic = json.loads(s) \n dic['truncated'] = False\n dic['records_count'] = 1\n dic['record_index'] = 0\n filesize = sys.getsizeof(s)\n if filesize <= THERESHOLD:\n return [dic]\n else:\n l = len(dic['text'])\n contentsize = sys.getsizeof(dic['text'])\n pieces = math.ceil(contentsize / (THERESHOLD - (filesize - contentsize)))\n piece_size = int(l / pieces)\n dic['truncated'] = True\n dics = []\n last = 0\n ind = 0\n while (last < l):\n cur_dic = copy.deepcopy(dic)\n if last + piece_size >= l:\n cur_dic['text'] = cur_dic['text'][last:]\n else:\n cur_dic['text'] = cur_dic['text'][last:last+piece_size]\n last += piece_size\n cur_dic['record_index'] = ind\n dics.append(cur_dic)\n ind += 1\n no_records = len(dics)\n for dic in dics:\n dic['records_count'] = no_records\n return dics", "def _parse_tx_infos(self, gtf_path):\n if os.path.exists('_tx_cache.bin'):\n with open('_tx_cache.bin', 'rb') as f:\n return pickle.load(f)\n result = []\n with gzip.open(gtf_path, 'rt') as f:\n for i, line in enumerate(f):\n if i % 1000 == 0:\n print('processed {}'.format(i), file=sys.stderr)\n if line.startswith('#'):\n continue\n if line.split('\\t', 3)[2] != 'transcript':\n continue\n record = GTFFeature.parse(line)\n if record.feature != 'transcript':\n continue\n result.append(\n TranscriptInfo(record.attrs['gene_id'],\n record.attrs['transcript_id'],\n record.attrs['transcript_type'],\n record.seqname,\n record.start,\n record.end))\n with open('_tx_cache.bin', 'wb') as g:\n pickle.dump(result, g)\n print(len(result), file=sys.stderr)\n return result", "def parse_records(self):\n for record in sp.parse(gzip.open(\n \"./human_uniprot_04_07_20.gz\", 'rt')):\n # print(record.taxonomy_id)\n # if record.organism != \"Homo sapiens\":\n # continue\n # print(record.features[0])\n # for comment in record.comments:\n # if comment.startswith(\"SUBCELLULAR LOCATION\"):\n # print(comment)\n self.extract_features_to_dict(record)\n self.extract_localization(record)", "def get_parsed_data():\n\n echonest_data_files = [f for f in os.listdir('.') if re.match(\"^echonest_[\\w]+.txt$\", f)]\n\n # Setting up header with user id and attributes\n header = ['user_id']\n header.extend(ATTRIBUTES)\n\n # printing header to standard out\n print \",\".join(header) \n\n # Processing each file to obtain parsed data\n for data_file in echonest_data_files:\n user_id = data_file[9:-4] # strip file prefix/suffix to get username/id\n parse_echonest_data_file(data_file, user_id)", "def main ():\n\n\tfio = fileIo('input.txt')\n text = fio.getInput()\n\n\tp = re.compile(r'#?\\d[\\s\\.]?[\\s]?')\n\tout = filter(None, p.split(text))\n\ti = 0\n\tlistOfLists = []\n\t\n\n\tfor s in out:\n\t\ti += 1\n\t\ttext = nltk.word_tokenize(s)\n\t\tpos = nltk.pos_tag(text)\n\t\tpattern = \"NP: {<DT>?<JJ>*<NN>}\"\n\t\tNPChunker = nltk.RegexpParser(pattern)\n\t\tresult = NPChunker.parse(pos)\n\t\tlistOfLists.append( result )\n\n\tprint \"Noun Count:\\n\" + str(countNouns( listOfLists ))\n\tprint \"Verb Count:\\n\" + str(countVerbs( listOfLists ))\n\tprint \"Adjective Count:\\n\" + str(countAdjectives( listOfLists ))", "def parse_nodes(self):\r\n for line in self._ansys_file:\r\n node_match = self._node_pattern.match(line)\r\n if node_match: # if we find a node\r\n # save the first 4 entries (drop last 3: thxy, thyz, thzx)\r\n (node_num, x, y, z) = line.strip().split()[:-3]\r\n n = Node(node_num, x, y, z)\r\n self.list_of_nodes.append(n)\r\n self.number_of_nodes = len(self.list_of_nodes)", "def parse_lines(lines):\n image_ids = []\n cleaned_captions = []\n\n # QUESTION 1.1\n\n for line in lines:\n # first we split the image id from caption text based on \\t\n id = line.split('\\t')[0]\n # then we extract remove .jpg#x part from image id (where x = 1 to 5)\n id = id.split('.')[0]\n # finally we extract raw text caption\n raw_caption = line.split('\\t')[1]\n # and forward to other function for cleaning the text\n caption = clean_caption(raw_caption)\n\n image_ids.append(id)\n cleaned_captions.append(caption)\n\n return image_ids, cleaned_captions", "def get_tweet_object_from_tweet_js(seq, num_of_tweet_block):\r\n\r\n data = \"\"\r\n res = []\r\n curr = 0\r\n start_flag = False\r\n \r\n for line in seq:\r\n line = line.rstrip()\r\n \r\n if \"\\\"tweet\\\"\" in line:\r\n start_flag = True\r\n if line != \"}, {\" and start_flag:\r\n if 'full_text' in line:\r\n line = line.replace('full_text', 'text')\r\n data += line\r\n if line == \"}, {\":\r\n start_flag = False\r\n curr += 1\r\n # remove the extra \"tweet\" in front\r\n res.append(data.split(\"\\\"tweet\\\" : \")[1])\r\n data = \"\"\r\n if curr >= num_of_tweet_block:\r\n return res\r\n \r\n return res # in case we have parsed all lines but still fewer than `num_of_tweet_block`, return the result anyways\r", "def BuildTailList(all_file_contents):\n tail_list = []\n list_all_file_contents = (all_file_contents)\n tail_start = False\n for line in list_all_file_contents:\n word = line[0:3]\n if word == \"TER\":\n tail_start = True\n if tail_start == True:\n tail_list.append(line)\n if word == \"END\":\n break\n \n return tail_list", "def readtxt(obslog):\n\n logger = log.getLogger('obslog.readtxt')\n\n if not os.path.exists(obslog):\n logger.error('Cannot access %s', obslog)\n raise SystemExit\n\n logger.info('Reading %s', obslog)\n\n with open(obslog) as f: # Since we will have to go through the data twice, read the whole file at once.\n data = f.readlines()\n\n header = ['Observation ID', 'Data Labels', 'File Numbers', 'Dataset UT', 'Target Name', 'Filters', 'Slit',\n 'Grating/Wavelength', 'Camera/Prism', 'ExpTime/LNR/Coadds', 'ACQ']\n\n pattern = dict() # Enforce formatting rules to avoid parsing comments as data:\n pattern['Observation ID'] = re.compile(r'^G[NS]-[0-9]{4}[AB]-([CQ]|DD|FT|LP|SV)-[0-9]{0,3}-[0-9]+$')\n pattern['Data Labels'] = re.compile(r'[0-9]+-*[0-9]*') # 1, 2-3, 45-67, 890-1234\n pattern['File Numbers'] = re.compile(r'[0-9]+-*[0-9]*') # 1, 2-3, 45-67, 890-1234\n pattern['Dataset UT'] = re.compile(r'^[0-9]{2}:[0-9]{2}:[0-9]{2}$') # 09:58:15\n pattern['Target Name'] = re.compile(r'[a-zA-Z0-9_-]+') # Match any string\n pattern['Filters'] = re.compile(r'[A-Z0-9\\-]+') # H, XD, H2, X, J, H\n pattern['Slit'] = re.compile(r'[a-zA-Z0-9]+') # 0.675, ACQ, LgPin\n pattern['Grating/Wavelength'] = re.compile(r'[0-9]{2,3}/[0-9]\\.[0-9]{2}') # 32/1.65, 111/1.68\n pattern['Camera/Prism'] = re.compile(r'[A-Z]{2}/[A-Z]{3}') # LB/MIR, SB/SXD\n pattern['ExpTime/LNR/Coadds'] = re.compile(r'[0-9]+\\.[0-9]/[0-9]+/[0-9]+') # 0.2/1/25, 300.0/32/1\n pattern['ACQ'] = re.compile(r'^Y*$') # Y or ''\n\n indx = {}\n for line in data:\n if 'Electronic Observing Log' in line:\n date = line.split()[-1][7:]\n logger.debug('Log date: %s', date)\n if line[0:14] == 'Observation ID': # This defines the start of the header row\n for h in header:\n indx[h] = line.find(h) # Find where each column starts\n break # No need to go farther\n\n width = {} # Find the width of each row\n for i in range(len(header) - 1): # This requires that 'header' be an ordered array (not a dictionary)\n width[header[i]] = indx[header[i + 1]] - indx[header[i]]\n width[header[i+1]] = 1 # The ACQ field is either 'Y' or blank\n\n val = {}\n match = {}\n info = {}\n for line in data:\n logger.debug('\\n%s', line)\n files = []\n for h in header:\n val[h] = line[indx[h]: indx[h] + width[h]].strip()\n match[h] = re.match(pattern[h], val[h])\n logger.debug('%s: \"%s\" %s' % (h, val[h], match[h]))\n\n # Maybe throw a warning if only match 1 fails; indicating a likely bad pattern specification?\n\n if None in match.values():\n logger.debug('Failed to match all patterns -> This is a comment')\n continue\n\n if '-' in val['File Numbers']:\n start, stop = val['File Numbers'].split('-')\n for i in range(int(start), int(stop)+1):\n files.append(i)\n else:\n files.append(int(val['File Numbers']))\n\n for filenum in files:\n f = 'N%sS%04d.fits' % (date, filenum)\n logger.debug('File: %s', f)\n info[f] = {}\n for h in [header[0]] + header[3:]: # Skip 'Data Labels' and \"File Numbers'\n info[f][h] = val[h]\n\n logger.debug('info: %s', info)\n return info", "def __readCONTINoutput(self):\n\n titleline = 'OBJ. FCTN. VARIANCE STD. DEV.'\n chunkTitle = re.compile('OBJ. FCTN. VARIANCE STD. DEV. ')\n\n alldata = []\n\n with open(self.outputfile, 'r') as f:\n\n for line in f:\n if chunkTitle.search(line) is not None:\n\n alphadic = {}\n\n # gets the header\n alphaLine = next(f)\n if '*' in alphaLine:\n alphadic['marked'] = True\n\n alphaLine = alphaLine.replace('*', '')\n alphaParam = np.fromstring(alphaLine, sep=' ')\n\n # reduce the header line to string seperated text\n line = re.sub('\\s\\s\\s+', ' ', line).strip()\n for key, value in zip(line.split(' '), alphaParam):\n alphadic[key] = value\n # skip a line then get the data\n next(f)\n # alldata.append((alphadic, readblock(f)))\n alldata.append(\n (alphadic, readblock(f), readSummaryData(f)))\n\n # skip a line then get the data\n # print(next(f))\n\n return alldata", "def generate_metadata_queue_messages(event_time: str, metadata_file_content: str) -> List[str]:\n ingest_queue_msg_list = []\n current_part_num = 100000 #Max part number\n global MAX_COMPACT_FILE_RECORDS\n\n for line in reversed(metadata_file_content.splitlines()):\n logging.info(f\"{HEADER} Processing metadata line content: {line}\")\n if not is_json(line):\n logging.info(f\"{HEADER} Skip non JSON line content: {line}\")\n continue\n\n pnum = get_part_number(line)\n\n if pnum > current_part_num:\n break # Reached files in previous batch, stop parsing\n\n current_part_num = pnum\n\n split_output_file_json = json.loads(line)\n output_abfss_path = split_output_file_json[\"path\"]\n output_file_size = split_output_file_json[\"size\"]\n output_file_modification_time = split_output_file_json[\"modificationTime\"]\n\n try:\n https_url = convert_abfss_path_to_https(output_abfss_path)\n except Exception: # pylint: disable=bare-except\n logging.warning(f\"{HEADER} Skip invalid abfss path {output_abfss_path}\", exc_info=True)\n continue\n\n queue_msg = INGEST_QUEUE_MSG_TEMPLATE.format(blob_size=output_file_size,\n blob_url=https_url,\n event_time=event_time,\n modification_time=output_file_modification_time)\n minify_msg = json.dumps(json.loads(queue_msg))\n ingest_queue_msg_list.append(minify_msg)\n\n MAX_COMPACT_FILE_RECORDS = max(len(ingest_queue_msg_list), MAX_COMPACT_FILE_RECORDS)\n return ingest_queue_msg_list", "def create_ph_text(self):\n text_list = [f\"Top {STORIES_NUMBER} from Product Hunt:\"]\n query = {\n \"query\": \"\"\"\n query todayPosts {\n posts {\n edges {\n node {\n name\n tagline\n votesCount\n website\n url\n }\n }\n }\n }\n \"\"\"\n }\n headers = {\n \"Accept\": \"application/json\",\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer \" + PH_API_TOKEN,\n }\n response = self.run_graphql_query(query, headers)\n today_posts = [\n post[\"node\"] for post in response[\"data\"][\"posts\"][\"edges\"]]\n top_posts = sorted(\n today_posts, key=lambda k: k[\"votesCount\"], reverse=True)\n # Format slack text\n for post in top_posts[:STORIES_NUMBER]:\n text_list.append(\n \"*<{}|{}>* - <{}|{} - {}>\".format(\n post[\"url\"],\n post[\"votesCount\"],\n post[\"website\"],\n post[\"name\"],\n post[\"tagline\"],\n )\n )\n self.logger.debug(text_list)\n return \"\\n>\".join(text_list)", "def read_hypnogram(hypno_file, epochlen = 30, epochlen_infile=None, mode='auto', exp_seconds=None):\n assert str(type(epochlen)()) == '0'\n assert epochlen_infile is None or str(type(epochlen_infile)()) == '0'\n\n with open(hypno_file, 'r') as file:\n content = file.read()\n content = content.replace('\\r', '') # remove windows style \\r\\n\n \n #conversion dictionary\n conv_dict = {'WAKE':0, 'WACH':0, 'WK':0, 'NWAKE': 0,\n 'N1': 1, 'NREM1': 1,\n 'N2': 2, 'NREM2': 2,\n 'N3': 3, 'NREM3': 3,\n 'N4':3, 'NREM4': 3,\n 'REM': 4,\n 0:0, 1:1, 2:2, 3:3, 4:4, -1:5, 5:5,\n 'ART': 5, 'A':5, 'ARTEFAKT':5, '8': 5,\n 'MT':5, 'BEWEGUNG':5, '9':5, '?': 5, ' ': 5, 'NAN': 5,\n 'UNSCORED': 5}\n \n lines = content.split('\\n')\n if mode=='auto':\n if lines[0].startswith('*'): # if there is a star, we assume it's the visbrain type\n mode = 'visbrain'\n elif lines[0].replace('-', '').isnumeric():\n mode = 'csv'\n elif lines[0].startswith('[HypnogramAASM]'):\n mode = 'dreams'\n elif lines[0].startswith(' Epoch Number ,Start Time ,Sleep Stage'):\n mode = 'alice'\n elif 'abstime' in lines[0]:\n mode = 'dat'\n elif lines[0].startswith('Signal ID:'):\n mode = 'somnoscreen'\n elif any(['luna-' in x for x in lines[:5]]):\n mode = 'luna'\n elif hypno_file.endswith('.eannot'):\n mode = 'csv'\n else :\n mode==None\n\n # reading file in format as used by Nihon Koden\n # files with a datestamp per stage annotation\n if mode=='dat':\n\n if epochlen_infile is not None:\n warnings.warn('epochlen_infile has been supplied, but hypnogram is' \n 'time based, will be ignored')\n elif exp_seconds and not epochlen_infile:\n epochlen_infile=exp_seconds//len(lines)\n print('[INFO] Assuming csv annotations with one entry per {} seconds'.format(epochlen_infile))\n\n stages = []\n for line1, line2 in zip(lines[1:-1], lines[2:]):\n if len(line1.strip())==0: continue # skip empty lines\n if len(line2.strip())==0: continue # skip empty lines\n\n curr_t, _, stage, *_ = line1.split('\\t')\n next_t,*_ = line2.split('\\t')\n curr_t = datetime.strptime(curr_t, '%Y-%m-%d %H:%M:%S')\n next_t = datetime.strptime(next_t, '%Y-%m-%d %H:%M:%S')\n assert next_t > curr_t, 'timestamp 2 is smaller than 1? {} < {}'.format(next_t, curr_t)\n \n sec_diff = (next_t - curr_t).seconds\n if exp_seconds and epochlen_infile!=sec_diff: \n warnings.warn('Epochlen in file is {} but {} would be selected'.format(sec_diff, epochlen_infile))\n \n stage = conv_dict[stage.upper()]\n stages.extend([stage]*sec_diff)\n \n elif mode=='somnoscreen':\n if epochlen_infile is not None:\n warnings.warn('epochlen_infile has been supplied, but information is in file, will be ignored')\n \n epochlen_infile = int(lines[5].replace('Rate: ', '').replace('s',''))\n stages = []\n for line in lines[6:]:\n if len(line.strip())==0: continue # skip empty lines\n \n _,stage = line.split('; ')\n stage = conv_dict[stage.upper()]\n stages.extend([stage]*epochlen_infile)\n\n # read hypnogram as written by visbrain (time based)\n elif mode=='visbrain':\n if epochlen_infile is not None:\n warnings.warn('epochlen_infile has been supplied, but hypnogram is time based,'\n 'will be ignored')\n stages = []\n prev_t = 0\n for line in lines:\n if len(line.strip())==0: continue\n if line[0] in '*#%/\\\\\"\\'': continue # this line seems to be a comment\n s, t = line.split('\\t')\n t = float(t)\n s = conv_dict[s.upper()]\n l = int(np.round((t-prev_t))) # length of this stage\n stages.extend([s]*l)\n prev_t = t\n \n # read hypnogram as simple CSV file, number based or string based\n elif mode=='csv':\n if exp_seconds and not epochlen_infile:\n epochlen_infile=exp_seconds//len(lines)\n print('[INFO] Assuming csv annotations with one entry per {} seconds'.format(epochlen_infile))\n\n elif epochlen_infile is None: \n if len(lines) < 2500: # we assume no recording is longer than 21 hours\n epochlen_infile = 30\n else:\n epochlen_infile = 1\n print('[INFO] Assuming csv annotations are per second')\n lines = [conv_dict[l.upper()] if isinstance(l, str) else int(l) for l in lines if len(l)>0]\n lines = [[line]*epochlen_infile for line in lines]\n stages = np.array(lines).flatten()\n \n # for the Dreams Database \n # http://www.tcts.fpms.ac.be/~devuyst/Databases/DatabaseSubjects/ \n elif mode=='dreams':\n epochlen_infile = 5\n conv_dict = {-2:5,-1:5, 0:5, 1:3, 2:2, 3:1, 4:4, 5:0} \n lines = [[int(line)] for line in lines[1:] if len(line)>0]\n lines = [[line]*epochlen_infile for line in lines]\n stages = np.array([conv_dict[l.upper()] for l in np.array(lines).flatten()])\n \n # for hypnogram created with Alice 5 software\n elif mode=='alice':\n epochlen_infile = 30\n lines = [line.split(',')[-1] for line in lines[1:] if len(line)>0]\n lines = [[line]*epochlen_infile for line in lines]\n try: stages = np.array([conv_dict[l] for l in np.array(lines).flatten()])\n except KeyError as e:\n print('Unknown sleep stage in file')\n raise e\n\n elif mode=='luna':\n # hypnograms created by Luna software from sleepdata.org\n if epochlen_infile is not None:\n warnings.warn('epochlen_infile has been supplied, but information is in file, will be ignored')\n import xml.etree.ElementTree as ET\n root = ET.fromstringlist(lines)\n # we don't actually properly parse it as it is intended, just\n # assume that it always contains the same labels\n instances = root[-1]\n stages = []\n for instance in instances:\n stage_str = instance.attrib['class']\n try: stage_nr = conv_dict[stage_str.upper()]\n except KeyError as e:\n print(f'Unknown sleep stage in file {hypno_file} : {stage_str}')\n raise e\n duration = int(instance.find('Duration').text)\n if duration!=30:\n raise ValueError(f'Duration!=30, not expected: {duration}')\n stages.extend([stage_nr]*duration)\n stages = np.array(stages)\n else:\n raise ValueError('This is not a recognized hypnogram: {}'.format(hypno_file))\n \n stages = stages[::epochlen]\n if len(stages)==0:\n print('[WARNING] hypnogram loading failed, len == 0')\n return np.array(stages)", "def create_content_list(contents: List[Text]) -> Text:\n # print(contents)\n return '\\n'.join(\n [template.LIST_TEMPLATE.format(\n level='',\n content=item\n ) for item in contents if item.strip()])", "def return_text_without_headlines(text):\n\n text = text.replace('\\\\n', '\\n')\n text = text.replace('\\\\r', '\\r')\n text = re.sub('h1. (.*)\\r', '', text)\n text = re.sub('h2. (.*)\\r', '', text)\n text = re.sub('h2. (.*)', '', text)\n text = re.sub('h3. (.*)\\r', '', text)\n text = re.sub('h4. (.*)\\r', '', text)\n text = text.replace('*acceptance criteria:*', \"\")\n text = text.replace('*acceptance criteria*:', \"\")\n text = text.replace('*acceptance criteria*', \"\")\n text = text.replace('*story:*', \"\")\n text = text.replace('*story*:', \"\")\n text = text.replace('*story*', \"\")\n text = text.replace('*stories:*', \"\")\n text = text.replace('*questions:*', \"\")\n text = text.replace('*questions*:', \"\")\n text = text.replace('*questions*', \"\")\n text = text.replace('*implementation notes:*', \"\")\n text = text.replace('*implementation notes*:', \"\")\n text = text.replace('*implementation notes*', \"\")\n text = text.replace('*notes:*', \"\")\n text = text.replace('*notes*:', \"\")\n text = text.replace('*notes*', \"\")\n text = text.replace('*Acceptance Criteria:*', \"\")\n text = text.replace('*Acceptance Criteria*:', \"\")\n text = text.replace('*Acceptance Criteria*', \"\")\n text = text.replace('*Story:*', \"\")\n text = text.replace('*Story*:', \"\")\n text = text.replace('*Story*', \"\")\n text = text.replace('*Stories:*', \"\")\n text = text.replace('*Questions:*', \"\")\n text = text.replace('*Questions*:', \"\")\n text = text.replace('*Questions*', \"\")\n text = text.replace('*Implementation Notes:*', \"\")\n text = text.replace('*Implementation Notes*:', \"\")\n text = text.replace('*Implementation Notes*', \"\")\n text = text.replace('*Notes:*', \"\")\n text = text.replace('*Notes*:', \"\")\n text = text.replace('*Notes*', \"\")\n text = text.replace('*Acceptance criteria:*', \"\")\n text = text.replace('*Acceptance criteria*:', \"\")\n text = text.replace('*Acceptance criteria*', \"\")\n text = text.replace('*Implementation notes:*', \"\")\n text = text.replace('*Implementation notes*:', \"\")\n text = text.replace('*Implementation notes*', \"\")\n text = text.replace('*Acceptance Criteria:*', \"\")\n text = text.replace('*Acceptance Criteria*:', \"\")\n text = text.replace('*Acceptance Criteria*', \"\")\n text = text.replace('*Implementation Notes:*', \"\")\n text = text.replace('*Implementation Notes*:', \"\")\n text = text.replace('*Implementation Notes*', \"\")\n text = text.replace(':\\r\\n****', \" \")\n text = text.replace('\\r\\n****', \". \")\n text = text.replace(':\\n****', \" \")\n text = text.replace('\\n****', \". \")\n text = text.replace(':\\r\\n***', \" \")\n text = text.replace('\\r\\n***', \". \")\n text = text.replace(':\\n***', \" \")\n text = text.replace('\\n***', \". \")\n text = text.replace(':\\r\\n**', \" \")\n text = text.replace('\\r\\n**', \". \")\n text = text.replace(':\\n**', \" \")\n text = text.replace('\\n**', \". \")\n text = text.replace(':\\r\\n*', \" \")\n text = text.replace('\\r\\n*', \". \")\n text = text.replace(':\\n*', \" \")\n text = text.replace('\\n*', \". \")\n text = text.replace(':\\r\\n\\r\\n', \" \")\n text = text.replace('\\r\\n\\r\\n', \". \")\n text = text.replace(':\\r\\n', \" \")\n text = text.replace('\\r\\n', \". \")\n text = text.replace('.\\n', \". \")\n text = text.replace('\\n', \" \")\n text = text.replace('.\\r', \". \")\n text = text.replace('\\r', \" \")\n text = text.replace('\\\\n', '\\n')\n text = text.replace('\\\\t', '\\t')\n text = text.replace('\\\\r', '\\r')\n text = text.replace('\\n', \" \")\n text = text.replace('\\r', \" \")\n text = text.replace('\\t', \" \")\n text = ' '.join(text.split())\n return text", "def tokenize(current_file_path):\n command_line = shell.get_command_line_from_argument_list(\n [get_ipc_message_util_executable(), '--dump', current_file_path])\n _, _, output = process_handler.run_process(\n command_line, testcase_run=False, timeout=IPCDUMP_TIMEOUT)\n output_lines = output.splitlines()\n if not output_lines:\n return []\n\n # Each output line starts with the message index followed by a \".\", but\n # we are only interested in the total number of messages in the file. To\n # find this, we add one to the index of the final message.\n try:\n last_index = int(output_lines[-1].split('.')[0])\n except ValueError:\n return []\n\n return list(range(last_index + 1))", "def parse_results(variants):\n out = []\n\n # set header\n lines = variants[0].get_output().split('\\n')\n for line in lines[:-1]:\n out.append(line.split('\\t')[0])\n\n # append output for all variants to single list\n for var in variants:\n lines = var.get_output().split('\\n')\n for i in range(0, len(lines) - 1):\n out[i] += '\\t{}'.format(lines[i].split()[1])\n\n return out", "def parse_fastq (rec_lines):\n data = []\n data.append(rec_lines[0][1:])\n data.append(rec_lines[1])\n data.append(rec_lines[3])\n return data", "def create_corpus(crawled_lyrics_file, save=False):\n\n # generating cleaned lyrics corpus from crawled data\n clean_lyrics(crawled_lyrics_file) # the corpus is one sequence of characters per line\n subprocess.call('kytea < ./data/cleaned_lyrics.txt > ./data/kytea_out.txt', shell=True) # processing with kytea\n logger.info(\" Done kytea processing! \")\n\n pron = []\n unk_pat = re.compile(u\"/補助記号/UNK\")\n slash_pat = re.compile(ur\"\\\\\")\n\n with codecs.open(\"data/kytea_out.txt\", 'UTF-8') as f:\n for line in f:\n line = line.decode(encoding=\"utf-8\").strip()\n line = unk_pat.sub(u\"\", line)\n line = slash_pat.sub(u\"\", line)\n\n triplets = line.split(u\" \") # take a look at Kytea output: https://github.com/chezou/Mykytea-python\n seq = []\n for item in triplets:\n try:\n # hir = item.split(u\"/\")[2]\n # if hir != \"UNK\":\n hir = item.split(u\"/\")[0]\n if hir != \"\\\\\":\n seq.append(hir)\n except IndexError:\n continue\n\n candidate_line = unicodedata.normalize(\"NFKC\", u\" \".join(seq))\n candidate_line = re.sub(u\"[A-Za-z]\", u\"\", candidate_line)\n candidate_line = re.sub(u\"\\s+\", u\"\", candidate_line)\n candidate_line = re.sub(u\"\\d+\", u\"5\", candidate_line)\n\n if len(candidate_line) > 10:\n pron.append(candidate_line)\n\n\n NN_input = u\"\\n\".join(pron)\n return NN_input" ]
[ "0.5664039", "0.5516081", "0.520067", "0.50546473", "0.5002102", "0.49736872", "0.48318633", "0.47706318", "0.47663316", "0.4757304", "0.47494912", "0.47263288", "0.47130182", "0.47129261", "0.4693467", "0.46848562", "0.46799994", "0.4672537", "0.46613166", "0.4635302", "0.4634309", "0.46326125", "0.462585", "0.46196043", "0.46092904", "0.45994005", "0.45774668", "0.45719868", "0.45629025", "0.4558935", "0.45548105", "0.45534828", "0.45469537", "0.4544055", "0.45434105", "0.45433837", "0.45271054", "0.45256305", "0.4514322", "0.4505823", "0.45011315", "0.44880214", "0.44787934", "0.447201", "0.44513723", "0.44346637", "0.4422559", "0.4403084", "0.43885127", "0.43827844", "0.43709755", "0.43669757", "0.43479347", "0.43454897", "0.43454665", "0.43447983", "0.4339739", "0.4338415", "0.43240526", "0.4322529", "0.4321053", "0.43132603", "0.43090945", "0.42976066", "0.4296532", "0.42950025", "0.4294731", "0.42936477", "0.42848867", "0.42827138", "0.4282309", "0.42807394", "0.4280721", "0.42771763", "0.42770776", "0.42742968", "0.42741993", "0.42732885", "0.426794", "0.4266031", "0.42634606", "0.4260744", "0.42607364", "0.42598", "0.42595172", "0.4253055", "0.42527252", "0.42505962", "0.42479166", "0.42412516", "0.42401996", "0.42396563", "0.42383358", "0.4238025", "0.42379278", "0.42352292", "0.4227945", "0.42274418", "0.42253393", "0.42245182" ]
0.754473
0
This parses the content of the transcription file. The size of the file can basically be unlimited but each line has to be under 300000 characters(?). This then returns the following...\n\n index 0 url\n index 1 realTimeFactor\n index 2 transcription\n
def fileTranscriptionContent(filePath): try: continu = True f = open(filePath, 'r') fileContent = "" while (continu): temp = f.readline(300000) if(len(temp) == 0): continu = False else: fileContent += temp results = [] f.close() url = re.findall(r'URL:(.*?)\n', fileContent) results.append(url) realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent) results.append(realTimeFactor) transcription = re.findall(r'utterance-id1 (.*?)\n', fileContent) for item in transcription: if(len(item) > 500): results.append(item.replace("'", "''")) if((len(results[0]) > 0) and (len(results[1]) > 0) and (len(results[2]) > 0)): return results else: Tools.writeException("fileTranscriptionContent", "ERROR attempted to parse " + filePath + " but got " + str(results)) return False except Exception as e: Tools.writeException("fileTranscriptionContent", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def nohupTranscriptionContent(filePath):\n try:\n continu = True\n fileContent = \"\"\n f = open(filePath, 'r')\n while (continu):\n temp = f.readline(900000)\n if(len(temp) == 0):\n continu = False\n else:\n fileContent += temp\n results = []\n realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent)\n results.append(realTimeFactor)\n transcription = re.findall(r'utterance-id(.*?) (.*?)\\n', fileContent)\n transcriptionList = []\n transcriptionIDList = []\n for item in transcription:\n if(len(item[1]) > 1000):\n transcriptionIDList.append(item[0])\n transcriptionList.append(item[1])\n results.append(transcriptionList)\n results.append(transcriptionIDList)\n transcriptionTime = re.findall(r'seconds / (.*?) seconds\\.', fileContent)\n results.append(transcriptionTime)\n return results\n except Exception as e:\n Tools.writeException(\"nohupTranscriptionContent\", e)\n return False", "def read_transcription_file(file_path, audio_file_path):\n with open(file_path) as in_file:\n last_timestamp = 0\n res = []\n transcription = \"\"\n for line in in_file:\n time_stamp_match = re.match(\"\\[([0-9\\]+\\.[0-9]+)\\]\", line)\n #if this regex matched then the line is a timestamp\n if time_stamp_match:\n timestamp = float(time_stamp_match.group(1))\n if transcription and transcription.strip() not in ['(())', \"<no-speech>\"]:\n single_instance = {\"start_time\": last_timestamp, \n \"end_time\": timestamp,\n \"transcription\": transcription,\n \"audio_file\" : audio_file_path}\n res.append(single_instance)\n last_timestamp = timestamp\n else:\n last_timestamp = timestamp # this handles silence at beginning\n else:\n transcription = line.strip()\n \n return res", "def _parse_transcription_file(self, root: str, name: str) -> None:\n trans_path = os.path.join(root, name)\n with open(trans_path, \"r\", encoding=\"utf-8\") as trans:\n # Each line has the form \"ID THE TARGET TRANSCRIPTION\"\n for line in trans:\n id_, transcript = line.split(maxsplit=1)\n dropped = self._process_audio(root, id_)\n if not dropped:\n self._process_transcript(transcript)", "def parse_file(self, file_lines):\n # separate the file into chunks of text\n chunks, chunk = [], []\n # check to see what format the corpus is in, we assume that the headers are the same for all\n # texts in the file... (maybe not safe?)\n if re.match('Primary publication:', file_lines[0]):\n header = re.compile('Primary publication:')\n else:\n header = re.compile(r'&?P\\d{6}')\n for line in file_lines:\n if header.match(line):\n if len(chunk) > 0: # pylint: disable=len-as-condition\n chunks.append(chunk)\n chunk = [line]\n else:\n if len(line) > 0: # pylint: disable=len-as-condition\n chunk.append(line)\n chunks.append(chunk)\n self.chunks = chunks\n # create a rich catalog from the chunks\n re_translit = re.compile(r'(\\d+\\'?\\.) ?(.*)')\n re_normaliz = re.compile(r'(#tr\\.ts:) ?(.*)')\n re_translat = re.compile(r'(#tr\\.en:) ?(.*)')\n for chunk in self.chunks:\n text = chunk\n if chunk[0].startswith('Primary publication:'):\n # we've got full metadata, add additional parsing later\n metadata = chunk[:25]\n text = chunk[26:]\n else: # no metadata\n metadata = []\n pnum = ''.join([c for c in text[0].split('=')[0] if c != '&']).rstrip()\n edition = text[0].split('=')[1].lstrip()\n text = text[3:]\n translit = []\n normaliz = []\n translat = []\n for line in text:\n if re.match(r'\\d+\\'?\\.', line):\n translit.append(re_translit.match(line).groups()[1])\n if line.startswith('#tr.ts:'):\n normaliz.append(re_normaliz.match(line).groups()[1])\n if line.startswith('#tr.en:'):\n translat.append(re_translat.match(line).groups()[1])\n self.catalog[pnum] = {'metadata': metadata,\n 'pnum': pnum,\n 'edition': edition,\n 'raw_text': text,\n 'transliteration': translit,\n 'normalization': normaliz,\n 'translation': translat}", "def read_transcript_data(fn):\n\n def _read_lines(fn):\n # NC_000007.13\tRefSeq\tcDNA_match\t50344265\t50344518\t254\t+\t.\tID=aln58042;Target=NM_001220765.2 1 254 +;gap_count=0;identity=0.0691326;idty=1;num_ident=428;num_mismatch=0;pct_coverage=6.91326;pct_identity_gap=100;pct_identity_ungap=100;score=254\n # NC_000002.11 RefSeq cDNA_match 179671939 179672150 212 - . ID=ed951d46-194c-477a-a480-4bc64530c5ba;Target=NM_001267550.2 1 212 +;gap_count=0;identity=0.999991;idty=1;num_ident=109223;num_mismatch=1;pct_coverage=100;pct_identity_gap=99.9991;pct_identity_ungap=99.9991\n line_re = re.compile(\n \"(?P<ref_ac>\\S+)\\s+(?P<origin>\\S+)\\s+(?P<match_type>\\S+)\\s+\"\n \"(?P<g_start>\\d+)\\s+(?P<g_end>\\d+)\\s+(?P<score>\\S+)\\s+\"\n \"(?P<strand>[-+])\\s+\\.\\s+ID=(?P<aln>[^;]+);Target=(?P<tx_ac>\\S+)\"\n \"\\s+(?P<tx_start>\\d+)\\s+(?P<tx_end>\\d+).+?\"\n \"pct_coverage=(?P<pct_coverage>[^;]+);\"\n \"pct_identity_gap=(?P<pct_identity_gap>[^;]+);\"\n \"pct_identity_ungap=(?P<pct_identity_ungap>[^;]+)\"\n )\n fh = io.open(fn, \"rb\")\n while fh.peek(1)[0] == \"#\":\n fh.readline()\n while fh.peek(3)[0:3] != \"###\":\n line = fh.readline()\n try:\n yield line_re.match(line).groupdict()\n except AttributeError:\n raise Exception(\"Failed at\", line)\n raise StopIteration\n def _key(e):\n return (e[\"tx_ac\"], not e[\"ref_ac\"].startswith(\"NC_\"), e[\"ref_ac\"], e[\"aln\"])\n return itertools.groupby(sorted(_read_lines(fn), key=_key), key=_key)", "def transcribe(self, fp):\n\n fp.seek(44, os.SEEK_SET)\n\n # FIXME: Can't use the Decoder.decode_raw() here, because\n # pocketsphinx segfaults with tempfile.SpooledTemporaryFile()\n data = fp.read()\n transcribed = []\n while True:\n try:\n self._decoder.start_utt()\n self._decoder.process_raw(data, False, True)\n self._decoder.end_utt()\n hyp = self._decoder.hyp()\n result = hyp.hypstr if hyp is not None else ''\n transcribed = [result] if result != '' else []\n self._logger.info('Transcribed: %r', transcribed)\n break\n except RuntimeError:\n self.reinit()\n\n if self._logfile is not None:\n with open(self._logfile, 'r+') as f:\n for line in f:\n self._logger.debug(line.strip())\n if self._logger.getEffectiveLevel() == logging.DEBUG:\n print(line.strip())\n f.truncate()\n\n return transcribed", "def parse_transcript(self):\n\t\t\n\t\toutput_text = tempfile.NamedTemporaryFile(mode = 'r')\n\t\twith tempfile.NamedTemporaryFile(delete=False) as input_text:\n\t\t\tinput_text.write(self.transcript_string.encode('utf-8'))\n\t\t\t#to write to the file, convert to utf-8; to use for jinja, convert it back to unicode\n\n\t\tos.popen(\"python vocab_resources/splitta/sbd.py -m vocab_resources/splitta/model_nb -t \" + input_text.name +\" -o \" + output_text.name)\n\t\tos.remove(input_text.name)\n\n\t\twith open(output_text.name) as parsed_text:\n\t\t\tsentence_index = {}\n\t\t\tfor index, sentence in enumerate(parsed_text):\n\t\t\t\tsentence = sentence.rstrip()\n\t\t\t\tsentence_index[index] = sentence\n\n\t\tsentence_index[len(sentence_index)] = \"Unable_to_find_matching_sentence\" #avoid outliers\n\t\tself.sentence_index = sentence_index", "def ParseSeqFile(FilePath):\n SeqFile = rSeqFile(FilePath)\n TidyFile = TidyLines(SeqFile)\n \n result = []\n\n for line in TidyFile:\n t = ( ProcessLine(line) )\n result.append(t)\n return(result)", "def _process(self, file: bytes) -> List[Tuple[str]]:\n decoded_text = file.decode('utf-8')\n decoded_lines = decoded_text.split('\\n')\n\n # Remove titles of Wikipedia articles if desired\n if self.remove_headers:\n filtered_lines = []\n for line in decoded_lines:\n line_strip = line.strip()\n if len(line_strip) > 0:\n if line_strip[0] != '=' and line_strip[-1] != '=':\n filtered_lines.append(line)\n decoded_lines = filtered_lines\n\n eol = self.eol or ''\n if self.split_by_line:\n text = [(line.lstrip() + eol,) for line in decoded_lines]\n else:\n text = [(eol.join(decoded_lines),)]\n\n return text", "def parse_text(self):\n self.text={}\n for i, lang in enumerate(LANGS):\n text=file(self.src).read()\n self.text[lang]=\"\"\n extracted, finish = \"\", 0\n start_string, stop_string = r\"<!--%s-->\" % lang, r\"<!--/%s-->\" % lang\n # Iterates to check multiple blocks of text within the file!\n # Pay attention to infinite loops!\n # AttributeError exception raised when no more blocks to extract exist\n while True:\n try:\n start=re.compile(start_string, re.IGNORECASE).search(text).span()[1]\n finish=re.compile(stop_string, re.IGNORECASE).search(text).span()[0]\n extracted+=text[start:finish]\n text=text[finish+1:]\n except AttributeError:\n break\n self.text[lang]+=extracted", "def _process(self, file: bytes) -> List[Tuple[str]]:\n decoded_text = file.decode('utf-8')\n # Replace end of line tokens\n if self.eol is not None and not self.split_by_sentence:\n decoded_text = decoded_text.replace('\\n', self.eol)\n\n # Split by sentence or unroll\n if self.split_by_sentence:\n nltk.download('punkt', quiet=True)\n text = [(sent.strip(),) for sent in nltk.tokenize.sent_tokenize(decoded_text)]\n else:\n text = [(decoded_text,)]\n\n return text", "def parse_plain_text_export(text_file):\n\n text_file.seek(0)\n for line in text_file.readlines():\n urls = re.findall(URL_REGEX, line) if line.strip() else ()\n for url in urls:\n yield {\n 'url': url,\n 'timestamp': str(datetime.now().timestamp()),\n 'title': None,\n 'tags': '',\n 'sources': [text_file.name],\n }", "def process_raw_phrases(file_path):", "def read_corpus(file_path, source):\n data = []\n for line in open(file_path):\n sent = line.strip().split(' ')\n # only append <s> and </s> to the target sentence\n if source == 'tgt':\n sent = ['<s>'] + sent + ['</s>'] #TODO: Change\n data.append(sent)\n return data", "def get_transcription(url):\n\n # Checks the format of the URL\n if \"https://www.youtube.com/watch?v=\" in url:\n input_url_id = url.replace(\"https://www.youtube.com/watch?v=\", \"\")\n elif \"https://youtu.be/\" in url:\n input_url_id = url.replace(\"https://youtu.be/\", \"\")\n\n # Creates a blank list to iterate over\n text_parts = []\n\n # Gets a list of all available transcripts\n try:\n\n list_of_transcripts = YouTubeTranscriptApi.list_transcripts(input_url_id)\n print(\"Checking for Transcriptions...\")\n\n # Checks to see if a manual transcript is created if not, checks to see if a generated one is created\n if 'en-US' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en-US'])\n elif 'en' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en'])\n elif 'en' in list_of_transcripts._generated_transcripts:\n print(\"Auto-Generated Transcription Found.\")\n transcript = list_of_transcripts.find_generated_transcript(['en'])\n\n # Saves the transcript into a variable to iterate over\n raw_transcription = transcript.fetch()\n\n # Indexing of raw transcripts\n iteration_of_raw = 0\n\n # Iterates over each dictionary and extracts 'text' key then appends the blank text_parts list\n for i in raw_transcription:\n indexed_dictionary = raw_transcription[iteration_of_raw]\n text_from_dictionary = indexed_dictionary['text']\n text_parts.append(text_from_dictionary)\n iteration_of_raw += 1\n # Defines how we want each text element to be separated with\n separator_for_each_text = \" \"\n\n # Joins the separator with the text_parts\n clean_transcription = separator_for_each_text.join(text_parts)\n\n # Returns the cleaned transcripts\n return clean_transcription\n\n except:\n print(\"No Transcriptions Found\")\n clean_transcription = \"No Transcriptions Found\"\n return clean_transcription", "def parse_file(self, file_path) -> list:\n data = []\n with open(file_path, 'rb') as f:\n lines = pickle.load(f)\n for line in lines:\n input, output = line\n if input.strip() == \"\" or output.strip() == \"\":\n continue\n input_len = len(input.split())\n output_len = len(output.split())\n if input_len > 50 or output_len > 50:\n continue\n data_item = Text2TextDataItem(input_text=input, output_text=output, tokenizer=self.tokenizer,\n share_vocab=self.share_vocab)\n data.append(data_item)\n return data", "def read_corpus(file_path, source):\n data = []\n for line in open(file_path, encoding='utf-8'):\n sent = line.strip().split(' ')\n # only append <s> and </s> to the target sentence\n if source == 'tgt':\n sent = ['<s>'] + sent + ['</s>']\n data.append(sent)\n\n return data", "def parse(self):\n\n lines = self._get_file_lines( )\n\n message, translation = None, None\n comment, status, sources = None, None, None\n temp_msgid, temp_msgstr = None, None\n previous, current = None, None\n\n tstore = UT3Store( )\n\n for curl in lines:\n\n curl = curl.strip( )\n\n if len(curl) == 0:\n current = LINE_EMPTY\n elif curl[0] == '#':\n current = LINE_COMMENT\n status, comment, sources = \\\n _extract_comment_values(curl, status, comment, sources)\n else:\n keyword, message = _parse_line(curl)\n if keyword is not None:\n if keyword == \"msgid\":\n current = LINE_MSGID\n # and now initialise them for later use\n temp_msgid = message\n temp_msgstr = \"\"\n elif keyword == \"msgstr\":\n current = LINE_MSGSTR\n temp_msgstr = message\n else:\n current = LINE_UNKNOWN\n logging.error(\"unknown keyword: %s\" % (keyword))\n else:\n if message is not None:\n if current == LINE_MSGID:\n temp_msgid = temp_msgid + message\n elif current == LINE_MSGSTR:\n temp_msgstr = temp_msgstr + message\n else:\n logging.error(\"unknown mode\")\n\n if previous == LINE_MSGSTR and current != LINE_MSGSTR:\n # we're not in msgstr mode anymore --> save the current entry\n entry = _make_item(message, translation, \\\n sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n # reset the item values\n message, translation = None, None\n comment, status, sources = None, None, None\n\n # save msgid and msgstr for storing them later\n message = temp_msgid\n translation = temp_msgstr\n # save line state\n previous = current\n\n # finally append the last pair\n if previous == LINE_MSGSTR:\n entry = _make_item(message, translation, sources, comment, status)\n if entry is not None:\n tstore.append(entry)\n\n return tstore", "def parse_lines(filename):\n line_counter = 0\n with open(filename, 'r') as rf:\n for line_txt in rf:\n try:\n d = json.loads(line_txt)\n tup = (\n d['attributed_to'],\n int(d['date_time'][8:10]),\n d.get('used_first_time_today', False),\n d.get('first_utm_source', 'unknown') \n )\n except:\n print('Error parsing line_txt:', line_txt)\n line_counter += 1\n if line_counter % 10 ** 6 == 0:\n print('read %dM lines' % (line_counter // 10 ** 6))\n yield tup # yield: https://stackoverflow.com/a/231855", "def _get_transcript_entries(transcript_directory):\n transcript_files = iglob_recursive(transcript_directory, '*.trans.txt')\n for transcript_file in transcript_files:\n with open(transcript_file, 'r') as f:\n for line in f:\n # Strip included new line symbol\n line = line.rstrip('\\n')\n\n # Each line is in the form\n # 00-000000-0000 WORD1 WORD2 ...\n splitted = line.split(' ', 1)\n yield splitted", "def _read_txt(file_path):\n translation_pairs = []\n with file_path.open() as f:\n for line in f:\n translation_pairs.append(\n evaluation.TranslationPair(source=None, translation=line.strip())\n )\n return translation_pairs", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n ### split off metadata\n \n content = re.split(\"X-FileName:.*$\", all_text, flags=re.MULTILINE, maxsplit=1)\n words = \"\"\n if len(content) > 1:\n text_string = content[1]\n\n ## remove mails that are forwarded or to which are responded\n # e.g. ---------------------- Forwarded\"\n text_string = re.split(\"-*\\sForwarded\", text_string, maxsplit=1)[0]\n\n # -----Original Message-----\n text_string = re.split(\"-*\\Original\\sMessage\", text_string, maxsplit=1)[0]\n\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # To:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n # or\n # Vince J Kaminski@ECT\n # 04/30/2001 02:28 PM\n # to:\tStanley Horton/Corp/Enron@Enron, Danny McCarty/ET&S/Enron@Enron\n # cc:\tVince J Kaminski/HOU/ECT@ECT \n \n text_string = re.split(\"((.*\\n){2})[Tt]o:\\s\", text_string, maxsplit=1)[0]\n\n ### remove punctuation\n # should be autopmatically by scikit learn\n #text_string = text_string.translate(string.maketrans(\"\", \"\"), string.punctuation)\n\n ### project part 2: comment out the line below\n #words = text_string\n\n ### split the text string into individual words, stem each word,\n ### and append the stemmed word to words (make sure there's a single\n ### space between each stemmed word)\n from nltk.stem.snowball import SnowballStemmer\n\n stemmer = SnowballStemmer(\"english\")\n words = [stemmer.stem(word) for word in text_string.split()]\n\n\n\n return \" \".join(words)", "def loadTIText(self, file):\n next = 1\n startAddr = 0\n segmentdata = []\n #Convert data for MSP430, TXT-File is parsed line by line\n while next >= 1:\n #Read one line\n l = file.readline()\n if not l: break #EOF\n l = l.strip()\n if l[0] == 'q': break\n elif l[0] == '@': #if @ => new address => send frame and set new addr.\n #create a new segment\n if segmentdata:\n self.segments.append( Segment(startAddr, ''.join(segmentdata)) )\n startAddr = int(l[1:],16)\n segmentdata = []\n else:\n for i in l.split():\n segmentdata.append(chr(int(i,16)))\n if segmentdata:\n self.segments.append( Segment(startAddr, ''.join(segmentdata)) )", "def read_txt(filename):\n content = [] # list with word index : word count for each track\n string = '%'\n find = False \n words = [] \n track_id = [] # list with track ID's from the MSD\n mxm_tid = [] # track ID's from musiXmatch\n str_data = []\n\n read_file = open(filename, \"r\")\n \n for line in read_file:\n if find:\n line = line.strip() # converting line into list\n index1 = line.find(',') # finds index of 1st comma\n index2 = line.find(',', index1+1) # finds index of 2nd comma\n track_id.append(line[:index1]) # appends track id to list \n mxm_tid.append(line[:index2]) # appends track id to list \n res = '{' + line[index2+1:] + '}' # simulates dictionary with string\n d = eval(res) # converts string to actual dictionary \n content.append(d) # appends track data to content list\n else:\n # obtaining line with 5,000 words \n if line.startswith(string):\n line = line[1:] # getting rid of %\n words = [word.strip() for word in line.split(',')]\n find = True # already found list of words \n read_file.close() \n \n\n return (words, content, track_id, mxm_tid)", "def parse_transcripts(transcript_lines):\n LOG.info(\"Parsing transcripts\")\n transcripts = parse_ensembl_transcripts(transcript_lines)\n\n # Since there can be multiple lines with information about the same transcript\n # we store transcript information in a dictionary for now\n parsed_transcripts = {}\n # Loop over the parsed transcripts\n for tx in transcripts:\n tx_id = tx[\"ensembl_transcript_id\"]\n ens_gene_id = tx[\"ensembl_gene_id\"]\n\n # Check if the transcript has been added\n # If not, create a new transcript\n if not tx_id in parsed_transcripts:\n tx_info = {\n \"chrom\": tx[\"chrom\"],\n \"transcript_start\": tx[\"transcript_start\"],\n \"transcript_end\": tx[\"transcript_end\"],\n \"mrna\": set(),\n \"mrna_predicted\": set(),\n \"nc_rna\": set(),\n \"ensembl_gene_id\": ens_gene_id,\n \"ensembl_transcript_id\": tx_id,\n }\n parsed_transcripts[tx_id] = tx_info\n\n tx_info = parsed_transcripts[tx_id]\n # Add the ref seq information\n if tx.get(\"refseq_mrna_predicted\"):\n tx_info[\"mrna_predicted\"].add(tx[\"refseq_mrna_predicted\"])\n if tx.get(\"refseq_mrna\"):\n tx_info[\"mrna\"].add(tx[\"refseq_mrna\"])\n if tx.get(\"refseq_ncrna\"):\n tx_info[\"nc_rna\"].add(tx[\"refseq_ncrna\"])\n\n return parsed_transcripts", "def index_file(self, file_name):\n self.contents = []\n article_text = \"\"\n article_annots = [] # for annot-only index\n\n f = open(file_name, \"r\")\n for line in f:\n line = line.replace(\"#redirect\", \"\")\n # ------ Reaches the end tag for an article ---------\n if re.search(r'</doc>', line):\n # ignores null titles\n if wiki_uri is None:\n print \"\\tINFO: Null Wikipedia title!\"\n # ignores disambiguation pages\n elif (wiki_uri.endswith(\"(disambiguation)>\")) or \\\n ((len(article_text) < 200) and (\"may refer to:\" in article_text)):\n print \"\\tINFO: disambiguation page \" + wiki_uri + \" ignored!\"\n # ignores list pages\n elif (wiki_uri.startswith(\"<wikipedia:List_of\")) or (wiki_uri.startswith(\"<wikipedia:Table_of\")):\n print \"\\tINFO: List page \" + wiki_uri + \" ignored!\"\n # adds the document to the index\n else:\n self.__add_to_contents(Lucene.FIELDNAME_ID, wiki_uri, Lucene.FIELDTYPE_ID)\n if self.annot_only:\n self.__add_to_contents(Lucene.FIELDNAME_CONTENTS, article_annots, Lucene.FIELDTYPE_ID_TV)\n else:\n self.__add_to_contents(Lucene.FIELDNAME_CONTENTS, article_text, Lucene.FIELDTYPE_TEXT_TVP)\n self.lucene.add_document(self.contents)\n self.contents = []\n article_text = \"\"\n article_annots = []\n\n # ------ Process other lines of article ---------\n tag_iter = list(self.tagRE.finditer(line))\n # adds line to content if there is no annotation\n if len(tag_iter) == 0:\n article_text += line\n continue\n # A tag is detected in the line\n for t in tag_iter:\n tag = t.group(3)\n if tag == \"doc\":\n doc_title = self.titleRE.search(t.group(2))\n wiki_uri = WikipediaUtils.wiki_title_to_uri(doc_title.group(1)) if doc_title else None\n if tag == \"a\":\n article_text += t.group(1) + t.group(4) # resolves annotations and replace them with mention\n # extracts only annotations\n if self.annot_only:\n link_title = self.linkRE.search(t.group(2))\n link_uri = WikipediaUtils.wiki_title_to_uri(unquote(link_title.group(1))) if link_title else None\n if link_uri is not None:\n article_annots.append(link_uri)\n else:\n print \"\\nINFO: link to the annotation not found in \" + file_name\n last_span = tag_iter[-1].span()\n article_text += line[last_span[1]:]\n f.close()", "def read_data(self, filePath):\n with open(filePath, 'r', encoding='iso-8859-1') as f:\n for sentence in f.readlines():\n sentence = sentence.replace('\\n', '')\\\n .replace('\"', '')\\\n .replace('\\'', '')\\\n .replace('.', '')\\\n .replace(',', '')\\\n .replace('[', '')\\\n .replace(']', '')\\\n .replace('(', '')\\\n .replace(')', '')\\\n .replace(':', '')\\\n .replace('--', '')\\\n .replace('-', '')\\\n .replace('\\\\', '')\\\n .replace('0', '')\\\n .replace('1', '')\\\n .replace('2', '')\\\n .replace('3', '')\\\n .replace('4', '')\\\n .replace('5', '')\\\n .replace('6', '')\\\n .replace('7', '')\\\n .replace('8', '')\\\n .replace('9', '')\\\n .replace('`', '')\\\n .replace('=', '')\\\n .replace('$', '')\\\n .replace('/', '')\\\n .replace('*', '')\\\n .replace(';', '')\\\n .replace('<b>', '')\\\n .replace('%', '')\n sentence = sentence.split(' ')\n sentence = list(filter(lambda x: x, sentence))\n if sentence:\n self.word_num += len(sentence)\n self.maxlen = self.maxlen if self.maxlen >= len(\n sentence) else len(sentence)\n self.minlen = self.minlen if self.minlen <= len(\n sentence) else len(sentence)\n if 'pos' in filePath:\n self.Pos.append([sentence, self.feelMap['pos']])\n else:\n self.Neg.append([sentence, self.feelMap['neg']])", "def process_corpus(self):\n sentences = []\n sentence = []\n with open(str(self.file), encoding=self.encoding) as f:\n\n line = f.readline()\n\n while line:\n\n if line.startswith(\"#\"):\n line = f.readline()\n continue\n\n if line.strip().replace(\"\", \"\") == \"\":\n if len(sentence) > 0:\n self.infer_space_after(sentence)\n if self.tagging_scheme is not None:\n self.convert_tag_scheme(\n sentence, target_scheme=\"iobes\"\n )\n\n sentences.append(sentence)\n sentence = []\n\n else:\n fields = re.split(r\"\\s+\", line)\n token = fields[0] # text column\n token_tags = {\n v: fields[k]\n for k, v in self.columns.items()\n if v != \"text\"\n }\n sentence.append({\"name\": token, \"tags\": token_tags})\n\n line = f.readline()\n\n return sentences", "def parseUpload(dbconnection, fileName):\n nhContent = ParseText.nohupTranscriptionContent(fileName)\n count = 0\n while count < len(nhContent[0]):\n try:\n rtf = nhContent[0][count]\n transcription = nhContent[1][count].replace(\"'\", \"''\").replace(\"_\", \"\")\n dbID = nhContent[2][count].replace(\".\", \"\")\n duration = nhContent[3][count]\n DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID)\n count += 1\n except:\n print(\"couldnt upload one at index \" + str(count))\n count += 1", "def tokenize(self, path):\n assert os.path.exists(path)\n # Add words to the dictionary\n with open(path, 'r') as f:\n tokens = 0\n lines_c = 0\n for line in f:\n words = ['<start>'] + line.split() + ['<eos>']\n len_ = len(words)\n tokens += len_\n if(self.max_sent_length <len_): self.max_sent_length = len_\n lines_c+=1\n for word in words:\n self.dictionary.add_word(word)\n\n # Tokenize file content\n with open(path, 'r') as f:\n #print('Creating tensor of size: ', lines_c, self.max_sent_length)\n print('Reading files: ', path)\n ids = [] # torch.LongTensor(lines_c, self.max_sent_length)\n target_vecs = [] # torch.LongTensor(lines_c, self.max_sent_length)\n line_c = 0\n count =0\n for line in f:\n words = ['<start>'] + line.split() + ['<eos>']\n sentence_len = len(words)\n if(sentence_len>self.max_length): \n #print (\"sen len: \", sentence_len, ' exceed limit: ', self.max_length, ' skipped!!', count)\n count+=1\n continue\n ids.append([])\n target_vecs.append([])\n #if(self.max_sent_length<sentence_len): self.max_sent_length = sentence_len\n token = 0\n for word in words:\n if(token<sentence_len-1 ): ids[line_c].append( self.dictionary.word2idx[word])\n if(token>0): target_vecs[line_c].append( self.dictionary.word2idx[word] )\n token += 1\n \n line_c +=1\n\n return ids, target_vecs", "def txtread(path):\n try:\n with open(path) as fp:\n for line in fp:\n line = line.rstrip().split()\n ns, ne, tr = line[0], line[1], ' '.join(line[2:])\n transcrpt = (int(ns), int(ne)), tr\n return transcrpt\n except FileNotFoundError:\n print(f\"{path} does not exist!\")\n return None", "def read_corpus_3(corpus_path):\n idx = 0\n data = []\n with open(corpus_path, encoding='utf-8') as fr:\n lines = fr.readlines()\n sent_, tag_h_, tag_p_ = [], [], []\n\n for line in lines:\n idx += 1\n if line.find(\"DOC-ID\") < 0 and line != '\\n':\n try:\n [char, tag_h, tag_p] = line.strip().split()\n sent_.append(char)\n tag_h_.append(tag_h)\n tag_p_.append(tag_p)\n except:\n print(line)\n else:\n # print(line)\n if idx > 1:\n data.append((sent_, tag_h_, tag_p_))\n sent_, tag_h_, tag_p_ = [], [], []\n\n return data", "def read_subtitles(self):\n\n # Group 1: index, Group 2: Start Time, Group 3: End Time, Group 4: Text\n\n patterns = [\n r\"(\\d+)\\n(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\n((?:.+\\n)*.+)\",\n r\"(\\d+)\\r\\n(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\r\\n((?:.+\\r\\n)*.+)\",\n # Reports pattern\n r\"(\\d+)\\r(\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d) --> (\\d\\d:\\d\\d:\\d\\d,\\d\\d\\d)\\n((?:.+\\r)*.+)\"\n ]\n\n for pattern in patterns:\n re_subs = re.findall(pattern, self.subtitles, re.M | re.I)\n if(len(re_subs) > 1):\n self.re_subs = re_subs\n return\n\n raise Exception(\n f're_subs length is {len(re_subs)}. Maybe the regex pattern is falty?')", "def transform(self, email_path):\n mail = open(email_path, 'r')\n content = mail.read(self.max_read_len)\n i = 0\n while not(content[i] == '\\n' and content[i + 1] == '\\n') and i < len(content) - self.ngram:\n i += 1\n header = content[:i]\n # TODO find a smarter way deal with the header-body problem\n body = content[i + 2:]\n if len(body) + len(header) > self.max_read_len:\n body = body[:max(1000, self.max_read_len - len(header))]\n header_set = self.tokenize(header)\n body_set = self.tokenize(body)\n mail.close()\n return (header_set, body_set)", "def _read_data(cls, input_file): # 这里是对文件的处理\r\n with open(input_file, encoding='utf-8') as f:\r\n lines = []\r\n\r\n for line in f:\r\n line = json.loads(line)\r\n words = ' '.join(list(line['natural']))\r\n labels = ' '.join(line['tag_seq'])\r\n poss = line['pos_seq']\r\n dps = line['dp_seq']\r\n head = line['head_seq']\r\n lines.append([labels, words, poss, dps, head])\r\n\r\n return lines", "def _process_trans(self):\n\t\tt_word = list()\n\t\t# with open(self.trans_file_path, 'r', encoding='utf-8') as in_f:\n\t\twith open(self.trans_file_path, 'r') as in_f:\n\t\t\tf = iter(in_f)\n\t\t\tfor line in f:\n\t\t\t\tword = line.lower().split();\n\t\t\t\tif len(word) == 0 or len(word) == 1:\n\t\t\t\t\tcontinue\n\t\t\t\t'''\n\t\t\t\tif w_0[len(w_0)-1] == '+': # ignore the label starting with #pat+ || #doc+\n\t\t\t\t\tcontinue;\n\t\t\t\t'''\n\t\t\t\tfor i in range(len(word)):\n\t\t\t\t\tif i != 0: # exclude the channel\n\t\t\t\t\t\tw = word[i]\n\t\t\t\t\t\tif w == \"<name>\":\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tt_word.append([w, 0, 0])\n\t\treturn t_word", "def read_processed_data_from_file(file, encoding='latin1'):\n\n with open(file, encoding=encoding) as f:\n raw = f.read()\n\n lines = raw.split('\\n')\n labeled_texts = []\n n = len(lines) - 1\n for i, line in enumerate(lines):\n print(f'\\rLoading review {i} of {n}', end='')\n if line == '':\n continue\n tagged_words = re.findall(r'(.+?\\\\.+?) ', line)\n label = re.findall(r'#(\\d+.\\d)#', line)[0]\n labeled_texts.append((tagged_words, label))\n print()\n return labeled_texts", "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.split(' ') for item in data]\n data = [\n item[:-2] + [' '.join(item[-2:])] if item[-1] == '\\n' else item\n for item in data\n ]\n data = [item for sublist in data for item in sublist]\n\n df = pd.DataFrame(data, columns=['word'])\n df['conversation_id'] = 1\n\n return df", "def get_parsed_paragraphs_from_file(self, processed_path):\n with open(processed_path, \"r\") as f:\n sent_len = json.loads(f.readline())['sentence_lens']\n paragraphs = list()\n line_no = 1\n para_idx = 0\n while para_idx < len(sent_len):\n paragraph = list()\n end_no = sent_len[para_idx]\n while line_no < end_no:\n sent = json.loads(f.readline())\n sent[\"sid\"] = self.generate_sid(sent, processed_path, line_no)\n paragraph.append(sent)\n line_no += 1\n para_idx += 1\n paragraphs.append(paragraph)\n return paragraphs", "def read_transcript(transcript_id):\n with open(transcript_path_for_id(transcript_id)) as f:\n return transcript_lines(f.read())", "def parse_ensembl_transcripts(lines):\n header = []\n LOG.info(\"Parsing ensembl transcripts from file\")\n for index, line in enumerate(lines):\n # File allways start with a header line\n if index == 0:\n header = line.rstrip().split(\"\\t\")\n # After that each line represents a transcript\n else:\n yield parse_ensembl_line(line, header)", "def parse(self, file_name):\n try:\n fp = open(file_name, encoding='utf-8')\n trigrams = []\n\n for line in fp.readlines():\n for i in range(len(line) - 2):\n trigrams.append(line[i : i + 3].lower())\n\n fp.close()\n return trigrams\n except UnicodeDecodeError:\n print(\"Skipping file: \", file_name)\n return []", "def read_file(filename):\n contents = []\n temp = []\n\n with open(filename, \"r\") as f:\n lines = f.readlines()\n\n for line in lines:\n if line != \"\\n\":\n temp.append(line)\n else:\n if len(temp) > 0:\n contents.append(temp)\n temp = []\n\n if len(temp) > 0:\n contents.append(temp) # appends the last subject\n\n for i, content in enumerate(contents):\n contents[i] = \" \".join(content)\n\n return contents", "def read_article_2(filename):\n file = open(filename, \"r\")\n filedata = file.readlines()\n sentences = sent_tokenize(filedata[0])\n return sentences", "def transcript_lines(transcript_text):\n lines = []\n for line in transcript_text.splitlines():\n if line.strip() and line.strip()[0] != '#':\n split = line.split(':')\n speaker = split[0][-1]\n utterance = ' '.join(split[1:]).strip()\n lines.append((speaker, utterance))\n return lines", "def loadTIText(self, file):\n startAddr = 0\n segmentdata = b''\n # Convert data for MSP430, TXT-File is parsed line by line\n while True:\n # Read one line\n l = file.readline()\n if not l:\n break # EOF\n l = l.strip()\n if l[0] == 'q':\n break\n elif l[0] == '@': #if @ => new address => send frame and set new addr.\n #create a new segment\n if segmentdata:\n self.segments.append(Segment(startAddr, segmentdata))\n startAddr = int(l[1:], 16)\n segmentdata = b''\n else:\n for i in l.split():\n segmentdata += bytes([int(i, 16)])\n if segmentdata:\n self.segments.append( Segment(startAddr, segmentdata) )", "def read_corpus_2(corpus_path):\n idx = 0\n data = []\n with open(corpus_path, encoding='utf-8') as fr:\n lines = fr.readlines()\n sent_, tag_ = [], []\n\n for line in lines:\n idx += 1\n if line.find(\"DOC-ID\") < 0 and line != '\\n':\n try:\n [char, label] = line.strip().split()\n sent_.append(char)\n tag_.append(label)\n except:\n print(line)\n else:\n # print(line)\n if idx > 1:\n data.append((sent_, tag_))\n sent_, tag_ = [], []\n\n return data", "def parse_file(self, filepath):\n\n xml_file = open(filepath, \"r\")\n xml = xml_file.read()\n content = \"\"\n\n xml_file.close()\n\n for line in xml.replace(\"&amp;\", \"&\").split(\"\\n\"):\n if content != \"\":\n content += \" \"\n content += re.sub(\"(<(P|F).*?>)|(<\\\\/P>)\", \"\", line).strip()\n # XML cleanning\n\n start_offset = \"<START_OFFSET_DUCFileRep>\"\n content = start_offset + content\n content = content.replace(\"</LP>\", \"</LP>%s\"%start_offset)\n content = content.replace(\"</TEXT>\", \"</TEXT>%s\"%start_offset)\n content = re.sub(\"%s.*?<LP>(.*?)<\\\\/LP>\"%start_offset, \"\\\\1\", content)\n content = re.sub(\"%s.*?<TEXT>(.*?)<\\\\/TEXT>\"%start_offset, \"\\\\1\", content)\n content = re.sub(\"%s.*\"%start_offset, \"\", content)\n\n self.set_content(content)", "def read_po(self, inputfile):\n is_index = False\n lines = inputfile.readlines()\n index = ''\n value = ''\n for line in lines:\n if line.startswith('#'):\n continue\n elif line.startswith('msgid'):\n is_index = True\n self.translations[index] = value\n index = ''\n value = ''\n elif line.startswith('msgstr'):\n is_index = False\n\n v = re.match('.*\"(.*)\".*', line)\n if v:\n if is_index:\n index += ''.join(v.groups())\n else:\n value += ''.join(v.groups())", "def read_corpus(corpus_path):\n idx = 0\n data = []\n with open(corpus_path, encoding='utf-8') as fr:\n lines = fr.readlines()\n sent_, tag_, pos_, ner_ = [], [], [], []\n\n for line in lines:\n idx += 1\n if line.find(\"DOC-ID\") < 0 and line != '\\n':\n try:\n [char, label, pos_tag, ner_tag] = line.strip().split()\n sent_.append(char)\n tag_.append(label)\n pos_.append(pos_tag)\n ner_.append(ner_tag)\n except:\n print(line)\n else:\n # print(line)\n if idx > 1:\n data.append((sent_, tag_, pos_, ner_))\n sent_, tag_, pos_, ner_ = [], [], [], []\n\n return data", "def parse_txt(self, path):\n with open(path) as f:\n data_txt = f.read()\n\n #Parseo bien ese txt con formato de mierda\n pattern = re.compile('(\\d{1,2}\\/\\d{1,2}\\/\\d{2}\\s\\d{2}:\\d{2})\\s-\\s(?:\\u200e*)')\n lista = pattern.split(data_txt)\n \n df = pd.DataFrame(data={\n 'timestamp':[lista[index] for index in range(len(lista)) if index%2==1],\n 'temp':[lista[index] for index in range(len(lista)) if (not index%2==1 and index!=0)]\n })\n\n df['timestamp'] = pd.to_datetime(df['timestamp'] , format='%d/%m/%y %H:%M')\n\n df[['user','message']] = df['temp'].str.split(':', 1, expand=True)\n df['message'] = df['message'].str.replace('\\\\n', ' ')\n df = df.drop(columns = 'temp')\n \n #si no hay mensaje, es porque no fue un mensaje si no una accion. Lo pongo como tal\n df['action'] = df[df['message'].isna()]['user'].str.replace('\\\\n', ' ')\n df.loc[df.loc[:,'message'].isna(), 'user'] = None\n\n self.__users = df.user.dropna().unique()\n\n df.loc[df.loc[:,'message'].isna(), 'user'] = df['action'].apply(self.__which_user)\n\n #Saco signos de puntuacion y saco mayusculas\n df['message'] = df['message'].str.replace('[{}]'.format(self.__punctuation), '').str.lower().str.strip()\n\n df = df.set_index('timestamp')\n\n return df", "def analyse(self):\n logging.info(\"transferring text to CorpusCook...\")\n\n paragraphs = self.text.split('\\n\\n')\n print(\"mean length of splitted lines\", (mean([len(p) for p in paragraphs])))\n\n # If TIKA resolved '\\n'\n if (mean([len(p) for p in paragraphs])) > 80:\n paragraphs = [re.sub(r\"- *\\n\", '', p) for p in paragraphs]\n paragraphs = [p.replace('\\n', \" \") for p in paragraphs]\n paragraphs = [p.replace(';', \" \") for p in paragraphs]\n joiner = \" \"\n else:\n # If TIKA did not\n joiner = \" \"\n\n processed_text = joiner.join([p\n for p in paragraphs\n if\n p and\n ks_2samp(self.normal_data, list(p)).pvalue > self.threshold\n ]\n )\n\n return processed_text.strip()[:self.length_limit]", "def parse_file(self):\n for num, line in enumerate(self._text):\n if \"CRYSTAL STRUCTURE SOLUTION\" in line:\n line = line.strip().strip('+').strip()\n if 'SHELXTL' in line:\n self.version = 'SHELXT ' + line.split()[-1]\n if line.strip().startswith('R1 Rweak Alpha'):\n for n in range(100):\n if not self._text[num + 1 + n]:\n break\n if self._text[num + 1]:\n self.solutions[self._text[num + 1 + n][58:76].strip()] = self._text[num + 1 + n][37:51].strip()", "def read_text_file(file_name):\n \n file_data = {}\n \n with open(file_name) as fp:\n lines = fp.readlines()\n for line in lines:\n lineno = line.strip().split(':')[0].strip()\n #here we are checking whether a particluar line in the file contains a valid data [i.e line number and content]\n try:\n content = line.strip().split(':')[1].strip()\n file_data[lineno] = content\n except IndexError:\n pass\n \n return file_data", "def parse_results_file(filename):\n\tfile = open(filename, 'r')\n\tpretext=[line for line in file.readlines() if line.strip()]\n\tfile.close()\n\n\ttext = []\n\tprocessed = []\n\tlanguages = 'NONE'\n\tID = 'NONE'\n\t\n\tmoreheader = raw_input('Extra header labels from question field (e.g.: item,condition,factor1,factor2): ')\n\tstim_type = raw_input('What type are your stims? (i.e. AcceptabilityJudgment): ')\n\toutput_loc = raw_input('Where would you like to put your parsed file? (enter filename path): ')\n\t\n\t#takes out comments\n\tfor line in pretext:\n\t\tif re.match('#', line):\n\t\t\tcontinue\n\t\telse:\n\t\t\ttext.append(line)\n\n\tfirst = 1;\n\n\tfor line in range(len(text)):\n\t\t#get their info\n\t\tif re.search('Form', text[line]):\n\t\t\tif re.search('number', text[line]):\n\t\t\t\tID = re.split('number,', text[line])[1].strip()\n\t\t\telif re.search('age', text[line]):\n\t\t\t\tlanguages = re.split('age,', text[line])[1].strip()\n\n\t\t#looks for the main stimulus type, as entered earlier\t\t\n\t\tif re.search(stim_type, text[line]):\n\t\t\tif first:\n\t\t\t\t#print 'first'\n\t\t\t\tprocessed.append(str(ID+ ','+languages+','+text[line]))\n\t\t\t\tfirst=0\n\t\t\telse:\n\t\t\t\ttoAmend = processed.pop()\n\t\t\t\t#print str('toAmend: ' + toAmend)\n\t\t\t\ttoAdd=''\n\t\t\t\tsplits = re.split('NULL,', text[line])\n\t\t\t\tfor thing in splits[1:]:\n\t\t\t\t\tif thing is not '':\n\t\t\t\t\t\ttoAdd = str(toAdd + ',' + thing.strip(','))\n\t\t\t\t#print str('toAdd: ' + toAdd)\n\t\t\t\tprocessed.append(str(toAmend.strip()+ toAdd))\n\t\t\t\tfirst = 1\n\n\t\t#if the line is a question line, there's more to append\n\t\tif re.search('Question', text[line]):\n\t\t\ttoAmend = processed.pop()\n\t\t\tpart = re.split('\\$', text[line])[1]\n\t\t\tpart.strip('$')\n\t\t\tparts = part.split('%2C')\n\t\t\tprocessed.append(str(toAmend.strip()+ ','+ string.join(parts, ',')+'\\n'))\n\t\t\t\n\toutput = open(output_loc, 'w')\n\n\theader = 'ID,Languages,Time sent,MD5 Hash of IP Address,Controller,Item Number,Element Number,Type,Group,Stimulus,Answer,RT,'\n\n\toutput.write(str(header+moreheader+'\\n'))\n\n\t#put it all into a text file\n\tfor line in processed:\n\t\toutput.write(line)\n\toutput.close()", "def _parse_tx_infos(self, gtf_path):\n if os.path.exists('_tx_cache.bin'):\n with open('_tx_cache.bin', 'rb') as f:\n return pickle.load(f)\n result = []\n with gzip.open(gtf_path, 'rt') as f:\n for i, line in enumerate(f):\n if i % 1000 == 0:\n print('processed {}'.format(i), file=sys.stderr)\n if line.startswith('#'):\n continue\n if line.split('\\t', 3)[2] != 'transcript':\n continue\n record = GTFFeature.parse(line)\n if record.feature != 'transcript':\n continue\n result.append(\n TranscriptInfo(record.attrs['gene_id'],\n record.attrs['transcript_id'],\n record.attrs['transcript_type'],\n record.seqname,\n record.start,\n record.end))\n with open('_tx_cache.bin', 'wb') as g:\n pickle.dump(result, g)\n print(len(result), file=sys.stderr)\n return result", "def parse(self):\n count = [] #count for trainset_size\n with open(self.file) as f:\n for line in f:\n data = line.split(\" \")[0]\n filename = data[:-1]\n id = data[-1:]\n if (filename not in count):\n count.append(filename)\n\n acid = \"\"\n structure = \"\"\n with open(self.directory+\"/\"+filename+\".dssp\") as dssp:\n for i in range(28): #skip lines we don't need\n next(dssp)\n for line in dssp:\n if (line[9] != \" \" and line[10] == \" \" and line[11] == id and line[13] not in (\"*\",\"!\",\"B\",\"Z\",\"X\")):\n #amino acid sequence\n if (line[13].islower()):\n acid += \"C\"\n else:\n acid += line[13]\n\n #sequence of the structure\n if (line[16] in (\"H\",\"G\",\"I\")):\n structure += \"H\"\n elif (line[16] in (\"E\",\"B\")):\n structure += \"E\"\n else:\n structure += \"C\"\n\n if (len(count) > self.trainset_size):\n self.testset.append((acid,structure))\n else:\n self.trainset.append((acid,structure))", "def analysis(file):\n\n fields = []\n\n with open(file) as f:\n lines = f.readlines()\n rows = len(lines)\n filesize = sum([len(line) for line in lines])\n\n tmp = []\n\n for line in lines[8:len(lines)-1]:\n fs = line.strip().split('\\t')\n\n \"\"\"\n fields:\n ts\n uid\n id.orig_h\n id.orig_p\n id.resp_h\n id.resp_p\n proto\n trans_id\n query\n qclass\n qclass_name\n qtype\n qtype_name\n rcode\n rcode_name\n AA\n TC\n RD\n RA\n Z\n answersTTLs\n rejected\n \"\"\"\n\n tmp.append(fs[N])\n\n #print(log, rows, ','.join(methods))\n\n # time intervals\n #tss_sorted = sorted(map(float,tmp))\n #tss_sorted = map(float, tmp)\n #intervals = map(int,[tss_sorted[i+1]-tss_sorted[i] for i in range(len(tss_sorted)-1)])\n #print('%s %s' % (log, ' '.join(map(str,intervals))))\n #file = urlparse(fs[N]).path.split('/')[-1].split('.')\n #if len(file)>1:\n # tmp.append(file[-1])\n #tmp.append(urlparse(fs[N]).path.split('/')[-1])\n #tmp.append(urlparse(fs[N]).path)\n\n #fields.append(set(tmp))\n #fields.append(intervals)\n fields.append(tmp)\n\n\n dic = {}\n for i in fields:\n for j in i:\n if j in dic:\n dic[j] += 1\n else:\n dic[j] = 1\n ls = sorted(dic.items(), lambda x,y: cmp(x[1], y[1]), reverse = True)\n for i in range(len(ls)):\n print('%s\\t%s' %(ls[i][0], ls[i][1]))\n #print('%s' % join(ls[i][1]))", "def parse_trflp(lines):\r\n\r\n sample_ids = []\r\n otu_ids = []\r\n data = []\r\n non_alphanum_mask = re.compile('[^\\w|^\\t]')\r\n # not sure why the above regex doesn't cover the following regex...\r\n dash_space_mask = re.compile('[_ -]')\r\n\r\n for i, line in enumerate(lines):\r\n elements = line.strip('\\n').split('\\t')\r\n\r\n # special handling for the first line only\r\n if i == 0:\r\n # validating if the file has a header\r\n if elements[0] == '':\r\n for otu_id in elements[1:]:\r\n otu_ids.append(non_alphanum_mask.sub('_', otu_id))\r\n continue\r\n else:\r\n for j, otu_id in enumerate(elements[1:]):\r\n otu_ids.append(non_alphanum_mask.sub('_', 'Bin%3d' % j))\r\n\r\n # handling of all other lines\r\n current_row = []\r\n\r\n # converting each value in the row to int\r\n for count in elements[1:]:\r\n try:\r\n current_row.append(int(round(float(count), 0)))\r\n except ValueError:\r\n current_row.append(0)\r\n\r\n # if the sum of all the values is equial to 0 ignore line\r\n if sum(current_row) == 0:\r\n continue\r\n\r\n # adding sample header to list\r\n sample_ids.append(non_alphanum_mask.sub('.',\r\n dash_space_mask.sub('.', elements[0])))\r\n\r\n # validating the size of the headers to add missing columns\r\n # this is only valid when there is no header\r\n if len(current_row) > len(otu_ids):\r\n # modify header data\r\n extra_cols = []\r\n for j in range(len(otu_ids), len(current_row)):\r\n extra_cols.append(non_alphanum_mask.sub('_', 'Bin%3d' % j))\r\n # modify data\r\n for j in range(len(data)):\r\n data[j].extend([0] * (len(current_row) - len(otu_ids)))\r\n\r\n otu_ids.extend(extra_cols)\r\n elif len(current_row) < len(otu_ids):\r\n # modify data\r\n current_row.extend([0] * (len(otu_ids) - len(current_row)))\r\n\r\n data.append(current_row)\r\n\r\n return sample_ids, otu_ids, asarray(data).transpose()", "def parse_txt(root, filename):\n\n root_filename = os.path.join(root, filename)\n\n match_date = regex_date.findall(filename)\n if not match_date:\n raise Exception(root_filename, 'r')\n\n year = int(match_date[0][0])\n month = int(match_date[0][1])\n day = int(match_date[0][2])\n\n file = open(root_filename)\n lines = file.readlines()\n\n i = 0\n while i < len(lines):\n match_time = regex_txt_time.match(lines[i])\n if match_time:\n hour = int(match_time.group(1))\n minute = int(match_time.group(2))\n second = int(match_time.group(3))\n time = datetime.datetime(year, month, day, hour, minute, second)\n timestamp = calendar.timegm(time.utctimetuple())\n\n match_txt = regex_txt.match(lines[i])\n if match_txt:\n\n name = match_txt.group(1)\n message_text = match_txt.group(2).strip()\n\n i += 1\n if i < len(lines):\n match_time = regex_txt_time.match(lines[i])\n while not match_time and i < len(lines):\n message_text += \"\\n\" + lines[i].strip()\n i += 1\n if i < len(lines):\n match_time = regex_txt_time.match(lines[i])\n\n add_message(name, timestamp, message_text, root)\n else:\n match_rest = regex_txt_rest.match(lines[i])\n message_text = None\n if match_rest:\n message_text = match_rest.group(1)\n add_message(None, timestamp, message_text, root)\n i += 1\n else:\n i += 1", "def read_file(filename):\n\n all_documents = []\n document = []\n with tf.gfile.GFile(filename, \"r\") as reader:\n for line in reader:\n line = line.strip()\n line = tokenization.convert_to_unicode(line)\n line = line.replace(u\"\\u2018\", \"'\").replace(u\"\\u2019\", \"'\")\n sents = split_line_by_sentences(line)\n for sent_line in sents:\n if not sent_line or len(sent_line) < 4: # Arbitrary min length for line\n continue\n if sent_line.lower()[:7] == \"chapter\":\n if document:\n all_documents.append(document)\n document = []\n else:\n document.append(sent_line)\n if len(document) == FLAGS.max_para_length:\n all_documents.append(document)\n document = []\n if document:\n all_documents.append(document)\n\n # Remove small documents\n all_documents = [x for x in all_documents if len(x) >= 8]\n\n return all_documents", "def get_processed_content(self, fn):\n fin = open(os.path.join(self.wiki_path, fn), 'rb')\n text = fin.read()\n fin.close()\n return (x for x in gensim.utils.tokenize(text, lowercase=True, deacc=True, errors=\"ignore\") if x not in STOPLIST)", "def readText(self, filename, firstLine = 0, lastLine = None):\n \n assert filename.endswith('.txt')\n file = open(filename, 'r')\n self.samples = []\n\n li = 0\n while li < firstLine:\n if not file.readline():\n return\n li += 1\n\n while lastLine == None or li < lastLine:\n line = file.readline()\n if not line:\n return\n li += 1\n line = line.strip()\n if line:\n columns = line.split('|')\n if columns[1] == 'client-fps':\n self.samples.append(Sample(line, columns))", "def _parse_ach_file(self, contents):\n file_length = len(contents)\n\n for index in range(0, file_length, self.LINE_LENGTH):\n line = contents[index:index + self.LINE_LENGTH]\n\n if line.startswith('1'):\n self._read_header(line)\n elif line.startswith('5'):\n self._read_batch_header(line)\n elif line.startswith('6'):\n self._read_entry_detail(line)\n elif line.startswith('7'):\n self._read_addenda_record(line)\n elif line.startswith('8'):\n self._read_batch_control_record(line)\n elif line.startswith('9'):\n if line == '9' * 94:\n continue\n self._read_file_control_record(line)", "def __read_data__(self):\n with open(self.file, 'r') as data:\n sentence = []\n tags = []\n for line in data:\n terms = line.rstrip().split(WHITESPACE)\n for term in terms:\n word_tag = tuple(term.split(TAGCHAR))\n word = word_tag[0]\n tag = word_tag[1]\n self.word_tag_dict[word_tag] += 1\n self.tag_dict[tag] += 1\n self.__add_to_word_dict__(word, tag)\n if self.isNumberWord(word):\n self.numbers += 1\n if word[0].isupper() and len(sentence) > 0:\n self.cap_no_start += 1\n sentence.append(word)\n tags.append(tag)\n if tag == ENDOFSENTENCE:\n self.sentences.append(tuple(sentence))\n self.tags.append(tuple(tags))\n sentence = []\n tags = []", "def get_large_audio_transcription(path):\n # open the audio file using pydub\n r = sr.Recognizer()\n sound = AudioSegment.from_mp3(path)\n sound.export(\"tmp.wav\", format=\"wav\")\n sound = AudioSegment.from_wav('tmp.wav')\n # split audio sound where silence is 700 miliseconds or more and get chunks\n chunks = split_on_silence(sound,\n # experiment with this value for your target audio file\n min_silence_len = 500,\n # adjust this per requirement\n silence_thresh = sound.dBFS-14,\n # keep the silence for 1 second, adjustable as well\n keep_silence=500,\n )\n folder_name = \"audio-chunks\"\n # create a directory to store the audio chunks\n if not os.path.isdir(folder_name):\n os.mkdir(folder_name)\n whole_text = \"\"\n\n chapter=(str(path.split('/')[-1])).split('_')[3]\n # if chapter == '01':\n # target=2\n # else:\n # target=1\n target=2\n # process each chunk\n for i, audio_chunk in enumerate(chunks, start=1):\n # export audio chunk and save it in\n # the `folder_name` directory.\n if i==1:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened,language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n #print(chunk_filename, \":\", text)\n whole_text += text\n # return the text for all chunks detected\n else:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened, language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n # print(chunk_filename, \":\", text)\n if chapter == '01':\n whole_text += ' ' +text\n if str(text).isalnum():\n if str(text).split(' ')[0]==' ':\n whole_text += text\n else: whole_text += ' '+text\n # return the text for all chunks detected\n\n if i==target:\n break\n if os.path.isfile('tmp.wav') :os.remove('tmp.wav')\n subprocess.run([\"rm\", \"-rf\", folder_name])\n return whole_text", "def get_text(self, file_number):\n\n with io.open(self.file_name.format(file_number), 'rb') as audio_file:\n content = audio_file.read()\n audio = types.RecognitionAudio(content=content)\n response = self.client.recognize(self.config, audio)\n texts = self._format_response(response)\n return texts", "def parseOutText(f):\n\n\n f.seek(0) ### go back to beginning of file (annoying)\n all_text = f.read()\n\n ### split off metadata\n content = all_text.split(\"X-FileName:\")\n words = \"\"\n if len(content) > 1:\n ### remove punctuation\n text_string = content[1].translate(str.maketrans(\"\", \"\", string.punctuation))\n\n ### split the text string into individual words\n words = text_string.split()\n\n return words", "def parse_text_file(file_path):\n with open(file_path, 'r') as f:\n for line in f:\n line = line.rstrip()\n if not line:\n continue\n yield line", "def convert_tcr(self):\n\n def read_text(file_name, event_a_id, event_b_id):\n idx_val = {\"span1\": [], \"span2\": [], \"signal\": []}\n parsed_doc = minidom.parse(self.dir_path + \"tcr/TemporalPart/{}\".format(file_name))\n elements = parsed_doc.getElementsByTagName('TEXT')\n text = \"\"\n token_index = 0\n tagxid = {\"EVENT\": \"eid\", \"TIMEX3\": \"tid\"}\n for element in elements:\n if element.tagName == \"TEXT\":\n for item in element.childNodes:\n if item.nodeName == \"#text\":\n text += item.wholeText\n token_index += len(item.wholeText)\n elif item.nodeName == \"EVENT\" or item.nodeName == \"TIMEX3\":\n item_text = ' '.join([child_node.wholeText for child_node in item.childNodes])\n text += item_text\n start_end = [token_index, token_index + len(item_text)]\n token_index += len(item_text)\n\n if item.attributes[tagxid[item.nodeName]].value == event_a_id:\n idx_val[\"span1\"].append(start_end)\n event_a_text = item_text\n elif item.attributes[tagxid[item.nodeName]].value == event_b_id:\n idx_val[\"span2\"].append(start_end)\n event_b_text = item_text\n return text, idx_val, [event_a_text, event_b_text]\n\n mismatch = 0\n data = pd.DataFrame(columns=self.scheme_columns)\n\n test_files = [\"2010.01.08.facebook.bra.color\", \"2010.01.12.haiti.earthquake\", \"2010.01.12.turkey.israel\",\n \"2010.01.13.google.china.exit\", \"2010.01.13.mexico.human.traffic.drug\"]\n\n with open(self.dir_path + \"tcr/CausalPart/allClinks.txt\", 'r') as in_file:\n lines = in_file.readlines()\n\n annotations = [line.strip().split('\\t') for line in lines]\n\n for annotation in annotations:\n file_path = annotation[0] + \".tml\"\n text, idx_val, events_text = read_text(file_path, annotation[1], annotation[2])\n direction = 1 if annotation[3] == \"caused_by\" else 0\n\n split = 2 if annotation[0] in test_files else 1\n\n # saving the sample\n new_row = {\"original_id\": '', \"span1\": [events_text[0]], \"span2\": [events_text[1]], \"signal\": [],\n \"context\": text,\n \"idx\": idx_val, \"label\": 1, \"direction\": direction,\n \"source\": self.namexid[\"tcr\"],\n \"ann_file\": file_path,\n \"split\": split}\n\n if self.check_span_indexes(new_row):\n data = data.append(new_row, ignore_index=True)\n else:\n mismatch += 1\n return data, mismatch", "def parse_story_file(content):\n content_raw = content.split(\"@highlight\")[0]\n content = \" \".join(filter(None, [x.strip() for x in content_raw.split(\"\\n\")]))\n return content", "def parseFile(self, filename):\n\n f = open(filename, \"r\")\n s = f.read()\n f.close()\n\n logging.log(10, 'parsing filename %s: %d lines' % (filename, len(s)))\n\n self.parseString(s)", "def get_text_list_from_raw_txt_file(data_root=\"MP4_download\"):\n data_root = pathlib.Path(data_root)\n all_txt_data_paths = [str(path) for path in\n list(data_root.glob('*/*/*.txt'))] # [MP4_download/360VR/89422838/89422838.txt,...]\n text_list = []\n for text_data_path in all_txt_data_paths:\n description_information_dict = eval(open(text_data_path).read())\n txt_brief = description_information_dict['mp4_txt_brief']\n text_list.append(txt_brief)\n return text_list", "def read_file_pretty(input_file):\n\n\ttext = open(input_file)\n\traw = text.readlines()\n\tdecoded = [line.decode('utf8') for line in raw]\n\tlines = [line.strip() for line in decoded if line.strip() != '']\n\tlines = [(\"<p>\" + line + \"</p>\") for line in lines]\n\tlines.insert(0, '<meta charset=\"UTF-8\">')\n\n\treturn lines", "def read_filename(self, filename):\r\n self.text_lines = task3.read_text_file(filename)", "def readFile(self):\n with pdfplumber.open(self.path) as pdf:\n first_page = pdf.pages[0]\n text = first_page.extract_text()\n text = text.split('\\n')\n return processText(text)", "def __parse(self):\n lines = self.file.readlines()\n for i in range(0, len(lines)):\n line = lines[i]\n tokens = line.split()\n if tokens[0] == \"#start\":\n trial_name = tokens[1]\n trial = Trial(trial_name)\n self.trials[trial_name] = trial\n elif tokens[0] == \"#end\":\n continue\n else:\n date_str = tokens[0] + \" \" + tokens[1]\n date = datetime.strptime(date_str, \"%m/%d/%y %H:%M:%S\")\n sound_file = line[18:-1].strip()\n event = Event(date, sound_file, 0)\n trial.addevent(event)", "def read_data(cls, input_file):\n with tf.gfile.Open(input_file, \"r\") as f:\n lines = []\n for line in f:\n line = line.strip()\n if line.startswith('-DOCSTART-'):\n continue\n else:\n word_labels = line.split('-seq-')\n assert len(word_labels) == 2\n\n words = word_labels[0]\n labels = word_labels[1]\n lines.append([words, labels])\n\n return lines", "def readFileToCorpus(f):\n if os.path.isfile(f):\n file = open(f, \"r\") # open the input file in read-only mode\n i = 0 # this is just a counter to keep track of the sentence numbers\n corpus = [] # this will become a list of sentences\n print(\"Reading file \", f)\n for line in file:\n i += 1\n sentence = line.split() # split the line into a list of words\n #append this lis as an element to the list of sentences\n corpus.append(sentence)\n if i % 1000 == 0:\n #print a status message: str(i) turns int i into a string\n #so we can concatenate it\n sys.stderr.write(\"Reading sentence \" + str(i) + \"\\n\")\n #endif\n #endfor\n return corpus\n else:\n #ideally we would throw an exception here, but this will suffice\n print(\"Error: corpus file \", f, \" does not exist\")\n sys.exit() # exit the script\n #endif", "def parse(self, lines):\n # convert file to string deleting end of line charcters\n citations_string = self.prepare_file(lines)\n # extract the entries from the string\n entries = list(self.find_entries(citations_string))\n entries.append(len(citations_string))\n # parse each entry to generate a citation\n for idx, jdx in zip(entries[:-1], entries[1:]):\n self.parse_entry(citations_string[idx:jdx])\n return self.force_field.citations", "def translationFile(language, filename):\n txt = open(language+\"File.txt\", mode=\"r\").readlines()\n translatedFileName = \"\"\n for line in txt:\n if line.split()[0] == filename:\n translatedFileName = line.split()[1]\n if translatedFileName == \"\":\n return [translatedFileName, 0, \"\"]\n translatedFileData = open(translatedFileName, mode=\"rb\").read()\n print(\"Translated filename: \", translatedFileName)\n print(\"Tranlated filesize: \", len(translatedFileData))\n #print(\"Translated file data: \", translatedFile)\n return [translatedFileName, len(translatedFileData), translatedFileData]", "def read_txt(cls, input_file):\n return open(input_file, \"r\", encoding=\"utf-8\").readlines()", "def read_txt(cls, input_file):\n return open(input_file, \"r\", encoding=\"utf-8\").readlines()", "def read_messages_from_file(file_path):\n # We don't currently enable customization of the from_shortcode via file upload.\n # Just use the default.\n from_shortcode = None\n with open(file_path, encoding='utf-8') as f:\n reader = csv.reader(f)\n for row in reader:\n if any(row):\n line = Line._make(row)\n number = int(line.number)\n yield number, line.message, from_shortcode\n os.remove(file_path)", "def fasta(file_path):\n \n print(f\"Parsing fasta '{file_path}'\")\n data = {\n 'ur_up_': [], 'accession': [],\n 'entry_name': [], 'offset': [],\n 'taxonomy': [], 'sequence': []\n }\n\n with open(file_path, 'r') as f:\n for i, line in enumerate(f):\n line = line.strip()\n \n if line[0] == '>':\n key = line[1:]\n \n if i == 0:\n name, offset = key.split(\"/\")\n ur_up_, acc = None, None\n else:\n ur_up_, acc, name_offset = key.split(\"|\")\n name, offset = name_offset.split('/')\n \n data['ur_up_'].append(ur_up_)\n data['accession'].append(acc)\n data['entry_name'].append(name)\n data['offset'].append(offset)\n data['sequence'].append('')\n data['taxonomy'].append(name.split('_')[1])\n else:\n data['sequence'][-1] += line\n \n if i and (i % 50000 == 0):\n print(f\"Reached: {i}\")\n\n return pd.DataFrame(data=data)", "def read_file(filename):\r\n\r\n print(\"Reading TextFile \" + filename)\r\n text = []\r\n with open(filename, encoding=\"utf8\") as file:\r\n lines = file.readlines()\r\n for line in lines:\r\n line = line.strip()\r\n text.append(line)\r\n return text", "def loadText(self,textFileName):\n #--Text File\n infoKey = None\n text = None\n texts = {}\n reHeader = re.compile('^#')\n reInfo = re.compile('@ +(\\d) +\"(.+?)\" +(\\d+)')\n reSingleQuote = re.compile('[\\x91\\x92]')\n reDoubleQuote = re.compile('[\\x93\\x94]')\n reEllipsis = re.compile('\\x85')\n reEolSpaces = re.compile(r' +\\r\\n')\n reExtraSpaces = re.compile(r' +')\n reIllegalChars = re.compile(r'[@#]')\n #--Read file\n textFile = file(textFileName,'rb')\n for line in textFile:\n if reHeader.match(line): continue\n maInfo = reInfo.match(line)\n if maInfo:\n infoKey = (int(maInfo.group(1)),maInfo.group(2),maInfo.group(3))\n texts[infoKey] = text = []\n else:\n text.append(line)\n textFile.close()\n #--Strip and clean texts\n updated = []\n unmatched = []\n trimmed = {}\n for infoKey in texts.keys():\n if infoKey not in self.infos:\n unmatched.append(infoKey)\n continue\n text = ''.join(texts[infoKey])\n #--Required Subs\n text = text.strip(' \\r\\n')\n text = reSingleQuote.sub('\\'',text)\n text = reDoubleQuote.sub('\"',text)\n text = reEllipsis.sub('...',text)\n text = reIllegalChars.sub('',text)\n #--Optional subs\n text = reEolSpaces.sub('\\r\\n',text)\n text = reExtraSpaces.sub(' ',text)\n #--Trim?\n if len(text) > 511:\n trimmed[infoKey] = (text[:511],text[511:])\n text = text[:511]\n info = self.infos[infoKey]\n if text != info.text:\n info.text = text\n info.setChanged()\n updated.append(infoKey)\n #--Report\n buff = cStringIO.StringIO()\n for header,infoKeys in ((_('Updated'),updated),(_('Unmatched'),unmatched)):\n if infoKeys:\n buff.write('=== %s\\n' % (header,))\n for infoKey in infoKeys:\n buff.write('* %s\\n' % (infoKey,))\n if trimmed:\n buff.write('=== %s\\n' % (_('Trimmed'),))\n for infoKey,(preTrim,postTrim) in trimmed.items():\n buff.write(`infoKey`+'\\n'+preTrim+'<<<'+postTrim+'\\n\\n')\n return buff.getvalue()", "def process_wiki_file(args: Tuple[str, str, int]) -> str:\n filepath, language, min_sent_word_count = args\n with bz2.open(filepath, \"rt\", encoding=\"utf8\") as bz2_file:\n\n # Extract text between <doc> xml tags\n soup = BeautifulSoup(bz2_file.read(), \"lxml\")\n docs = soup.find_all(\"doc\")\n wiki_dump_content = \"\"\n for i, doc in enumerate(docs):\n processed_text = process_wiki_doc_text(\n doc.text, language, min_sent_word_count\n )\n if len(processed_text) == 0:\n continue\n\n # Append to result\n if i > 0 and len(wiki_dump_content) > 0:\n wiki_dump_content += \"\\n\"\n wiki_dump_content += processed_text\n\n return wiki_dump_content", "def parse_text(text=None, file=None):\n if not text:\n text = open(file).readlines()\n parsed_text = re.split(ARTICLE_TOKEN, text)\n return parsed_text", "def transcribe_file_with_word_time_offsets(speech_file):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n language_code='en-US',\n enable_word_time_offsets=True)\n\n response = client.recognize(config, audio)\n\n word_with_ts = []\n for result in response.results:\n #print result\n alternative = result.alternatives[0]\n print('Transcript: {}'.format(alternative.transcript))\n\n for word_info in alternative.words:\n word = word_info.word\n start_time = word_info.start_time\n end_time = word_info.end_time\n word_with_ts.append((word ,start_time.seconds + start_time.nanos * 1e-9, end_time.seconds + end_time.nanos * 1e-9))\n #print('Word: {}, start_time: {}, end_time: {}'.format(\n # word,\n # start_time.seconds + start_time.nanos * 1e-9,\n # end_time.seconds + end_time.nanos * 1e-9))\n return word_with_ts", "def get_sample_sent(file_path,sent_number):\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n \n with codecs.open(file_path, encoding='utf_8') as f:\n for sentence in it.islice(f,sent_number, sent_number+1):\n return sentence.replace('\\n', '')", "def _parse(self, verbose=False):\n instructions = json.load(open(self.filename, 'rb'))\n self.bpm = instructions['header']['bpm']\n self.ticks_per_beat = instructions['header']['PPQ']\n self.song_length = instructions['duration']\n self.phraseLength = instructions['phraseLength']\n\n print ('Parsing file:', self.filename)\n print ('Title', instructions['header']['name']) \n print ('BPM', self.bpm) \n\n EIGHTH_NOTE_INTERVAL_S = 60 / (2*self.bpm)\n\n # Parse the messages into buckets for each half-beat. Put them in 32-beat chunks\n chunks = []\n current_chunk = []\n index = 0\n for time in np.arange(0, self.song_length, EIGHTH_NOTE_INTERVAL_S):\n for message in instructions['tracks'][1]['notes']:\n if (message['time'] >= time and message['time'] < time + EIGHTH_NOTE_INTERVAL_S):\n current_chunk.append(str(message['midi']))\n chunks.append(current_chunk)\n index += 1\n current_chunk = []\n\n # For each bucktet, create parsed messages\n phrases = []\n current_phrase = []\n current_phrase_parsed = []\n for phrase_index in range(self.phraseLength):\n current_phrase = chunks[phrase_index*self.phraseLength:(phrase_index+1)*self.phraseLength]\n index_word = 0\n for word in current_phrase:\n word_parsed = str(index_word) + ',' + ','.join(word)\n if index_word == 0:\n self.initial_notes.append(word_parsed)\n current_phrase_parsed.append(word_parsed)\n index_word += 1\n phrases.append(current_phrase_parsed)\n current_phrase_parsed = []\n current_phrase=[]\n\n # Put them in the markov-chain\n for phrase in phrases:\n self._sequence(phrase)\n \n # Print out the resulting chunks\n if verbose:\n print ('Initial notes', self.initial_notes)\n print ('Matrix')\n self.markov_chain.print_as_matrix(20)", "def tsv_generator(file):\n for line in fileinput.input(file):\n article, summary = line.strip().split(\"\\t\")\n yield (article, summary)", "def read_file(filename):\n\n sentences = open(filename).read().strip().split(\"\\n\\n\") #separate tweets\n ret = []\n for sent in sentences:\n lines = sent.split(\"\\n\") #each word in the tweet\n pairs = [L.split(\"\\t\") for L in lines] #Funniest O\n tokens = [tok for tok,tag in pairs]\n tags = [tag for tok,tag in pairs]\n ret.append( (tokens,tags) )\n return ret", "def read_input(input_file):\n \n logging.info(\"reading file {0}...this may take a while\".format(input_file))\n with gzip.open(input_file, 'rb') as f:\n for i, line in enumerate(f):\n \n if (i % 10000 == 0):\n logging.info(\"read {0} reviews\".format(i))\n # do some pre-processing and return list of words for each review\n # text\n yield gensim.utils.simple_preprocess(line)", "def load2TexAS(data):\n # State global variable\n global cache_stanza, cache_spacy, cache_udpipe, cache_trankit\n\n # Collect the data\n string = data['text']\n lang = data['lang']\n packages = data['packages']\n\n hash_string = hashlib.sha1(string.encode()).hexdigest()\n\n final_HTML = \"\"\n message_HTML = \"<div class=\\'message\\'>\"\n isMessage = False\n header_input = []\n log_row = [datetime.now().strftime('%Y-%m-%d %H:%M:%S'), lang]\n\n if \"stanza\" in packages:\n # Initialize the TexAS document\n mydoc = tx.Document(string)\n mydoc.meta().set(\"authors\",\"hegler,yiwen,celine,yuqian\")\n mydoc.date().setTimestamp(\"2021-01-19T14:44\")\n\n ## If cache is full, reload the cache.\n if cache.count(cache_stanza) > 100:\n cache.write(cache_stanza, \"stanza\")\n cache_stanza = cache.load(\"stanza\")\n \n ## Check text whether is already in cache\n if hash_string in cache_stanza[lang].keys():\n tokens, end_pos, lemma, pos, nlpWordsList, hasCompoundWords, cache_stanza = cache.read(\"stanza\", cache_stanza, lang, string) #The output cache_stanza has 'count' been updated.\n else:\n tokens, end_pos, lemma, pos, nlpWordsList, hasCompoundWords, cache_stanza = cache.add(\"stanza\", cache_stanza, lang, string, get_services_stanza)\n \n mydoc.setTokenList(tokens, indexed=True)\n mydoc.views().get(\"TOKENS\").meta().set(\"generator\", \"stanza\")\n mydoc.views().get(\"TOKENS\").meta().set(\"model\", \"stanza\" + \"-\" + lang)\n mydoc.setSentenceList(end_pos)\n\n if hasCompoundWords:\n mydoc.addTokenView( \"WORDS\", nlpWordsList )\n mydoc.addTokenView(\"LEMMA\", lemma)\n mydoc.addTokenView(\"POS\", pos)\n \n # Extract HTML View\n myTabView = tx.UITabularView(mydoc)\n if hasCompoundWords:\n myTabView.showView(\"WORDS\")\n myTabView.showView(\"LEMMA\", labelCSS=False)\n myTabView.showView(\"POS\")\n\n # concatenate the myTabView.HTML()\n header_input.append((\"Stanza\", str(len(end_pos)) , str(len(tokens)), str(get_tokens_per_sents(end_pos))))\n final_HTML += \"<div class='subtitle'>Stanza</div> <br>\" + myTabView.HTML().replace(\"\\n\", \"\") + \"<br>\"\n log_row.append(\"stanza\")\n \n else:\n log_row.append(\"\")\n\n if \"spacy\" in packages:\n # SpaCy does not support Arabic and Russian\n if lang == 'ara' or lang == 'rus':\n message_HTML += \"SpaCy does not support Arabic or Russian. <br>\"\n isMessage = True\n\n else:\n mydoc = tx.Document(string)\n mydoc.meta().set(\"authors\",\"hegler,yiwen,celine,yuqian\")\n mydoc.date().setTimestamp(\"2021-01-19T14:44\")\n \n ## If cache is full, reload the cache.\n if cache.count(cache_spacy) > 100:\n cache.write(cache_spacy, \"spacy\")\n cache_spacy = cache.load(\"spacy\")\n \n ## Check text whether is already in cache\n if hash_string in cache_spacy[lang].keys():\n tokens, end_pos, lemma, pos, cache_spacy = cache.read(\"spacy\", cache_spacy, lang, string)\n else:\n tokens, end_pos, lemma, pos, cache_spacy = cache.add(\"spacy\", cache_spacy, lang, string, get_services_spacy)\n \n mydoc.setTokenList(tokens, indexed=True)\n mydoc.views().get(\"TOKENS\").meta().set(\"generator\", \"spacy\")\n mydoc.views().get(\"TOKENS\").meta().set(\"model\", \"spacy\" + \"-\" + lang )\n mydoc.setSentenceList(end_pos)\n mydoc.addTokenView(\"LEMMA\", lemma)\n mydoc.addTokenView(\"POS\", pos)\n \n # Extract HTML View\n myTabView = tx.UITabularView(mydoc)\n myTabView.showView(\"LEMMA\", labelCSS=False)\n myTabView.showView(\"POS\")\n\n # concatenate the myTabView.HTML()\n header_input.append((\"SpaCy\", str(len(end_pos)) , str(len(tokens)), str(get_tokens_per_sents(end_pos))))\n final_HTML += \"<div class='subtitle'>\" + \"SpaCy\" + \"</div><br>\" + myTabView.HTML().replace(\"\\n\", \"\") + \"<br>\"\n log_row.append(\"spacy\")\n \n else:\n log_row.append(\"\")\n\n if \"udpipe\" in packages: \n ## If cache is full, reload the cache.\n if cache.count(cache_udpipe) > 100:\n cache.write(cache_udpipe, \"udpipe\")\n cache_udpipe = cache.load(\"udpipe\")\n \n ## Check text whether is already in cache\n if hash_string in cache_udpipe[lang].keys():\n tokens, end_pos, lemma, pos, cache_udpipe = cache.read(\"udpipe\", cache_udpipe, lang, string)\n else:\n tokens, end_pos, lemma, pos, cache_udpipe = cache.add(\"udpipe\", cache_udpipe, lang, string, get_services_udpipe)\n \n string_udpipe = \" \".join(tokens)\n\n # Initialize the TexAS document\n mydoc = tx.Document(string_udpipe)\n mydoc.meta().set(\"authors\",\"hegler,yiwen,celine,yuqian\")\n mydoc.date().setTimestamp(\"2021-01-19T14:44\")\n\n mydoc.setTokenList(tokens, indexed=True)\n mydoc.views().get(\"TOKENS\").meta().set(\"generator\", \"udpipe\")\n mydoc.views().get(\"TOKENS\").meta().set(\"model\", \"udpipe\" + \"-\" + lang )\n mydoc.setSentenceList(end_pos)\n mydoc.addTokenView(\"LEMMA\", lemma)\n mydoc.addTokenView(\"POS\", pos)\n \n # Extract HTML View\n myTabView = tx.UITabularView(mydoc)\n myTabView.showView(\"LEMMA\", labelCSS=False)\n myTabView.showView(\"POS\")\n\n # concatenate the myTabView.HTML()\n header_input.append((\"UDpipe\", str(len(end_pos)) , str(len(tokens)), str(get_tokens_per_sents(end_pos))))\n final_HTML += \"<div class='subtitle'>UDpipe</div> <br>\" + myTabView.HTML().replace(\"\\n\", \"\") + \"<br>\"\n log_row.append(\"udpipe\")\n \n else:\n log_row.append(\"\")\n \n if \"trankit\" in packages:\n # trankit temporarily only support english\n if lang == 'eng':\n mydoc = tx.Document(string)\n mydoc.meta().set(\"authors\",\"hegler,yiwen,celine,yuqian\")\n mydoc.date().setTimestamp(\"2021-01-19T14:44\")\n \n ## If cache is full, reload the cache.\n if cache.count(cache_trankit) > 100:\n cache.write(cache_trankit, \"trankit\")\n cache_trankit = cache.load(\"trankit\")\n \n ## Check text whether is already in cache\n if hash_string in cache_trankit[lang].keys():\n tokens, end_pos, lemma, pos, cache_trankit = cache.read(\"trankit\", cache_trankit, lang, string)\n else:\n tokens, end_pos, lemma, pos, cache_trankit = cache.add(\"trankit\", cache_trankit, lang, string, get_services_trankit)\n \n mydoc.setTokenList(tokens, indexed=True)\n mydoc.views().get(\"TOKENS\").meta().set(\"generator\", \"spacy\")\n mydoc.views().get(\"TOKENS\").meta().set(\"model\", \"spacy\" + \"-\" + lang )\n mydoc.setSentenceList(end_pos)\n mydoc.addTokenView(\"LEMMA\", lemma)\n mydoc.addTokenView(\"POS\", pos)\n \n # Extract HTML View\n myTabView = tx.UITabularView(mydoc)\n myTabView.showView(\"LEMMA\", labelCSS=False)\n myTabView.showView(\"POS\")\n\n # concatenate the myTabView.HTML()\n header_input.append((\"Trankit\", str(len(end_pos)) , str(len(tokens)), str(get_tokens_per_sents(end_pos))))\n final_HTML += \"<div class='subtitle'>\" + \"Trankit\" + \"</div><br>\" + myTabView.HTML().replace(\"\\n\", \"\") + \"<br>\"\n log_row.append(\"trankit\")\n\n else:\n message_HTML += \"Trankit temporarily only supports English. <br>\"\n isMessage = True \n \n else:\n log_row.append(\"\")\n\n message_HTML += \"</div>\"\n if isMessage:\n return message_HTML + get_header_table(header_input) + \"<br><br>\" + final_HTML\n\n writeLog(log_row)\n return get_header_table(header_input) + \"<br><br>\" + final_HTML", "def parse_text(self, source):\r\n\r\n global word_set\r\n line_count = 0\r\n word_count = 0\r\n self.vowels = self.analyse_vowels(source)\r\n\r\n with open(source) as f:\r\n for line in f:\r\n # Detect end of paragraph\r\n if line_count and not line.strip() or line.startswith(\"\\t\"):\r\n self.paragraph_sizes.add(line_count)\r\n line_count = 0\r\n \r\n words = line.split()\r\n for word in words:\r\n if not word:\r\n continue\r\n self.word_sizes.add(len(word))\r\n construction = self.calculate_construction(word)\r\n self.word_constructions.add(construction)\r\n word_count += 1\r\n\r\n # Check if this is the end of a line.\r\n if word[-1] in self.ENDING_PUNCTUATION:\r\n line_count += 1\r\n self.sentence_sizes.add(word_count)\r\n word_count = 0\r\n\r\n \r\n if not self.paragraph_sizes.is_empty():\r\n # Liable to not parse in certain sources.\r\n self.paragraph_sizes = probabilities.PARAGRAPH_SIZES", "def vtt_to_txt(filepath):\n filename = filepath.split('/')[-1]\n if filename.startswith('.'):\n return\n try:\n captions = open(filepath, 'r').readlines()\n except UnicodeError:\n print(\"Couldn't convert: {}\".format(filepath))\n return\n \n video_id = filename.split('.')[0]\n # only hold onto english subtitles (easier on text processing)\n language = captions[2].split('Language: ')[1].strip()\n\n if language != 'en':\n return\n \n result = []\n drop_regex = re.compile(r'-->')\n corrupted_regex = re.compile(r'<c>')\n for line in captions[5:]:\n # if it's a timestamp line or empty skip it\n if drop_regex.search(line) or not line.strip():\n continue\n if corrupted_regex.search(line):\n return\n else:\n result.append(line.encode('utf-8', 'ignore').decode('utf-8').lower())\n \n with open(os.path.join('derived_data/captions_clean', video_id + '.txt'), 'w') as f:\n f.write(\" \".join(result))", "def read_from_file(self, file_type):\n\n doc_idx, sentence_list_idx, tag_idx = [], [], []\n\n file_name = self.config.parser[file_type + '_dir']\n with open(file_name, 'r') as f:\n\n new_batch_doc_idx, new_batch_sentence_list_idx,\\\n new_batch_tag_idx = [], [], []\n new_sentence_idx = [] # for a sentence\n # temprate variable to store current batch data\n\n for idx, line in enumerate(f.readlines()):\n if idx == 95:\n xu = 1\n contents = line.strip().split(' ')\n # an empty line, means seperator for two batch\n # doc id, means a new batch whose `docid` is doc id\n # a word and its tag sepaerated by a blank\n if len(contents) >= 2:\n word, role = contents[0], contents[1]\n new_batch_doc_idx.append(\n self.word_dict.get_word_index(word)\n )\n new_batch_tag_idx.append(\n self.tag_dict.get_word_index(role)\n )\n new_sentence_idx.append(\n self.word_dict.get_word_index(word)\n )\n if word is '.':\n # default: '.' is the seperator for two sentences.\n new_batch_sentence_list_idx.append(new_sentence_idx)\n new_sentence_idx = []\n elif len(contents) == 1 and contents[0] != '':\n\n new_batch_doc_idx, new_batch_sentence_list_idx,\\\n new_batch_tag_idx = [], [], []\n new_sentence_idx = [] # for a sentence\n # temprate variable to store current batch data\n\n elif len(contents) == 1 and contents[0] == ''\\\n and len(new_batch_doc_idx) < self.config.parser['HP_max_len']:\n\n # Sometimes a sentence is not terminated by `.`\n # It will cause bug without this judgement.\n if len(new_sentence_idx) > 0:\n new_batch_sentence_list_idx.append(new_sentence_idx)\n new_sentence_idx = []\n doc_idx.append(new_batch_doc_idx)\n sentence_list_idx.append(new_batch_sentence_list_idx)\n tag_idx.append(new_batch_tag_idx)\n\n return doc_idx, sentence_list_idx, tag_idx", "def load_sentences(path):\n sentences = []\n sentence = []\n num = 0\n with codecs.open(path, 'r', 'utf8') as fread:\n # n_lines = len(fread)\n print(\"Read from {:s}\".format(path))\n # pbar = progressbar.ProgressBar(max_value=n_lines)\n for line_idx, line in enumerate(fread):\n assert line_idx==num,'ER'\n num += 1\n\n line = line.rstrip()\n # print(list(line))\n if not line: #Update: only deal with space between sentences\n if len(sentence) > 0:\n if 'DOCSTART' not in sentence[0][0]:# remove the DOCstart\n sentences.append(sentence)\n sentence = []\n else:\n if line[0] == \" \":#Update: this part is never used in Chinese ner!\n line = \"$\" + line[1:]\n word = line.split()\n # word[0] = \" \"\n else:\n word= line.split()\n assert len(word) >= 2, ([word[0]])\n sentence.append(word)\n if len(sentence) > 0:\n if 'DOCSTART' not in sentence[0][0]:\n sentences.append(sentence)\n\n return sentences" ]
[ "0.67881304", "0.626287", "0.6229017", "0.615032", "0.61068505", "0.61058336", "0.60824823", "0.6019963", "0.59705794", "0.5939803", "0.5923286", "0.5885868", "0.5883228", "0.583081", "0.5810255", "0.5807429", "0.5763637", "0.5748194", "0.5746666", "0.5737573", "0.5734013", "0.5723646", "0.5666927", "0.56596035", "0.56577843", "0.5635724", "0.5623955", "0.5588269", "0.5586113", "0.55837935", "0.557844", "0.55777436", "0.5573707", "0.55293864", "0.5514775", "0.54959124", "0.5490837", "0.5489965", "0.5488057", "0.54806656", "0.5464746", "0.5463507", "0.54438347", "0.54420805", "0.5437007", "0.54257256", "0.54176295", "0.5395161", "0.53929174", "0.539052", "0.5388132", "0.5387661", "0.5379559", "0.5368968", "0.5364812", "0.5357296", "0.5335494", "0.53314155", "0.5328753", "0.5324171", "0.5301955", "0.5294926", "0.5293585", "0.52923965", "0.5288716", "0.5286396", "0.5277398", "0.5270318", "0.52687836", "0.5254254", "0.52533597", "0.52395606", "0.5206125", "0.5204372", "0.5192795", "0.5191742", "0.5191421", "0.51834935", "0.5179035", "0.51774824", "0.5174642", "0.51712847", "0.51712847", "0.5167996", "0.516602", "0.51614106", "0.5156985", "0.5155315", "0.51531893", "0.5146368", "0.51457405", "0.514107", "0.5137597", "0.51362497", "0.5127559", "0.51273376", "0.5111818", "0.510867", "0.51063186", "0.5105278" ]
0.7538108
0
deletes all contents of the specified folder (but not the folder itself).\n returns true if successful. False if an error was thrown or the number of running processes is not = 0
def cleanupFolder(folderName): try: if(Tools.numRunningProcesses() == 0): process = subprocess.call('rm -r ./' + folderName + '/*', shell=True) return True else: return False except Exception as e: Tools.writeException("cleanupFolder", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_folder(path: str):\n try:\n if os.path.exists(path):\n shutil.rmtree(path)\n return True\n except:\n print(\"An error occured.\")", "def _rm(folder):\n import os\n import shutil\n for the_file in os.listdir(folder):\n file_path = os.path.join(folder, the_file)\n try:\n if os.path.isfile(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception as e:\n print(e)\n return", "def cleanup(folder):\n os.system('rm -rf %s/*' % folder)", "def empty_trash():\n drive_service().files().emptyTrash().execute()\n\n return True", "def rmdirtree(path):\r\n\r\n path = encode(path)\r\n if path.lower().startswith(\"smb://\"):\r\n try:\r\n for raiz, subcarpetas, ficheros in walk(path, topdown=False):\r\n samba.delete_files(ficheros, raiz)\r\n for s in subcarpetas:\r\n samba.delete_directory(s, raiz)\r\n except:\r\n pass\r\n else:\r\n import shutil\r\n shutil.rmtree(path, ignore_errors=True)\r\n\r\n if exists(path): # No se ha eliminado\r\n return False\r\n\r\n return True", "def clear_cache(directory):\n if os.path.exists(directory):\n list_dirs = os.listdir(directory)\n try:\n for direct in list_dirs:\n shutil.rmtree(directory + direct)\n return True\n except:\n return False\n else:\n return False", "def delete_directory_contents(conn_obj, path, device=\"dut\"):\n command = \"rm -rf {}/*\".format(path.rstrip(\"/\"))\n if device == \"dut\":\n st.config(conn_obj, command)\n else:\n conn_obj.execute_command(conn_obj, command)\n return True", "def rmdir_empty(f):\n if not f.is_dir():\n return 0\n removable = True\n result = 0\n for i in f.iterdir():\n if i.is_dir():\n result += rmdir_empty(i)\n removable = removable and not i.exists()\n else:\n removable = removable and (i.name == '.DS_Store')\n if removable:\n items = list(f.iterdir())\n assert not items or items[0].name == '.DS_Store'\n print(f)\n shutil.rmtree(f)\n result += 1\n\n return result", "def delete_all(self, prog:progress=None): \n\t\tself.__output_status('Delete all files')\n\t\tif (self.__check_terminated()):\n\t\t\treturn;\t\n\t\tdelete_dir(self.root)\n\t\ttime.sleep(0.3)", "def delete_folder(path):\n command = ['rm', '-rf', TEST_DIR]\n file_operation(path, command)", "def __rmtree(path):\n logger.info(\"rmtree: %s\" % path)\n try:\n shutil.rmtree(path)\n return True\n except Exception as e:\n logger.error(\"rmtree: %s failed! Error: %s\" % (path, e))\n return False", "def _safe_clear_dirflow(path):\n print(\"Clearing {}...\".format(path))\n assert os.path.isdir(path), \"Didn't pass a folder to be cleaned\"\n list_dir = [f for f in os.listdir(path) if not f.startswith('.')]\n for folder in list_dir:\n cat_folder = os.path.join(path, folder)\n assert os.path.isdir(cat_folder), \\\n \"Dir contains Non-Folder File!\"\n cat_folder_item = [f for f in os.listdir(cat_folder)\n if not f.startswith('.')]\n for file in cat_folder_item:\n # For every file, confirm is PNG or error.\n # DONT DELETE YET, IN CASE OF ERRORS!\n assert \".png\" in file, \"Folder has Non PNG Contents!\"\n # If we got though that with no error, then now we can delete!\n # for folder in os.listdir(the_path):\n # cat_folder = os.path.join(the_path, folder)\n # for file in os.listdir(cat_folder):\n # os.remove(os.path.join(cat_folder, file))\n # os.rmdir(cat_folder)\n # os.rmdir(the_path)\n return True", "def rmEmptyDir(path):\n try:\n os.rmdir(path)\n except OSError as exc:\n return False\n return True", "def clear_directory(folder_path):\n for the_file in os.listdir(folder_path):\n try:\n file_path = os.path.join(folder_path, the_file)\n if os.path.isfile(file_path) \\\n and the_file != RefreshListScript \\\n and not the_file.endswith(('.txt', 'py')):\n os.unlink(file_path)\n except Exception as e:\n print(e)", "def clear_directory(folder_name):\n for the_file in os.listdir(folder_name):\n file_path = os.path.join(folder_name, the_file)\n try:\n if os.path.isfile(file_path):\n os.unlink(file_path)\n except Exception as e:\n print(e)", "def deleteImageFolder(pause=5):\n try:\n shutil.rmtree(imageFolder)\n except PermissionError:\n # Still busy creating the montage or something. Try once more\n time.sleep(pause)\n shutil.rmtree(imageFolder)\n except FileNotFoundError:\n # Folder already gone\n pass", "def delete_with_retry(folder):\n\n for _i in range(0, 5):\n try:\n if os.path.exists(folder):\n shutil.rmtree(folder)\n\n return\n except:\n time.sleep(0.1)\n\n print(\"Could not delete directory after 5 attempts: %s\" % folder)\n sys.exit(1)", "def clear_cache():\n if not(os.path.isdir(cache_dir)):\n return True\n try:\n for f in os.listdir(cache_dir):\n os.remove(cache_dir + f)\n return True\n except:\n return False", "async def remove_data(folder: Path) -> None:\n try:\n proc = await asyncio.create_subprocess_exec(\n \"rm\", \"-rf\", str(folder), stdout=asyncio.subprocess.DEVNULL\n )\n\n _, error_msg = await proc.communicate()\n except OSError as err:\n error_msg = str(err)\n else:\n if proc.returncode == 0:\n return\n\n _LOGGER.error(\"Can't remove Add-on Data: %s\", error_msg)", "def eraseDatas(folderToRemove='datas'):\n directoryToRemove = os.path.join(pathtofolder(), folderToRemove)\n for i in os.listdir(directoryToRemove):\n os.remove(os.path.join(directoryToRemove, i))\n os.rmdir(directoryToRemove) # Now the folder is empty of files\n pass", "def cleanup(self):\n\t\tfor filename in self.cfg_files:\n\t\t\tif os.path.isfile(filename):\n\t\t\t\tsize = os.stat(filename)[6]\n\t\t\t\tif size == 0:\n\t\t\t\t\tos.remove(filename)\n\n\t\treturn True", "def delete(self):\n return self.client._perform_empty(\n \"DELETE\", \"/project-folders/%s\" % self.project_folder_id)", "def flush_outputs():\n try:\n shutil.rmtree(ROOT_OUTPUT_DIR)\n print(\"Removed directory '{}'!\".format(ROOT_OUTPUT_DIR))\n return True\n except FileNotFoundError:\n print(\"Directory '{}' already removed!\".format(ROOT_OUTPUT_DIR))\n return False", "def DeleteFolderContents(dir):\n create_dir(dir)\n shutil.rmtree(dir)\n create_dir(dir)", "def DeleteCheckedFiles(self):\n nb = self._model.delete_checked_files()\n if nb > 0:\n self.__refresh()\n return True\n return False", "def purge_workflow_file(path):\n logger = fsurfer.log.get_logger()\n if not os.path.exists(path):\n return True\n try:\n if os.path.isfile(path):\n os.unlink(path)\n elif os.path.isdir(path):\n os.rmdir(path)\n return True\n except OSError as e:\n logger.exception(\"Exception: {0}\".format(str(e)))\n return False", "def removeDir(directory):\n if os.path.exists(directory):\n statusDeletion = shutil.rmtree(directory)\n else:\n statusDeletion = 2\n return statusDeletion", "def delete_file_or_folder(file_id: str) -> bool:\n # TODO Create unit test for this delete_file_or_folder\n try:\n drive_service().files().delete(fileId=file_id).execute()\n file_deleted_status = True\n\n except errors.HttpError:\n file_deleted_status = False\n\n return file_deleted_status", "def delete_temp_folder():\n\n tempFolder = os.path.join(os.getenv(\"APPDATA\"), \"GARI\\Temp\")\n\n if os.path.exists(tempFolder):\n for file in os.listdir(tempFolder):\n arcpy.Delete_management(os.path.join(tempFolder, file))", "def clean_all_folder():\n LOGGER.warning('removal of old files has been temporarily disabled')\n # paths_to_clean = CFG.remove_files\n # if paths_to_clean: # pylint: disable=using-constant-test\n # for remove_config in paths_to_clean: # pylint: disable=not-an-iterable\n # name = tuple(remove_config.keys())[0]\n # LOGGER.info(f'processing: {name}')\n # remove_config = remove_config[name]\n # if 'folder' not in remove_config.keys():\n # LOGGER.error(f'missing \"folder\" in {name}')\n # return\n # if 'age' not in remove_config.keys():\n # LOGGER.error(f'missing \"age\" in {name}')\n # return\n # if not os.path.exists(remove_config['folder']):\n # LOGGER.error(f'path does not exist: {remove_config[\"folder\"]}')\n # return\n # _remove_old_files_from_folder(**remove_config)\n # else:\n # LOGGER.debug('no folder to clean')", "def delete_folder(folder_path):\r\n if os.path.exists(folder_path):\r\n shutil.rmtree(folder_path)", "def clean(self):\n \n with current_context() as ctx:\n project_outputs = ctx.get('current.project_outputs')\n if project_outputs is not None:\n if self._project in project_outputs:\n del project_outputs[self._project]\n \n path = self.path\n if os.path.isfile(path):\n args = [self.command, '-f', path, '-t', 'clean', '-g']\n try:\n check_call(args)\n except CalledProcessError as ex:\n return ex.returncode\n self.remove()\n return 0", "def remove(path):\n try:\n os.remove(path)\n return True\n except FileNotFoundError:\n return False", "def clean():\n folders = ['utils_dfn/temp', 'utils_dfn/img', 'utils_dfn/mask', 'utils_dfn/output']\n for folder in folders:\n for item in os.listdir(folder):\n item_path = os.path.join(folder, item)\n if os.path.isdir(item_path):\n shutil.rmtree(item_path)\n elif os.path.isfile(item_path):\n os.remove(item_path)", "def delete_tempfolder(path):\n try:\n rmtree(path)\n except:\n pass", "def filedelete(fname):\n\n if os.path.exists(fname):\n try:\n if os.path.isdir(fname):\n # delete folder\n shutil.rmtree(fname)\n return\n else:\n # delete file\n os.remove(fname)\n return\n except:\n return\n else:\n return", "def rmdir(folder_name: str) -> None:\n if not exist(folder_name):\n print(\"The folder does not exist\")\n return\n \n os.rmdir(folder_name)", "def rm(path):\n try:\n shutil.rmtree(path)\n except Exception as e:\n print(\"* [Error] occured: {}\\n\".format(e))\n else:\n print(\"* Done.\\n\")", "def clean(working_directory):\n\n clean_directories(working_directory, (\"temp\", \"_build\"))\n clean_files(working_directory, \".DS_Store\")\n return 0", "def delete(self): # DirObj.delete\n self.deleted=True\n for name, d in self.subdirs.iteritems():\n d.delete()\n for name, f in self.files.iteritems():\n f.delete()", "def unmanaged_delete(task_id, url):\n\n PoolManager.db.query('DELETE FROM `unmanaged_deletions` WHERE `id` = %s', task_id)\n\n try:\n stat_result = gfal_exec('stat', (url,), return_value = True)\n except:\n return 0, None, None, 'stat error', ''\n\n if stat.S_ISDIR(stat_result.st_mode):\n # this is a directory\n result = gfal_exec('rmdir', (url,))\n else:\n result = gfal_exec('unlink', (url,))\n\n return (0,) + rmdir_result[1:]", "def check_existed(folder):\n cond_check = True\n while cond_check:\n input_option = input('Are you sure you want to replace ' + folder + '? [Y/N]\\n')\n if input_option == 'Y':\n os.rmdir(folder)\n create_folder(folder)\n return 1\n elif input_option == 'N':\n return 0\n else:\n print('Invalid Option')\n continue", "def clear_dir(dir_path: str) -> None:\n files = os.listdir(dir_path)\n\n if len(files) != 0:\n while True:\n print(f\"\\nThe directory '{dir_path}' is not empty.\")\n answer = input(\n \"Do you wish to delete all files located here? (Y/N)\")\n\n if answer.upper() == 'Y':\n for f in files:\n os.remove(os.path.join(dir_path, f))\n break\n\n elif answer.upper() == 'N':\n raise DirectoryNotEmptyError()\n\n else:\n continue", "def delete_folder_content(folder_name):\n for file_to_delete in os.listdir(\"../\" + folder_name):\n os.remove(\"../\" + folder_name + '/' + file_to_delete)", "def _delete_dir_content(dir_path):\n\n for item in os.listdir(dir_path):\n path = os.path.join(dir_path, item)\n try:\n os.remove(path)\n\n except PermissionError:\n shutil.rmtree(path)", "def rm_rf(path, max_retries=5, trash=True, clean_empty_parents=False, *args, **kw):\n try:\n path = abspath(path)\n if isdir(path) and not islink(path):\n rmtree(path)\n elif lexists(path):\n unlink_or_rename_to_trash(path)\n finally:\n if lexists(path):\n return False\n if clean_empty_parents:\n remove_empty_parent_paths(path)\n return True", "def cleanup(self):\n\n # check if the directory exists\n if not os.path.exists(self.path):\n return\n\n # check if the directory is a directory\n if not os.path.isdir(self.path):\n return\n\n # loop over content of directory and remove it\n for the_file in os.listdir(self.path):\n file_path = os.path.join(self.path, the_file)\n try:\n if os.path.isfile(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception as e:\n pass", "def file_delete(file_name:str, folder_path: List[str]): \n drive = _drive_gen()\n deleted = False\n folder_id, files = _list_file(folder_path, drive)\n for file in files:\n if file['title'] == file_name:\n file.Trash()\n deleted = True\n if not deleted:\n raise FileNotFoundError(f\"file {file_name} is not found under /{'/'.join(folder_path)}\")", "def delete_files_from_folder(path: str) -> None:\n for the_file in os.listdir(path):\n file_path = os.path.join(path, the_file)\n try:\n if os.path.isfile(file_path):\n os.unlink(file_path)\n except Exception as e:\n print(e)", "def delete_folder(folder_path):\n shutil.rmtree(folder_path)", "def delete_folder_from_datastore(content, datacenter_name, folder):\n datacenter = get_obj(content, [vim.Datacenter], datacenter_name)\n task = vim.FileManager.DeleteDatastoreFile_Task(\n content.fileManager,\n folder,\n datacenter\n )\n wait_for_task(task)", "def delete(self):\n if self.dir in self.filelist:\n self.remove(self.dir)", "def check_directory_permissions(directory) :\n\n return_string = False\n\n test_file = directory + \"/testFile.Remove\"\n\n try :\n file_pointer = open(test_file, \"w\")\n file_pointer.write(\"TEST\")\n file_pointer.close()\n os.remove(test_file)\n except IOError as io_error:\n return_string = \"directory not accessible. Error -> '%s'\" % (\n str(io_error) )\n\n return return_string", "def _delete_file(self, path):\n if not self.mount():\n return False\n uri = self.path_to_uri(path)\n return self.gvfs.delete_file(uri)", "def remove(self):\n path = os.path.abspath(path)\n if path in self.files:\n del self.files[path]\n return True\n return False", "def delete_folder(path: str) -> None:\n\tuux.show_info(\"Deleting \" + path)\n\n\tif not os.path.exists(path):\n\t\t# Path does not exist\n\t\treturn\n\n\ttry:\n\t\tshutil.rmtree(path, True)\n\texcept OSError as ex:\n\t\tuux.show_warning(\"Failed to delete directory, \" + os.strerror(ex.errno))", "def remove_temp_folders(self) -> None:\n if self.args.deletefolders:\n time.sleep(2)\n for f in self.created_folders:\n shutil.rmtree(path.join(self.args.output, f))\n print(f\"{self.args.output}/{f} was deleted\")", "def _delete_root_dir(self):\n\n staf_request = ('DELETE ENTRY \"{0}\" RECURSE '\n 'CONFIRM '.format(unix_style_path(self._sut.bespoke_root)))\n\n result = self._staf_handle.submit(self._sut.network_address, 'fs', staf_request)\n\n if result.rc not in [result.Ok, result.DoesNotExist]:\n raise CoreError(result.result)", "def delete_dir(name):\n root_dir = get_data_dir()\n target_dir = root_dir / name\n if not is_relative_to(target_dir, root_dir) or target_dir == root_dir:\n return False\n try:\n shutil.rmtree(target_dir)\n return True\n except FileNotFoundError:\n return False", "def del_contents_of_dir(directory):\n\n for file in os.listdir(directory):\n file_path = os.path.join(directory, file)\n if os.path.isfile(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)", "def deleteFolder(self, folder, recursive=False):\n\n state = False\n self.deleteFolderContent(folder)\n res = self.delRequest(folder.selfUrl)\n\n if res == 200 or res == 204:\n state = True\n else:\n if recursive:\n folders = self.getContainedFolders(folder)\n for f in folders:\n return self.deleteFolder(f, recursive=recursive)\n return state", "def is_download_finished(folder):\n firefox_temp_file = sorted(pathlib.Path(folder).glob('*.part'))\n chrome_temp_file = sorted(pathlib.Path(folder).glob('*.crdownload'))\n downloaded_files = sorted(pathlib.Path(folder).glob('*.*'))\n if (len(firefox_temp_file) == 0) and \\\n (len(chrome_temp_file) == 0) and \\\n (len(downloaded_files) >= 1):\n return True\n else:\n return False", "def gitclean(self):\n clean_cmd = ['git', 'clean', '-X', '-d']\n if 'CI' not in self.env_vars:\n print('The following files/directories will be deleted:')\n print('')\n print(check_output(clean_cmd + ['-n']))\n if not strtobool(input('Proceed?: ')):\n return False\n check_call(clean_cmd + ['-f'])\n empty_dirs = self.get_empty_dirs(self.env_root)\n if empty_dirs != []:\n print('Now removing empty directories:')\n for directory in empty_dirs:\n print(\"Removing %s/\" % directory)\n shutil.rmtree(os.path.join(self.env_root, directory))\n return True", "def list_for_clean(self,path):\n nu = -1\n \n dirs_and_files = [os.path.join(path, d) for d in os.listdir(path) if not os.path.basename(d).startswith('.')]\n dirs_and_files.sort(key = lambda d: self.cache[d])\n while nu != 0:\n if nu>0:\n d = dirs_and_files[nu-1]\n rep = 'o'\n while rep not in ['y','n']:\n rep = input(\"Do you want to delete {}? y/n\\n\".format(d))\n if rep == 'y':\n #dd = os.path.join(path,d)\n if os.path.isfile(d):\n os.remove(d)\n del self.cache[d]\n else:\n shutil.rmtree(d)\n del self.cache[d]\n dirs_and_files = [os.path.join(path, d) for d in os.listdir(path)\n if not os.path.basename(d).startswith('.')]\n dirs_and_files.sort(key = lambda d: self.cache[d])\n t_size = sum ([self.cache[d].val for d in dirs_and_files])\n self.cache[path] = HumanReadableSize(t_size)\n \n print (\"\\t\\t-------------Cleaning {} - total size of{}\".\\\n format(path, self.cache[path]))\n count = 0\n if not dirs_and_files:\n print (\"\\t\\t|Empty directory!!!!!!!!!!\")\n break\n else:\n for d in dirs_and_files:\n count += 1\n if os.path.isfile(d):\n print (\"\\t\\t|F\\t{}\\t{}\\t{}\".\\\n format(count,os.path.basename(d), self.cache[d]))\n else:\n print (\"\\t\\t|D\\t{}\\t{}\\t{}\".\\\n format(count,os.path.basename(d),self.cache[d]))\n rep = -1\n while rep not in {str(i) for i in range(count+1)}:\n rep = input(\"Enter number to delete file or dir, 0 to quit cleaning\\n\")\n nu = int(rep)", "def remove_contents_of_dir(dir_path):\n # try:\n for filename in os.listdir(dir_path):\n file_path = os.path.join(dir_path, filename)\n try:\n if os.path.isfile(file_path) or os.path.islink(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception as e:\n warnings.warn(\"Failed to delete %s. Reason: %s\" % (file_path, e))", "def remove_contents_of_dir(dir_path):\n # try:\n for filename in os.listdir(dir_path):\n file_path = os.path.join(dir_path, filename)\n try:\n if os.path.isfile(file_path) or os.path.islink(file_path):\n os.unlink(file_path)\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n except Exception as e:\n warnings.warn(\"Failed to delete %s. Reason: %s\" % (file_path, e))", "def delete_files(file_prototype):\n from os import remove\n folder = getFolder()\n if folder != '/':\n for f in list_files(folder, file_prototype):\n print('Deleting: '+f)\n remove(f)\n\n return()", "def delete_folder(self, name):\n return self.DeleteFolder(name, 0)", "def cleanup(self):\n process_handler.terminate_root_and_child_processes(self._proc.pid)\n self._read_thread.join()\n if self._data_dir:\n shutil.rmtree(self._data_dir, ignore_errors=True)", "def deleteFolderContent(self, folder):\n\n state = False\n\n folder.populate(containedObjects=None)\n\n res = self.putRequest('folders', data=folder.to_struct())", "def exit_on_exception(self):\n shutil.rmtree(str(self.proj_folder.absolute()))", "def test_subdirectory_deleted(self):\n path = self._create_directory('test')\n sub_path = self._create_directory('test/test2')\n self._watcher.start()\n\n self.assertEqual(\n set([sub_path]),\n self._watcher._directory_to_subdirs[path])\n os.rmdir(sub_path)\n self.assertEqual(\n set([sub_path]),\n self._watcher.changes())\n self.assertEqual(\n set(),\n self._watcher._directory_to_subdirs[path])\n\n os.rmdir(path)\n self.assertEqual(\n set([path]),\n self._watcher.changes())", "def delete_folder_from_s3(s3_folder, connection=None):\n if connection:\n run_out = connection.run(f\"aws s3 rm --recursive {s3_folder}\")\n else:\n run_out = run(f\"aws s3 rm --recursive {s3_folder}\")\n\n return run_out.return_code", "def clean_batch_folder(folder, is_remote=False):\n\n try:\n log.msg(\"Removing remote batch folder\", folder, \"...\")\n self.utility.remove_folder(path=folder, is_remote=is_remote, sftp_client=self.sftp)\n except Exception as e:\n log.msg(str(e))\n log.err(\"Cannot remove folder {}\".format(folder), str(e))\n raise Exception('Cannot remove batch folder, ' + folder)", "def removeFolderIfExists(directory):\n try:\n shutil.rmtree(directory)\n except OSError as e: \n if e.errno != errno.ENOENT: # Continue if the error is \"no such file or directory\"\n raise # Re-raise the exception if a different error occured", "def cleanup(destination_subdir):\n sp.check_call(f\"rm {destination_subdir}/*.bam\", shell=True)\n sp.check_call(f\"rm {destination_subdir}/*.sam\", shell=True)\n sp.check_call(f\"rm -rf ./index_files\", shell=True)", "def clean_build(build_dir):\n\n err = False\n try:\n print \"Cleaning %s...\" % build_dir\n shutil.rmtree(build_dir)\n except:\n print >> sys.stderr, \"Failed to clean %s!\" % build_dir\n err = True\n return err", "def rmtree(dirname):\n try:\n shutil.rmtree(dirname)\n except PermissionError:\n if sys.platform == \"win32\":\n subprocess.check_call([\"del\", \"/F/S/Q\", dirname], shell=True)\n else:\n raise", "def rmtree(dirname):\n try:\n shutil.rmtree(dirname)\n except PermissionError:\n if sys.platform == \"win32\":\n subprocess.check_call([\"del\", \"/F/S/Q\", dirname], shell=True)\n else:\n raise", "def delete_all():\n if os.path.exists(DATA_DIR):\n shutil.rmtree(DATA_DIR)", "def rmtree_if_can_lock(root):\n lock_file_path = os.path.join(root, BUCK_LOCK_FILE_NAME)\n lock_file = None\n if os.path.exists(lock_file_path):\n lock_file = open(lock_file_path, \"a+\")\n if not acquire_exclusive_lock(lock_file):\n lock_file.close()\n return\n for name in os.listdir(root):\n p = os.path.join(root, name)\n if os.path.isdir(p):\n rmtree_if_can_lock(p)\n else:\n try:\n os.unlink(p)\n except (IOError, OSError):\n # Ignore errors like shutil.rmtree\n pass\n try:\n os.rmdir(root)\n except (IOError, OSError):\n # Ignore errors like shutil.rmtree\n pass\n if lock_file is not None:\n lock_file.close()", "def clear_dir(self, subdir=''):\n if not os.path.isdir(self.file_system.mount_point):\n raise exceptions.ValidationError(\"mount_point %s is not a directory\" % self.file_system.mount_point)\n if not os.path.isdir(self.full_path()):\n raise exceptions.ValidationError(\"project %s is not a directory\" % self.full_path())\n path = self.subdir(subdir)\n if not os.path.isdir(path):\n raise exceptions.ValidationError(\"%s is not a directory\" % path)\n for root, dirs, files in os.walk(path):\n for f in files:\n os.unlink(os.path.join(root, f))\n for d in dirs:\n shutil.rmtree(os.path.join(root, d))\n return True", "def del_tmp() -> None:\n for elem in os.listdir('./tmp'):\n path = f\"./tmp/{elem}\"\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)", "def RemoveCheckedFiles(self):\n nb = self._model.remove_checked_files()\n if nb > 0:\n self.__refresh()\n return True\n return False", "def clear_dir(directory):\n for f in os.listdir(directory):\n path = os.path.join(directory, f)\n try:\n if os.path.isfile(path):\n os.unlink(path)\n elif os.path.isdir(path):\n shutil.rmtree(path)\n except Exception as e:\n print(e)", "def test_subdirectory_deleted(self):\n path = self._create_directory('test')\n sub_path = self._create_directory('test/test2')\n self._watcher.start()\n\n self.assertEqual(\n set([sub_path]),\n self._watcher._directory_to_subdirs[path])\n os.rmdir(sub_path)\n self.assertEqual(\n set([sub_path]),\n self._watcher._get_changed_paths())\n self.assertEqual(\n set(),\n self._watcher._directory_to_subdirs[path])\n\n os.rmdir(path)\n self.assertEqual(\n set([path]),\n self._watcher._get_changed_paths())", "def RemoveDirectory(*path):\n file_path = os.path.join(*path)\n if not os.path.exists(file_path):\n return\n\n if sys.platform == 'win32':\n # Give up and use cmd.exe's rd command.\n file_path = os.path.normcase(file_path)\n for _ in xrange(3):\n print('RemoveDirectory running %s' % (' '.join(\n ['cmd.exe', '/c', 'rd', '/q', '/s', file_path])))\n if not subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]):\n break\n print(' Failed')\n time.sleep(3)\n return\n\n def RemoveWithRetry_non_win(rmfunc, path):\n if os.path.islink(path):\n return os.remove(path)\n else:\n return rmfunc(path)\n\n remove_with_retry = RemoveWithRetry_non_win\n\n def RmTreeOnError(function, path, excinfo):\n r\"\"\"This works around a problem whereby python 2.x on Windows has no ability\n to check for symbolic links. os.path.islink always returns False. But\n shutil.rmtree will fail if invoked on a symbolic link whose target was\n deleted before the link. E.g., reproduce like this:\n > mkdir test\n > mkdir test\\1\n > mklink /D test\\current test\\1\n > python -c \"import chromium_utils; chromium_utils.RemoveDirectory('test')\"\n To avoid this issue, we pass this error-handling function to rmtree. If\n we see the exact sort of failure, we ignore it. All other failures we re-\n raise.\n \"\"\"\n\n exception_type = excinfo[0]\n exception_value = excinfo[1]\n # If shutil.rmtree encounters a symbolic link on Windows, os.listdir will\n # fail with a WindowsError exception with an ENOENT errno (i.e., file not\n # found). We'll ignore that error. Note that WindowsError is not defined\n # for non-Windows platforms, so we use OSError (of which it is a subclass)\n # to avoid lint complaints about an undefined global on non-Windows\n # platforms.\n if (function is os.listdir) and issubclass(exception_type, OSError):\n if exception_value.errno == errno.ENOENT:\n # File does not exist, and we're trying to delete, so we can ignore the\n # failure.\n print('WARNING: Failed to list %s during rmtree. Ignoring.\\n' % path)\n else:\n raise\n else:\n raise\n\n for root, dirs, files in os.walk(file_path, topdown=False):\n # For POSIX: making the directory writable guarantees removability.\n # Windows will ignore the non-read-only bits in the chmod value.\n os.chmod(root, 0o770)\n for name in files:\n remove_with_retry(os.remove, os.path.join(root, name))\n for name in dirs:\n remove_with_retry(lambda p: shutil.rmtree(p, onerror=RmTreeOnError),\n os.path.join(root, name))\n\n remove_with_retry(os.rmdir, file_path)", "def RemoveDirectory(*path):\n file_path = os.path.join(*path)\n if not os.path.exists(file_path):\n return\n\n if sys.platform == 'win32':\n # Give up and use cmd.exe's rd command.\n file_path = os.path.normcase(file_path)\n for _ in xrange(3):\n print 'RemoveDirectory running %s' % (' '.join(\n ['cmd.exe', '/c', 'rd', '/q', '/s', file_path]))\n if not subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]):\n break\n print ' Failed'\n time.sleep(3)\n return\n\n def RemoveWithRetry_non_win(rmfunc, path):\n if os.path.islink(path):\n return os.remove(path)\n else:\n return rmfunc(path)\n\n remove_with_retry = RemoveWithRetry_non_win\n\n def RmTreeOnError(function, path, excinfo):\n r\"\"\"This works around a problem whereby python 2.x on Windows has no ability\n to check for symbolic links. os.path.islink always returns False. But\n shutil.rmtree will fail if invoked on a symbolic link whose target was\n deleted before the link. E.g., reproduce like this:\n > mkdir test\n > mkdir test\\1\n > mklink /D test\\current test\\1\n > python -c \"import chromium_utils; chromium_utils.RemoveDirectory('test')\"\n To avoid this issue, we pass this error-handling function to rmtree. If\n we see the exact sort of failure, we ignore it. All other failures we re-\n raise.\n \"\"\"\n\n exception_type = excinfo[0]\n exception_value = excinfo[1]\n # If shutil.rmtree encounters a symbolic link on Windows, os.listdir will\n # fail with a WindowsError exception with an ENOENT errno (i.e., file not\n # found). We'll ignore that error. Note that WindowsError is not defined\n # for non-Windows platforms, so we use OSError (of which it is a subclass)\n # to avoid lint complaints about an undefined global on non-Windows\n # platforms.\n if (function is os.listdir) and issubclass(exception_type, OSError):\n if exception_value.errno == errno.ENOENT:\n # File does not exist, and we're trying to delete, so we can ignore the\n # failure.\n print 'WARNING: Failed to list %s during rmtree. Ignoring.\\n' % path\n else:\n raise\n else:\n raise\n\n for root, dirs, files in os.walk(file_path, topdown=False):\n # For POSIX: making the directory writable guarantees removability.\n # Windows will ignore the non-read-only bits in the chmod value.\n os.chmod(root, 0770)\n for name in files:\n remove_with_retry(os.remove, os.path.join(root, name))\n for name in dirs:\n remove_with_retry(lambda p: shutil.rmtree(p, onerror=RmTreeOnError),\n os.path.join(root, name))\n\n remove_with_retry(os.rmdir, file_path)", "def _delete_folder(folder_path, warn=False):\n try:\n if os.path.exists(folder_path):\n # This can fail under windows,\n # but will succeed when called by atexit\n shutil.rmtree(folder_path)\n except OSError:\n if warn:\n warnings.warn(\"Could not delete temporary folder %s\" % folder_path)", "def emptydir(directory):\n for path in os.listdir(directory):\n path = os.path.join(directory, path)\n if os.path.isfile(path):\n os.unlink(path)\n else:\n shutil.rmtree(path)", "def del_files_from_disk(path):\n\n shutil.rmtree(path) #,ignore_errors=True)", "def clean_and_exit(clone_path, msg):\n print msg\n if exists(clone_path):\n rmtree(clone_path)\n sys.exit(1)", "def DeleteFile(*path):\n\n for line in path:\n if os.path.isdir(line):\n shutil.rmtree(line)\n if os.path.isfile(line):\n os.remove(line)\n\n return 0", "def pytest_sessionfinish(session, exitstatus):\n\n # dat files are created when using attachements\n print(\"\\n-------------------------\\nClean dpytest_*.dat files\")\n fileList = glob.glob('./dpytest_*.dat')\n for filePath in fileList:\n try:\n os.remove(filePath)\n except Exception:\n print(\"Error while deleting file : \", filePath)", "def delete_folder(self, path):\n if not path_exists(path, self._store_folder):\n raise NotFoundException(\"\")\n rmdir(path)", "def remove_empty ( self ):\n with self._lock:\n for key in tuple ( self._subdirs.keys() ):\n if self._subdirs [key].check_empty():\n del self._subdirs [key]", "def delete(self,result):\n path = self.get_archive_file_path(result) if isinstance(result,RunResults) else result\n if os.path.exists(path):\n os.remove(path)", "def destroy(self):\n res = subprocess.run(\"{} rm {}\".format(self.binary,\n self.args['name']))\n if res.returncode != 0:\n sys.exit(2)\n return res", "def delete(self):\n return self.client._perform_empty(\n \"DELETE\", \"/projects/%s/managedfolders/%s\" % (self.project_key, self.odb_id))", "def remover(file_path):\n if os.path.isfile(file_path):\n os.remove(file_path)\n return True\n elif os.path.isdir(file_path):\n shutil.rmtree(file_path)\n return True\n else:\n return False" ]
[ "0.65608245", "0.6207403", "0.60895413", "0.6053084", "0.60113263", "0.5985281", "0.5914889", "0.58811086", "0.58790207", "0.5868364", "0.5852713", "0.5824594", "0.5818425", "0.58163047", "0.5810179", "0.57717085", "0.5769049", "0.5744798", "0.5744438", "0.5740823", "0.5726497", "0.5724047", "0.5721212", "0.5715005", "0.56995", "0.5688822", "0.56862587", "0.5661347", "0.56559616", "0.56542426", "0.56540006", "0.56255764", "0.56184536", "0.5605676", "0.56012887", "0.5589407", "0.5586569", "0.5563927", "0.556027", "0.55579644", "0.5557406", "0.5544624", "0.55331516", "0.55288476", "0.55260015", "0.55024064", "0.54809165", "0.54774344", "0.5468308", "0.5465174", "0.5460778", "0.54605126", "0.54070634", "0.5404265", "0.54017913", "0.54008836", "0.5395223", "0.5392247", "0.53873426", "0.53855956", "0.53820896", "0.5373048", "0.5372616", "0.5369691", "0.5354362", "0.5354362", "0.5352812", "0.53514636", "0.5345994", "0.5344797", "0.53439045", "0.5343371", "0.53320163", "0.53286546", "0.5326612", "0.5317576", "0.5316392", "0.5313815", "0.5313815", "0.53125864", "0.53016174", "0.52930117", "0.5285414", "0.5281963", "0.5279697", "0.5276356", "0.526593", "0.52614975", "0.52613807", "0.5261108", "0.5253804", "0.52531", "0.525222", "0.5248423", "0.52455854", "0.52428627", "0.5242744", "0.52393913", "0.5233252", "0.52324915" ]
0.70612246
0
gets the number of runnning transcription processes
def numRunningProcesses(): try: proc = subprocess.run("ps -Af|grep -i \"online2-wav-nnet3-latgen-faster\"", stdout=subprocess.PIPE, shell=True) np = (len(str(proc.stdout).split("\\n")) - 3) if(np == None): np = 0 return np except Exception as e: Tools.writeException("numRunningProcesses", e) return -1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def num_processes():\n return 1", "def num_processes(self):\n return 1", "def num_processes(self, new_value):", "def n_worker(self):\n return self.redis.pubsub_numsub(MSG)[0][-1]", "def GetNumberOfResultsProcessed(self) -> int:\n return self.i", "def numRunning(self):\n #with self.__queueLock:\n # The size of the list does not change, only its contents, so I don't\n # think there should be any conflict if we are reading a variable from\n # one thread and updating it on the other thread.\n activeRuns = sum(run is not None for run in self.__running)\n\n return activeRuns", "def number_of_jobs_in_queue():\n\n # Initialize #\n user_name = get_username()\n\n process = subprocess.check_output([\"squeue\", \"-u\", user_name])\n\n return len([line for line in process.split(\"\\n\") if user_name in line])", "def _n_workers(self, processes: int = 2) -> int:\n if 2 <= processes <= cpu_count():\n n_workers = processes\n else:\n n_workers = cpu_count()\n return n_workers", "def get_count(bam, max_workers):\n print (\"Count total number of paired reads in %s ...\"%bam)\n cmd = ['samtools','view','-c','-f', '3','-@',str(max_workers),bam]\n out, err = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout=subprocess.PIPE).communicate()\n return int(out.split()[0])", "def processes(self):\n return self._getint('processes')", "def numRunningTotal(self):\n activeRuns = sum(run is not None for run in self.__running + self.__clientRunning)\n return activeRuns", "def numprocesses(self):\n info = self.info()\n return info['max_processes']", "def count(self):\n return len(self._runs)", "def get_num_sequence(self):\n return len(self.study_list)", "def num_run_trajs(self, run_idx):\n return len(self._h5['{}/{}/{}'.format(RUNS, run_idx, TRAJECTORIES)])", "def get_num_processes(profileDict):\n assert isinstance(profileDict, dict)\n\n return profileDict[\"info\"][\"number_of_processes\"]", "def total_nt(self) -> int:\n return self.sequence.length", "def processor_count(self):\n return self._processor_count", "def number_of_sequences(self):\n return self.sequence_last() + 1", "def get_num_jobs(self):\n return str(self.num_jobs)", "def number_of_workers():\n return (multiprocessing.cpu_count() * 2) + 1", "def count():", "def get_number_executors(self):\n with self.__threads_lock:\n return self.__number_executors", "def execution_count(self) -> int:\n return pulumi.get(self, \"execution_count\")", "def number_of_workers():\n return (cpu_count() * 2) + 1", "def get_procs_count(proc_name):\n procs = subprocess.check_output(['ps','-ef']).splitlines()\n name_procs = [proc for proc in procs if proc_name.encode() in proc]\n return len(name_procs)", "def n_tasks(self) -> int:\n pass", "def num_trials(self):", "def procs_running():\n \n return __proc_stat('procs_running')", "def numProcs(reportname):\n with open(reportname, \"rb\") as f:\n data = json.load(f)\n numProcesses = len(data[\"behavior\"][\"processes\"])\n return numProcesses", "def get_ncores(self):\n return self._ncores", "def get_text_queue_count(self):\n return self.db.llen(\"soq_texts\")", "def number_of_running_consumers(self):\n try:\n return len(self.get_classads(\"OSGRSV==\\\"consumers\\\"\"))\n except TypeError:\n self.rsv.log(\"ERROR\", \"Classad parsing failed, unable to count running consumers\")", "def get_ncpu():\n from multiprocessing import cpu_count\n return cpu_count()", "def num_worker(self):\n return self.config.get(\"jobs\", 4)", "def getProcessLength(self):\n return self.length", "def __number_of_jobs__(self):\n # | - __number_of_jobs__\n num_jobs = 0\n\n # Regular jobs\n if self.job_var_lst is not None:\n num_jobs = len(self.job_var_lst)\n\n # Individual dir jobs\n if self.indiv_dir_lst is not None:\n num_jobs += len(self.indiv_dir_lst)\n\n\n return(num_jobs)\n # __|", "def get_n_workers(self):\n return self.df.worker.nunique()", "def __get_thread_count(self, conf):\n return conf[self.conf_item.get_thread_count()]", "def Numtrials(self):\n\t\treturn self._get_attribute('numtrials')", "def getThreads():\r\n return multiprocessing.cpu_count()", "def _get_count(results):\n return len(results)", "def num_pending(status=\"pending\", username=\"garrett.wilson\"):\n cmd = [\n \"squeue\",\n \"-u\", username,\n \"-t\", status,\n \"-h\"\n ]\n\n output = subprocess.check_output(cmd)\n output = output.decode(\"utf-8\").strip()\n\n if output == \"\":\n return 0\n\n return len(output.split(\"\\n\"))", "def number_of_launches(self):\n return self._number_of_launches", "def num_running(self):\n return sum(cmd.is_running for id, cmd in self.commands)", "def count(self) -> int:\n return pulumi.get(self, \"count\")", "def get_num_of_sessions(self):\n return len(self.current_sessions)", "def nworkers(self):\n return len(self._workers)", "def num_tasks(self) -> int:\n return 1", "def n_trials(self):\n return self.getGlobalVariableByName(\"ntrials\")", "def gatherReadCounts(samplesList, scriptsDir, threads, alignmentPath, outRoot, stype, mode):\n reads = 0\n ext = \".pruned.bam\"\n if mode == \"all_reads\":\n ext = \".bam\"\n for i in range(len(samplesList)):\n bam = os.path.join(alignmentPath, outRoot) + \".\" + stype + \".\" + str(i) + ext\n reads += int(subprocess.run([os.path.join(scriptsDir, \"get_readcount.sh\"), bam, str(threads)], capture_output=True, text=True).stdout.strip(\"\\n\"))\n return reads", "def num_trajs(self):\n return len(list(self.run_traj_idx_tuples()))", "def num_runs(self):\n return len(self._h5[RUNS])", "def get_num_instances(self):\n return len( self.get_instances_ids() )", "def number_of_running_metrics(self):\n try:\n return len(self.get_classads(\"OSGRSV==\\\"metrics\\\"\"))\n except TypeError:\n self.rsv.log(\"ERROR\", \"Classad parsing failed, unable to count running metrics\")", "def number_processors(self):\n if self._number_processors:\n return self._number_processors\n\n uniques = set()\n\n for task in self.tasks.values():\n for job in task.jobs.values():\n for start in job.starts:\n uniques.add(start['processor_id'])\n for finish in job.finishes:\n uniques.add(finish['processor_id'])\n\n self._number_processors = len(uniques)\n return self._number_processors", "def count_processes(pid=None, name=None):\n counter = 0\n for process in win32com.client.GetObject('winmgmts:').InstancesOf('Win32_Process'):\n if ((pid is None or process.Properties_(\"ProcessID\").Value == pid) and\n (name is None or process.Properties_(\"Name\").Value == name)):\n counter += 1\n return counter", "def get_terminals_count(self, sequence: str) -> int:\n\n res = 0\n\n for terminal in self._terminals:\n if terminal != '':\n res += sequence.count(terminal)\n\n return res", "def count() -> int:\n pass", "def ntasks(self):\n return len(self.tasks)", "def get_total_session_count(self) -> int:\n return self.streams_count", "def nprograms(self):\n return len(self.__programs)", "def pcp_process_count(self):\n\n\t\tif self.PCPConnectionStatus() != ConnStateType.OK:\n\t\t\tself.pcp_internal_error('invalid PCP connection')\n\t\t\treturn None\n\t\t\n\t\tself._PCPWrite('N'.encode(), 1)\n\t\twsize = self.int_to_bytes(4)\n\t\tself._PCPWrite(wsize, 4)\n\t\tif self.PCPFlush() < 0:\n\t\t\treturn None\n\t\tif self.Pfdebug:\n\t\t\tself.Pfdebug.write(f'DEBUG: send: tos=\"N\", length={self.bytes_to_int(wsize)}\\n')\n\n\t\treturn self._process_pcp_response('N')", "def task_count(self) -> Optional[int]:\n return pulumi.get(self, \"task_count\")", "def count(self):\n with self.pdq:\n (count,)=self.pdq.cursor().execute('select count(*) from pdq').next()\n return count", "def _get_n_jobs(self):\n self._validate_n_jobs()\n return deepcopy(self.n_jobs)", "def get_total_num_clients(task):\n if task == 'stackoverflow_lr':\n return 342477\n else:\n raise ValueError(f'Unsupported task: {task}')", "def instance_count(self) -> int:\n return pulumi.get(self, \"instance_count\")", "def Count(self, limit=None):\n if limit is None:\n count = 0\n for i in self.Run():\n count += 1\n return count\n else:\n return len(self.Get(limit))", "def get_total_n_events(self):\n\n return self._total_n_processed_events", "def participant_count(self) -> int:\n return self.participants.count() + 1", "def get_number_of_submissions():\n\n start = time.time()\n print(\"counting submissions in\", TEST_SUBREDDIT, 'between', TEST_START_DATE, 'and', TEST_END_DATE)\n threads = list(get_submissions(TEST_SUBREDDIT, TEST_START_DATE, TEST_END_DATE, TEST_MAX))\n end = time.time()\n print('time elapsed: ', end - start)\n print('total submissions:', len(threads))\n print(TEST_MAX)", "def count(self):\n return int()", "def count(self):\n return int()", "def count_samples(self):\n return sum(SEQ_LENGTHS)", "def n_in_progress(self) -> int:\n return self.n_tasks() - self.qsize()", "def get_count(self):\n\n\t\treturn self.__count", "def getNumTasks(self):\n with self._lock_c:\n return len(self.taskset)", "def count(self):\n # TODO not implemented yet\n return 0", "def get_n_jobs(self):\n return self.n_jobs", "def tally(self):\n return self.count", "def get_num_sync_workers(self, comm):\n if self.synchronous:\n return int( math.ceil( 0.95 * (comm.Get_size() - 1) ) )\n return 1", "def num_cpus(self):\n if 'QUTIP_NUM_PROCESSES' in os.environ:\n num_cpus = int(os.environ['QUTIP_NUM_PROCESSES'])\n else:\n num_cpus = available_cpu_count()\n os.environ['QUTIP_NUM_PROCESSES'] = str(num_cpus)\n return num_cpus", "def get_num_parallel_workers():\n return _config.get_num_parallel_workers()", "def num_of_rabbit_running_nodes(remote):\n result = remote.execute('pcs status --full | '\n 'grep p_rabbitmq-server | '\n 'grep ocf | '\n 'grep -c -E \"Master|Started\"', verbose=False)\n count = result['stdout'][0].strip()\n if count.isdigit():\n return int(count)\n else:\n return 0", "def num_instances_msp(infile_name):\n\tinfile = open(infile_name)\n\tnum_instances = 0\n\tfor line in infile:\n\t\tif line.startswith(\"Name: \"):\n\t\t\tnum_instances += 1\n\treturn(num_instances)", "def next_run_idx(self):\n return self.num_runs", "def counter(self) -> int:", "def counter(self) -> int:", "def usage(self):\n self.process = subprocess.Popen(\n# \"ps -u %s -o rss | awk '{sum+=$1} END {print sum}'\" % self.username,\n \"ps -p %s -o rss | awk '{sum+=$1} END {print sum}'\" % self.pid,\n shell=True, stdout=subprocess.PIPE)\n self.stdout_list = self.process.communicate()[0].split('\\n')\n return int(self.stdout_list[0])", "def get_control_count(cmd):\n return len(cmd.control_qubits)", "def num_launches(self):\n return len(self.launches)", "def count_tasks(self):\n return len(self.tasks)", "def concurrency(self):\n return multiprocessing.cpu_count()", "def get_program_counter(self):\n return self.get_thread().program_counter", "def nThreads(self):\n return self._c_param.n_threads", "def numpsus():\n click.echo(_wrapper_get_num_psus())", "def get_num_chunks(self) -> int:", "def instance_count(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"instance_count\")", "def numcpu () :\n import multiprocessing\n return multiprocessing.cpu_count()" ]
[ "0.7056694", "0.6808125", "0.6725539", "0.66032845", "0.64222586", "0.6406968", "0.6369945", "0.6350684", "0.6319151", "0.6316894", "0.631224", "0.62300086", "0.62215966", "0.6156739", "0.6135421", "0.6128718", "0.61258286", "0.611203", "0.6101045", "0.60927176", "0.6082708", "0.6049304", "0.60454756", "0.60439026", "0.6039683", "0.60033643", "0.5998477", "0.5984397", "0.59553885", "0.5938129", "0.59370285", "0.5936558", "0.59325796", "0.5909512", "0.5904209", "0.5897413", "0.5869352", "0.5866081", "0.5856901", "0.5846686", "0.58405286", "0.58399993", "0.58393407", "0.5832995", "0.5810171", "0.58065224", "0.58039594", "0.5802933", "0.5800605", "0.5797286", "0.57956487", "0.579134", "0.5791171", "0.57887304", "0.57841325", "0.5781453", "0.57733184", "0.576775", "0.5758019", "0.57528937", "0.57497746", "0.5740515", "0.5739031", "0.5738996", "0.5732542", "0.5732152", "0.5710152", "0.57050925", "0.5705082", "0.5702669", "0.57019097", "0.5697904", "0.56965244", "0.56965244", "0.5695253", "0.5690452", "0.5687921", "0.56861955", "0.56823105", "0.5679357", "0.5679086", "0.5678883", "0.56785685", "0.5672724", "0.56617975", "0.56609404", "0.5660355", "0.5660047", "0.5660047", "0.56594527", "0.56498516", "0.5646297", "0.5639846", "0.5639362", "0.56381696", "0.5634514", "0.56303287", "0.5620227", "0.5618513", "0.5615571" ]
0.73324955
0
Writes Exception given the string format of the class name and the 'e' in any Exception as e premise
def writeException(className, exceptionString): errorFile = open("error.log", 'a') errorFile.write("ERROR occured in " + className + " at " + str(datetime.now()) + " with the following message\n" + str(exceptionString) + "\n\n") errorFile.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_format_error(caught_exception, event_str):\n\tcheck_type(caught_exception, Exception)\n\tcheck_type(event_str, StringType)\n\t\n\treturn '{0}, Class: {1}:{2}'.format(event_str, str(type(caught_exception)), caught_exception)", "def create_exception(self, msg: str):", "def format_exception(exception_type, class_name = 'No classname', message = 'Formated exception', debug_info = {}):\n\tcheck_class(exception_type, Exception)\n\tcheck_type(class_name, StringType)\n\tcheck_type(message, StringType)\n\tcheck_type(debug_info, DictType)\n\n\tdebug = []\n\tfor k in debug_info:\n\t\tdebug.append('{0}: {1}'.format(k, debug_info[k]))\n\texc = exception_type('{0}, \"{1}\" - debug: ({2})'.format(class_name, message, ', '.join(debug)))\n\treturn exc", "def format_exception_only(exc):\r\n exc_type = type(exc)\r\n\r\n stype = exc_type.__qualname__\r\n smod = exc_type.__module__\r\n if smod not in (\"__main__\", \"builtins\"):\r\n stype = smod + '.' + stype\r\n try:\r\n _str = str(exc)\r\n except:\r\n _str = \"<unprintable {} object>\".format(exc_type.__name__)\r\n\r\n if _str == \"None\" or not _str:\r\n line = \"{}\\n\".format(stype)\r\n else:\r\n line = \"{}: {}\\n\".format(stype, _str)\r\n return line", "def e(msg):\n raise Exception(repr(msg))", "def _print_exception(self, s, e):\n \n # Output exception message\n sys.stdout.write(\"\\n{0}: {1} \\n\\n\".format(s, e))\n \n if self.model.debug:\n with open(self.logfile,'a') as f:\n f.write(\"\\n{0}: {1} \\n\\n\".format(s, e))", "def print_exception(masking=None, file=sys.stderr):\n ty, val, tb = sys.exc_info()\n string = format_exception(tb, masking=masking)\n file.write(string)", "def test_friendly_exception_formatting_exc_with_str_overload():\n ex = InsufficientSignatures(1, 3)\n\n formatted_exception = friendlyEx(ex)\n\n assert formatted_exception == '{}'.format(ex.reason)", "def format_exc(etype, evalue, etb, context=5, tb_offset=0):\r\n # some locals\r\n try:\r\n etype = etype.__name__\r\n except AttributeError:\r\n pass\r\n\r\n # Header with the exception type, python version, and date\r\n pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable\r\n date = time.ctime(time.time())\r\n pid = 'PID: %i' % os.getpid()\r\n\r\n head = '%s%s%s\\n%s%s%s' % (etype, ' ' * (75 - len(str(etype)) - len(date)),\r\n date, pid, ' ' * (75 - len(str(pid)) - len(pyver)),\r\n pyver)\r\n\r\n # Flush cache before calling inspect. This helps alleviate some of the\r\n # problems with python 2.3's inspect.py.\r\n linecache.checkcache()\r\n # Drop topmost frames if requested\r\n try:\r\n records = _fixed_getframes(etb, context, tb_offset)\r\n except:\r\n raise\r\n print('\\nUnfortunately, your original traceback can not be '\r\n 'constructed.\\n')\r\n return ''\r\n\r\n # Get (safely) a string form of the exception info\r\n try:\r\n etype_str, evalue_str = map(str, (etype, evalue))\r\n except:\r\n # User exception is improperly defined.\r\n etype, evalue = str, sys.exc_info()[:2]\r\n etype_str, evalue_str = map(str, (etype, evalue))\r\n # ... and format it\r\n exception = ['%s: %s' % (etype_str, evalue_str)]\r\n frames = format_records(records)\r\n return '%s\\n%s\\n%s' % (head, '\\n'.join(frames), ''.join(exception[0]))", "def formatException(self, exc_info):\n keys = [\"type\", \"value\", \"frame\", \"filename\", \"lineno\", \"function\", \"text\"]\n type_, value, trcbk = exc_info\n rows = []\n\n for pos, frame in enumerate(traceback.extract_tb(trcbk)):\n values = [\n type_.__name__,\n value,\n pos,\n frame.filename,\n frame.lineno,\n frame.name,\n frame.line,\n ]\n rows.append(dict(zip(keys, values)))\n\n return str(CustomEncoder().encode(rows))", "def exc_log_str(exception) -> str:\n return \"{}: {!s}\".format(type(exception).__name__, exception)", "def error(self, e):\n return \"{}: {} ({})\".format(e.__class__.__name__, e.__doc__, e.message)", "def test_friendly_exception_formatting_exc_without_str_overload():\n ex = SigningException()\n\n formatted_exception = friendlyEx(ex)\n\n assert formatted_exception == '{}'.format(ex)", "def FormatException(message):\n message = ('Exception Type: %s\\n'\n 'Details: %s\\n'\n 'Message: %s\\n') % (sys.exc_type, traceback.format_exc(), message)\n return message", "def create_exception_report(exc_type, exc_value, tb, output_format, storage_backend, data_processor=None, get_full_tb=False):\n exception_data = get_exception_data(exc_type, exc_value, tb, get_full_tb=get_full_tb)\n if data_processor:\n exception_data = data_processor(exception_data)\n\n if output_format == \"html\":\n text = render_exception_html(exception_data)\n elif output_format == \"json\":\n text = render_exception_json(exception_data)\n else:\n raise TypeError(\"Exception report format not correctly specified\")\n\n filename = gen_error_filename(extension=output_format)\n\n report_location = storage_backend.write(filename, text)\n\n return report_location", "def pretty_exception(err: Exception, message: str = \"\"):\n return f\"{message} ({err.__module__}.{err.__class__.__name__}: {err!s})\"", "def make_exception_message(exc):\n if str(exc):\n return '%s: %s\\n' % (exc.__class__.__name__, exc)\n else:\n return '%s\\n' % (exc.__class__.__name__)", "def exception_alias():\n try:\n #result=1/0\n raise Exception\n except ZeroDivisionError, e:\n print(\"ZeroDivisionError\")\n print(e.message if e.message != \"\" else 'no message')\n except Exception, e:\n print(\"Exception\")\n print(type(e.message)) # <type 'str'>\n print(e.message if e.message != \"\" else 'no message')", "def format_exception_only(etype, value):\n # Gracefully handle (the way Python 2.4 and earlier did) the case of\n # being called with (None, None).\n if etype is None:\n return [_format_final_exc_line(etype, value)]\n\n stype = etype.__name__\n smod = etype.__module__\n if smod not in (\"__main__\", \"builtins\", \"exceptions\"):\n stype = smod + '.' + stype\n\n if not issubclass(etype, SyntaxError):\n return [_format_final_exc_line(stype, value)]\n\n # It was a syntax error; show exactly where the problem was found.\n lines = []\n filename = value.filename or \"<string>\"\n lineno = str(value.lineno) or '?'\n lines.append(' File \"%s\", line %s\\n' % (filename, lineno))\n badline = value.text\n offset = value.offset\n if badline is not None:\n lines.append(' %s\\n' % badline.strip())\n if offset is not None:\n caretspace = badline.rstrip('\\n')[:offset].lstrip()\n # non-space whitespace (likes tabs) must be kept for alignment\n caretspace = ((c.isspace() and c or ' ') for c in caretspace)\n # only three spaces to account for offset1 == pos 0\n lines.append(' %s^\\n' % ''.join(caretspace))\n msg = value.msg or \"<no detail available>\"\n lines.append(\"%s: %s\\n\" % (stype, msg))\n return lines", "def exception(self, msg, *args, **kwargs):\n if args:\n try:\n msg = msg % args\n except TypeError:\n log.exception_orig(_('Wrong format of a log message'))\n\n (exc_type, exc_value, exc_tb) = sys.exc_info()\n bugdialog.ShowEI(exc_type, exc_value, exc_tb, msg)\n if compat.PYTHON2: sys.exc_clear()", "def _FormatException(exc):\n return ''.join(traceback.format_exception_only(type(exc), exc))", "def create_log(self, exc):\n return self.formatter.formatException(exc)", "def save_exception(exc):\n LOG.error(\"Error - %s\", str(exc))\n hour = time.strftime(\"_%H_%M_%S\")\n today = time.strftime(\"_%d_%m_%Y\")\n data = (str(exc)+traceback.format_exc())\n\n file = open(\"./logs/ERROR_\"+threading.currentThread().getName()+today+\".log\",'a+') #Replace to fix OSError\n file.write(\"\\n==\"+hour+\"==\\n\")\n file.write(Parser.parse_text(data))\n file.write(\"=====================================\\n\")\n file.close()", "def rewrite_exception(old_name, new_name):\n try:\n yield\n except Exception as e:\n msg = e.args[0]\n msg = msg.replace(old_name, new_name)\n args = (msg,)\n if len(e.args) > 1:\n args = args + e.args[1:]\n e.args = args\n raise", "def exception(self, e):\n pass", "def exception(self, msg, *args, **kwargs):\n logger = self.__get_logger()\n logger.exception(str(msg), *args, **kwargs)", "def print_exception(self, output=None):\r\n\r\n if not output:\r\n output = sys.stderr\r\n\r\n text = u\"stream failed. reason: %s\\n\" % self.message\r\n text += u\"exception: %s: \\n\" % self.exception.__class__.__name__\r\n\r\n text += u\"node: %s\\n\" % self.node\r\n\r\n try:\r\n text += unicode(self.exception)\r\n except Exception, e:\r\n text += u\"<unable to get exception string: %s>\" % e\r\n\r\n text += \"\\ntraceback\\n\"\r\n\r\n try:\r\n l = traceback.format_list(traceback.extract_tb(self.traceback))\r\n text += \"\".join(l)\r\n except Exception as e:\r\n text += \"<unable to get traceback string: %s>\" % e\r\n\r\n text += \"\\n\"\r\n\r\n if self.inputs:\r\n for i, fields in enumerate(self.inputs):\r\n text += \"input %i:\\n\" % i\r\n input_text = \"\"\r\n for (index, field) in enumerate(fields):\r\n input_text += u\"% 5d %s (storage:%s analytical:%s)\\n\" \\\r\n % (index, field.name, field.storage_type, field.analytical_type)\r\n text += unicode(input_text)\r\n else:\r\n text += \"input: none\"\r\n\r\n text += \"\\n\"\r\n\r\n if self.output:\r\n text += \"output:\\n\"\r\n for field in self.output:\r\n text += u\" %s (storage:%s analytical:%s)\\n\" \\\r\n % (field.name, field.storage_type, field.analytical_type)\r\n else:\r\n text += \"ouput: none\"\r\n\r\n text += \"\\n\"\r\n\r\n if self.attributes:\r\n text += \"attributes:\\n\"\r\n for name, attribute in self.attributes.items():\r\n try:\r\n value = unicode(attribute)\r\n except Exception, e:\r\n value = \"unable to convert to string (exception: %s)\" % e\r\n text += \" %s: %s\\n\" % (name, value)\r\n else:\r\n text += \"attributes: none\"\r\n\r\n output.write(text)", "def _error_handling(self,e,func):\n print(self.type, \" sufferred exception in \" , func , \":\" , e)", "def exception_description(err):\n result = ''\n if isinstance(err, str):\n result = err\n elif isinstance(err, Exception):\n result = \"Exception class: %s.%s\\n\" % (err.__class__.__module__, \\\n err.__class__.__name__)\n if len(err.args) > 0:\n result += \"Args:\\n\"\n arg_num = 0\n for arg in err.args:\n if not isinstance(arg, str):\n arg = str(arg)\n\n arg = arg.replace('\\n', '\\n\\t' + ' '*(len(str(arg_num)) + 3))\n\n result += \"\\t%s : %s\\n\" % (arg_num, arg)\n arg_num += 1\n else:\n result = str(err)\n return result", "def test_friendly_exception_formatting_multiple_exceptions():\n ex1 = InsufficientCorrectSignatures(1, 2, {'6ouriXMZkLeHsuXrN1X1fd': '3GoEPiwhJUjALzrXmmE9tFTXAi7Emv8Y8jjSxQyQB'})\n ex2 = InsufficientSignatures(1, 3)\n ex2.__cause__ = ex1\n ex3 = SigningException()\n ex3.__cause__ = ex2\n\n expected = '{} [caused by {} [caused by {}]]'.format(ex3, ex2.reason, ex1.reason)\n formatted_exception = friendlyEx(ex3)\n\n assert formatted_exception == expected", "def handle_exception(e):\n maps = {\n exp.ServiceExp: api_exceptions.ServiceException,\n exp.PermissionExp: api_exceptions.ForbiddenException,\n exp.NotFoundExp: api_exceptions.NotFoundException,\n exp.ValueExp: api_exceptions.BadRequestException,\n exp.BadRequestExp: api_exceptions.BadRequestException,\n }\n raise maps[e.__class__](e.message)", "def produceExceptionClass(theName, theList):\n TheException = type(theName,\n (Exception, object,),\n dict(_errorMessages={},\n __init__=_myEx_init,\n __str__=_myEx_str,\n errCode=_myEx_errCode,\n messages=_myEx_messages))\n for x in theList:\n (errCode, errName, errMsg) = x\n setattr(TheException, errName, errCode)\n TheException._errorMessages[errCode] = errMsg\n return TheException", "def format_exc():\n from traceback import format_exc\n return format_exc().decode('utf-8', 'surrogateescape')", "def exception(msg):\n log('EXCEPTION', msg)", "def __as_unicode(self):\n # WARNING: Do not change this string - it is used to extract error from log\n strg = WMEXCEPTION_START_STR\n strg += \"\\nException Class: %s\\n\" % self.name\n strg += \"Message: %s\\n\" % self._message\n for key, value in viewitems(self.data):\n strg += \"\\t%s : %s\\n\" % (key, value,)\n strg += \"\\nTraceback: \\n\"\n strg += self.traceback\n strg += '\\n'\n strg += WMEXCEPTION_END_STR\n return strg", "def test_print_exception() -> None:\n try:\n raise ValueError(\"foo\")\n except Exception as ex:\n print_exception(ex, \"Message\")", "def err_str(err):\n return \"\".join(format_exception_only(type(err), err))", "def exception_hook(cls, etype, value, tb):\n import traceback\n\n # Print exception\n traceback.print_exception(etype, value, tb)\n\n # Log exception\n stacktrace_msg = ''.join(traceback.format_tb(tb))\n if etype:\n exception_msg = '{0}: {1}'.format(etype, value)\n else:\n exception_msg = 'Exception: {}'.format(value)\n\n LOGGER.critical(stacktrace_msg)\n LOGGER.critical(exception_msg)\n\n # Write to exception log file\n exception_file_name = datetime.now().strftime('RenderKnecht_Exception_%Y-%m-%d_%H%M%S.log')\n exception_file = Path(get_settings_dir()) / exception_file_name\n\n with open(exception_file, 'w') as f:\n traceback.print_exception(etype, value, tb, file=f)\n\n # Inform GUI of exception if QApplication set\n if cls.app:\n gui_msg = f'{stacktrace_msg}\\n{exception_msg}'\n cls.send_exception_signal(gui_msg)", "def reraise(exception_class, e, traceback=None):\n try:\n e = exception_class(\"\" if e is None else e)\n if e.__traceback__ is not traceback:\n raise e.with_traceback(traceback)\n raise e\n finally:\n e = None\n traceback = None", "def print_exception_formatted(type, value, tb):\n\n tbtext = \"\".join(traceback.format_exception(type, value, tb))\n lexer = get_lexer_by_name(\"pytb\", stripall=True)\n formatter = TerminalFormatter()\n sys.stderr.write(highlight(tbtext, lexer, formatter))", "def _format_exception(message: str) -> str:\n exc_inf = sys.exc_info()\n if all(exc_inf):\n (etype, value, trace) = exc_inf\n if message:\n return f'{message}, {traceback.format_exception(etype, value, trace)}'\n else:\n return f'{traceback.format_exception(etype, value, trace)}'", "def exception(self, level, *args):\r\n if level < self.level:\r\n return\r\n\r\n message = []\r\n if args:\r\n message.append(args[0].format(*args[1:]))\r\n message.append('\\n')\r\n\r\n message.append(traceback.format_exc())\r\n\r\n message = ''.join(message)\r\n self._raw_insert(level, message)", "def exception_to_string(excp: Exception) -> str:\n stack = traceback.extract_stack()[:-3] + traceback.extract_tb(\n excp.__traceback__\n ) # add limit=??\n pretty = traceback.format_list(stack)\n return \"\".join(pretty) + f\"\\n {excp.__class__} {excp}\"", "def creation_error(src_dict: Dict[str, List[Union['Repeater', 'Step']]], e: str) -> str:\n return \"Sequencer error in %s: %s\\n\" % (json.dumps(src_dict), e)", "def formatException(self, exc_info):\n traces = traceback.format_exception(*exc_info)\n return \"\\n\".join(traces)", "def syntaxError (self, s) :\r\n report = self.generateReport() + s\r\n raise Exception, report", "def formatException(self, exc_info):\n type_, value, trcbk = exc_info\n\n for pos, frame in enumerate(traceback.extract_tb(trcbk)):\n row = [\n type_.__name__,\n value,\n pos,\n frame.filename,\n frame.lineno,\n frame.name,\n frame.line,\n ]\n self.writer.writerow(row)\n\n data = self.output.getvalue()\n self.output.truncate(0)\n self.output.seek(0)\n return data.strip()", "def __ex(exception_string, internal=False):\n ex = str(exception_string).strip()\n while \" \" * 2 in ex:\n ex = ex.replace((\" \" * 2), \" \")\n if internal:\n ex = \"PaVal: \" + ex\n raise Exception(ex)", "def exception(self, *args, **kwargs):\n\n message = self.get_message(*args, **kwargs)\n self.logger.exception(message)", "def exception(self, *args, **kwargs):", "def creation_error(src_dict: Dict[str, List[str]], e: str):\n return \"LED Group error in %s: %s\\n)\" % (json.dumps(src_dict), e)", "def test_are_chained_exceptions_printed(self):\n\n io = BufferedSystemIO()\n\n try:\n try:\n raise IndexError('Invalid index 5')\n except IndexError as index_exc:\n raise Exception('There was an error with index') from index_exc\n\n except Exception as exc:\n output_formatted_exception(exc, ':my-test-task', io)\n\n self.assertIn('(Caused by) IndexError:', io.get_value())\n self.assertIn('Exception:', io.get_value())\n self.assertIn('There was an error with index', io.get_value())", "def format_exception(self):\n if isinstance(self.message, dict):\n return self.message, self.status_code\n return Request.format_exception(self.message, self.status_code)", "def addExceptionMessage(self, q, inst, traceback):\n self.fail('FAIL: Exception raised: %s' % inst)\n self.addMessage('')\n for line in traceback.format_exc().split('\\n'):\n self.addMessage(line)", "def parse_error(self, message, exc_cls=VisualizerParseError):\n raise exc_cls(\"Error parsing %s '%s' (%s:%i): %s\" % \n (self.tag, self.ref, self.filename, self.lineno, message))", "def formatException(self, exc_info):\n result = super(OneLineExceptionFormatter, self).formatException(exc_info)\n return repr(result) # or format into one line however you want to", "def print_exception(begin: str = '') -> str:\n et, ev, tb = sys.exc_info()\n exc = begin + \"Exception was thrown: {}\\n\".format(ev)\n for l in traceback.format_exception(et, ev, tb):\n exc += l\n return print_warning(exc)", "def _raise_format_error(self, name: str, format_str: str, source_format: str):\n\n raise ValueError(f\"The '{ name }' should be { format_str }, rather than { source_format }\")", "def __str__(self):\n return \"ParseException: %s\" % self.__msg", "def ExceptionAppend(e, msg):\n if not e.args:\n e.args = (msg,)\n elif len(e.args) == 1:\n e.args = (str(e.args[0]) + ' ' + msg,)\n else:\n e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]", "def _exceptions_formatter(field, description):\n heads = ['throws']\n types = _or_types(field)\n if types:\n heads.append(types)\n tail = description\n return heads, tail", "def exception_handler(exctype, val, trace):\n logger.info(\n ''.join(traceback.format_exception(exctype, val, trace)))", "def exceptions(e):\n ts = strftime('[%Y-%b-%d %H:%M]')\n tb = format_exc()\n app.logger.error('%s %s %s %s %s 5xx INTERNAL SERVER ERROR\\n%s',\n ts,\n request.remote_addr,\n request.method,\n request.scheme,\n request.full_path,\n tb)\n return jsonify(message=\"Internal Server Error\"), 500", "def exceptions(e):\n ts = strftime('[%Y-%b-%d %H:%M]')\n tb = traceback.format_exc()\n logger.error('%s %s %s %s %s 5xx INTERNAL SERVER ERROR\\n%s',\n ts,\n request.remote_addr,\n request.method,\n request.scheme,\n request.full_path,\n tb)\n return \"Internal Server Error\", 500", "def handle_exception(exc_type, exc_value, exc_traceback):\n exc_msg = traceback.format_exception(exc_type, exc_value, exc_traceback)\n exc_msg.insert(0, 'Uncaught exception on processor {}\\n'.format(mpiops.chunk_index))\n exc_msg = \"\".join(exc_msg)\n print(exc_msg, file=sys.stderr)", "def exception(self) -> str:\n return pulumi.get(self, \"exception\")", "def handleException(self,e):\n # print (\"Handling Exception %s %s\" % (e.__class__.__name__, e.args))\n #\n # general pre-processing\n #\n # add filename to EnvironmentError for printout\n if isinstance(e,EnvironmentError):\n fn = e.filename\n if fn is not None and fn not in e.args: e.args += (fn,)\n #\n # specific processing\n #\n if isinstance(e,TransformError): return self.handleTransformError(e)\n elif isinstance(e,IncludeError): return self.handleIncludeError(e)\n elif isinstance(e,SystemExit): return self.handleSystemExit(e)\n elif isinstance(e,KeyboardInterrupt): return self.handleKeyboardInterrupt(e)\n elif isinstance(e,RuntimeError): return self.handleRuntimeError(e)\n elif type(e) in (AttributeError,NameError,TypeError,SyntaxError):\n return self.handlePythonSyntaxError(e)\n elif isinstance(e,Exception):\n if hasattr(e,'args') and type(e.args) == list and e.args:\n args0 = e.args[0]\n # test for some known strings\n if isinstance(args0, str):\n if args0.find('Failed to load DLL') != -1:\n return self.handleDllLoadError(e)\n # error was not handled\n return None", "def GetLastExceptionString():\n (exc, error) = sys.exc_info()[0:2]\n exc_msg = str(exc)\n if '<class' in exc_msg:\n exc_msg = exc_msg.split(\"'\")[1]\n\n exc_msg = exc_msg.replace('dns.exception.', '')\n error = '%s %s' % (exc_msg, error)\n # We need to remove the trailing space at some point.\n return error.rstrip()", "def formatException(cls, instance, trcback, context=1):\n\n\tstack = extractStack(getInnerMostFrame(trcback), context=context)\n\toutput = []\n\toutput.append(\"Traceback (most recent call last):\")\n\tfor frame, fileName, lineNumber, name, context, index in stack:\n\t\toutput.append(\" File \\\"{0}\\\", line {1}, in {2}\".format(fileName, lineNumber, name))\n\t\tfor line in context:\n\t\t\toutput.append(\" {0}\".format(line.strip()))\n\tfor line in traceback.format_exception_only(cls, instance):\n\t\toutput.append(\"{0}\".format(line))\n\treturn output", "def exception(self, msg, *args, **kwargs):\n ex = sys.exc_info()[1]\n\n if hasattr(ex, '_monocle'):\n args = args + (format_tb(ex),)\n self.logger.error('%s\\n%%s' % msg, *args, **kwargs)\n else:\n super(Adapter, self).exception(msg, *args, **kwargs)", "def get_exception():\n trace = ''\n exception = ''\n exc_list = traceback.format_exception_only(sys.exc_info()[0],\n sys.exc_info()[1])\n for entry in exc_list:\n exception += entry\n tb_list = traceback.format_tb(sys.exc_info()[2])\n for entry in tb_list:\n trace += entry\n return '%s\\n%s' % (exception, trace)", "def logError(e):\r\n print(e)", "def otherError(caller, exception):\n m = str(exception)\n if len(m) > 0:\n m = \": \" + m\n _log.error(\n \"- ERROR {} {}{}\".format(\n impl.util.encode2(caller),\n impl.util.encode1(type(exception).__name__),\n impl.util.encode1(m),\n )\n )\n if django.conf.settings.DEBUG:\n raise exception\n else:\n _notifyAdmins(\n \"Exception raised in {}:\\n{}{}\\n\\n{}\".format(\n caller, type(exception).__name__, m, traceback.format_exc()\n )\n )", "def exception_class(self, exception):\n\n\t\tcls = type(exception)\n\t\tif cls.__module__ == 'exceptions': # Built-in exception.\n\t\t\treturn cls.__name__\n\t\treturn \"%s.%s\" % (cls.__module__, cls.__name__)", "def __str__(self) -> str:\n message = (\n f\"ERROR: Registration Exception.\\n\"\n f\" - Internal error code: {str(self.code)}\\n\"\n f\" - Internal error message: {str(self.message)}\"\n )\n return message", "def _exception_dispatcher(self, e):\n # TODO Currently not doing anything\n raise e", "def __str__(self) -> str:\n message = (\n f\"ERROR: Backend Exception.\\n\"\n f\" - Internal error code: {str(self.code)}\\n\"\n f\" - Internal error message: {str(self.message)}\"\n )\n return message", "def __init__(self, module, message, _type, exc_message=None, *args, **kwargs):\n logger.error(\"[{}] {} {} {}\".format(module,\n _type,\n '<{}>'.format(exc_message) if exc_message else '',\n message))\n super(CliException, self).__init__(message, *args)\n self.message = message\n self.type = _type\n self.exc_message = exc_message\n self.str_at_error = kwargs.get('str_at_error', None)", "def exception(self):\n exc_type, exc_value, exc_tb = sys.exc_info()\n cui.message(traceback.format_exception_only(exc_type, exc_value)[-1],\n log_message=traceback.format_exc())", "def log_exception(*args, **kwds):\n cls, err = sys.exc_info()[:2]\n logging.exception('Exception in request: %s: %s', cls.__name__, err)", "def error(msg, ex=None):\n log_error(msg, ex)\n if ex == None:\n exMsg = \"\"\n else:\n exMsg = \" \\n \" + repr(ex)\n if ex == None:\n raise Exception(exMsg)\n else:\n raise ex", "def exception_hash(err, traceback = None):\n result = ''\n if isinstance(err, str):\n result = \"str: %s\" % err\n else:\n if traceback == None:\n traceback = \"\\nNone\\n\"\n else:\n traceback = '\\n' + traceback\n result = \"%s.%s: %s%s\" % (err.__class__.__module__, \\\n err.__class__.__name__, \\\n str(err), traceback)\n return result", "def write_error(self, status_code, **kwargs):\n reason = \"Unknown Error\"\n\n # Get information about the triggered exception\n self.application.gs_globals[\"exception_fulltext\"] = repr(sys.exc_info())\n\n # Get the status code and error reason\n if status_code in list(ERROR_CODES):\n reason = ERROR_CODES[status_code]\n try:\n if \"exc_info\" in kwargs:\n _, error, _ = kwargs[\"exc_info\"]\n reason = error.reason\n except AttributeError:\n pass\n\n # Return JSON if this is an API call\n if \"/api/v1/\" in self.request.uri:\n jsondict = {\n \"page_title\": \"Error {}: {}\".format(status_code, reason),\n \"error_status\": status_code,\n \"error_reason\": reason,\n \"error_exception\": self.application.gs_globals[\"exception_fulltext\"],\n }\n self.set_header(\"Content-type\", \"application/json\")\n self.write(json.dumps(jsondict))\n\n # Render the error template\n else:\n t = self.application.loader.load(\"error_page.html\")\n self.write(\n t.generate(\n gs_globals=self.application.gs_globals,\n status=status_code,\n reason=reason,\n user=self.get_current_user(),\n )\n )", "def log_error(e):\n\tprint(e)", "def log_error(e):\n\tprint(e)", "def _exc_info_to_string(self, err, test):\n\t\texctype, value, tb = err\n\t\t# Skip test runner traceback levels\n\t\twhile tb and self._is_relevant_tb_level(tb):\n\t\t\ttb = tb.tb_next\n\n\t\tif exctype is test.failureException:\n\t\t\t# Skip assert*() traceback levels\n\t\t\tlength = self._count_relevant_tb_levels(tb)\n\t\t\tmsgLines = traceback.format_exception(exctype, value, tb, length)\n\t\telse:\n\t\t\tmsgLines = traceback.format_exception(exctype, value, tb)\t\t\n\t\treturn ''.join(msgLines)", "def get_exception():\n raise Exception(\"example\")", "def log_error(e):\r\n print(e)", "def log_error(e):\r\n print(e)", "def _publish_error(self, exc_info, parent=None):\n exc_type, exception, traceback = exc_info\n\n content = {\n \"ename\": exc_type.__name__,\n \"evalue\": str(exception),\n \"traceback\": format_tb(traceback),\n }\n self.session.send(\n self.iopub_socket,\n \"error\",\n content,\n parent=parent,\n ident=self._topic(\"error\"),\n )", "def send_error(self, e):\n self._send({'error': e.payload})", "def _catch_exceptions(self, exctype, value, tb):\n\n # Now we log it.\n self.error(\"Uncaught exception\", exc_info=(exctype, value, tb))\n\n # First, we print to stdout with some colouring.\n print_exception_formatted(exctype, value, tb)", "def test_K_str_no_args(self):\n r = Rectangle(5, 2)\n with self.assertRaises(TypeError) as e:\n Rectangle.__str__()\n s = \"__str__() missing 1 required positional argument: 'self'\"\n self.assertEqual(str(e.exception), s)", "def write_event_error(\n writer,\n raw_event,\n error_message,\n error_code,\n parsed_event=None\n):\n try:\n writer.send(\n create_event_error(\n raw_event,\n error_message,\n error_code,\n parsed_event\n )\n )\n except Exception as e:\n logging.error('Unable to create EventError object: %s' % str(e))", "def print_exception(etype, value, tb, limit=None, file=None):\n\n if file is None:\n file = sys.stderr\n if tb:\n tbi = TracebackInfo.from_traceback(tb, limit)\n print(str(tbi), end='', file=file)\n\n for line in format_exception_only(etype, value):\n print(line, end='', file=file)", "def serialize_known_exception(e, formatted_traceback=None):\n if formatted_traceback is None:\n tb = StringIO()\n traceback.print_exc(file=tb)\n trace_out = tb.getvalue()\n else:\n trace_out = formatted_traceback\n\n # Needed because HttpException constructor sucks\n append_message = False\n # Convert exception to a know exception type that can be deserialized\n # by the calling process\n known_exception_type_args = []\n if isinstance(e, exceptions.HttpException):\n known_exception_type = exceptions.HttpException\n known_exception_type_args = [e.url, e.code]\n append_message = True\n elif isinstance(e, exceptions.NonRecoverableError):\n known_exception_type = exceptions.NonRecoverableError\n elif isinstance(e, exceptions.OperationRetry):\n known_exception_type = exceptions.OperationRetry\n known_exception_type_args = [e.retry_after]\n trace_out = None\n elif isinstance(e, exceptions.RecoverableError):\n known_exception_type = exceptions.RecoverableError\n known_exception_type_args = [e.retry_after]\n elif isinstance(e, exceptions.StopAgent):\n known_exception_type = exceptions.StopAgent\n elif isinstance(e, exceptions.WorkflowFailed):\n known_exception_type = exceptions.WorkflowFailed\n trace_out = None\n else:\n # convert pure user exceptions to a RecoverableError\n known_exception_type = exceptions.RecoverableError\n\n try:\n causes = e.causes\n except AttributeError:\n causes = []\n\n payload = {\n 'exception_type': type(e).__name__,\n 'message': format_exception(e),\n 'known_exception_type': known_exception_type.__name__,\n 'known_exception_type_args': known_exception_type_args,\n 'known_exception_type_kwargs': {'causes': causes or []},\n 'append_message': append_message,\n }\n if trace_out:\n payload['traceback'] = trace_out\n return payload", "def exceptions(e):\n # NOTE: add log entry\n str(getattr(e, \"code\", \"unavailable\"))\n log_error_code = str(getattr(e, \"code\", \"unavailable\"))\n service_log.error(\n f\"{request.remote_addr} {request.method} {request.scheme} {request.full_path}\\n\"\n f\"Error code: {log_error_code}\\n\"\n f\"Stack trace: {traceback.format_exc()}\"\n )\n\n # NOTE: craft user messages\n if hasattr(e, \"code\"):\n code = int(e.code)\n\n # NOTE: return an http error for methods with no body allowed. This prevents undesired exceptions.\n NO_PAYLOAD_METHODS = \"HEAD\"\n if request.method in NO_PAYLOAD_METHODS:\n return Response(status=code)\n\n error: ServiceError\n if code == 400:\n error = ProgramHttpRequestError(e)\n elif code == 404:\n error = ProgramHttpMissingError(e)\n elif code == 405:\n error = ProgramHttpMethodError(e)\n elif code == 408:\n error = ProgramHttpTimeoutError(e)\n else:\n error = ProgramHttpServerError(e, code)\n\n return error_response(error)\n\n # NOTE: Werkzeug exceptions should be covered above, the following line is for\n # unexpected HTTP server errors.\n return error_response(e)", "def handle_exception(e):\n print(e)\n return error()", "def sample_exception(\n self, el: Any, exc_info: Any, transform_id: str,\n instruction_id: str) -> None:\n with self._samples_lock:\n err_string = ''.join(traceback.format_exception(*exc_info))\n self._exceptions.append(\n (el, ExceptionMetadata(err_string, transform_id, instruction_id)))", "def displayException(originator: object, parent: java.awt.Component, title: unicode, message: unicode, throwable: java.lang.Throwable) -> None:\n ..." ]
[ "0.6879372", "0.66124326", "0.66071355", "0.6428653", "0.6300448", "0.62846094", "0.617269", "0.60398024", "0.60369265", "0.5990471", "0.59839076", "0.5983831", "0.5976452", "0.5947421", "0.59470266", "0.5942072", "0.5938443", "0.58877414", "0.58695066", "0.58431464", "0.5832342", "0.58168536", "0.57758707", "0.5734521", "0.56904227", "0.56871927", "0.567612", "0.5669497", "0.5664345", "0.56610864", "0.5641102", "0.56133986", "0.5610479", "0.5605608", "0.5590604", "0.55894727", "0.558237", "0.55781007", "0.5563872", "0.55587953", "0.55390054", "0.5536554", "0.55319977", "0.55270916", "0.5518697", "0.55045545", "0.550135", "0.54943603", "0.5492988", "0.5486604", "0.54794854", "0.5478286", "0.547285", "0.5471677", "0.5459014", "0.5453535", "0.54460275", "0.5444364", "0.54368764", "0.54302937", "0.5429283", "0.54288155", "0.5424072", "0.5408912", "0.5405651", "0.53855073", "0.5383503", "0.5373259", "0.53728884", "0.53727186", "0.5371558", "0.53687173", "0.53661007", "0.5365406", "0.5362357", "0.53546727", "0.53517354", "0.5337355", "0.53295267", "0.5314", "0.5309905", "0.5301295", "0.52964187", "0.52910584", "0.52910584", "0.5283177", "0.52711403", "0.5269229", "0.5269229", "0.52539366", "0.5249502", "0.52285445", "0.5222403", "0.5217569", "0.5210889", "0.5207773", "0.5198409", "0.51847285", "0.51810104", "0.5169347" ]
0.65568435
3
Returns with the filename of the first file in the given directory. Just provide the directory's name with no leading './'
def getFirstFile(folderName): listFiles = subprocess.run("ls ./" + folderName, shell=True, stdout=subprocess.PIPE) fileName = re.search(r"b'(.*?)\\n", str(listFiles.stdout))[1] if(len(fileName) > 0): return fileName else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_filename(self):\n \n for f in os.listdir(self.get_directory()):\n if os.path.isfile(os.path.join(self.get_directory(), f)):\n return f\n \n return None", "def get_first_file(cmds):\n for cmd in cmds:\n all_files = glob.glob(cmd)\n if all_files:\n for f in all_files:\n if not os.path.isdir(f):\n return f\n return ''", "def get_dir_for_fname(directory, filename):\r\n for fname, dirpath in get_all_files(directory):\r\n if fname == filename:\r\n return dirpath\r\n return None", "def get_file_name_from_cd(cd):\n if not cd: \n return None\n fname = re.findall('filename=(.+)', cd) \n if (fname) == 0: \n return None\n return fname[0]", "def get_file_name_from_directory(file_dir):\n L = []\n for root, dirs, files in os.walk(file_dir):\n for file in files:\n if os.path.splitext(file):\n L.append(os.path.join(root, file))\n return L", "def getDirectoryFilename(path):\n\tfrom os.path import splitext\n\tpath = normalizePath(path)\n\treturn splitext(path)[0]", "def get_filename(file_path):\n\n # Get rid of directories and etc\n just_file = os.path.basename(file_path)\n\n # Now we return just the base name\n return os.path.splitext(just_file)[0]", "def extract_dir_name(input_file):\r\n fname = PurePath(input_file).__str__()\r\n s = fname.split('.')\r\n name = '.'.join(s[:-1])\r\n return name", "def file_name(path):\n return os.path.basename(path).split('.')[0]", "def get_first_formatted_dir_in_dir(folder, fmt):\n first_elem = None\n root_elements = sorted(os.listdir(folder))\n for root_element in root_elements:\n if os.path.isdir(os.path.join(folder, root_element)):\n if parser.validate(fmt, root_element):\n first_elem = root_element\n break\n return first_elem", "def get_file_name(file_path):\n full_file_name = file_path.split(os.sep)[-1]\n file_name = full_file_name.split(\".\")[0]\n return file_name", "def find_file(directory, extensions):\n for filename in os.listdir(directory):\n if filename.endswith(extensions):\n return \"{}/{}\".format(directory, filename)\n return None", "def Dir(path=None):\n global _last_files\n if path:\n _last_files = glob.glob(path)\n if _last_files:\n return os.path.split(_last_files.pop(0))[1] # VB just returns the filename, not full path\n else:\n return \"\"", "def get_filename(filename):\n\tpath, filename = os.path.split(filename)\n\tpaths = ['', ] + list(filter(None, path.split(os.path.sep)))\n\tpaths = list(map(lambda t: os.path.join(*paths[:len(paths)-t[0]], filename), enumerate(paths)))\n\tpaths = list(filter(os.path.exists, paths))\n\treturn paths[0] if paths else None", "def getfilename(path):\r\n return path.split('\\\\').pop().split('/').pop().rsplit('.', 1)[0]", "def get_file_name(path: str) -> str:\n return os.path.splitext(os.path.basename(path))[0]", "def get_filename(path):\n return path.split('/')[-1]", "def dir_fname(directory,nametype):\n\tfnames = directory+os.sep+nametype\n\tfnames = glob.glob(fnames)\n\tfnames = np.sort(fnames) # order files from 0 to last\n\treturn fnames", "def getFileName(filepath):\n return os.path.splitext(os.path.basename(filepath))[0]", "def get_full_path(file_extension=True) -> str:\n return get_directory() + \"/\" + get_filename(file_extension=file_extension)", "def get_file(dir, filename):\n return os.path.join(os.path.dirname(__file__), dir, filename)", "def find_file_directory(data_dir, _format=None):\n if _format is not None:\n files = [f for f in data_dir.glob(\"*{}\".format(_format))]\n if len(files) > 1:\n print_choices([f.resolve().expanduser() for f in files])\n file_index = click.prompt(\"Select the file\",\n type=click.Choice(range(1, len(files) + 1)),\n show_choices=True,\n value_proc=parse\n )\n click.secho(\"Selected from your computer {}\".format(files[file_index-1]))\n return str(files[file_index-1])\n elif len(files) == 1:\n click.secho(\"Selected from your computer {}\".format(files[0]))\n return str(files[0])\n else:\n click.secho(\"There is not files with format {} in the directory {}\".format(_format, data_dir))\n return None", "def generate_file_path(directory: str, file_name: str):\n return os.path.join(os.getcwd(), directory, file_name)", "def get_file_name(file):\n return os.path.splitext(os.path.basename(file))[0]", "def file_directory(file):\n return os.path.dirname(os.path.realpath(file))", "def getFilename(path):\n\tfrom os.path import split\n\tpath = normalizePath(path)\n\treturn split(path)[1]", "def get_directory(path):\n return mangle_path(path).rsplit('/',1)[0]", "def GetFile(file):\n\n file = file.replace(\"/\", \"\\\\\").strip(\"\\\\\")\n new = list(file)\n new.reverse()\n if \"\\\\\" not in new:\n return None, file # Don't raise an error, but there isn't any folder\n indx = new.index(\"\\\\\")\n return file[:-indx], file[-indx:] # Full path and file name", "def get_fname(a_file):\r\n fname, fext = os.path.splitext(a_file)\r\n return os.path.basename(fname)", "def get_fname(a_file):\r\n fname, fext = os.path.splitext(a_file)\r\n return os.path.basename(fname)", "def get_file_name(path):\n return os.path.basename(path)", "def extract_file_name(file_path):\n # ファイルパスからファイル名(拡張子含む)を取り出す\n file_name = file_path.split('/')[-1]\n # 拡張子を取り除く\n return file_name.split('.')[0]", "def just_the_name(path):\n return os.path.splitext(os.path.basename(path))[0]", "def abs_path(file_name: str, directory: str) -> str:\r\n return os.path.join(directory, file_name)", "def get_filename(filepath):\n return os.path.basename(filepath)", "def find_specific_file_extension_in_dir(dir_path, extension):\r\n return glob.glob(os.path.join(dir_path, r'*{}'.format(extension)))[0].replace('\\\\', '/').split('/')[-1]", "def filename_from_path(filepath: str) -> str:\n return filepath.split(\"/\")[-1]", "def get_name_from_file(filename):\n return filename.split(\".\")[0]", "def get_full_filename(dirname, name, ext, tmstamp=False):\n fill = '_' + str_current_time() if tmstamp else ''\n fmt = '/{}{}{}' if ext.startswith('.') else '/{}{}.{}'\n return resolve(dirname) + fmt.format(name, fill, ext)", "def get_directory() -> str:\n return directory", "def get_filename(extended_slug):\n user, project, build_id, job_id = split_extended_slug(extended_slug)\n\n if None in (user, project, build_id, job_id): # todo; remove this\n return\n\n filename_glob = os.path.join(\n test_data_dir,\n user, project,\n '{0}.{1}-*.txt'.format(build_id, job_id))\n filenames = glob.glob(filename_glob)\n if filenames:\n return filenames[0]\n else:\n return None", "def GetOriginalFilename(name):\n if not name.endswith(\".py\"):\n name = name + \".py\"\n\n # Stop looking for views and widgets in the top folder, except for Main\n if name == \"Main.py\":\n if os.path.isfile(name):\n return name\n\n originalDir = os.getcwd()\n listDir = os.listdir(originalDir)\n # Loop over the content of the demo directory\n for item in listDir:\n if not os.path.isdir(item):\n # Not a directory, continue\n continue\n dirFile = os.listdir(item)\n # See if a file called \"name\" is there\n if name in dirFile:\n return os.path.join(item, name)\n\n # We must return a string...\n return \"\"", "def get_filename(name):\n return osp.join(osp.dirname(osp.abspath(__file__)), name)", "def find(name):\n\n if os.path.exists(name):\n return name\n\n path = os.path.dirname(__file__) or '.'\n filename = os.path.abspath(os.path.join(path,name))\n if os.path.exists(filename):\n return filename\n\n for d in os.listdir(path):\n fullpath = os.path.abspath(os.path.join(path,d))\n if os.path.isdir(fullpath):\n filename = os.path.abspath(os.path.join(fullpath,name))\n if os.path.exists(filename):\n return filename\n return None", "def get_dir_path(file_name=\"\"):\n prog_path = sys.argv[0].replace(sys.argv[0].split(\"/\")[-1],\n file_name)\n return os.path.abspath(prog_path)", "def path(filename: str) -> str:\n path = os.path.dirname(sys.argv[0])\n if not path:\n path = '.'\n return path + '/' + filename", "def _lookupFileName (self,\n enclosingDirectoryName : String,\n originalFileName : String) -> String:\n\n Logging.trace(\">>: directory = %r, file = %r\",\n enclosingDirectoryName, originalFileName)\n\n cls = self.__class__\n result = None\n separator = OperatingSystem.pathSeparator\n simpleFileName = OperatingSystem.basename(originalFileName)\n searchPathList = list(cls._searchPathList)\n searchPathList.append(enclosingDirectoryName)\n\n for directoryName in searchPathList:\n fileName = iif(directoryName == \".\", originalFileName,\n directoryName + separator + simpleFileName)\n isFound = OperatingSystem.hasFile(fileName)\n Logging.trace(\"--: %r -> found = %r\", fileName, isFound)\n\n if isFound:\n result = fileName\n break\n\n Logging.trace(\"<<: %r\", result)\n return result", "def path_to_file(fname, dirs):\n for each in dirs:\n path = '/'.join([each, fname])\n if os.path.exists(path):\n return path\n return None", "def filename(self, url, default_file = \"index.html\"):\n purl = urlparse(url)\n file_name = purl[1] + purl[2] \n folder_name = (purl[1] + purl[2])\n \n if purl[2] == '':\n folder_name += ('/' + default_file)\n file_name += ('/' + default_file)\n elif purl[2] == '/':\n folder_name += default_file\n file_name += default_file\n elif (purl[2])[-1] == '/':\n file_name += ('/' + default_file)\n\n folder_path = dirname(folder_name)\n \n if not isdir(folder_path): # create archive dir if nec.\n if not exists(folder_path): \n makedirs(folder_path)\n return file_name", "def auto_file(filename, where='.') -> str:\n\n if os.path.isabs(filename):\n return filename\n\n prob = os.path.join(where, filename)\n if os.path.exists(prob) and os.path.isfile(prob):\n return prob\n\n files = list(glob.iglob(os.path.join(where, '**', filename), recursive=True))\n if len(files) == 0:\n raise FileNotFoundError('Given file could not be found with recursive search:' + filename)\n\n if len(files) > 1:\n raise FileNotFoundError('More than one file matches given filename. Please specify it explicitly' + filename)\n\n return files[0]", "def get_path(filename):\n\tif filename != \"\":\n\t\treturn filename\n\telse:\n\t\tfilename = \".\"", "def get_filename(filepath):\n # Get only the name of the file\n filename_ext = os.path.basename(filepath)\n # Get the name of the file without the extnesion\n filename = os.path.splitext(filename_ext)[0]\n\n return filename", "def joplin_file_name(joplin_dir: str) -> str:\n for (_, _, files) in os.walk(joplin_dir):\n for filename in files:\n file_full_name = os.path.join(joplin_dir, filename)\n if os.path.isfile(file_full_name):\n yield file_full_name", "def result_file(file_path: str) -> Union[str, None]:\n if not os.path.isdir(file_path):\n return None\n else:\n file_list = list()\n for file in os.listdir(file_path):\n file_list.append(file)\n if not file_list or len(file_list) > 1:\n # it should be just one file per file_id directory\n return None\n else:\n return file_list[0]", "def root_name(file_name, file_id):\n if file_id is not None:\n return \"{}{}\".format(R_DIR, file_name.format(file_id))\n else:\n return \"{}{}\".format(R_DIR, file_name)", "def get_filename(target_dir, filename_prefix):\n # this whole function is not the nicest thing, but isolating it makes\n # things clearer , a good refactoring would be to get\n # the info from the video_url or the current output, to avoid the\n # iteration from the current dir\n filenames = os.listdir(target_dir)\n subs_filename = filename_prefix\n for name in filenames: # Find the filename of the downloaded video\n if name.startswith(filename_prefix):\n (basename, ext) = os.path.splitext(name)\n return basename", "def search_file(directory: Path, filename=None, recursive=True, extension=None):\n assert Path(directory).exists(), f\"Path({directory}) is not existence\"\n assert Path(directory).is_dir(), f\"Path({directory}) should be direcory\"\n\n target_file = directory + \"/**\"\n if filename:\n target_file = target_file + \"/\" + filename\n\n if extension:\n target_file = target_file + \"/*.\" + extension\n\n files = glob.glob(target_file, recursive=recursive)\n if len(files) == 1:\n return files[0]\n\n assert len(files), f\"No files is not founded : {target_file}\"\n return files", "def get_filename(target_dir, filename_prefix):\n # This whole function is not the nicest thing, but isolating it makes\n # things clearer. A good refactoring would be to get the info from the\n # video_url or the current output, to avoid the iteration from the\n # current dir.\n filenames = os.listdir(target_dir)\n for name in filenames: # Find the filename of the downloaded video\n if name.startswith(filename_prefix):\n (basename, ext) = os.path.splitext(name)\n return basename\n return None", "def filePathToFileName(path):\n return os.path.splitext(os.path.basename(path))[0]", "def just_the_name(path):\n name = os.path.splitext(os.path.basename(path))[0]\n return name", "def dirname(path):\r\n return split(path)[0]", "def get_file(self):\n return self.dir + self.file_name + self.extension", "def listdir_full_path(directory):\n for f in os.listdir(directory):\n if not os.path.isdir(f):\n yield os.path.abspath(os.path.join(directory, f))", "def module_name_from_dir(dirname, err=True, files=None):\r\n if files is None:\r\n files = os.listdir(dirname)\r\n names = [file for file in files\r\n if file.endswith('.so') or file.endswith('.pyd')]\r\n if len(names) == 0 and not err:\r\n return None\r\n elif len(names) == 1:\r\n return os.path.join(dirname, names[0])\r\n else:\r\n raise ValueError(\"More than 1 compiled module in this directory:\" +\r\n dirname)", "def GetInputFilename(fname):\n if not indir or fname[:1] == '/':\n return fname\n for dirname in indir:\n pathname = os.path.join(dirname, fname)\n if os.path.exists(pathname):\n return pathname\n\n raise ValueError(\"Filename '%s' not found in input path (%s) (cwd='%s')\" %\n (fname, ','.join(indir), os.getcwd()))", "def get_filename(img_path):\n filename = os.path.splitext(img_path)\n return os.path.basename(filename[0])", "def root_name(filename: str):\n basename = os.path.basename(filename)\n basename_split = os.path.splitext(basename)\n root = None\n if len(basename_split) == 2:\n root = basename_split[0]\n return root", "def get_filename(filepath):\n return filepath.replace(\"{}\\\\\".format(RES_DIR), \"\")", "def Filename(fname):\n if fname.startswith('##/'):\n if chroot_path:\n fname = os.path.join(chroot_path, fname[3:])\n else:\n return None\n\n # Search for a pathname that exists, and return it if found\n if fname and not os.path.exists(fname):\n for path in search_paths:\n pathname = os.path.join(path, os.path.basename(fname))\n if os.path.exists(pathname):\n return pathname\n\n # If not found, just return the standard, unchanged path\n return fname", "def get_file_name(filepath): # need pytest\n filename, extension = os.path.splitext(filepath.split('/')[-1])\n return filename, extension", "def _unpacked_toplevel(self, dir):\n unpacked = glob.glob('%s/*' % dir)\n unpacked.extend(glob.glob(\"%s/.*\" % dir)) # include hidden files and folders\n # Check that dir contains nothing but a single folder:\n if len(unpacked) == 1 and os.path.isdir(unpacked[0]):\n return unpacked[0]\n else:\n return dir", "def directory_path(directory, file):\n try:\n file_name, file_extension = os.path.splitext(file)\n except Exception:\n file_extension = ''\n new_file_name = str(uuid.uuid4()) + file_extension\n return '{}/{}'.format(directory, new_file_name)", "def get_filename(file_extension=False) -> str:\n if file_extension == False:\n return filename[0:filename.find(\".json\")]\n else:\n return filename", "def build_file_path(dir_name, file_name, ext):\n return os.path.join(dir_name, os.path.extsep.join((file_name, ext)))", "def _possible_dir_name(contents):\n top_level_dirs = _find_top_level_directories(contents, sep='/')\n if len(top_level_dirs) == 0:\n raise InvalidFile, 'has no contents'\n elif len(top_level_dirs) > 1:\n raise MultipleTopLevels, 'more than one top levels: %s' % top_level_dirs\n d = abspath(top_level_dirs[0])\n assert exists(d), 'missing dir: %s' % d\n if not isdir(d):\n # eg: http://pypi.python.org/pypi/DeferArgs/0.4\n raise SingleFile, 'contains a single file: %s' % d\n return d", "def extract_file_name_from_source_full_path(source_full_path):\n destination_file_name = os.path.basename(source_full_path)\n return destination_file_name", "def find_template_filename(self, template_name):\n\n def next_file():\n filename = self.path / template_name\n yield filename\n try:\n exts = self.default_file_extensions\n except AttributeError:\n return\n\n strfilename = str(filename)\n for ext in exts:\n yield Path(strfilename + ext)\n\n for filename in next_file():\n if filename.is_file():\n return filename", "def get_filename(pattern, dir_, mode='last'):\n if isinstance(pattern, re.Pattern):\n files_found = filter(pattern.search, os.listdir(dir_))\n elif isinstance(pattern, str):\n files_found = [fn for fn in os.listdir(dir_) if pattern in fn]\n try:\n if mode == 'first':\n return min(files_found)\n elif mode == 'last':\n return max(files_found)\n else:\n raise ValueError('only first and last are valid modes for file '\n 'selection')\n except ValueError:\n return None", "def _get_file_name(url: str) -> str:\n url = url.strip('/')\n result = findall(r'/(\\w+\\.\\w+)[?|$]', url)\n if result:\n return result[-1]\n return url.split('/')[-1]", "def getDirectory(path):\n\tfrom os.path import split\n\tpath = normalizePath(path)\n\treturn split(path)[0]", "def rootname(filename):\n if os.path.sep not in filename:\n return ''\n else:\n file_root, _ = filename.split(os.path.sep, 1)\n return file_root", "def get_filename(self, base_filename: str) -> str:\n folder = self.prepare_folder()\n i = 0\n cartridge_number = self.config['info']['cartridge_number']\n while os.path.isfile(os.path.join(folder, base_filename.format(\n cartridge_number=cartridge_number,\n i=i))):\n i += 1\n\n return os.path.join(folder, base_filename.format(cartridge_number=cartridge_number, i=i))", "def obtain_filename(folder):\n fname_list = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]\n if '.DS_Store' in fname_list: fname_list.remove('.DS_Store')\n fname_list.sort()\n return fname_list", "def file_path(file_name, path):\n return path.rstrip('\\/') + \"/{0}\".format(file_name) if path else os.getcwd() + \"/{0}\".format(file_name)", "def get_file_name(path):\n logger.debug('Function Successful: % s',\n 'get_file_name: get_file_name successfully called from local_save', extra=d)\n logger.info('Extracting file name...')\n\n split_path = path.split(\"/\")\n file_name = split_path[len(split_path) - 1]\n\n logger.debug('Returning: %s',\n 'get_file_name: returning the file name', extra=d)\n logger.info('File name extracted')\n\n return file_name", "def basefname(fname):\n return os.path.splitext(fname.split(\"\\\\\")[-1])[0]", "def base_filename(self):\n return self.filename.split('.')[0]", "def get_file_name(replay_dir, template_name):\n suffix = '.json' if not template_name.endswith('.json') else ''\n file_name = f'{template_name}{suffix}'\n return os.path.join(replay_dir, file_name)", "def get_parent_directory(src: str) -> str:\n return src[: src.rfind(os.path.sep)]", "def file_title(self):\n basename = os.path.basename(self.__path)\n index_dot = basename.rfind(\".\")\n if index_dot == 0:\n return basename[1:]\n return basename if index_dot < 0 else basename[:index_dot]", "def _getFileName(self, filePath):\r\n\t\thead, tail = ntpath.split(filePath)\r\n\t\treturn tail or ntpath.basename(head)", "def getFileDir(filepath):\n return os.path.dirname(filepath)", "def out_filename(self, filetype, dir, format='old'):\n filename = self.filename(filetype=filetype, format=format)\n return Path(dir) / filename", "def getFilePath(self, filename):\n idx = self._soundfiles.index(filename)\n return \"{}/{}\".format(self._soundpaths[idx], filename)", "def get_relative_file(in_file, directory, ext):\n filename_w_ext = os.path.basename(in_file)\n filename, file_extension = os.path.splitext(filename_w_ext)\n return os.path.join(directory, filename + '.' + ext)", "def infer_module_name(filename, fspath):\n filename, _ = os.path.splitext(filename)\n for f in fspath:\n short_name = f.relative_path(filename)\n if short_name:\n # The module name for __init__.py files is the directory.\n if short_name.endswith(os.path.sep + \"__init__\"):\n short_name = short_name[:short_name.rfind(os.path.sep)]\n return short_name.replace(os.path.sep, '.')\n # We have not found filename relative to anywhere in pythonpath.\n return ''", "def rootname(filename):\n name = os.path.basename(filename)\n root, ext = os.path.splitext(name)\n while ext:\n root, ext = os.path.splitext(root)\n return root", "def get_dir(path):\n extension = path.suffix\n if extension == '':\n return path\n else:\n return path.parent", "def getFirst(self):\n if self.first != None:\n return self.first.filename\n else:\n return None", "def format_path(file: str) -> str:\n return os.path.abspath([file.replace('/', os.path.sep)][0])" ]
[ "0.7349085", "0.7295504", "0.7234868", "0.6979272", "0.6957857", "0.68544894", "0.6841921", "0.67179877", "0.66709983", "0.66419125", "0.65900755", "0.65814984", "0.65654624", "0.65107995", "0.6479264", "0.64599794", "0.6448274", "0.64449316", "0.6437284", "0.6429767", "0.6404433", "0.63651747", "0.633397", "0.6325977", "0.6325803", "0.63227165", "0.63141817", "0.6297338", "0.6246351", "0.6246351", "0.6245149", "0.6229492", "0.6223425", "0.6204151", "0.61736596", "0.61543787", "0.61540455", "0.61415786", "0.61367214", "0.6118616", "0.6102335", "0.6093273", "0.6090041", "0.60792834", "0.60774696", "0.6077258", "0.60749525", "0.6072929", "0.6065325", "0.6064362", "0.6054854", "0.60531765", "0.604949", "0.60426414", "0.6039568", "0.6037753", "0.6035929", "0.60295403", "0.60260564", "0.60118526", "0.5978118", "0.59624314", "0.5944507", "0.59430766", "0.5936306", "0.59337485", "0.59325534", "0.5931486", "0.59268", "0.5923753", "0.59126246", "0.59123933", "0.5911418", "0.5903878", "0.59037596", "0.5890635", "0.5889453", "0.58830607", "0.5882757", "0.5873942", "0.5869314", "0.5864118", "0.5861192", "0.5860652", "0.58441836", "0.5831674", "0.58312166", "0.58272517", "0.58232594", "0.5813865", "0.57992023", "0.5798714", "0.57959443", "0.5793658", "0.5785412", "0.5784442", "0.5783249", "0.5782714", "0.5780334", "0.57790875" ]
0.6773866
7
Does everything you need to transcribe a podcast given the filename\n Download podcast, wait 40 seconds, change podcast to .wav, wait 10 seconds, remove the .mp3 file, run the transcription
def transcribeAll(service, url, fileName): if(service == "omny.fm"): url = url.replace(".mp3","") + ".mp3" subprocess.Popen("wget -c -O ./podcasts/" + fileName + ".mp3 " + url + " && sleep 40 && ffmpeg -i ./podcasts/" + fileName + ".mp3 -acodec pcm_s16le -ac 1 -ar 8000 ./podcasts/" + fileName + ".wav && sleep 10 && rm ./podcasts/" + fileName + ".mp3 && nohup ./online2-wav-nnet3-latgen-faster --online=false --do-endpointing=false " + "--frame-subsampling-factor=3 --config=online.conf --max-mem=2000000000 --max-active=7000 --beam=15.0 --lattice-beam=6.0 " + "--acoustic-scale=1.0 --word-symbol-table=words.txt final.mdl HCLG.fst 'ark:echo utterance-id" + fileName + " utterance-id" + fileName + "|' 'scp:echo utterance-id" + fileName + " ./podcasts/" + fileName + ".wav|' 'ark:/dev/null' &", shell=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transcribe_audio_file(filename):\n url = 'https://api.nexiwave.com/SpeechIndexing/file/storage/' + USERNAME +'/recording/?authData.passwd=' + PASSWORD + '&auto-redirect=true&response=application/json'\n\n # To receive transcript in plain text, instead of html format, comment this line out (for SMS, for example)\n #url = url + '&transcriptFormat=html'\n\n\n # Ready to send:\n sys.stderr.write(\"Send audio for transcript with \" + url + \"\\n\")\n r = requests.post(url, files={'mediaFileData': open(filename,'rb')})\n data = r.json()\n transcript = data['text']\n foo = data['text']\n f = open('newf.txt', 'w')\n f.write(foo)\n f.close() \n # Perform your magic here:\n print \"Transcript for \"+filename+\"=\" + transcript", "async def download_audio(event):\n url = event.pattern_match.group(1)\n rmsg = await event.get_reply_message()\n if not url and rmsg:\n myString = rmsg.text\n url = re.search(\"(?P<url>https?://[^\\s]+)\", myString).group(\"url\")\n if not url:\n return await edit_or_reply(event, \"`What I am Supposed to find? Give link`\")\n codevent = await edit_or_reply(event, \"`Preparing to download...`\")\n reply_to_id = await reply_id(event)\n ytdl_data = await ytdl_down(codevent, audio_opts, url)\n if ytdl_data is None:\n return\n await codevent.edit(\n f\"`Preparing to upload song:`\\\n \\n**{ytdl_data['title']}**\\\n \\nby *{ytdl_data['uploader']}*\"\n )\n f = pathlib.Path(f\"{ytdl_data['title']}.mp3\".replace(\"|\", \"_\"))\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.jpg\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.webp\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = None\n c_time = time.time()\n ul = io.open(f, \"rb\")\n uploaded = await event.client.fast_upload_file(\n file=ul,\n progress_callback=lambda d, t: asyncio.get_event_loop().create_task(\n progress(d, t, codevent, c_time, \"upload\", file_name=f)\n ),\n )\n ul.close()\n attributes, mime_type = await fix_attributes(f, ytdl_data, supports_streaming=True)\n media = types.InputMediaUploadedDocument(\n file=uploaded,\n mime_type=mime_type,\n attributes=attributes,\n thumb=await event.client.upload_file(codthumb) if codthumb else None,\n )\n await event.client.send_file(\n event.chat_id,\n file=media,\n reply_to=reply_to_id,\n caption=ytdl_data[\"title\"],\n supports_streaming=True,\n force_document=False,\n )\n os.remove(f)\n if codthumb:\n os.remove(codthumb)\n await codevent.delete()", "def podcast_download(self):\r\n warnings.filterwarnings(\"ignore\", category=UnicodeWarning)\r\n now = datetime.datetime.now()\r\n\r\n for podcast_file in self.podcast_list:\r\n published, name, link, title = podcast_file\r\n if self.podcast_list != []:\r\n line_file = (published + ';' + title + ';' + name + ';' + link).encode(\"utf-8\") \r\n if line_file in open(self.download_log).read():\r\n pass\r\n else:\r\n title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')\r\n download_folder = os.path.join('downloads', title)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n try:\r\n published = str(parser.parse(published))[:10]\r\n except IOError as error:\r\n print 'Error' + (error) + ': File - ' + str(title)\r\n download_folder = os.path.join(download_folder, published)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n namefile_unicode = link[link.rfind('/')+1:]\r\n namefile_str = unicodedata.normalize('NFKD', \r\n namefile_unicode).encode('ascii', 'ignore')\r\n namefile_str = namefile_str.decode('utf-8', 'ignore').encode(\"utf-8\")\r\n if '.mp3' in namefile_str:\r\n len_name = namefile_str.index('.mp3')\r\n elif '.MP3' in namefile_str:\r\n len_name = namefile_str.index('.MP3')\r\n namefile_str = namefile_str[:len_name + 4]\r\n fileoutput = os.path.join(download_folder, namefile_str)\r\n name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')\r\n print str(published) + '; ' + name\r\n ## downlink\r\n download_file(link, fileoutput) \r\n ## tagging\r\n mp3_tagging(fileoutput, podcast_file)\r\n ## write log\r\n write_file(self.download_log, line_file)\r\n end = datetime.datetime.now()\r\n print '\\r' + 'Download Time = ' + str(end-now) + '\\r'\r\n return None", "def main():\n # transcribe_audio()\n summarize()", "def subprocess_transcribe_function( fname, voicenote_filename_regex ):\n if not hasattr( subprocess_transcribe_function, \"client\" ):\n # Init function failed.\n return None\n if subprocess_transcribe_function.verbose:\n # TODO: We should (probably?) queue these messages and print() on a single thread/process...but....\n print( \"Transcribing {}...\".format( fname ) )\n try:\n ret = ( recording_date_from_full_path( fname, voicenote_filename_regex ), fname, transcribe_wav( fname, client=subprocess_transcribe_function.client ) )\n except BaseException as e:\n # Do NOT kill the program. We'll leave the audio file in the unprocessed directory.\n print( \"ERROR:\" )\n print( e )\n ret = None\n return ret", "def main():\n st.info(\n \"This webpage lets you upload wav audio file and transribe it to Amharic, CHECK THAT OUT !!\")\n st.markdown(STYLE, unsafe_allow_html=True)\n st.header(\"Upload audio file\")\n file = st.file_uploader(\"Audio file\", type=FILE_TYPES)\n show_file = st.empty()\n if not file:\n show_file.info(\"Please upload a file of type: \" +\n \", \".join(FILE_TYPES))\n return\n\n file_type = get_file_type(file)\n if file_type == FileType.PYTHON:\n st.code(file.getvalue())\n\n elif file_type == FileType.SOUND:\n # st.code(file.getvalue())\n audio_bytes = file.read()\n st.audio(audio_bytes, format=\"audio/ogg\")\n\n else:\n data = pd.read_csv(file)\n st.dataframe(data.head(10))\n\n with open(os.path.join(\"./tempfile\", file.name), \"wb\") as f:\n f.write(file.getbuffer())\n st.success(\"Processing File..\")\n\n st.header(\"Transcribe audio\")\n if st.button('Transcribe'):\n st.write(\"\")\n with st.spinner('wait for it ...'):\n time.sleep(60)\n st.success('Done!')\n else:\n st.write('')\n\n # if file:\n # token, t_id = upload_file(file)\n # result = {}\n # #polling\n # sleep_duration = 1\n # percent_complete = 0\n # progress_bar = st.progress(percent_complete)\n # st.text(\"Currently in queue\")\n # while result.get(\"status\") != \"processing\":\n # percent_complete += sleep_duration\n # time.sleep(sleep_duration)\n # progress_bar.progress(percent_complete/10)\n # result = get_text(token,t_id)\n\n # sleep_duration = 0.01\n\n # for percent in range(percent_complete,101):\n # time.sleep(sleep_duration)\n # progress_bar.progress(percent)\n\n # with st.spinner(\"Processing.....\"):\n # while result.get(\"status\") != 'completed':\n # result = get_text(token,t_id)\n\n # st.balloons()\n # st.header(\"Transcribed Text\")\n # st.subheader(result['text'])\n\n file.close()", "def main():\n\n # Parse arguments\n parser = OptionParser()\n parser.add_option('-n', '--subscription_key', dest='subscription_key',\n help='subscription_key for authentication')\n parser.add_option('-t', '--text', dest='text',\n help='text to synthesize')\n parser.add_option('-l', '--language', dest='language',\n help='language')\n parser.add_option('-g', '--gender', dest='gender',\n help='gender')\n parser.add_option('-d', '--directory', dest='directory',\n help='directory to store the file')\n (options, args) = parser.parse_args()\n subscription_key = options.subscription_key\n text = options.text\n language = options.language\n gender = options.gender\n directory = options.directory\n\n # Perform sanity checks on options\n validate_options(subscription_key, text)\n\n if not directory:\n directory = default_directory\n\n if not language:\n language = default_language\n\n if not gender:\n gender = default_gender\n\n # format = 'riff-16khz-16bit-mono-pcm'\n format = 'riff-8khz-8bit-mono-mulaw'\n\n # lang = 'en-AU'\n # gender = 'Female'\n tts_msspeak = MSSpeak(subscription_key, '/tmp/')\n tts_msspeak.set_cache(False)\n output_filename = tts_msspeak.speak(text, language, gender, format)\n\n print 'Recorded TTS to %s%s' % (directory, output_filename)", "def process_speak_listen(device_index, mp3_filename, text, record, flag):\n\n mp3_filename = mp3_filename + \".mp3\"\n try:\n tts = gTTS(text=text, lang='en', slow=False)\n tts.save(mp3_filename)\n playsound(mp3_filename)\n os.remove(mp3_filename)\n\n if flag != 1:\n with sr.Microphone(device_index=device_index) as source:\n record.adjust_for_ambient_noise(source, duration=1)\n print(\"Speak:\")\n os.system(\"zenity --progress --width=400 --height=200 --title='Speak Now' \"\n \"--text='Speak Now......No need to click OK button' --no-cancel &\")\n try:\n audio = record.listen(source, timeout=5)\n text = record.recognize_google(audio)\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(text)\n except LookupError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : LookupError - Could not able to understand\")\n text = None\n except speech_recognition.WaitTimeoutError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : WaitTimeoutError - Could not able to listen anything for 5 seconds\")\n text = None\n except speech_recognition.UnknownValueError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : UnknownValueError - Could not able to listen anything for 5 seconds\")\n text = None\n except gtts.tts.gTTSError:\n print(\"ERROR : Connection Error : No internet connection.\")\n exit_program()\n except PermissionError:\n print(\"ERROR : No permission\")\n exit_program()\n\n return text", "def execute(self, **kwargs):\n if \"text\" not in kwargs:\n return ''\n phrase = str(kwargs[\"text\"])\n \n names = {\n \"callie\": \"6.5\",\n \"lawrence\": \"8.5\"\n }\n name = \"callie\"\n\n #TODO find a better way of implementing TTS\n ttsfd, ttsfile = tempfile.mkstemp(\".wav\")\n outfile, outname = tempfile.mkstemp(\".wav\")\n try:\n \n tts = sp.Popen(['/opt/swift/bin/swift', '-o', ttsfile, '-n', name, phrase], stdout=sp.PIPE, stderr=sp.PIPE)\n# cmd = ('/opt/swift/bin/swift \"' + phrase + '\" -o ' + ttsname + ' && sox -V1 ' +\n# tmp + ' -t wav ' + tmp2 + ' trim 8 ;')\n# p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE, shell=True)\n# out, err = p.communicate()\n# if len(err) > 0:\n# return err\n\n out, err = tts.communicate()\n if not err:\n sox = sp.Popen(['sox', '-V1', ttsfile, '-t', 'wav', outname, 'trim', names[name]], stdout=sp.PIPE, stderr=sp.PIPE)\n out, err = sox.communicate()\n\n player = gst.element_factory_make(\"playbin2\", \"player\")\n bus = player.get_bus()\n bus.add_signal_watch()\n\n mainloop = gobject.MainLoop()\n\n def quit(bus, message):\n mainloop.quit()\n\n bus.connect(\"message::eos\", quit)\n bus.connect(\"message::error\", quit)\n player.set_property(\"uri\", 'file://' + outname)\n player.set_state(gst.STATE_PLAYING)\n\n try:\n mainloop.run()\n finally:\n player.set_state(gst.STATE_NULL)\n\n finally:\n try:\n os.remove(ttsfile)\n except OSError as err:\n print e\n try:\n os.remove(outname)\n except IOError as err:\n print err", "def transcribe_streaming_voice_activity_timeouts(\n project_id: str,\n speech_start_timeout: int,\n speech_end_timeout: int,\n audio_file: str,\n) -> cloud_speech.StreamingRecognizeResponse:\n # Instantiates a client\n client = SpeechClient()\n\n # Reads a file as bytes\n with open(audio_file, \"rb\") as f:\n content = f.read()\n\n # In practice, stream should be a generator yielding chunks of audio data\n chunk_length = len(content) // 20\n stream = [\n content[start : start + chunk_length]\n for start in range(0, len(content), chunk_length)\n ]\n audio_requests = (\n cloud_speech.StreamingRecognizeRequest(audio=audio) for audio in stream\n )\n\n recognition_config = cloud_speech.RecognitionConfig(\n auto_decoding_config=cloud_speech.AutoDetectDecodingConfig(),\n language_codes=[\"en-US\"],\n model=\"long\",\n )\n\n # Sets the flag to enable voice activity events and timeout\n speech_start_timeout = duration_pb2.Duration(seconds=speech_start_timeout)\n speech_end_timeout = duration_pb2.Duration(seconds=speech_end_timeout)\n voice_activity_timeout = (\n cloud_speech.StreamingRecognitionFeatures.VoiceActivityTimeout(\n speech_start_timeout=speech_start_timeout,\n speech_end_timeout=speech_end_timeout,\n )\n )\n streaming_features = cloud_speech.StreamingRecognitionFeatures(\n enable_voice_activity_events=True, voice_activity_timeout=voice_activity_timeout\n )\n\n streaming_config = cloud_speech.StreamingRecognitionConfig(\n config=recognition_config, streaming_features=streaming_features\n )\n\n config_request = cloud_speech.StreamingRecognizeRequest(\n recognizer=f\"projects/{project_id}/locations/global/recognizers/_\",\n streaming_config=streaming_config,\n )\n\n def requests(config: cloud_speech.RecognitionConfig, audio: list) -> list:\n yield config\n for message in audio:\n sleep(0.5)\n yield message\n\n # Transcribes the audio into text\n responses_iterator = client.streaming_recognize(\n requests=requests(config_request, audio_requests)\n )\n\n responses = []\n for response in responses_iterator:\n responses.append(response)\n if (\n response.speech_event_type\n == cloud_speech.StreamingRecognizeResponse.SpeechEventType.SPEECH_ACTIVITY_BEGIN\n ):\n print(\"Speech started.\")\n if (\n response.speech_event_type\n == cloud_speech.StreamingRecognizeResponse.SpeechEventType.SPEECH_ACTIVITY_END\n ):\n print(\"Speech ended.\")\n for result in response.results:\n print(f\"Transcript: {result.alternatives[0].transcript}\")\n\n return responses", "def play_audio(filename):\n os.system(AUDIOPLAYER + ' ' + filename)", "def do_play(*_args):\n print(last_wav_path)\n if last_wav_path and last_wav_path.is_file():\n threading.Thread(\n target=lambda: subprocess.check_call(\n [\"aplay\", \"-q\", str(last_wav_path)]\n )\n ).start()", "async def play(self, ctx, *, filename: str):\r\n if not ctx.voice_client:\r\n await self.connect(ctx)\r\n if filename not in self.audio_files:\r\n await ctx.send(\"File {0} not found\".format(filename))\r\n await self.audiofiles(ctx)\r\n else:\r\n ctx.voice_client.play(discord.FFmpegPCMAudio(source=\"{0}{1}.mp3\".format(self.audio_base_dir, filename)))\r\n await ctx.message.delete()", "def playMessage(msg):\n tts = gTTS(msg, lang=\"pt-br\")\n file = \"./audios/temp.mp3\"\n\n tts.save(file)\n player = MediaPlayer(file)\n player.play()\n sleep(10)\n os.remove(file)", "def transcribe(config):\n\n long_mode = True\n\n if 'audio_data' not in config:\n raise KeyError(\"`audio_data` not specified for transcription operation.\")\n\n if 'timeout' not in config:\n raise KeyError(\"`timeout` not specified for transcription operation.\")\n\n try:\n if config.pop('audio_duration') < 60: \n long_mode = False\n except KeyError:\n pass\n\n if long_mode:\n print(\"Running in long audio duration mode (audio is >60 seconds duration)...\")\n print(\"Uploading file...\")\n remote_object = gcloud_upload_file(config['audio_data'], config['storage_bucket'])\n file_name = remote_object.rsplit('/', 1)[-1]\n\n config['audio_data'] = \"gs://%s/%s\" % (config['storage_bucket'], file_name)\n storage_bucket = config.pop('storage_bucket')\n\n print(\"Transcribing file...\")\n result = gcloud_transcribe_long(config)\n\n print(\"Transcription successful, cleaning up...\")\n print(\"Deleting uploaded GCS file...\")\n gcloud_delete_file(file_name, storage_bucket)\n else:\n print(\"Transcribing file...\")\n config.pop('timeout')\n config.pop('storage_bucket')\n result = gcloud_transcribe_short(config)\n\n return result", "def main():\n destination = Path(argv[1])\n source_files = destination.glob(\"**/*.wma\")\n for file in source_files:\n new_name = file.name.rsplit(\".\", maxsplit=1)[0] + \".flac\"\n dest = str(file.parent / new_name)\n cmd = list(map(str, [\"avconv\", \"-i\", file, dest]))\n if platform == \"win32\":\n print(\"Running on windows... on Unix I'd run the following command:\")\n print(cmd)\n else:\n that = Popen(cmd)\n that.wait()", "def act(self, audio_file=None):\n #file as source\n if self.src == 'file':\n if audio_file is None:\n raise ValueError(\"Please provide a audio_file\")\n return None\n elif not os.path.exists(audio_file):\n raise FileNotFoundError(\"Specified file not found\")\n return None\n else:\n file = speech_recognition.AudioFile(audio_file)\n with file:\n speech = self.recog_obj.record(file)\n \n #mic as source\n elif self.src == 'mic':\n if audio_file is not None:\n print(\"WARNING: source is set to device microphone. Audio file will be ignored\\n\")\n \n try:\n with self.mic_obj:\n print(\"Speak into the mic....\\n\")\n self.recog_obj.adjust_for_ambient_noise(self.mic_obj)\n speech = self.recog_obj.listen(self.mic_obj)\n #if microphone is not detected\n except OSError:\n print(\"Error: Microphone not detected\")\n return None\n \n \n try:\n print(\"Please wait while we transcribe...\\n\")\n text = self.recog_obj.recognize_google(speech, language='en', show_all=self.debug)\n \n #if audio is not detected\n except speech_recognition.UnknownValueError:\n print(\"Error: Sorry audio not detected by device microphone\")\n return None\n \n #if there is connection issue or api issue\n except speech_recognition.RequestError:\n print(\"Error: API for transcription is not reachable. There may be some connection issue or server side issue\")\n return None\n \n #for imposing various rules to text \n #But if debug mode is enabled, transcript variable will store a dictionary of various transcriptions \n #along with their confidence probabilities, so conversion rules are disabled meanwhile \n transcript = self.tcr.deconcat(text) if not self.debug else text\n return transcript", "def main(speech_files, output_directory, lexical = False, enable_proxy = False, *argv):\n speech_config = speechsdk.SpeechConfig(subscription = pa.stt_key, region = pa.stt_region)\n # If necessary, you can enable a proxy here: \n # set_proxy(hostname: str, port: str, username: str, password: str)\n if enable_proxy: \n speech_config.set_proxy(argv[0], argv[1], argv[2], argv[3])\n # Set speech service properties, requesting the detailed response format to make it compatible with lexical format, if wanted\n speech_config.set_service_property(name='format', value='detailed', channel=speechsdk.ServicePropertyChannel.UriQueryParameter)\n if pa.stt_endpoint != \"\": \n speech_config.endpoint_id = pa.stt_endpoint\n logging.info(f'[INFO] - Starting to transcribe {len(next(os.walk(speech_files))[2])} audio files')\n results = []\n filenames = []\n for index, audio in enumerate(glob.iglob(f'{speech_files}*av')):\n result, filename = request_endpoint(audio, speech_config, output_directory, lexical)\n results.append(result)\n filenames.append(filename)\n # Check the result\n return zip(filenames, results)", "async def transcribe_stream(args: argparse.Namespace, core: Voice2JsonCore) -> None:\n from rhasspyasr import Transcription\n from rhasspysilence import VoiceCommand, VoiceCommandResult\n\n # Make sure profile has been trained\n assert core.check_trained(), \"Not trained\"\n\n wav_sink = None\n wav_dir = None\n if args.wav_sink:\n wav_sink_path = Path(args.wav_sink)\n if wav_sink_path.is_dir():\n # Directory to write WAV files\n wav_dir = wav_sink_path\n else:\n # Single WAV file to write\n wav_sink = open(args.wav_sink, \"wb\")\n\n event_sink = None\n if args.event_sink:\n if args.event_sink == \"-\":\n event_sink = sys.stdout\n else:\n event_sink = open(args.event_sink, \"w\")\n\n # Record command\n recorder = core.get_command_recorder()\n recorder.start()\n\n voice_command: typing.Optional[VoiceCommand] = None\n\n # Expecting raw 16-bit, 16Khz mono audio\n audio_source = await core.make_audio_source(args.audio_source)\n\n # Audio settings\n sample_rate = int(pydash.get(core.profile, \"audio.format.sample-rate-hertz\", 16000))\n sample_width = (\n int(pydash.get(core.profile, \"audio.format.sample-width-bits\", 16)) // 8\n )\n channels = int(pydash.get(core.profile, \"audio.format.channel-count\", 1))\n\n # Get speech to text transcriber for profile\n transcriber = core.get_transcriber(open_transcription=args.open, debug=args.debug)\n\n # Set after a transcription has been printed\n transcription_printed = threading.Event()\n\n # Run transcription in separate thread\n frame_queue: \"Queue[typing.Optional[bytes]]\" = Queue()\n\n def audio_stream() -> typing.Iterable[bytes]:\n \"\"\"Read audio chunks from queue and yield.\"\"\"\n frames = frame_queue.get()\n while frames:\n yield frames\n frames = frame_queue.get()\n\n def transcribe_proc():\n \"\"\"Transcribe live audio stream indefinitely.\"\"\"\n while True:\n # Get result of transcription\n transcribe_result = transcriber.transcribe_stream(\n audio_stream(), sample_rate, sample_width, channels\n )\n\n _LOGGER.debug(\"Transcription result: %s\", transcribe_result)\n\n transcribe_result = transcribe_result or Transcription.empty()\n transcribe_dict = dataclasses.asdict(transcribe_result)\n transcribe_dict[\"timeout\"] = is_timeout\n\n print_json(transcribe_dict)\n transcription_printed.set()\n\n threading.Thread(target=transcribe_proc, daemon=True).start()\n\n # True if current voice command timed out\n is_timeout = False\n\n # Number of events for pending voice command\n event_count = 0\n\n # Number of transcriptions that have happened\n num_transcriptions = 0\n\n print(\"Ready\", file=sys.stderr)\n\n try:\n chunk = await audio_source.read(args.chunk_size)\n while chunk:\n # Reset event\n transcription_printed.clear()\n\n # Look for speech/silence\n voice_command = recorder.process_chunk(chunk)\n\n if event_sink:\n # Print outstanding events\n for event in recorder.events[event_count:]:\n print_json(dataclasses.asdict(event), out_file=event_sink)\n\n event_count = len(recorder.events)\n\n if voice_command:\n is_timeout = voice_command.result == VoiceCommandResult.FAILURE\n\n # Force transcription\n frame_queue.put(None)\n\n # Reset\n audio_data = recorder.stop()\n if wav_dir:\n # Write WAV to directory\n wav_path = (wav_dir / time.strftime(args.wav_filename)).with_suffix(\n \".wav\"\n )\n wav_bytes = core.buffer_to_wav(audio_data)\n wav_path.write_bytes(wav_bytes)\n _LOGGER.debug(\"Wrote %s (%s byte(s))\", wav_path, len(wav_bytes))\n elif wav_sink:\n # Write to WAV file\n wav_bytes = core.buffer_to_wav(audio_data)\n wav_sink.write(wav_bytes)\n _LOGGER.debug(\n \"Wrote %s (%s byte(s))\", args.wav_sink, len(wav_bytes)\n )\n\n num_transcriptions += 1\n\n # Wait for transcription to be printed\n transcription_printed.wait(timeout=args.timeout)\n\n # Check exit count\n if (args.exit_count is not None) and (\n num_transcriptions >= args.exit_count\n ):\n _LOGGER.debug(\"Exit count reached\")\n break\n\n recorder.start()\n else:\n # Add to current command\n frame_queue.put(chunk)\n\n # Next audio chunk\n chunk = await audio_source.read(args.chunk_size)\n finally:\n transcriber.stop()\n\n try:\n await audio_source.close()\n except Exception:\n pass", "def main():\n\n start_program()\n yes_syn_words, no_syn_words, stop_words, record, mp3_filename, text, device_index, output_file = \\\n process_parameter_set()\n stand_alone_flag = process_check_input_argument()\n process_speak_listen(device_index, mp3_filename, text, record, flag=1)\n text = process_name(device_index, mp3_filename, record)\n input_details = process_speak_listen(device_index, mp3_filename, text, record, flag=0)\n response = process_input_details(device_index, input_details, mp3_filename, record, yes_syn_words, no_syn_words,\n stop_words)\n process_output_file_write(output_file, response)\n process_delete_mp3_output_files(stand_alone_flag)\n exit_program()", "def pron(word):\n\n return send_from_directory('prons', word + \".mp3\", mimetype=\"audio/mpeg\")", "def transcribe_gcs(gcs_uri):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n audio = types.RecognitionAudio(uri=gcs_uri)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.FLAC,\n enable_word_time_offsets=True,\n #sample_rate_hertz=32000,\n language_code='en-US')\n\n operation_start_time = time.time()\n operation = client.long_running_recognize(config, audio)\n\n\n print('Waiting for operation to complete...')\n response = operation.result(timeout=None)\n operation_end_time = time.time()\n operation_elapsed_time = operation_end_time - operation_start_time\n operation_time_string = format_time_string(operation_elapsed_time)\n\n last_result_index = len(response.results)-1\n last_word_index = len(response.results[last_result_index].alternatives[0].words)-1\n audio_duration = response.results[last_result_index].alternatives[0].words[last_word_index].end_time.seconds\n audio_duration_string = format_time_string(audio_duration)\n\n counter = 1\n srt_file_name = gcs_uri[gcs_uri.rfind(\"/\")+1:gcs_uri.rfind(\".mp4-audio.\")]+\".srt\"\n srt_file = open(srt_file_name, \"w\")\n\n srt_file_name2 = gcs_uri[gcs_uri.rfind(\"/\") + 1:gcs_uri.rfind(\".mp4-audio.\")] + \"2.srt\"\n srt_file2 = open(srt_file_name2, \"w\")\n\n transcription_file_name = gcs_uri[gcs_uri.rfind(\"/\") + 1:gcs_uri.rfind(\"-audio.\")] + \"-transcription.txt\"\n transcription_file = open(transcription_file_name, \"w\")\n\n word_list = concat_word_list(response.results)\n phrase_list = make_phrase_list(word_list)\n write_srt_file(srt_file2, phrase_list)\n\n # Print the first alternative of all the consecutive results.\n for result in response.results:\n transcript = result.alternatives[0].transcript.strip()\n seconds = result.alternatives[0].words[0].start_time.seconds\n last_word_index = len(result.alternatives[0].words)-1\n end_seconds = result.alternatives[0].words[last_word_index].end_time.seconds\n outstring = format_time_string(seconds) + \" - \" +transcript\n print(outstring + \"\\n\")\n transcription_file.write(outstring + \"\\n\\n\")\n\n # now write to srt file\n srt_file.write(str(counter)+\"\\n\")\n start_time_code = format_time_string(seconds) + \",000\"\n\n end_time_code = format_time_string(end_seconds) + \",000\"\n time_code = start_time_code + \" --> \" + end_time_code\n srt_file.write(time_code + \"\\n\")\n srt_file.write(transcript + \"\\n\\n\")\n counter += 1\n #print('Confidence: {}'.format(result.alternatives[0].confidence))\n srt_file.close()\n srt_file2.close()\n transcription_file.close()\n print(\"\\n------------------------------------------------\")\n print(\"Audio file length: \" + audio_duration_string)\n print(\"Transcribe operation running time: \" + operation_time_string)\n print(\"------------------------------------------------\")", "def process_transcript(transcript_label):\n transcript_key = f\"{transcript_label}.json\"\n\n # Load Transcribe output from S3.\n raw_transcript = get_transcribe_output(transcript_key)\n\n # Parse to assign speaker parts.\n speaker_parts = assign_speakers(raw_transcript)\n\n # Identify Karen and Georgia.\n assigned = karen_or_georgia(speaker_parts)\n\n # Update the full transcript.\n build_transcript(assigned)\n\n # Upload the latest transcript to S3.\n s3 = boto3.resource(\"s3\")\n s3.Bucket(os.getenv(\"S3_BUCKET\")).upload_file(\"main_transcript.txt\", \"main_transcript.txt\")", "def wavplay(filename):\n\tif (os.path.isfile(filename) == False): # raise error if wrong input file\n\t\tprint(\"Input file does not exist. Make sure you computed the analysis/synthesis\")\n\telse:\n\t\tif sys.platform == \"linux\" or sys.platform == \"linux2\":\n\t\t # linux\n\t\t subprocess.call([\"aplay\", filename])\n\n\t\telif sys.platform == \"darwin\":\n\t\t\t# OS X\n\t\t\tsubprocess.call([\"afplay\", filename])\n\t\telse:\n\t\t\tprint(\"Platform not recognized\")", "def gravar():\n frase = input(\"Digite a frase a ser gravada: \")\n filename = frase.replace(\" \", \"\").lower() + '.mp3'\n txt = \"{};{}\\n\".format(frase, filename)\n\n # adiciona texto ao arquivo\n with open('frases', 'a') as file:\n file.write(txt)\n\n play_async(text_to_file(frase, filename))", "def track_01():\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title='FM4.ORF.AT', force_radio=True)\n return \"Ok\"", "def transcribe_recording(file_name, transcript_label):\n s3_key = f\"recordings/{file_name}\"\n\n # Load to S3.\n load_recording_to_s3(file_name)\n\n # Start the transcription job.\n start_transcribe_recording_job(s3_key, transcript_label)", "def synthesize_text_file(text_file):\n from google.cloud import texttospeech\n client = texttospeech.TextToSpeechClient()\n\n with open(text_file, 'r') as f:\n text = f.read()\n input_text = texttospeech.types.SynthesisInput(text=text)\n\n # Note: the voice can also be specified by name.\n # Names of voices can be retrieved with client.list_voices().\n voice = texttospeech.types.VoiceSelectionParams(\n language_code='en-AU',\n name='en-AU-Wavenet-C',\n ssml_gender=texttospeech.enums.SsmlVoiceGender.NEUTRAL)\n\n audio_config = texttospeech.types.AudioConfig(\n audio_encoding=texttospeech.enums.AudioEncoding.MP3,\n speaking_rate=0.80)\n\n response = client.synthesize_speech(input_text, voice, audio_config)\n\n # The response's audio_content is binary.\n filename = text_file\n try:\n filename = filename.replace('.txt', '.mp3')\n filename = filename.replace('../Articles/', '')\n filename = filename.replace(';', ' ')\n filename = filename.replace(\"'\", \" \")\n except Exception as e:\n print(e)\n print('Check replace command in synthesize_file.py file')\n\n with open(filename, 'wb') as out:\n out.write(response.audio_content)\n print(f'Audio content written to file: \\n{filename}\\n')", "def async_transcribe(audio_file_paths,\n bucket_name,\n output_tsv_path,\n sample_rate,\n language_code,\n speaker_count=0,\n begin_sec=0.0):\n tmp_audio_file = tempfile.mktemp(suffix=\".flac\")\n print(\"Temporary audio file: %s\" % tmp_audio_file)\n audio_duration_s = concatenate_audio_files(audio_file_paths, tmp_audio_file)\n\n storage_client = storage.Client()\n bucket = storage_client.bucket(bucket_name)\n destination_blob_name = os.path.basename(tmp_audio_file)\n blob = bucket.blob(destination_blob_name)\n print(\"Uploading %s to GCS bucket %s\" % (tmp_audio_file, bucket_name))\n blob.upload_from_filename(tmp_audio_file)\n gcs_uri = \"gs://%s/%s\" % (bucket_name, destination_blob_name)\n print(\"Uploaded to GCS URI: %s\" % gcs_uri)\n\n client = speech.SpeechClient()\n audio = speech.RecognitionAudio(uri=gcs_uri)\n enable_speaker_diarization = speaker_count > 0\n config = speech.RecognitionConfig(\n encoding=speech.RecognitionConfig.AudioEncoding.FLAC,\n sample_rate_hertz=sample_rate,\n language_code=language_code,\n enable_speaker_diarization=enable_speaker_diarization,\n diarization_speaker_count=speaker_count)\n\n operation = client.long_running_recognize(config=config, audio=audio)\n timeout_s = int(audio_duration_s * 0.25)\n print(\n \"Waiting for async ASR operation to complete \"\n \"(audio duration: %.3f s; ASR timeout: %d s)...\" %\n (audio_duration_s, timeout_s))\n response = operation.result(timeout=timeout_s)\n blob.delete()\n os.remove(tmp_audio_file)\n\n utterances = []\n for result in response.results:\n # The first alternative is the most likely one for this portion.\n alt = result.alternatives[0]\n utterances.append(alt.transcript)\n print(u\"Transcript: {}\".format(alt.transcript))\n diarized_words = [(\n word.word, word.speaker_tag, word.start_time.total_seconds(),\n word.end_time.total_seconds()) for word in alt.words]\n # print(\"Confidence: {}\".format(result.alternatives[0].confidence))\n\n regrouped_utterances = regroup_utterances(utterances, diarized_words)\n with open(output_tsv_path, \"w\" if not begin_sec else \"a\") as f:\n if not begin_sec:\n # Write the TSV header.\n f.write(tsv_data.HEADER + \"\\n\")\n utterance_counter = 0\n for (regrouped_utterance,\n speaker_index, start_time_sec, end_time_sec) in regrouped_utterances:\n utterance_counter += 1\n line = \"%.3f\\t%.3f\\t%s\\t%s [U%d] [Speaker #%d]\" % (\n start_time_sec + begin_sec,\n end_time_sec + begin_sec,\n tsv_data.SPEECH_TRANSCRIPT_TIER,\n regrouped_utterance,\n utterance_counter,\n speaker_index)\n print(line)\n f.write(line + \"\\n\")", "def main():\n input_video = sys.argv[1]\n input_audio = sys.argv[2]\n output_video = sys.argv[3]\n set_audio(input_video, input_audio, output_video)", "def play_audio():\n directory = os.fsencode(MINI_PATH)\n print(directory)\n adp= []\n # lst = os.listdir(directory)\n # lst.sort()\n for file in os.listdir(directory):\n filename = os.fsdecode(file)\n #print(file)\n\n if filename.endswith(\".mp3\"): \n adp.append(MINI_PATH+filename)\n #print(adp)\n adp.sort()\n print(\"ADP: \", adp)\n x = \"|\".join(adp)\n print( f'concat:{x}')\n subprocess.call(['ffmpeg', '-i', f'concat:{x}', '-acodec', 'copy', RESULT_PATH])\n \n for file in os.listdir(directory):\n filename = os.fsdecode(file)\n print(filename)\n if filename.endswith(\".mp3\"):\n os.remove(MINI_PATH+filename)", "def get_large_audio_transcription(path):\n # open the audio file using pydub\n r = sr.Recognizer()\n sound = AudioSegment.from_mp3(path)\n sound.export(\"tmp.wav\", format=\"wav\")\n sound = AudioSegment.from_wav('tmp.wav')\n # split audio sound where silence is 700 miliseconds or more and get chunks\n chunks = split_on_silence(sound,\n # experiment with this value for your target audio file\n min_silence_len = 500,\n # adjust this per requirement\n silence_thresh = sound.dBFS-14,\n # keep the silence for 1 second, adjustable as well\n keep_silence=500,\n )\n folder_name = \"audio-chunks\"\n # create a directory to store the audio chunks\n if not os.path.isdir(folder_name):\n os.mkdir(folder_name)\n whole_text = \"\"\n\n chapter=(str(path.split('/')[-1])).split('_')[3]\n # if chapter == '01':\n # target=2\n # else:\n # target=1\n target=2\n # process each chunk\n for i, audio_chunk in enumerate(chunks, start=1):\n # export audio chunk and save it in\n # the `folder_name` directory.\n if i==1:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened,language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n #print(chunk_filename, \":\", text)\n whole_text += text\n # return the text for all chunks detected\n else:\n chunk_filename = os.path.join(folder_name, f\"chunk{i}.wav\")\n audio_chunk.export(chunk_filename, format=\"wav\")\n # recognize the chunk\n with sr.AudioFile(chunk_filename) as source:\n audio_listened = r.record(source)\n # try converting it to text\n try:\n text = r.recognize_google(audio_listened, language=\"en-US\")\n\n except sr.UnknownValueError as e:\n print(\"Error:\", str(e))\n else:\n #text = f\"{text.capitalize()}. \"\n # print(chunk_filename, \":\", text)\n if chapter == '01':\n whole_text += ' ' +text\n if str(text).isalnum():\n if str(text).split(' ')[0]==' ':\n whole_text += text\n else: whole_text += ' '+text\n # return the text for all chunks detected\n\n if i==target:\n break\n if os.path.isfile('tmp.wav') :os.remove('tmp.wav')\n subprocess.run([\"rm\", \"-rf\", folder_name])\n return whole_text", "def transcribe_proc():\n while True:\n # Get result of transcription\n transcribe_result = transcriber.transcribe_stream(\n audio_stream(), sample_rate, sample_width, channels\n )\n\n _LOGGER.debug(\"Transcription result: %s\", transcribe_result)\n\n transcribe_result = transcribe_result or Transcription.empty()\n transcribe_dict = dataclasses.asdict(transcribe_result)\n transcribe_dict[\"timeout\"] = is_timeout\n\n print_json(transcribe_dict)\n transcription_printed.set()", "def speech(self, audio_file, verbose=None, headers=None):\n params = {}\n headers = headers or {}\n if verbose:\n params['verbose'] = True\n resp = req(self.logger, self.access_token, 'POST', '/speech', params,\n data=audio_file, headers=headers)\n return resp", "async def transcribe_wav(args: argparse.Namespace, core: Voice2JsonCore) -> None:\n from rhasspyasr import Transcription\n\n # Make sure profile has been trained\n assert core.check_trained(), \"Not trained\"\n\n # Get speech to text transcriber for profile\n transcriber = core.get_transcriber(open_transcription=args.open, debug=args.debug)\n\n # Directory to report WAV file names relative to\n relative_dir = (\n None if args.relative_directory is None else Path(args.relative_directory)\n )\n\n try:\n if args.wav_file or args.stdin_files:\n # Read WAV file paths\n wav_files = args.wav_file\n if args.stdin_files:\n _LOGGER.debug(\"Reading file paths from stdin\")\n wav_files = itertools.chain(wav_files, sys.stdin)\n\n for wav_path_str in wav_files:\n wav_path_str = wav_path_str.strip()\n\n # Load and convert\n wav_path = Path(wav_path_str)\n _LOGGER.debug(\"Transcribing %s\", wav_path)\n\n wav_data = await core.maybe_convert_wav(wav_path.read_bytes())\n\n # Transcribe\n transcription = (\n transcriber.transcribe_wav(wav_data) or Transcription.empty()\n )\n result = dataclasses.asdict(transcription)\n\n if relative_dir is None:\n # Add name of WAV file to result\n result[\"wav_name\"] = wav_path.name\n else:\n # Make relative to some directory\n result[\"wav_name\"] = str(\n wav_path.absolute().relative_to(relative_dir.absolute())\n )\n\n print_json(result)\n else:\n # Read WAV data from stdin\n _LOGGER.debug(\"Reading WAV data from stdin\")\n\n if args.input_size:\n # Number of bytes is on separate line\n line = sys.stdin.buffer.readline().strip()\n if not line:\n return\n\n num_bytes = int(line)\n while num_bytes > 0:\n # Read in WAV\n wav_data = sys.stdin.buffer.read(num_bytes)\n while len(wav_data) < num_bytes:\n wav_data = sys.stdin.buffer.read(num_bytes - len(wav_data))\n\n # Transcribe\n wav_data = await core.maybe_convert_wav(wav_data)\n transcription = (\n transcriber.transcribe_wav(wav_data) or Transcription.empty()\n )\n result = dataclasses.asdict(transcription)\n\n print_json(result)\n\n # Next WAV\n line = sys.stdin.buffer.readline().strip()\n if not line:\n break\n\n num_bytes = int(line)\n else:\n # Load and convert entire input\n wav_data = await core.maybe_convert_wav(sys.stdin.buffer.read())\n\n # Transcribe\n transcription = (\n transcriber.transcribe_wav(wav_data) or Transcription.empty()\n )\n result = dataclasses.asdict(transcription)\n\n print_json(result)\n finally:\n transcriber.stop()", "def speak():\n sentences = ['DESTROY ALL HU- I MEAN GREETINGS MEAT BAG',\n 'She sells sea shells by the sea shore', 'Other sentence']\n while True:\n AUDIO.speak(sentences[randint(0, 2)])\n sleep(15)", "def main():\n #follow_line()\n #data, samplerate = sf.read('youtube_8660.wav')\n #sf.write('new_file.ogg', data, samplerate)\n beep_for_color()", "def runAutoCheck(dbConnection, maxConcurrent):\n # checks if any shows are pending.\n fileContent = DatabaseInteract.checkPre(dbConnection)\n if(len(fileContent) > 0 and Tools.numRunningProcesses() < maxConcurrent):\n cursor = dbConnection.cursor()\n cursor.execute(\"UPDATE transcriptions SET pending = TRUE WHERE id = '\" + str(fileContent[1]) + \"';\")\n dbConnection.commit()\n cursor.close()\n url = fileContent[0]\n indexID = str(fileContent[1]) # get the ID instead of the filename\n service = str(fileContent[3])\n # podcastName = fileContent[2]\n Tools.transcribeAll(service, url, indexID) # download the mp3 will print when done", "def demonstrate():\n\n api = authenticate()\n\n # Demonstrate upload feature.\n # Create a list of one or more file paths of the mp3s you would like \n # to upload\n filepaths = []\n filepaths.append('./song1.mp3')\n\n # Upload an mp3 to your library. upload() returns a tuple of information\n # about the success or failure of uploads\n print(\"Beginning upload...\\n\")\n uploaded = api.upload(filepaths) \n\n # Print all successfully uploaded songs\n if len(uploaded[0]) > 0:\n print(\"Successfully uploaded:\")\n i = 1\n for key in uploaded[0]:\n print(\"%d. %s\" % (i, key))\n i += 1\n\n # Print all unsuccessfully uploaded songs and a description of why\n # songs weren't uploaded\n if len(uploaded[2]) == 0:\n print(\"\\nAll songs successfully uploaded.\")\n else:\n print(\"Not all songs were successfully uploaded:\")\n i = 1\n for key in uploaded[2]:\n print(\"%d. %s not uploaded: %s\" % (i, key, uploaded[2][key]))\n i += 1\n\n\n # Demonstrate download feature\n # Get information about songs previously uploaded that are available\n # to be downloaded\n uploaded_songs = api.get_uploaded_songs()\n\n if len(uploaded_songs) == 0:\n print(\"There are no songs currently available for download\")\n else:\n # Print songs that are available for download and store their ids\n # so we can download them\n song_ids = []\n print(\"\\nThe following songs are available for download\")\n for i in range(len(uploaded_songs)):\n song_ids.append(uploaded_songs[i]['id'])\n print(\"%d. %s\" % (i+1, uploaded_songs[i]['title']))\n\n # Download uploaded songs from your library\n print(\"\\nBeginning download...\")\n for i in range(len(song_ids)):\n filename, audio = api.download_song(song_ids[i])\n\n # Write song to disk\n with open(filename, 'wb') as f:\n f.write(audio)\n\n print(\"%d. Written to ./%s\" % (i + 1, filename))\n print(\"\\nDownload complete.\")\n\n # It's good practice to logout when finished\n api.logout()", "def play(filename):\n SoundClient(blocking=True).playWave(filename)", "def track_04():\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title='Sunshine Live', force_radio=True)\n return \"Ok\"", "def processText(self, text: str, filename: str) :\n execution_time = 0.\n\n directory = os.path.join(self.execution_time_dir, AUDIO_DIR, self.getTTS().getName())\n make_dir(directory)\n time_for_generating_audio_fpath = os.path.join(directory, filename + \".txt\")\n \n audio_fpath = self.getTTS().getAudioPath(\n text=text, audio_dir=self.audio_dir, filename=filename)\n \n if self.recompute or not os.path.exists(audio_fpath):\n # print(audio_fpath)\n start_time = time.time()\n self.getTTS().generateAudio(text=text, audio_fpath=audio_fpath)\n save_execution_time(fpath=time_for_generating_audio_fpath, execution_time=time.time() - start_time)\n \n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_generating_audio_fpath) \n \n transcription_dir = os.path.join(self.transcription_dir, self.getTTS().getName())\n \n transcriptions = {}\n for asr in self.asrs :\n directory = os.path.join(\n self.execution_time_dir, TRANSCRIPTION_DIR, self.getTTS().getName(), asr.getName())\n make_dir(directory)\n time_for_recognizing_audio_fpath = os.path.join(\n directory, filename + \".txt\")\n\n if self.recompute :\n start_time = time.time()\n # TODO: \n # change recognize audio -> input audio instead of fpath\n # audio = asr.loadAudio(audio_fpath=audio_fpath)\n # transcription = asr.recognizeAudio(audio=audio)\n # asr.saveTranscription(transcription_fpath, transcription)\n transcription = asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.setTranscription(transcription)\n asr.saveTranscription(transcription_dir=transcription_dir, filename=filename)\n save_execution_time(fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n \n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n num_retry = 0\n while transcription == \"\" and num_retry < self.max_num_retry :\n start_time = time.time()\n asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.saveTranscription(\n transcription_dir=transcription_dir, filename=filename)\n save_execution_time(\n fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n\n if asr.getName() == \"wit\" :\n random_number = float(random.randint(9, 47))/10.\n time.sleep(random_number)\n\n num_retry += 1\n\n transcriptions[asr.getName()] = preprocess_text(transcription)\n\n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_recognizing_audio_fpath) \n \n\n cases = self.caseDeterminer(text, transcriptions)\n # if sum(cases.values()) == 0 :\n # print(text)\n # print(transcriptions[\"wav2vec2\"])\n # print(cases)\n # print()\n \n for asr_name, case in cases.items() :\n self.saveCase(self.case_dir, self.getTTS().getName(), asr_name, filename, str(case))\n\n # print(f\"Execution time: {execution_time}\")\n return cases, execution_time", "def start_transcribing():\n transcribe.main()", "def play_podcast(url, name):\n\n player.play(url, name)", "async def prog(ctx, note:str,amount=3):\n answer = Tempo.getNoteProg(note,amount)\n solution = 'Generated Key Progression '+str(answer)\n await ctx.send(solution)\n if ctx.author.voice is not None:\n vc = await ctx.author.voice.channel.connect()\n for i in range(len(answer)):\n source = discord.PCMVolumeTransformer(discord.FFmpegPCMAudio('sounds/'+str(answer[i])+'.mp3'))\n ctx.voice_client.play(source, after=lambda e: print('Player error: %s' % e) if e else None)\n time.sleep(1)\n await vc.disconnect()", "async def generate_audio(self, site, text, payload):\n cache_path = self.config['services']['Pico2wavTtsService'].get(\n 'cache_path', '/tmp/tts_cache')\n value = payload.get('id', 'no_id')\n\n if text:\n short_text = text[0:100].replace(' ', '_').replace(\".\", \"\")\n # speakable and limited\n say_text = text[0:300].replace('(', '').replace(')', '')\n short_file_name = clean_filename('tts-' + str(short_text)) + '.wav'\n file_name = os.path.join(cache_path, short_file_name)\n\n # generate if file doesn't exist in cache\n if not os.path.isfile(file_name):\n path = self.config['services']['Pico2wavTtsService']['binary_path']\n command = path + ' -w=' + file_name + ' \"{}\" '.format(say_text)\n executor = concurrent.futures.ProcessPoolExecutor(\n max_workers=1,\n )\n await self.loop.run_in_executor(executor, os_system, command)\n\n async with aiofiles.open(file_name, mode='rb') as send_file:\n audio_file = await send_file.read()\n await self.client.subscribe('hermod/{}/speaker/finished'.format(site))\n if site in self.clients and self.clients[site].get(\n 'platform', '') == \"web\" and self.clients[site].get('url', False):\n await self.client.publish(\\\n 'hermod/{}/speaker/play/{}'.format(site, value), payload=json.dumps({\n \"url\": self.clients[site].get('url') + \"/tts/\" + short_file_name\n }), qos=0)\n else:\n slice_length = 2048\n\n def chunker(seq, size):\n \"\"\" return chunks\"\"\"\n return (seq[pos:pos + size] for pos in range(0, len(seq), size))\n for chunk in chunker(audio_file, slice_length):\n await self.client.publish('hermod/{}/speaker/cache/{}'.format(site, value)\\\n , payload=bytes(chunk), qos=0)\n\n # finally send play message with empty payload\n await self.client.publish(\n 'hermod/{}/speaker/play/{}'.format(site, value), payload=None, qos=0)\n\n await self.cleanup_file(short_text, file_name)", "def transcribe_file(speech_file):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n # [START migration_async_request]\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n #encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n enable_word_time_offset=True,\n encoding=enums.RecognitionConfig.AudioEncoding.FLAC,\n #sample_rate_hertz=32000,\n language_code='en-US')\n\n # [START migration_async_response]\n operation = client.long_running_recognize(config, audio)\n # [END migration_async_request]\n\n print('Waiting for operation to complete...')\n response = operation.result(timeout=90)\n\n # Print the first alternative of all the consecutive results.\n for result in response.results:\n print('Transcript: {}'.format(result.alternatives[0].transcript))\n print('Confidence: {}'.format(result.alternatives[0].confidence))\n # [END migration_async_response]", "def track_02():\n sonos.play_uri('http://streams.radiopsr.de/psr-live/mp3-192/mediaplayer', title='Radio PSR Live', force_radio=True)\n return \"Ok\"", "def on_complete(stream, filepath):\n global cli, video_path, audio_path\n\n if cli.audio_only:\n print('Converting audio to mp3. This might take some time.\\n')\n mp4_to_mp3(filepath)\n\n if adaptive:\n if '_video.mp4' in filepath:\n video_path = filepath\n if '_audio.mp4' in filepath:\n audio_path = filepath\n if os.path.exists(video_path) and os.path.exists(audio_path):\n merge(video_path, audio_path)\n print(\"\\n\\nDownload has completed.\\n\")", "def generate_audio():\n text, lang = introduction()\n ses = boto3.Session(profile_name=\"default\")\n pol = ses.client(\"polly\")\n res = pol.synthesize_speech(Text=text, LanguageCode=lang, OutputFormat=\"mp3\", VoiceId=VOICE)\n return res", "def transcribe_file_ret(speech_file):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n # [START migration_sync_request]\n # [START migration_audio_config_file]\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n sample_rate_hertz=8000,\n language_code='en-US',\n speech_contexts=[types.SpeechContext(\n phrases=phrases,\n )])\n #use_enhanced=True,\n # model='phone_call',)\n # [END migration_audio_config_file]\n\n # [START migration_sync_response]\n response = client.recognize(config, audio)\n # [END migration_sync_request]\n # Each result is for a consecutive portion of the audio. Iterate through\n # them to get the transcripts for the entire audio file.\n return(response)", "def transcribe_file(speech_file):\n client = speech.SpeechClient()\n # [START speech_python_migration_sync_request]\n # [START speech_python_migration_config]\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n sample_rate_hertz=44100,\n language_code='en-US')\n # [END speech_python_migration_config]\n\n # [START speech_python_migration_sync_response]\n response = client.recognize(config, audio)\n # [END speech_python_migration_sync_request]\n # Each result is for a consecutive portion of the audio. Iterate through\n # them to get the transcripts for the entire audio file.\n to_return = \"\"\n for result in response.results:\n # The first alternative is the most likely one for this portion.\n to_return += result.alternatives[0].transcript\n return to_return\n # [END speech_python_migration_sync_response]", "def main():\n if (len(sys.argv) == 1):\n song = (\n ('c', 4), ('c*', 4), ('eb', 4),\n ('g#', 4), ('g*', 2), ('g5', 4),\n ('g5*', 4), ('r', 4), ('e5', 16),\n ('f5', 16), ('e5', 16), ('d5', 16),\n ('e5*', 4)\n )\n song = (\n ('a3',4), ('b3',4),('c4',4),('d4',4)\n )\n outputSongFile = \"testSong.wav\"\n timing = 4\n\n createMelody(song, outputSongFile, timing)\n playMelody(outputSongFile)\n\n else:\n song = str(sys.argv[1])\n outputSongFile = str(sys.argv[2])\n createMelody(song, outputSongFile, timing)", "def play_audio():\n play_file = input(\"Would you like to play the file we created (y/n)? \")\n if play_file == \"y\":\n os.system(\"open output_audio.mp3\")\n else:\n print(\"Thanks for using our service, the file exists in your directory where you ran this file.\")", "async def ttscmd(self, message):\r\n reply = await message.get_reply_message()\r\n text = utils.get_args_raw(message.message)\r\n\r\n if not text:\r\n if message.is_reply:\r\n text = (await message.get_reply_message()).message\r\n else:\r\n return await utils.answer(message, self.strings(\"no_text\", message))\r\n\r\n if message.out:\r\n await message.delete()\r\n\r\n tts = await utils.run_sync(gTTS, text, lang=self.config[\"TTS_LANG\"])\r\n voice = io.BytesIO()\r\n await utils.run_sync(tts.write_to_fp, voice)\r\n voice.seek(0)\r\n voice.name = \"voice.mp3\"\r\n\r\n if self.is_ffmpeg:\r\n voice, duration = to_voice(voice)\r\n else:\r\n duration = None\r\n\r\n await message.client.send_file(message.chat_id, voice, voice_note=True, reply_to=reply, duration=duration)", "def main():\n opts = docopt(__doc__, version=\"cast 0.1\")\n\n cast = pychromecast.PyChromecast(CHROMECAST_HOST)\n ramp = cast.get_protocol(pychromecast.PROTOCOL_RAMP)\n\n # Wait for ramp connection to be initted.\n time.sleep(SLEEP_TIME)\n\n if ramp is None:\n print 'Chromecast is not up or current app does not handle RAMP.'\n return 1\n\n if opts['next']:\n ramp.next()\n elif opts['pause']:\n ramp.pause()\n elif opts['play']:\n ramp.play()\n elif opts['toggle']:\n ramp.playpause()\n elif opts['seek']:\n ramp.seek(opts['<second>'])\n elif opts['rewind']:\n ramp.rewind()\n elif opts['status']:\n _status_command(cast, ramp)\n elif opts['volume']:\n _volume_command(ramp, opts['<value>'])\n\n # Wait for command to be sent.\n time.sleep(SLEEP_TIME)", "async def stream(self, ctx, *, url: str):\r\n if not ctx.voice_client:\r\n await self.connect(ctx)\r\n with youtube_dl.YoutubeDL(YDL_OPTS) as ydl:\r\n try:\r\n info = ydl.extract_info(url, download=False)\r\n except youtube_dl.utils.DownloadError:\r\n return await ctx.send(\"Error, cannot play audio from this URL\")\r\n URL = info['formats'][0]['url']\r\n ctx.voice_client.play(discord.FFmpegPCMAudio(URL, **FFMPEG_OPTIONS))\r\n await ctx.message.delete()\r\n await ctx.send(\"Playing {0}\".format(url))", "def transcribe_wav( local_file_path, gcp_credentials_path=None, language_code=\"en-US\", client=None ):\n SEGMENT_SIZE = 55 * 1000 # 55 seconds\n OVERLAP_SIZE = 5 * 1000 # 5 seconds\n\n #\n # Instantiate a client\n #\n if client is None:\n client = create_api_client( gcp_credentials_path )\n\n #\n # Build the request. Because we only support WAV, don't need to define encoding\n # or sample rate.\n #\n config = {\n \"model\": \"video\", # premium model, but cost is basically nothing for single user anyway. Works MUCH better.\n \"language_code\": language_code,\n \"enable_word_time_offsets\": True,\n }\n\n #\n # GCP inline audio is restricted to just one minute. To avoid needing to setup\n # a GCP bucket, we'll split any provided audio files into 55-second chunks with\n # 5 seconds of overlap (since we'll probably split a word). IE, chunk 1 is from\n # 0:00 to 0:55, two is from 0:50 to 1:45, etc...\n #\n full_text = \"\"\n time_map = []\n full_recording = pydub.AudioSegment.from_file( local_file_path, format=\"wav\" )\n full_duration_ms = len( full_recording )\n offset = 0\n while offset < full_duration_ms:\n\n # If we're splitting into chunks, insert a hint\n if offset > 0:\n full_text += \" \" + SPLICE_STR + \" \"\n time_map.append( ( int( offset / 1000 ), SPLICE_STR ) )\n\n # Segment the clip into a RAM file\n this_clip = full_recording[ offset : min( offset + SEGMENT_SIZE, full_duration_ms ) ]\n segment_wav = io.BytesIO()\n this_clip.export( segment_wav, format=\"wav\" )\n segment_wav.seek(0)\n audio = { \"content\": segment_wav.read() }\n\n #\n # Submit the request & wait synchronously\n #\n operation = client.long_running_recognize( config, audio )\n response = operation.result()\n\n #\n # Process the response. Only take the first alternative.\n #\n for result in response.results:\n if len( result.alternatives ) < 1:\n continue\n best_guess = result.alternatives[0]\n full_text += best_guess.transcript\n time_map.extend( [ ( x.start_time.seconds + int( offset / 1000 ), x.word ) for x in best_guess.words ] )\n\n # Next offset\n offset += ( SEGMENT_SIZE - OVERLAP_SIZE )\n\n return ( full_text, time_map )", "def main(path):\n logger.info(f'Processing video file {path}')\n # Extract audio\n audio_file = extract_audio(path, pipeline_config.audio_target_dir)\n\n # Generate sound classification results and speech recogniser results\n sound_results = SoundRecogniser().process_file(audio_file)\n sound_results = process_overlap(sound_results)\n speech_results = SpeechRecogniser().process_file(audio_file)\n\n # NLP\n wrds = get_words(speech_results)\n nlp = SpaCyNaturalLanguageProcessor(pipeline_config.spacy_model)\n custom_nlp = SpaCyNaturalLanguageProcessor(pipeline_config.custom_spacy_model)\n processor = nlp.get_spacy_results_processor(wrds, speech_results)\n custom_processor = custom_nlp.get_spacy_results_processor(wrds, speech_results)\n chunk_results = processor.process_speech_results_chunk()\n ner_results = processor.process_speech_results_ner()\n ner_results.extend(custom_processor.process_speech_results_ner())\n match_results = processor.process_speech_results_match()\n speech_results = nlp.process_spurious_words(speech_results, chunk_results)\n\n # Add Speech recogniser results, sound classification results and NLP results to a subtitle file\n subs_1 = save_to_subtitles(speech_results,\n lambda speech_result: speech_result['word'])\n subs_1 = compress_subs(subs_1)\n subs_2 = save_to_subtitles(sound_results,\n lambda sound_result: sound_result['class'])\n subs_2 = flatten_subs(subs_2)\n subs_3 = save_to_subtitles(chunk_results,\n lambda chunk_result: f'{chunk_result[\"word\"]} ({chunk_result[\"head\"]})')\n subs_4 = save_to_subtitles(ner_results,\n lambda ner_result: f'{ner_result[\"type\"]} {ner_result[\"word\"]}')\n subs_5 = save_to_subtitles(match_results,\n lambda match_result: match_result[\"word\"])\n\n combined_subs = append_subs(None, subs_1, style='bottom')\n combined_subs = append_subs(combined_subs, subs_2, exclude=['bottom'], style='top', formatter=lambda x: f'({x})')\n combined_subs = append_subs(combined_subs, subs_3, style='left')\n combined_subs = append_subs(combined_subs, subs_4, style='right')\n combined_subs = append_subs(combined_subs, subs_5, style='bottom_left_pred')\n combined_subs = remove_tiny_subs(combined_subs, duration_millis=1000, left_millis=None,\n right_millis=None, style='top')\n subtitle_file_name = os.path.splitext(path)[0] + '.ass'\n create_styles(combined_subs)\n combined_subs.save(subtitle_file_name)\n\n # Burn to a video\n burn_subtitles_into_video(path, subtitle_file_name, pipeline_config.audio_target_dir)\n logger.info(f'Done processing {audio_file}')", "def output_sound():\n try:\n subprocess.call(['ffplay', '-nodisp', '-autoexit', SOUND_FILE])\n except:\n pass", "def do_actions(cast, action, url):\n if action == 'play_media':\n print()\n print(\"=> Sending non-blocking play_media command\")\n cast.play_media((str(url)), \"video/mp4\")\n elif action == 'pause':\n print()\n print(\"=> Sending non-blocking pause command\")\n cast.media_controller.pause()\n elif action == 'play':\n print()\n print(\"=> Sending non-blocking play command\")\n cast.media_controller.play()\n elif action == 'stop':\n print()\n print(\"=> Sending non-blocking stop command\")\n cast.media_controller.stop()\n elif action == 'quit_app':\n print()\n print(\"=> Sending non-blocking quit_app command\")\n cast.quit_app()", "def detect_netease_music_name(file_path, dist_path, KEEP_SOURCE=True):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0\"\n }\n url_base = \"http://music.163.com/api/song/detail/?id={}&ids=[{}]\"\n\n if not os.path.exists(dist_path):\n os.mkdir(dist_path)\n\n for file_name in os.listdir(file_path):\n if not file_name.endswith(\".mp3\"):\n continue\n if not len(file_name.split(\"-\")) == 3:\n print(\n \">>>> File %s not in format <song id>-<bite rate>-<random number>.mp3\"\n % (file_name)\n )\n continue\n\n try:\n song_id = file_name.split(\"-\")[0]\n url_target = url_base.format(song_id, song_id)\n resp = requests.get(url_target, headers=headers)\n rr = json.loads(resp.text)\n\n tt = eyed3.load(os.path.join(file_path, file_name))\n tt.tag.title = rr[\"songs\"][0][\"name\"].replace(\"\\xa0\", \" \")\n tt.tag.artist = rr[\"songs\"][0][\"artists\"][0][\"name\"]\n tt.tag.album = rr[\"songs\"][0][\"album\"][\"name\"]\n tt.tag.album_artist = rr[\"songs\"][0][\"album\"][\"artists\"][0][\"name\"]\n print(\n \"song_id = %s, tt.tag title = %s, artist = %s, album = %s, album_artist = %s\"\n % (\n song_id,\n tt.tag.title,\n tt.tag.artist,\n tt.tag.album,\n tt.tag.album_artist,\n )\n )\n tt.tag.save()\n except UnicodeEncodeError as e:\n print(\n \">>>> UnicodeEncodeError, try again later: file_name = %s, error = %s\"\n % (file_name, str(e))\n )\n continue\n except:\n print(\">>>> Some other error happens: file_name = %s\" % (file_name))\n continue\n\n dist_name = (\n os.path.join(\n dist_path,\n \"%s - %s\"\n % (tt.tag.artist.replace(\"/\", \" \"), tt.tag.title.replace(\"/\", \" \")),\n )\n + \".mp3\"\n )\n \n if KEEP_SOURCE == True:\n shutil.copyfile(os.path.join(file_path, file_name), dist_name)\n else:\n os.rename(os.path.join(file_path, file_name), dist_name)", "def start(self):\n\tglobal mode\n\tmode=\"./music/\"\n\tglobal message\n\tif message!=2:\n\t\tmessage=1\n\t\tbot.loop.create_task(play())", "def convert_to_mp3(filename: str, title: str, start: int=None, end: int=None) -> list:\n\t# setup args for ffmpeg\n\tfile_a = f\"{path_to_wrk_dir}{filename}.mp4\" # input file\n\tfile_b = f\"{path_to_wrk_dir}{title}.mp3\" # output file\n\tfiles_b = [] # this list need if file more than 30 mb\n\targs = [\n\t\t\"/usr/bin/ffmpeg\", # path to ffmpeg\n\t\t\"-i\", # flag for input file\n\t\tfile_a, # input file\n\t\t\"-acodec\", # setup codec\n\t\t\"libmp3lame\", # codec name\n\t\t]\n\n\t# now need setup timings for target encode\n\tif start is not None and start != 0:\n\t\targs = args + [\"-ss\", str(start)]\n\tif end is not None and end != 0:\n\t\targs = args + [\"-t\", str(end - start)]\n\n\t# and last part for args to ffmpeg\n\targs = args + [\n\t\t\"-metadata\", # setup metadata for file\n\t\tf\"title={title}\", # title\n\t\t\"-metadata\",\n\t\tf\"artist={title}\", # and artist\n\t\t\"-b:a\", # setup bitrate\n\t\t\"320k\", # setup max bitrate\n\t\tfile_b,\n\t\t]\n\tprint(f\"{args}\")\n\t# start subprocess for encoding\n\tpopen = subprocess.Popen(args)\n\tpopen.wait()\n\n\t# check size file. if he more than 30 mb, bot need split him to chunks.\n\tsize = getsize(file_b) / 1024 / 1024\n\tif size > 30 and ( start or end is None ):\n\t\t# setup args for split to chunks\n\t\targs = [\n\t\t\t\"ffprobe\",\n\t\t\t\"-show_entries\",\n\t\t\t\"format=duration\",\n\t\t\t\"-i\",\n\t\t\tfile_b,\n\t\t\t]\n\n\t\t# get duration video.\n\t\tpopen = subprocess.Popen(args, stdout=subprocess.PIPE)\n\t\tpopen.wait()\n\t\toutput = popen.stdout.read()\n\t\t# now we know how long this audio file\n\t\t# split to 10 min chunks\n\t\tdur = re.findall(r\"\\d{1,10}\", str(output))\n\t\t# get chunks count for loop\n\t\tcount_chunks = (int(dur[0]) // 600) + 1\n\t\tfor chunk_start_time in range(0, count_chunks):\n\t\t\t# setup args for split\n\t\t\t# big parts of args the same for encode\n\t\t\targs = [\n\t\t\t\t\"/usr/bin/ffmpeg\",\n\t\t\t\t\"-i\",\n\t\t\t\tfile_b,\n\t\t\t\t\"-ss\",\n\t\t\t\tf\"{chunk_start_time * 600}\", # when start chunk\n\t\t\t\t\"-t\",\n\t\t\t\t\"600\", # 10 mints duration\n\t\t\t\t\"-acodec\",\n\t\t\t\t\"copy\", # copy\n\t\t\t\t\"-b:a\",\n\t\t\t\t\"320k\",\n\t\t\t\tf\"{path_to_wrk_dir}{title}_{chunk_start_time}.mp3\", # now we have path to video with chunk number.\n\t\t\t]\n\t\t\ttry:\n\t\t\t\t# start process for cut chunk\n\t\t\t\tpopen = subprocess.Popen(args, stdout=subprocess.PIPE)\n\t\t\t\tpopen.wait()\n\t\t\t# handle except.\n\t\t\texcept Exception as e:\n\t\t\t\tprint(f\"Exception - {e}\")\n\t\t\tfiles_b.append(f\"{path_to_wrk_dir}{title}_{chunk_start_time}.mp3\") # append name of file in list\n\t\tremove(file_b)\n\ttry:\n\t\t# remove tmp file\n\t\tremove(file_a)\n\t# handle except\n\texcept FileNotFoundError:\n\t\tfiles = get_file_list(path_to_wrk_dir)\n\t\tfor i in files:\n\t\t\tif -1 != f\"{path_to_wrk_dir}{i}\".find(f\"{filename}\") and f\"{i}\".find(f\".mp3\") == -1:\n\t\t\t\ttry:\n\t\t\t\t\tremove(f\"{path_to_wrk_dir}{i}\")\n\t\t\t\texcept FileNotFoundError:\n\t\t\t\t\tprint(f\"can't remove file {path_to_wrk_dir}{i}\")\n\tif len(files_b) == 0:\n\t\treturn [file_b]\n\telse:\n\t\treturn files_b", "def _interactive_training(filebasename, cluster, identifier):\n info = None\n prc = None\n if identifier == \"unknown\":\n info = \"\"\"The system has not identified this speaker!\"\"\"\n else:\n info = \"The system identified this speaker as '\" + identifier + \"'!\"\n print info\n while True:\n try:\n char = raw_input(\"\\n 1) Listen\\n 2) Set \" + \n \" name\\n Press enter to skip\\n> \")\n except EOFError:\n print ''\n continue\n print ''\n if prc != None and prc.poll() == None:\n prc.kill()\n if char == \"1\":\n videocluster = str(filebasename + \"/\" + cluster)\n listwaves = os.listdir(videocluster)\n listw = [os.path.join(videocluster, f) for f in listwaves]\n wrd = \" \".join(listw)\n commandline = \"play \" + str(wrd)\n if sys.platform == 'win32':\n commandline = \"vlc \" + str(wrd)\n commandline = commandline.replace('\\\\', '\\\\\\\\')\n print \" Listening %s...\" % cluster\n args = shlex.split(commandline)\n prc = subprocess.Popen(args, stdin=CONFIGURATION.output_redirect,\n stdout=CONFIGURATION.output_redirect,\n stderr=CONFIGURATION.output_redirect)\n time.sleep(1)\n continue\n if char == \"2\":\n menu = False\n while not menu:\n name = raw_input(\"Type speaker name \"\n + \"or leave blank for unknown speaker: \")\n while True:\n if len(name) == 0:\n name = \"unknown\"\n if not name.isalnum():\n print 'No blank, dash or special chars allowed! Retry'\n# menu = True\n break\n okk = raw_input(\"Save as '\" + name + \"'? [Y/n/m] \")\n if okk in ('y', 'ye', 'yes', ''):\n return name\n if okk in ('n', 'no', 'nop', 'nope'):\n break\n if okk in ('m', \"menu\"):\n menu = True\n break\n if not menu:\n print \"Yes, no or menu, please!\"\n continue\n if char == \"\":\n return identifier\n print \"Type 1, 2 or enter to skip, please\"", "def main():\n #\n # Parse CLI\n #\n parser = argparse.ArgumentParser( description=\"Transcribe a directory of wav files into a single Emacs org-mode file.\" )\n parser.add_argument( \"--voice_notes_dir\", type=str, help=\"Directory of WAV files which will be searched non-recursively.\" )\n parser.add_argument( \"--archive_dir\", type=str, help=\"Directory where WAV files will be placed after transcription.\" )\n parser.add_argument( \"--org_transcript_file\", type=str, help=\"Org file where transcription headings will be appended. Will be created if it doesn't exist.\" )\n parser.add_argument( \"--just_copy\", type=bool, help=\"If True, don't remove files from voice_notes_dir. Default is False.\" )\n parser.add_argument( \"--gcp_credentials_path\", type=str, help=\"Path to GCP credentials JSON, if environment variables are unconfigured.\" )\n parser.add_argument( \"--verbose\", type=bool, help=\"Prints out which WAV we're working on.\" )\n parser.add_argument( \"--max_concurrent_requests\", type=int, help=\"Maximum number of concurrent transcription requests.\" )\n parser.add_argument( \"--voicenote_filename_regex_path\", type=str, help=\"Path to a text file containing a Python regex, which will be used to match \"\n \"and parse voice note filenames. It MUST contain named groups for year, month, day, hour, minute, and ampm. All but ampm \"\n \"are local date/time (or, whatever you want, really), 12 hour clock. ampm should be either literally am or pm.\")\n cli_kwargs = { k: v for k, v in vars( parser.parse_args() ).items() if v is not None }\n\n #\n # If a config file exists, find anything missing there\n #\n config_file_path = os.path.join( appdirs.user_config_dir( \"voicenotes2org\", \"voicenotes2org\" ), \"default.toml\" )\n kwargs = {}\n if os.path.exists( config_file_path ):\n with open( config_file_path, \"r\" ) as fin:\n try:\n\n # Read the kwargs from the TOML\n kwargs = toml.load( fin )\n\n # Check args and expand paths (Like ~ and $VAR\n # also convert any relative paths to be relative /to the config file/, not CWD\n path_args = [ \"voice_notes_dir\", \"archive_dir\", \"org_transcript_file\", \"voicenote_filename_regex_path\" ]\n for p in path_args:\n if p in kwargs:\n kwargs[ p ] = os.path.expanduser( os.path.expandvars( kwargs[ p ] ) )\n if not os.path.isabs( kwargs[ p ] ):\n kwargs[ p ] = os.path.join( os.path.dirname( config_file_path ), kwargs[ p ] )\n\n except toml.decoder.TomlDecodeError as e:\n print( \"\\nInvalid config file at {}!\".format( config_file_path ) )\n print( str( e ) )\n print( )\n exit( -1 )\n\n #\n # Determine final kwargs -- CLI always overwrites config file\n #\n kwargs.update( cli_kwargs )\n\n #\n # If user supplied a voicenote_filename_regex_path, replace it with a compiled regex.\n #\n if \"voicenote_filename_regex_path\" in kwargs:\n with open( kwargs[ \"voicenote_filename_regex_path\" ], \"r\" ) as fin:\n content = [ line for line in fin.readlines() if not line.startswith( \"#\" ) ]\n content = \"\".join( content )\n try:\n regex = re.compile( content )\n del kwargs[ \"voicenote_filename_regex_path\" ] # Not valid to org_transcribe\n except re.error as e:\n print( \"Invalid regex!\" )\n print( str( e ) )\n exit( -1 )\n\n #\n # Explain ourselves\n #\n if \"verbose\" in kwargs and kwargs[ \"verbose\" ]:\n print()\n print( \"Config Options:\" )\n pprint.pprint( kwargs )\n print()\n\n #\n # Go!\n #\n org_transcribe( **kwargs )", "def play(self, filename, callback) :\n raise NotImplementedError(\"play not implemented\")", "def main(args=None):\n\n parser = argparse.ArgumentParser(\n description=\"Coqui STT (file-polling)\",\n prog=\"stt_transcribe_poll\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument('--model', metavar=\"FILE\", type=str, required=True, help='The tflite model to use.')\n parser.add_argument('--prediction_in', help='Path to the input audio files', required=True, default=None)\n parser.add_argument('--prediction_out', help='Path to the output files folder', required=True, default=None)\n parser.add_argument('--prediction_tmp', help='Path to the temporary files folder', required=False, default=None)\n parser.add_argument('--poll_wait', type=float, help='poll interval in seconds when not using watchdog mode', required=False, default=1.0)\n parser.add_argument('--continuous', action='store_true', help='Whether to continuously process audio files and perform prediction', required=False, default=False)\n parser.add_argument('--use_watchdog', action='store_true', help='Whether to react to file creation events rather than performing fixed-interval polling', required=False, default=False)\n parser.add_argument('--watchdog_check_interval', type=float, help='check interval in seconds for the watchdog', required=False, default=10.0)\n parser.add_argument('--delete_input', action='store_true', help='Whether to delete the input files rather than move them to --prediction_out directory', required=False, default=False)\n parser.add_argument(\"--scorer\", metavar=\"FILE\", required=False, help=\"Path to the external scorer file\")\n parser.add_argument(\"--beam_width\", metavar=\"INT\", type=int, help=\"Beam width for the CTC decoder\")\n parser.add_argument(\"--lm_alpha\", metavar=\"NUM\", type=float, help=\"Language model weight (lm_alpha). If not specified, use default from the scorer package.\")\n parser.add_argument(\"--lm_beta\", metavar=\"NUM\", type=float, help=\"Word insertion bonus (lm_beta). If not specified, use default from the scorer package.\")\n parser.add_argument(\"--candidate_transcripts\", metavar=\"INT\", type=int, default=None, help=\"Number of candidate transcripts to include in JSON output\")\n parser.add_argument(\"--hot_words\", metavar=\"WORD:BOOST[,WORD:BOOST...]\", type=str, help=\"Hot-words and their boosts.\")\n parser.add_argument('--verbose', action='store_true', help='Whether to output more logging info', required=False, default=False)\n parser.add_argument('--quiet', action='store_true', help='Whether to suppress output', required=False, default=False)\n parsed = parser.parse_args(args=args)\n\n transcribe_audio_files(parsed.model, parsed.prediction_in, parsed.prediction_out, tmp_dir=parsed.prediction_tmp,\n poll_wait=parsed.poll_wait, continuous=parsed.continuous, use_watchdog=parsed.use_watchdog,\n watchdog_check_interval=parsed.watchdog_check_interval, delete_input=parsed.delete_input,\n scorer=parsed.scorer, beam_width=parsed.beam_width, lm_alpha=parsed.lm_alpha,\n lm_beta=parsed.lm_beta, candidate_transcripts=parsed.candidate_transcripts,\n hot_words=parsed.hot_words, verbose=parsed.verbose, quiet=parsed.quiet)", "def download(self, language, filename, filetype):\n if language not in self.languages.keys():\n print \"Theres's no subtitle in this language\"\n sys.exit()\n url = \"http://www.youtube.com/api/timedtext?v={0}&lang={1}\".format(self.video_id, language)\n self.subtitle = urllib2.urlopen(url)\n if filetype == \"srt\":\n self.writeSRTFile(filename)\n else:\n self.writeXMLFile(filename)", "def create_audiobook():\n\n f = open(\"static/files/book.txt\", \"r\", encoding=\"utf-8\")\n summary = f.read()\n print('total chars: ', len(summary))\n all_words = summary.split('.')\n aflr.api_key = \"b6b1434676d14bdfbf9f50ca2157ed5c\"\n VOICE=\"Matthew\"\n current, total_chars, chunk_num, TEXT = 0,0,0,[]\n while current < len(all_words) - 1:\n while total_chars <= 4999:\n TEXT.append(all_words[current])\n total_chars += len(all_words[current]) + 1\n current += 1\n if current == len(all_words):\n break\n \n if current < len(all_words):\n TEXT.pop()\n current -= 1\n total_chars = 0\n\n TEXT = \".\".join(TEXT)\n\n SPEED=80\n script = aflr.Script().create(\n scriptText=TEXT,\n projectName=\"may_the_4th\",\n moduleName=\"evil\",\n scriptName=f\"{chunk_num}_evil_{VOICE}\",\n )\n print(f\"Connect to the dev star: \\n {script} \\n\")\n\n scriptId = script[\"scriptId\"]\n\n response = aflr.Speech().create(\n scriptId=scriptId, voice=VOICE, speed=SPEED, #effect=EFFECT\n )\n # print(f\"Response from dev star: \\n {response} \\n\")\n # mastering current\n response = aflr.Mastering().create(\n scriptId=scriptId, #backgroundTrackId=BACKGROUNDTRACK\n )\n # print(f\"Using the force: \\n {response} \\n\")\n\n url = aflr.Mastering().retrieve(scriptId=scriptId)\n #print(f\"url to download the track: \\n {url} \\n\")\n\n # or download\n file = aflr.Mastering().download(\n scriptId=scriptId, destination=MINI_PATH\n )\n # print(f\"Listen to the results of the force: \\n {file} \\n\")\n\n print(\"finished\",chunk_num)\n\n TEXT = []\n chunk_num += 1\n\n play_audio()", "def studio_transcript(self, request, dispatch):\r\n _ = self.runtime.service(self, \"i18n\").ugettext\r\n\r\n if dispatch.startswith('translation'):\r\n language = dispatch.replace('translation', '').strip('/')\r\n\r\n if not language:\r\n log.info(\"Invalid /translation request: no language.\")\r\n return Response(status=400)\r\n\r\n if request.method == 'POST':\r\n subtitles = request.POST['file']\r\n save_to_store(subtitles.file.read(), unicode(subtitles.filename), 'application/x-subrip', self.location)\r\n generate_sjson_for_all_speeds(self, unicode(subtitles.filename), {}, language)\r\n response = {'filename': unicode(subtitles.filename), 'status': 'Success'}\r\n return Response(json.dumps(response), status=201)\r\n\r\n elif request.method == 'GET':\r\n\r\n filename = request.GET.get('filename')\r\n if not filename:\r\n log.info(\"Invalid /translation request: no filename in request.GET\")\r\n return Response(status=400)\r\n\r\n content = Transcript.get_asset(self.location, filename).data\r\n response = Response(content, headerlist=[\r\n ('Content-Disposition', 'attachment; filename=\"{}\"'.format(filename.encode('utf8'))),\r\n ('Content-Language', language),\r\n ])\r\n response.content_type = Transcript.mime_types['srt']\r\n\r\n else: # unknown dispatch\r\n log.debug(\"Dispatch is not allowed\")\r\n response = Response(status=404)\r\n\r\n return response", "def main():\n\tif not len(sys.argv) in (3, 4):\n\t\tprint \"usage: nowplaying \\\"Interpreter\\\" \\\"Title\\\" [\\\"link\\\"]\"\n\t\tsys.exit(1)\n\tinterpreter = sys.argv[1]\n\ttitle = sys.argv[2]\n\tif len(sys.argv) == 4:\n\t\tlink = sys.argv[3]\n\telse:\n\t\tlink = None\n\t\n\tclient = NowPlayingClient(APIKEY)\n\tret = client.sendTrack(interpreter, title, link)\n\tif ret == \"Done.\\n\":\n\t\tsys.exit(0)\n\telse:\n\t\t# some debugging\n\t\tprint \"ERROR!\\nServer response below:\\n\\n--BEGIN SERVER RESPONSE--\"\n\t\tprint ret\n\t\tprint \"--END SERVER RESPONSE--\"\n\t\tsys.exit(1)", "def publish_files():\n print(\"Publishing files to the internet...\", end=\"\", flush=True)\n import subprocess\n try:\n subprocess.run(\"./upload.sh\", timeout=120.0)\n print(\"done.\\n\")\n except:\n print(\"failed.\\n\")", "def transcribe_gcs(gcs_uri):\n if not gcs_uri:\n print('You need to specify Google Cloud Storage URL for your audio file')\n return\n print('Transcribe', gcs_uri)\n audio_file_name = gcs_uri.split('/')[-1]\n if not audio_file_name:\n audio_file_name = 'out'\n audio_file_name = audio_file_name.replace('.flac', '')\n client = speech.SpeechClient()\n\n audio = types.RecognitionAudio(uri=gcs_uri)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.FLAC,\n sample_rate_hertz=16000,\n language_code='en-US')\n\n operation = client.long_running_recognize(config, audio)\n\n print('Waiting for operation to complete...')\n response = operation.result(timeout=36000)\n\n # Each result is for a consecutive portion of the audio. Iterate through\n # them to get the transcripts for the entire audio file.\n # save_pickle(response.results, './results/' + audio_file_name + '.pkl')\n lines = []\n for result in response.results:\n # The first alternative is the most likely one for this portion.\n lines.append(result.alternatives[0].transcript)\n print(u'Transcript: {}'.format(lines[-1]))\n print('Confidence: {}'.format(result.alternatives[0].confidence))\n\n if lines:\n fout_file = './results/' + audio_file_name + '.txt'\n print('Write', fout_file)\n with open(fout_file, 'w') as fout:\n fout.write('\\n'.join(lines))", "def run(self):\n self.start_recording()\n time.sleep(self.duration)\n self.stop_recording()\n self.start_playback()\n time.sleep(self.duration)\n self.stop_playback()\n self.free()", "def upload_youtube(msg):\n url = \"https://www.youtube.com/watch?v=%s\" % msg['yt_videoid']\n fname = \"%s/download_file/%s.mp4\" % (TOP_LEVEL_DIR, randint(1, 1000000000))\n cmd = \"/home/hadn/py4code/bin/youtube-dl -o %s %s\" % (fname, url)\n cmd = shlex.split(cmd)\n up = Popen(cmd, stdout=PIPE)\n temp = up.communicate()\n\n cmd_upload = \"/home/hadn/py4code/bin/python %s/flask_app/crawler/upload_youtube.py --file %s --title '%s'\" % (\n TOP_LEVEL_DIR, fname, msg['title'])\n cmd_upload = shlex.split(cmd_upload)\n up_youtube = Popen(cmd_upload, stdout=PIPE)\n temp_upload = up_youtube.communicate()\n\n print(temp_upload)\n return msg", "def transcribe_audio_files(model, input_dir, output_dir, tmp_dir=None,\n poll_wait=1.0, continuous=False, use_watchdog=False, watchdog_check_interval=10.0,\n delete_input=False, beam_width=None, scorer=None, lm_alpha=None, lm_beta=None,\n hot_words=None, candidate_transcripts=None, verbose=False, quiet=False):\n\n if verbose:\n print(\"Loading model: %s\" % model)\n ds = load_model(model, beam_width=beam_width, scorer=scorer, lm_alpha=lm_alpha, lm_beta=lm_beta, hot_words=hot_words)\n\n poller = Poller()\n poller.input_dir = input_dir\n poller.output_dir = output_dir\n poller.tmp_dir = tmp_dir\n poller.extensions = SUPPORTED_EXTS\n poller.delete_input = delete_input\n poller.verbose = verbose\n poller.progress = not quiet\n poller.check_file = None\n poller.process_file = process_audio\n poller.poll_wait = poll_wait\n poller.continuous = continuous\n poller.use_watchdog = use_watchdog\n poller.watchdog_check_interval = watchdog_check_interval\n poller.params.model = ds\n poller.params.candidate_transcripts = candidate_transcripts\n poller.poll()", "def make_movie(processed_files_directory='files/', WITH_SUBTITLES=False, WITH_AUDIO=False):\r\n # Declare the text for sub-titles\r\n\r\n if WITH_SUBTITLES: # if the user is willing to have subtitles in the movie\r\n with open(processed_files_directory+'subtitles.txt', 'r', encoding='utf8') as f:\r\n txt = f.read() # read the subtitles file\r\n # Split text to lines.\r\n subtitles = txt.split('\\n')\r\n # Declare VideoFileClip from the movie that I already have.\r\n clip = VideoFileClip(processed_files_directory + \"initial.avi\")\r\n # Declare duration of one sub-title as total duration of the video divided by number of lines.\r\n duration = clip.duration/len(subtitles)\r\n # Set start to zero.\r\n start=0\r\n # Set container for the clips.\r\n videos=[]\r\n # Loop all sub-titles\r\n for line in subtitles:\r\n # Make text clip from the reversed Hebrew text\r\n txt_clip = TextClip(line[::-1], fontsize=30, color='yellow', font='Calibri')\r\n # Set position to the bottom of screen.\r\n txt_clip = txt_clip.set_position('bottom').set_duration(duration)\r\n # Make sub clip of the movie with same duration as text clip.\r\n sub_clip = clip.subclip(start,start+duration)\r\n # Set CompositeVideoClip from the text clip and sub clip.\r\n video = CompositeVideoClip([sub_clip, txt_clip])\r\n # Insert the video to the clips container\r\n videos.append(video)\r\n # Set start time for next sub-title.\r\n start+=duration\r\n # Concatenate all clips of the container.\r\n res = concatenate_videoclips(videos)\r\n clip = res # now the clip is res\r\n else:\r\n clip = VideoFileClip(processed_files_directory+ \"initial.avi\") # the clip won't have subtitles\r\n\r\n\r\n # Set audio clip from mp3 file.\r\n if WITH_AUDIO: # if the user has chosen to include soundtrack in the movie\r\n f = 'audio.mp3' # change to mp3 soundtrack file of the movie\r\n # set the duration of the audioclip to max(duration of clip), even if the audioclip is longer\r\n audioclip = AudioFileClip(processed_files_directory+f)\r\n\r\n # check if the clip length is bigger than the\r\n if clip.duration > audioclip.duration:\r\n number_of_duplicated = int(np.ceil(clip.duration/audioclip.duration))\r\n # duplicate the audioclip in order to later fit the movie's duration\r\n audioclip = concatenate_audioclips([AudioFileClip(processed_files_directory+f) for i in range(number_of_duplicated)])\r\n\r\n # Now fit the audioclip duration to the movie's\r\n audioclip = audioclip.set_duration(clip.duration)\r\n\r\n # Set audio for the container.\r\n if not WITH_SUBTITLES: # if the user wanted to have audio included without subtitles\r\n videoclip = clip.set_audio(audioclip)\r\n else: # if the user wanted to have both audio and subtitles\r\n videoclip = res.set_audio(audioclip)\r\n else:\r\n videoclip = clip # if the user didn't want audio in the movie\r\n\r\n # Write the video file.\r\n f = 'final_movie.mp4' # change to the desired movie filename\r\n videoclip.write_videofile(processed_files_directory+f)", "def speak(audio):\n engine.say(audio)\n engine.runAndWait()", "def speak(audio):\n engine.say(audio)\n engine.runAndWait()", "def gcloud_transcribe_short(config):\n try:\n audio_data = config.pop('audio_data')\n except KeyError:\n raise KeyError(\"`audio_data` not specified for transcription operation.\")\n\n # Read file into memory before uploading\n audio = types.RecognitionAudio(content=audio_data)\n \n # Detects speech in the audio file\n return gce_speech_client.recognize(config, audio)", "def setup_audio(self):\n\t\t\n\t\tpath_to_file = '/var/lib/snips/skills/snips_app_pilldispenser/settings/setup_audio.sh'\n\t\tsubprocess.call([path_to_file])", "def synthesize_text(text, output_filename, output_dir, voice=None):\n from google.cloud import texttospeech_v1beta1 as texttospeech\n client = texttospeech.TextToSpeechClient()\n\n input_text = texttospeech.types.SynthesisInput(text=text)\n\n genders = (texttospeech.enums.SsmlVoiceGender.FEMALE, texttospeech.enums.SsmlVoiceGender.MALE)\n if not voice:\n gender = genders[random.randrange(0, 2)]\n else:\n gender = genders[voice]\n\n # Note: the voice can also be specified by name.\n # Names of voices can be retrieved with client.list_voices().\n voice = texttospeech.types.VoiceSelectionParams(\n language_code='en-US',\n ssml_gender=gender)\n\n audio_config = texttospeech.types.AudioConfig(\n audio_encoding=texttospeech.enums.AudioEncoding.MP3)\n\n response = client.synthesize_speech(input_text, voice, audio_config)\n\n # The response's audio_content is binary.\n mp3_filepath = os.path.join(output_dir, \"%s.mp3\" % output_filename)\n with open(mp3_filepath, 'wb') as out:\n out.write(response.audio_content)\n print('Audio content written to file %s' % mp3_filepath)\n \n wav_name = os.path.join(output_dir, \"%s.wav\" % output_filename)\n print('Audio content re-written to file %s' % wav_name)\n os.system(\"mpg321 -w %s %s\" % (wav_name, mp3_filepath))\n print('Deleting mp3')\n os.remove(mp3_filepath)", "def stt_google_wav(audio_fname):\n\n print \"Sending \", audio_fname\n #Convert to flac first\n filename = audio_fname\n del_flac = False\n if 'flac' not in filename:\n del_flac = True\n print \"Converting to flac\"\n print FLAC_CONV + filename\n os.system(FLAC_CONV + ' ' + filename)\n filename = filename.split('.')[0] + '.flac'\n\n f = open(filename, 'rb')\n flac_cont = f.read()\n f.close()\n \n req = urllib2.Request(GOOGLE_SPEECH_URL, data=flac_cont, headers={'Content-type': 'audio/x-flac; rate=44100;'})\n\n try:\n\tret = urllib2.urlopen(req)\n except urllib2.URLError:\n print \"Error Transcribing Voicemail\"\n sys.exit(1)\n\n responses=[]\n responses = ret.read()\n #print responses\n text = json.loads(json.dumps(responses))\n\n if del_flac:\n os.remove(filename) # Remove temp file\n\n return text", "async def play(client, message, voicePlayerList):\r\n #Check to ensure that a valid voice client was passed\r\n voiceConnectionExists = False\r\n voice = None\r\n for connection in client.voice_clients:\r\n if connection.server == message.server:\r\n voiceConnectionExists = True\r\n voice = connection\r\n # If there is a valid voice client, try to create an audio player\r\n if voiceConnectionExists:\r\n #Create a filepath with the users input\r\n playFilePath = 'audio/'\r\n messageContentList = message.content.split(' ')\r\n if not len(messageContentList) > 1:\r\n # This is not a valid command, notify the user\r\n playError = 'I\\'m not sure what you want me to do. '\r\n playError += 'Please use the format:\\n```\\n'\r\n playError += 'play {songtitle}\\n```'\r\n await client.send_message(message.channel, playError)\r\n return\r\n playFilePath += messageContentList[1] # Index 1 contains the song\r\n playFilePath += '.mp3'\r\n #Check if the file exists\r\n if os.path.isfile(playFilePath):\r\n #Create a list to be appended to the queue\r\n #List will contain ['local', local_mp3_id]\r\n #Will be used by songFinished to identify the type of player needed\r\n playerListAppend = []\r\n playerListAppend.append('local')\r\n playerListAppend.append(playFilePath)\r\n voicePlayerList.append(playerListAppend)\r\n if len(voicePlayerList) == 1:\r\n #There is nothing currently playing\r\n #Display a currently playing message first\r\n nowPlaying = 'Now Playing:```prolog\\n'\r\n nowPlaying += playFilePath\r\n nowPlaying += '\\n```'\r\n await client.send_message(message.channel, nowPlaying)\r\n #Start a new player\r\n mp3Player = voice.create_ffmpeg_player(playFilePath,\r\n options='-loglevel panic -hide_banner',\t\t\t\t\t\t\t\t after=lambda: songFinished(client, message, voice, voicePlayerList))\r\n #Before starting it, replace the 0 index of the queue\r\n #With the player so it can be stopped if needed\r\n voicePlayerList[0] = mp3Player\r\n mp3Player.start()\r\n else:\r\n #This has been added the queue\r\n #Send a notification\r\n notification = 'I\\'ve added '\r\n notification += playFilePath\r\n notification += ' to the queue!'\r\n await client.send_message(message.channel, notification)\r\n\r\n else: \r\n #No file was found, notify the user\r\n playError = 'I can\\'t find a song with the name \\''\r\n playError += messageContentList[1]\r\n playError += '\\'!'\r\n await client.send_message(message.channel, playError)\r\n return\r\n else:\r\n playError = 'I have to be connected to a voice channel to do that!\\n'\r\n playError += 'Use the \\'connect\\' command to summon me!'\r\n await client.send_message(message.channel, playError)\r\n return", "def play(song):\n # Show the metadata\n if (verbose==True):\n for s in song.keys():\n print s, \":\", \n print song[s]\n else:\n print \"Title:\", song[\"title\"]\n print \"Artisit:\", song[\"artist\"]\n print \"Album:\", song[\"albumtitle\"]\n print \"Year\", song[\"public_time\"]\n print \"Company:\", song[\"company\"]\n print \"Length\", song[\"length\"]\n print \"Playing...\"\n mp3_url = song[\"url\"]\n song_length = song[\"length\"]\n p = subprocess.Popen([\"mplayer\", \"-msglevel\", \"all=0\", mp3_url])\n\n # At the same time, download the song:\n u = urllib2.urlopen(mp3_url)\n local_mp3 = open(song[\"title\"] + \"-\" + song[\"artist\"] + \".mp3\", \"w\")\n local_mp3.write(u.read())\n local_mp3.close()\n # time.sleep(song_length)\n i = 0\n while(True):\n time.sleep(1)\n i += 1\n if i == song_length:\n # Kill the process when the song is finished.\n p.terminate()\n print \"#\" * 80\n break", "def play_audio_file(self, fname=DETECT_DONG):\n ding_wav = wave.open(fname, 'rb')\n ding_data = ding_wav.readframes(ding_wav.getnframes())\n # with no_alsa_error():\n audio = pyaudio.PyAudio()\n stream_out = audio.open(\n format=audio.get_format_from_width(ding_wav.getsampwidth()),\n channels=ding_wav.getnchannels(),\n rate=ding_wav.getframerate(), input=False, output=True)\n stream_out.start_stream()\n stream_out.write(ding_data)\n time.sleep(0.2)\n stream_out.stop_stream()\n stream_out.close()\n audio.terminate()", "def prototype_asr2():\n if request.form['ta3'] == 'start/stop_button2':\n if not record[0]:\n\n timings['S2'] = time.time()\n\n record[0] = True\n\n # Create an instance of PyAudio\n p = pyaudio.PyAudio()\n\n stream = p.open(format=pyaudio.paInt16,\n channels=2,\n rate=44100,\n input=True,\n frames_per_buffer=1024)\n\n i = int(44100 / 1024 * 300) # Arbitrary duration set at 300 seconds\n while i > 0:\n data = stream.read(1024)\n frames.append(data)\n i -= 1\n if not record[0]:\n break\n\n stream.stop_stream()\n stream.close()\n p.terminate()\n\n elapsed_time = round(time.time() - timings['S2'], 2)\n timings['S2'] = str(elapsed_time) + ' seconds'\n\n return render_template('Prototype2.html', utterance3=display_string(3))\n else:\n stop_recording_asr2()\n return render_template('Prototype2.html', utterance3=display_string(3))\n elif request.form['ta3'] == 'clear_button2':\n clear_text_area(3)\n return render_template('Prototype2.html', utterance3=display_string(3))\n elif request.form['ta3'] == 'play_button1':\n th = Thread(target=play_audio, args=('Test1.wav',))\n th.start()\n return render_template('Prototype2.html', utterance3=display_string(3),\n audio3='Playing file 1...')\n elif request.form['ta3'] == 'play_button2':\n th = Thread(target=play_audio, args=('Test1.wav',))\n th.start()\n return render_template('Prototype2.html', utterance3=display_string(3),\n audio3='Playing file 2...')\n elif request.form['ta3'] == 'asr_button1':\n utterance = 'You said: ' + rsp1('Test1.wav')\n store_utterance(3, utterance)\n # elapsed_time = round(time.time() - start, 2)\n # timings['S2'] = str(elapsed_time) + ' seconds'\n return render_template('Prototype2.html', utterance3=display_string(3), asr3='Recognised file 1')\n elif request.form['ta3'] == 'asr_button2':\n utterance = 'You said: ' + rsp1('Test1.wav')\n store_utterance(3, utterance)\n # elapsed_time = round(time.time() - start, 2)\n # timings['P1'] = str(elapsed_time) + ' seconds'\n return render_template('Prototype2.html', utterance3=display_string(3), asr3='Recognised file 2')", "def text_to_mp3(client: texttospeech.TextToSpeechClient,\n voice: texttospeech.VoiceSelectionParams,\n audio_config: texttospeech.AudioConfig,\n text: str,\n output_file_path: Path) -> None:\n lines = text.splitlines()\n\n logger.info(f'Synthesising {len(lines)} lines ...')\n\n output_file_log = output_file_path.parent / (output_file_path.stem + '_log.json')\n\n with output_file_path.open(mode='wb') as output_file:\n for (i, text_chunk) in enumerate(lines):\n # skip empty lines\n if len(text_chunk) > 0:\n input_text = texttospeech.SynthesisInput(text=text_chunk)\n try:\n logger.info(f'Synthesising speech for chunk `{i}`, size: `{len(text_chunk)}`')\n response = client.synthesize_speech(input=input_text, voice=voice, audio_config=audio_config)\n except Exception as e:\n # If a line could not be synthesised properly, return it along with the error message\n # It is possible that textract could not extract the text properly.\n logger.error(f'Speech synthesising failed! Chunk text: `{input_text}`\\nError: {e}\\n')\n _error_log = {\n 'chunk_number': i,\n 'chunk_length': len(text_chunk),\n 'chunk_text': str(text_chunk),\n 'Error message': traceback.format_exc()\n }\n with open(f'{output_file_log}', 'w') as log_out:\n json.dump(_error_log, log_out)\n continue\n output_file.write(response.audio_content)\n logger.info(f'Audio content written to `{output_file_path}`!')\n\n logger.info(f'Output saved to `{output_file_path}`')\n logger.info(f'logs at `{output_file_log}`')", "def _download(song, filename, url=None, audio=False, allow_transcode=True):\n # pylint: disable=R0914\n # too many local variables\n # Instance of 'bool' has no 'url' member (some types not inferable)\n\n if not url:\n stream = streams.select(streams.get(song), audio=audio, m4a_ok=True)\n url = stream['url']\n\n # if an external download command is set, use it\n if Config.DOWNLOAD_COMMAND.get:\n title = c.y + os.path.splitext(os.path.basename(filename))[0] + c.w\n xprint(\"Downloading %s using custom command\" % title)\n external_download(song, filename, url)\n return None\n\n if not Config.OVERWRITE.get:\n if os.path.exists(filename):\n xprint(\"File exists. Skipping %s%s%s ..\\n\" % (c.r, filename, c.w))\n time.sleep(0.2)\n return filename\n\n xprint(\"Downloading to %s%s%s ..\" % (c.r, filename, c.w))\n status_string = (' {0}{1:,}{2} Bytes [{0}{3:.2%}{2}] received. Rate: '\n '[{0}{4:4.0f} kbps{2}]. ETA: [{0}{5:.0f} secs{2}]')\n\n resp = urlopen(url)\n total = int(resp.info()['Content-Length'].strip())\n chunksize, bytesdone, t0 = 16384, 0, time.time()\n outfh = open(filename, 'wb')\n\n while True:\n chunk = resp.read(chunksize)\n outfh.write(chunk)\n elapsed = time.time() - t0\n bytesdone += len(chunk)\n rate = (bytesdone / 1024) / elapsed\n eta = (total - bytesdone) / (rate * 1024)\n stats = (c.y, bytesdone, c.w, bytesdone * 1.0 / total, rate, eta)\n\n if not chunk:\n outfh.close()\n break\n\n status = status_string.format(*stats)\n sys.stdout.write(\"\\r\" + status + ' ' * 4 + \"\\r\")\n sys.stdout.flush()\n\n active_encoder = g.encoders[Config.ENCODER.get]\n ext = filename.split(\".\")[-1]\n valid_ext = ext in active_encoder['valid'].split(\",\")\n\n if audio and g.muxapp:\n remux_audio(filename, song.title)\n\n if Config.ENCODER.get != 0 and valid_ext and allow_transcode:\n filename = transcode(filename, active_encoder)\n\n return filename", "def create_movie():\r\n st.title(\"Create The Movie\")\r\n files = sort_files('files/')\r\n total_frames = np.sum([True if file.split(\".\")[-1]=='jpg' else False for file in files])\r\n st.sidebar.write(\"Total frames detected: %s\"%total_frames)\r\n\r\n with_subtitles = st.checkbox(\"Enable Subtitles\")\r\n subtitles_selected = False\r\n\r\n # If the user wants to include subtitles in the movie\r\n if with_subtitles:\r\n st.write(\"---------\")\r\n # Allow him to choose to create subtitles or import them\r\n option = st.radio(\"\",[\"Write your subtitles\",\"Upload subtitles\"])\r\n if option == \"Upload subtitles\":\r\n # Upload txt file\r\n txt = st.file_uploader(\"Upload\", type=[\"txt\"])\r\n if txt:\r\n subtitles = txt.read()\r\n if subtitles: # if it was read successfully\r\n subtitles_selected = True # subtitles loaded successfully\r\n st.success(\"Subtitles were loaded successfully\")\r\n elif option == \"Write your subtitles\":\r\n st.write(\"Please write your wanted subtitles in עברית only.\")\r\n st.write(\"Separate each line by pressing Enter.\")\r\n subtitles = st.text_area(\"Write the subtitles here:\")\r\n if subtitles:\r\n # Write the subtitles hard-coded to the file subtitles.txt\r\n with open('files/subtitles.txt','w', encoding='utf-8') as f: #\r\n f.write(str(subtitles))\r\n subtitles_selected = True # subtitles loaded successfully\r\n st.success(\"Subtitles were loaded successfully\")\r\n st.write(\"---------\")\r\n\r\n # If the user has chosen to add audio to the movie\r\n with_audio = st.checkbox(\"Enable Audio\")\r\n audio_selected = False\r\n\r\n if with_audio:\r\n st.write(\"---------\")\r\n # Allow him to choose to extract audio from youtube video or to upload a mp3 file\r\n select_action = st.radio(\"Choose one of the following options:\",[\"Upload mp3\",\"Extract from Youtube video\"])\r\n if select_action == \"Upload mp3\":\r\n # Receive the mp3 file from the user\r\n audio_file = st.file_uploader(\"Upload\", type=[\"mp3\"])\r\n if audio_file:\r\n with open('files/audio.mp3','wb') as f:\r\n f.write(audio_file.getbuffer()) # write it in the processed files directory\r\n audio_selected = True # Audio imported successfully\r\n st.success(\"Audio was imported successfully\")\r\n elif select_action == \"Extract from Youtube video\":\r\n # Receive youtube link from the user\r\n youtube_link = st.text_input(\"Youtube link:\")\r\n process_button = st.button(\"Process\")\r\n if process_button or youtube_link:\r\n audio_selected = True\r\n if process_button:\r\n # check if the youtube link is correct\r\n if youtube_link[:32] == 'https://www.youtube.com/watch?v=' and len(youtube_link)==43:\r\n download_audio_from_youtube(youtube_link)\r\n audio_selected = True\r\n else: # if it's not correct, let the user know\r\n st.error(\"Invalid youtube link, please try again\")\r\n st.write(\"---------\")\r\n\r\n # Allow the user to choose the frame rate\r\n fps = st.slider(\"Frames per second:\",0.5,20.0,3.0,0.5)\r\n\r\n if st.button(\"Start!\"):\r\n if total_frames > 0:\r\n # if subtitles or audio are selected, ensure we have the info for processing them\r\n if with_audio and not audio_selected:\r\n st.error(\"You have chosen to include audio, but haven't included any. Please try again\")\r\n st.stop()\r\n if with_subtitles and not subtitles_selected:\r\n st.error(\"You have chosen to include subtitles, but haven't included any. Please try again.\")\r\n st.stop()\r\n with st.spinner(\"Creating raw movie...\"):\r\n make_seret(fps=fps)\r\n st.success(\"Raw movie created successfully!\")\r\n with st.spinner(\"Creating final movie...\"):\r\n make_movie(WITH_SUBTITLES=with_subtitles,WITH_AUDIO=with_audio)\r\n st.success(\"The movie has been created successfully!\")\r\n else: # no frames were detected!\r\n st.warning(\"0 Frames were detected. Please process some pictures before using this screen!\")", "def remux_audio(filename, title):\n dbg(\"starting remux\")\n temp_file = filename + \".\" + str(random.randint(10000, 99999))\n os.rename(filename, temp_file)\n meta = extract_metadata(title)\n metadata = [\"title=%s\" % meta[\"title\"]]\n\n if meta[\"artist\"]:\n metadata = [\"title=%s\" % meta[\"title\"], \"-metadata\",\n \"artist=%s\" % meta[\"artist\"]]\n\n cmd = [g.muxapp, \"-y\", \"-i\", temp_file, \"-acodec\", \"copy\", \"-metadata\"]\n cmd += metadata + [\"-vn\", filename]\n dbg(cmd)\n\n try:\n with open(os.devnull, \"w\") as devnull:\n subprocess.call(cmd, stdout=devnull, stderr=subprocess.STDOUT)\n\n except OSError:\n dbg(\"Failed to remux audio using %s\", g.muxapp)\n os.rename(temp_file, filename)\n\n else:\n os.unlink(temp_file)\n dbg(\"remuxed audio file using %s\" % g.muxapp)", "def text_to_speech(entry):\n text = entry.get_text()\n if text:\n subprocess.call([\"milena_say\", text])", "def transcribe_audio_to_tsv(input_audio_paths,\n output_tsv_path,\n sample_rate,\n language_code,\n begin_sec=0.0):\n client = speech.SpeechClient()\n config = speech.RecognitionConfig(\n encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,\n sample_rate_hertz=sample_rate,\n audio_channel_count=1,\n language_code=language_code)\n streaming_config = speech.StreamingRecognitionConfig(\n config=config, interim_results=False)\n requests = audio_data_generator(input_audio_paths, config)\n responses = client.streaming_recognize(streaming_config, requests)\n\n with open(output_tsv_path, \"w\" if not begin_sec else \"a\") as f:\n if not begin_sec:\n # Write the TSV header.\n f.write(tsv_data.HEADER + \"\\n\")\n\n for response in responses:\n if not response.results:\n continue\n results = [result for result in response.results if result.is_final]\n max_confidence = -1\n best_transcript = None\n result_end_time = None\n for result in results:\n for alt in result.alternatives:\n if alt.confidence > max_confidence:\n max_confidence = alt.confidence\n best_transcript = alt.transcript.strip()\n result_end_time = result.result_end_time\n if not best_transcript:\n continue\n end_time_sec = result_end_time.total_seconds()\n # TODO(cais): The default transcript result doesn't include the start\n # time stamp, so we currently pretend that each recognizer output phrase\n # is exactly 1 second.\n # TODO(cais): Should we use absolute timestamps such as epoch time, instead of\n # time relative to the beginning of the first file?\n start_time_sec = end_time_sec - 1\n line = \"%.3f\\t%.3f\\t%s\\t%s\" % (\n start_time_sec + begin_sec,\n end_time_sec + begin_sec,\n tsv_data.SPEECH_TRANSCRIPT_TIER,\n best_transcript)\n print(line)\n f.write(line + \"\\n\")", "def downloadSong(self, song, outputFolder=\"\", outputFunction=None, writeJSON=True):\n\n \"\"\"\n -x: Extract audio\n --audio-format: sets the audio format from ogg to mp3\n --audio-quality: 0 is best\n --write-info-json: Writes the DASH information for the downloaded video to the filename with .info.json appended\n \"\"\"\n audioOptions = self.audioOptions.copy() # Generate a shallow copy of our object's options\n if writeJSON:\n audioOptions[\"--write-info-json\"] = True\n audioOptions.update(settings[\"youtubeSettings\"])\n\n self.youtubeLock.acquire() # Wait the requisite amount of time\n log.debug(\"Downloading Song '{}'\".format(song))\n obj = subprocess.Popen(\n [settings[\"youtube_dl\"]] + # Executable\n self.flattenDict(audioOptions) + #Turn the dict items into a list where key is before value. Bools are special. If false, not added, otherwise only key\n [\"-o\", os.path.join(outputFolder, settings[\"formatString\"])] + #Output format and folder\n [\"--\", song], #Then add song as input\n **settings[\"pipeOptions\"], #Add in subprocess options\n stdout=subprocess.PIPE #Also this for now\n )\n \n outputText = \"\"\n for line in obj.stdout:\n outputText += line\n match = re.match(r\"\\[download\\]\\s+([\\d.]+)% of \\S+ at\\s+([\\d.]+\\S+)\", line) #Matches the download update lines\n if match:\n percent, downloadRate = match.group(1, 2)\n if callable(outputFunction):\n outputFunction(song, float(percent), downloadRate) #Update this if we have items\n return obj.wait(), outputText # Wait for process to complete and get return code. Also return the whole output printed to stdout", "def play(filename):\n if sys.platform == \"win32\":\n os.startfile(filename)\n else:\n opener =\"open\" if sys.platform == \"darwin\" else \"xdg-open\"\n subprocess.call([opener, filename])", "async def yt(self, ctx, *, url):\r\n if ctx.message.channel.id != 701868237771505695:\r\n return await ctx.send(\"**Error:** Music Bot commands are only available in <#701868237771505695>\")\r\n if self.music_off:\r\n return await ctx.send(\"**Error:** Music Bot features are currently off\")\r\n if ctx.voice_client is None:\r\n return await ctx.send(\"**Error:** The bot is not connected to the voice channel you are in\")\r\n if ctx.voice_client.channel is not ctx.message.author.voice.channel:\r\n return await ctx.send(\"**Error:** You must be connected to the voice channel.\")\r\n if len(self.songs) + self.processing_songs >= 30:\r\n return await ctx.send(\"**Error:** There can only be a maximum of 30 items in the queue\")\r\n if self.processing_songs >= 3:\r\n return await ctx.send(\"**Error:** Please wait until some of the other songs are finished processing\")\r\n\r\n self.played_time = time.time()\r\n self.voice = ctx.voice_client\r\n self.voice_channel = ctx.message.channel\r\n async with ctx.message.channel.typing():\r\n self.processing_songs = self.processing_songs + 1\r\n try:\r\n player = await YTDLSource.from_url(url, loop=self.bot.loop)\r\n except:\r\n self.processing_songs = self.processing_songs - 1\r\n return await ctx.send(\"Error processing song. Invalid URL or no matching videos using that search term\")\r\n if player is None:\r\n self.processing_songs = self.processing_songs - 1\r\n return await ctx.send(\"**Error:** Song file too large!\")\r\n self.processing_songs = self.processing_songs - 1\r\n self.songs.append([player, ctx.message.author.id, ctx.voice_client, ctx.message.channel, []])\r\n\r\n shortened_title = self.title_shorten(player.title)\r\n await ctx.send('Queued: `{}`'.format(shortened_title))", "def transcribe_audio_to_tsv_with_diarization(input_audio_paths,\n output_tsv_path,\n sample_rate,\n language_code,\n speaker_count,\n begin_sec=0.0):\n client = speech.SpeechClient()\n enable_speaker_diarization = speaker_count > 0\n config = speech.RecognitionConfig(\n encoding=speech.RecognitionConfig.AudioEncoding.LINEAR16,\n sample_rate_hertz=sample_rate,\n audio_channel_count=1,\n enable_separate_recognition_per_channel=False,\n language_code=language_code,\n enable_speaker_diarization=enable_speaker_diarization,\n diarization_speaker_count=speaker_count)\n streaming_config = speech.StreamingRecognitionConfig(\n config=config, interim_results=False)\n requests = audio_data_generator(input_audio_paths, config)\n responses = client.streaming_recognize(streaming_config, requests)\n\n with open(output_tsv_path, \"w\" if not begin_sec else \"a\") as f:\n if not begin_sec:\n # Write the TSV header.\n f.write(tsv_data.HEADER + \"\\n\")\n utterances = []\n for response in responses:\n if not response.results:\n continue\n results = [result for result in response.results if result.is_final]\n max_confidence = -1\n best_transcript = None\n result_end_time = None\n for result in results:\n for alt in result.alternatives:\n if alt.confidence > max_confidence:\n max_confidence = alt.confidence\n best_transcript = alt.transcript.strip()\n diarized_words = [(\n word.word, word.speaker_tag, word.start_time.total_seconds(),\n word.end_time.total_seconds()) for word in alt.words]\n result_end_time = result.result_end_time\n if not best_transcript:\n continue\n end_time_sec = result_end_time.total_seconds()\n utterances.append(best_transcript)\n\n regrouped_utterances = regroup_utterances(utterances, diarized_words)\n utterance_counter = 0\n for (regrouped_utterance,\n speaker_index, start_time_sec, end_time_sec) in regrouped_utterances:\n utterance_counter += 1\n line = \"%.3f\\t%.3f\\t%s\\t%s [U%d] [Speaker #%d]\" % (\n start_time_sec + begin_sec,\n end_time_sec + begin_sec,\n tsv_data.SPEECH_TRANSCRIPT_TIER,\n regrouped_utterance,\n utterance_counter,\n speaker_index)\n print(line)\n f.write(line + \"\\n\")", "def transcribe_file(speech_file):\n client = speech.SpeechClient()\n prediccion = \" \"\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n sample_rate_hertz=44100,\n language_code='en-US')\n\n response = client.recognize(config, audio)\n # Each result is for a consecutive portion of the audio. Iterate through\n # them to get the transcripts for the entire audio file.\n for result in response.results:\n # The first alternative is the most likely one for this portion.\n prediccion = prediccion + \" \" + result.alternatives[0].transcript\n return prediccion", "def copy_file_to_server():\r\n utils.system_output('mv /home/chronos/user/Downloads/* /usr/local/autotest/results/default/',ignore_status=True)\r\n logging.info(\"Video Copied to Log location\")" ]
[ "0.66069883", "0.6560382", "0.6439472", "0.626135", "0.61909765", "0.6083509", "0.6073147", "0.5968924", "0.5962577", "0.5942388", "0.58873165", "0.58815217", "0.5879562", "0.5855016", "0.5853225", "0.5847647", "0.58247757", "0.5802723", "0.5779541", "0.5740425", "0.57340455", "0.57230544", "0.5722402", "0.57103133", "0.5694708", "0.5692375", "0.5677796", "0.56541234", "0.56260026", "0.56227624", "0.56178206", "0.56134844", "0.56080973", "0.56080234", "0.5590664", "0.5576966", "0.5551137", "0.55406153", "0.5530741", "0.5527357", "0.5516737", "0.5508744", "0.5489633", "0.5488604", "0.5469471", "0.54563653", "0.5449022", "0.54385304", "0.5430804", "0.54303306", "0.54181975", "0.5416227", "0.5414287", "0.54057074", "0.5395867", "0.53913724", "0.5368805", "0.53642756", "0.53607273", "0.5357918", "0.5355665", "0.53526753", "0.5350884", "0.5330155", "0.5322208", "0.5313824", "0.530532", "0.52995235", "0.5288137", "0.5287488", "0.5276053", "0.52652246", "0.5264946", "0.52635646", "0.5257746", "0.5256551", "0.5254999", "0.52534395", "0.5243568", "0.5243568", "0.524062", "0.5236136", "0.52142596", "0.52050215", "0.5198045", "0.51975495", "0.5194525", "0.5194316", "0.5179324", "0.5178896", "0.51743823", "0.5168506", "0.5166242", "0.5160829", "0.5158854", "0.5153601", "0.5151691", "0.51497644", "0.51486987", "0.5145815" ]
0.7504941
0
HomePage > the homepage of the podcast (NOT NULL)\n Name > The name of the podcast (NOT NULL)\n Description > a short description of the podcast\n Category > The category of the podcast\n Source > The service of which the podcast is being accessed through\n ImageURI > Podcast cover art\n Web > The website of the podcaster\n Twitter > The twitter account of the podcaster\n Facebook > the facebook account of the podcaster\n LastUpdated > the date that this was last updated.\n RSS > The URL of the podcasts RSS feed\n If you dont have values for a certain field just pass it in as an empty string
def uploadPodcast(dbConnection, homepage, name, description, category, source, imageurl, web, twitter, facebook, rss): try: cursor = dbConnection.cursor() name = name.replace("'", "''") description = description.replace("'", "''") cursor.execute("""INSERT INTO podcasts(homepage, name, description, category, source, imageuri, web, twitter, Facebook, rss) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""", (homepage, name, description, category, source, imageurl, web, twitter, facebook, rss)) dbConnection.commit() cursor.close() return True except Exception as e: Tools.writeException("insertHeader", "e") return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def homepage():\n return (\n f\"Welcome to Hawaii - Climate Page<br/>\"\n f\"<br/>\"\n f\"This site has data from 01-01-2010 to 08-23-2017<br/>\"\n f\"<br/>\"\n f\"Available Pages:<br/>\"\n f\"<br/>\"\n f\"<br/>\"\n f\" Station Information<br/>\"\n f\" /api/v1.0/stations<br/>\"\n f\"<br/>\"\n f\" Percipitation Information<br/>\"\n f\" /api/v1.0/percipitation<br/>\"\n f\"<br/>\"\n f\" Temperature Observations<br/>\"\n f\" /api/v1.0/tobs<br/>\"\n f\"<br/>\"\n f\" Start Date information - complete url is '/api/v1.0//yyyy-mm-dd'<br/>\"\n f\" /api/v1.0/start<br/>\"\n f\"<br/>\"\n f\" Start and End Date information - complete url is '/api/v1.0/yyyy-mm-dd/yyyy-mm-dd'<br/>\"\n f\" /api/v1.0/start/end\"\n )", "def _parse_entry(self,entry):\n item_meta={'title':entry.title,\n 'description':entry.description,\n 'category':entry.category,\n 'tags':entry.tags,\n 'page_url':entry.url,\n 'lq_url':None,\n 'hq_url':None,\n 'hd_url':None,\n 'search-id':self.search_id,\n 'source':'4',}\n self._logger.debug('Video Metadata: %s',item_meta)\n return item_meta", "def __init__(self):\n self.title_text = ''\n self.meta_text = ''\n self.url_text = ''\n self.heading_text = ['','','','','','']\n self.body_text = ''", "def __init__(self, pubDate, text, link):\n self.pubDate = pubDate\n self.text = text\n self.link = link\n self.guid = link.split('/')[-1:][0]", "def make_entry(line):\n #focus on relevant parts\n parts = line.split(\" - \")\n visitor_id = parts[0]\n subparts = parts[1].split('\"')\n method_and_uri = subparts[1]\n method_and_uri_parts = method_and_uri.split(\" \")\n method = method_and_uri_parts[0]\n uri = method_and_uri_parts[1]\n d = dict()\n d[\"visitor_id\"] = visitor_id\n d[\"method\"] = method\n d[\"uri\"] = uri\n return d", "def home():\n return\"\"\"<!DOCTYPE><html><h1>List of all available Honolulu, HI API routes</h1><ul>\n <li>List of precipitation scores from the last year:<a href=\"/api/v1.0/precipitation\">/api/v1.0/precipitation</a></li>\n <li>List of stations:<a href=\"/api/v1.0/stations\">/api/v1.0/stations</a></li>\n <li>List of temp observations from the last year:<a href=\"/api/v1.0/tobs\">/api/v1.0/tobs</a></li>\n <li>List of minimum, maximum, and average temperatures for the date provided (replace &ltstart&gt with a date in 'yyyy-mm-dd' format: <a href=\"/api/v1.0/<start>\">/api/v1.0/<start></a></li>\n <li>List of minimum, maximum, and average temperatures for the dates in range provided (replace &ltstart&gt and &ltend&gt with dates in 'yyyy-mm-dd' format): <a href=\"/api/v1.0/<start>/<end>\">/api/v1.0/<start>/<end></a></li>\n </ul></html>\"\"\"", "def parse_list_page_html(html):\n episode_list = []\n\n lines = html.split('\\n')\n for line in lines:\n if 'class=\"topictitle\"' in line and ' - ' in line and 'x' in line:\n datum = {}\n query = line.split('/viewtopic.php?f=177&amp;t=')[1].split('&amp;')[0]\n episode_season_str = line.split('class=\"topictitle\">')[1].split(' - ')[0]\n season_str = episode_season_str.split('x')[0]\n episode_str = episode_season_str.split('x')[1]\n datum['query'] = query\n datum['season'] = int(season_str)\n datum['episode'] = int(episode_str)\n episode_list.append(datum)\n\n return episode_list", "def home_page():\n return (\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/start<br/>\"\n f\"/api/v1.0/start/end<br/>\"\n )", "def headline(self):\r\n return '%s%s %s%s' % (BLUE, self.title,\r\n NORMAL, self.link)", "def displayMeta(ctx, data, vid):\n\t\n\ts = \"\"\n\ts += \"Title: %s \" % data.getElementsByTagName(\"title\")[0].firstChild.data\n\ts += \" • By: %s\" % data.getElementsByTagName(\"author\")[0].getElementsByTagName(\"name\")[0].firstChild.data\n\n\tshowRest = True\n\n\tr = data.getElementsByTagName(\"yt:state\")\n\tif len(r):\n\t\tr = r[0]\n\t\tif r.getAttribute(\"name\") == \"restricted\":\n\t\t\tshowRest = r.getAttribute(\"reasonCode\") == \"limitedSyndication\"\n\t\t\tif showRest:\n\t\t\t\ts += \" • Syndication Limited.\"\n\t\t\telse:\n\t\t\t\ts += \" • Video is unavailable: %s\" % r.firstChild.data\n\n\tif showRest:\n\t\ts += \" • Length: %s\" % prettyTime(data.getElementsByTagName(\"yt:duration\")[0].getAttribute(\"seconds\"))\n\t\ts += \" • View Count: %s\" % prettyNumber(data.getElementsByTagName(\"yt:statistics\")[0].getAttribute(\"viewCount\"))\n\n\t\tr = data.getElementsByTagName(\"gd:rating\")\n\t\tif len(r):\n\t\t\tr = r[0]\n\t\t\ts += \" • Average Rating: %1.2f/5 over %s people\" % (\n\t\t\t\tfloat(r.getAttribute(\"average\")),\n\t\t\t\tprettyNumber(r.getAttribute(\"numRaters\"))\n\t\t\t\t)\n\t\telse:\n\t\t\ts += \" • No ratings\"\n\t\n\ts += \" • https://youtu.be/%s\" % vid\n\tctx.reply(s, \"YouTube\")", "def get_bare_file(filename):\n \"\"\" for a given entry, finds all of the info we want to display \"\"\"\n f = open(filename, 'r')\n str = f.read()\n str = str.decode('utf-8')\n e = {}\n try: e['title'] = re.search('(?<=title:)(.)*', str).group()\n except: pass\n try: e['slug'] = re.search('(?<=slug:)(.)*', str).group()\n except: pass\n try: e['summary'] = re.search('(?<=summary:)(.)*', str).group()\n except: pass\n try:\n e['content'] =re.search('(?<=content:)((?!category:)(?!published:)(.)|(\\n))*', str).group()\n if e['content'] == None:\n e['content'] = re.search('(?<=content:)((.)|(\\n))*$', str).group()\n except:\n pass\n try:\n e['published'] = re.search('(?<=published:)(.)*', str).group()\n except: pass\n try: e['author'] = re.search('(?<=author:)(.)*', str).group()\n except: pass\n try: e['category'] = re.search('(?<=category:)(.)*', str).group()\n except: pass\n try: e['url'] = re.search('(?<=url:)(.)*', str).group()\n except: pass\n try:\n e['uid'] = re.search('(?<=u-uid:)(.)*', str)\n if e['uid']:\n e['uid'] = e['uid'].group()\n else:\n e['uid'] = re.search('(?<=u-uid)(.)*', str).group()\n except: pass\n try: e['time-zone'] = re.search('(?<=time-zone:)(.)*', str).group()\n except: pass\n try: e['location'] = re.search('(?<=location:)(.)*', str).group()\n except: pass\n try: e['syndication'] = re.search('(?<=syndication:)(.)*', str).group()\n except: pass\n try: e['location_name'] = re.search('(?<=location-name:)(.)*', str).group()\n except: pass\n try: e['in_reply_to'] = re.search('(?<=in-reply-to:)(.)*', str).group()\n except:pass\n return e", "def print_entry(item):\n print('Date: ', item[\"Date\"])\n print('Task: ', item[\"Task\"])\n print('Time Spent: ', item[\"Time\"])\n print('Notes: ', item[\"Notes\"], '\\n')", "def print_feed(list_with_items):\n result_str = list_with_items[0].name + '\\n'\n for item in list_with_items:\n item_as_str = (f'Title: {item.title}\\nLink: {item.link}\\n'\n f'Date: {time.strftime(\"%y-%m-%d %H:%M\", tuple(item.date))}')\n result_str += item_as_str\n result_str += string_handlers.get_str_content(item.content)\n result_str += string_handlers.get_img_as_str(item.img)\n result_str += string_handlers.get_links_as_str(item.links) + '\\n\\n'\n return result_str", "def format_for_website(line):\n\n full_submission = \"Full submission\" in str(\n line.submission_type\n ) or \"Full submission\" in str(line.track)\n if full_submission:\n if line.decision == \"Accept (Contributed Talk)\":\n line[\"presentation_type\"] = \"talk\"\n elif line.decision == \"Accept (Poster)\":\n line[\"presentation_type\"] = \"poster\"\n else:\n raise ValueError(line.decision)\n line[\"category\"] = \"full\"\n else:\n line[\"presentation_type\"] = \"poster\"\n line[\"category\"] = \"abstract\"\n\n line[\"authors\"] = line[\"authors\"].replace(\"|\", \", \")\n line[\"formatted\"] = \"{title}.\\n\\t{authors}.\".format(**line)\n return line", "def home():\n return (\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/<start><br/>\"\n f\"/api/v1.0/<start>/<end>\"\n \n )", "def ATTRIBUTE():\n return \"author\", \"title\", \"publisher\", \"shelf\", \"category\", \"subject\"", "def parse_text(self, page):\n text = page.find(self.tag_prefix + self.revision_tag).find(self.tag_prefix + self.text_tag).text\n title = page.find(self.tag_prefix + self.title_tag).text\n categories = []\n #\n text = self.parse_archivo(text)\n text = self.parse_foto(text)\n text = self.parse_by_line(text)\n text = self.parse_link(text)\n text = self.parse_url(text)\n text = self.parse_fecha(text)\n text = self.parse_bracketed_word(text)\n #\n if text:\n categories = re.findall(self.category_finder_regex, text)\n #\n text = self.parse_category(text)\n text = self.parse_other_language(text)\n text = self.parse_table_regex(text)\n text = self.parse_ver_fuente(text)\n text = self.remove_extra_text(text)\n text = self.remove_extra_characters(text)\n\n categorias = []\n for cat in categories:\n categorias.append(cat[6])\n\n if text:\n if 'REDIRECT' in text or 'redirect' in text:\n return None\n\n return Article(title=title, content=text, categories=categorias)", "def Home():\n return(\n f\"Hawaii Climate Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/<start><br/>\"\n f\"and<br/>\"\n f\"/api/v1.0/<start>/<end>\"\n )", "def cmd_devopsme(self, msg, args):\n\n data = htmlparse(\"http://devopsreactions.tumblr.com/random\", \".item_content\")\n\n title = data.find(\".post_title\").find(\"a\").text()\n img = data.find(\"p\").find(\"img\").attr(\"src\")\n\n self.send(msg['from'], img, msg['type'])\n\n return str(title)", "def blosxom_entry_parser(filename, request):\n config = request.getConfiguration()\n\n entryData = {}\n\n try:\n story = open(filename).readlines()\n except IOError:\n raise IOError\n\n if len(story) > 0:\n entryData['title'] = story.pop(0).strip()\n\n # this handles properties of the entry that are between\n # the title and the body and start with a #\n while len(story) > 0:\n match = re.match(r'^#(\\w+)\\s+(.*)', story[0])\n if match:\n story.pop(0)\n entryData[match.groups()[0]] = match.groups()[1].strip()\n else:\n break\n\n # Call the preformat function\n entryData['body'] = tools.run_callback('preformat',\n {'parser': (entryData.get('parser', '') \n or config.get('parser', 'plain')),\n 'story': story,\n 'request': request},\n donefunc = lambda x:x != None,\n defaultfunc = lambda x: ''.join(x['story']))\n\n # Call the postformat callbacks\n tools.run_callback('postformat',\n {'request': request,\n 'entry_data': entryData})\n \n return entryData", "def _parse_result_entry(result):\n entry = ParsedEntry()\n\n if \"content\" in result and len(result.content) > 0:\n entry.content = result.content[0].value\n # if not html, have to escape\n if result.content[0].type not in HTML_MIME_TYPES:\n entry.content = cgi.escape(entry.content)\n elif \"summary_detail\" in result:\n entry.content = result.summary_detail.value\n # if not html, have to escape\n if result.summary_detail.type not in HTML_MIME_TYPES:\n entry.content = cgi.escape(entry.content)\n else:\n entry.content = \"\"\n entry.link = result.get(\"link\", None)\n entry.title = result.get(\"title\", None)\n if \"author_detail\" in result and \"name\" in result.author_detail:\n entry.author = result.author_detail.name\n else:\n entry.author = None\n if \"updated_parsed\" in result and result.updated_parsed is not None:\n entry.date = int(calendar.timegm(result.updated_parsed))\n elif \"published_parsed\" in result and result.published_parsed is not None:\n entry.date = int(calendar.timegm(result.published_parsed))\n else:\n entry.date = int(time.time())\n # try to find something to use as GUID, or fall back to static string\n guid_content = result.get(\"id\", entry.title)\n if guid_content is None:\n guid_content = \"None\"\n entry.guid = hashlib.sha1(guid_content.encode('utf-8')).hexdigest()\n return entry", "def __init__(self, title='', link='', desc=''):\n self.__title = title\n self.__link = link\n self.__desc = desc", "def homepage():\n return {'sample': 'ADAL'}", "def met(r):\n image_url = r.get(\"image\")\n if image_url is None:\n if r.get(\"source\") is not None:\n image_url = r.get(\"source\").get(\"href\")\n image_name = r.get(\"name\")\n image_artist = r.get(\"Who\")\n return image_url, image_name, image_artist", "def extractInfo(Link):\r\n response = urlopen(Link)\r\n html = response.read()\r\n #LinkInfo = ds.Links()\r\n #html = refinehtmltags(html)\r\n pagetitle = html[html.find('<title>') + 7 : html.find('</title>')]\r\n startindex = html.find('<meta name=\"description\" content=\"')\r\n desc = html[startindex + 34 : html.find('\"',startindex + 38)]\r\n print pagetitle\r\n print desc\r\n #### Use the links to\r\n #### Extract the information as\r\n #### pagetitle\r\n #### description\r\n #return LinkInfo\r", "def home():\n return (\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/start_date(yyyy-mm-dd)<br/>\"\n f\"/api/v1.0/start_date(yyyy-mm-dd)/end_date(yyyy-mm-dd)<br/>\")", "def create_hn_text(self):\n text_list = [f\"Top {STORIES_NUMBER} from HackerNews:\"]\n sorted_stories = self.get_top_stories()\n # Format slack text\n for story in sorted_stories:\n text_list.append(\n \"*<{}|{}>* - <{}|{}>\".format(\n \"{}/item?id={}\".format(HN_URL, story[\"id\"]),\n story[\"score\"],\n # Ask HN type posts do not have 'url' key, so using get to return None\n story.get('url'),\n story[\"title\"],\n )\n )\n self.logger.debug(text_list)\n return \"\\n>\".join(text_list)", "def parseline(self, line):\n line = line.strip()\n if not line:\n return None, None, line\n elif line[0] == '?':\n line = 'help ' + line[1:]\n elif line[0] == '!':\n if hasattr(self, 'do_shell'):\n line = 'shell ' + line[1:]\n else:\n return None, None, line\n return '', '', line", "def __init__(self, url):\n self.time = datetime.now().time()\n self.url = url\n self.location = url.split(\"/\")[2]\n self.category = url.split(\"/\")[3]\n self.name = url.split(\"/\")[4]\n self.price = \"NULL\"\n self.listDate = \"NULL\"\n self.na = False", "def home():\n return (\n f\"Welcome to the Hawaii Weather API<br/>\"\n \"<br/>\"\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/start_date<br/>\"\n f\"/api/v1.0/start_date/end_date<br/>\"\n \"<br/>\"\n f\"Date format: YYYY-MM-DD\"\n )", "def parse_news(news):\n default_value = '---'\n\n news_list = []\n for entry in news:\n title = entry.get('title', default_value)\n link = entry.get('link', default_value)\n published = entry.get('published', default_value)\n source = entry.get('source', default_value)\n description = entry.get('description', default_value)\n media_content = entry.get('media_content', default_value)\n\n source_title = default_value\n if source != default_value:\n source_title = source['title']\n\n image = default_value\n if media_content != image:\n image = media_content[0]['url']\n\n article = Article(title, link, published, source_title, description, image)\n news_list.append(article)\n\n return news_list", "def __init__(self, movie_title, release_date, movie_storyline, poster_image,\n trailer_youtube, more_link):\n\n self.title = movie_title\n self.date = release_date\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n self.more_url = more_link", "def _parse_title(self, item):\n title = item[\"Title\"]\n return title", "def home():\n return(\n f\"Available Routes:<br/>\"\n f\"Precipitation: /api/v1.0/precipitation<br/>\"\n f\"List of Stations: /api/v1.0/stations<br/>\"\n f\"Temperature for one year: /api/v1.0/tobs<br/>\"\n f\"Temperature stat from the start date(yyyy-mm-dd): /api/v1.0/min_max_avg/<start><br/>\"\n f\"Temperature stat from start to end dates(yyyy-mm-dd): /api/v1.0/min_max_avg/<start><br/>\"\n )", "def home():\r\n return (\r\n \"<h1><center>WELCOME TO SURF'S UP!</center></h1><br/>\"\r\n \"<h2><center>Please plug in the browser any of the available routes:</h2></center><br/>\"\r\n \"<h3><center>/api/v1.0/precipitation</h3></center><br/>\"\r\n \"<h3><center>/api/v1.0/stations</h3></center><br/>\"\r\n \"<h3><center>/api/v1.0/tobs</h3></center><br/>\"\r\n \"<h3><center>/api/v1.0/<start></h3></center>\"\r\n \"<center>Note: Type the start date in the form of %mm-%dd</center>\"\r\n \"<h3><center>/api/v1.0/<start>/<end></h3></center>\"\r\n \"<center>Note: API request takes two parameters: Start date / End date</center>\"\r\n \"<center>Type dates in the form of %yyyy-%mm-%dd</center>\"\r\n \"<br/>\"\r\n \"<br/>\"\r\n \"<br/>\"\r\n \"<center>MJV</center>\"\r\n )", "def get_info(hit):\n mention = Mention(hit)\n return dict(\n url = mention.info[\"url\"],\n title = mention.info[\"title\"],\n date = mention.info[\"datetime_date\"] or datetime.date(1970, 1, 1),\n type = 'news' if mention.in_the_news else 'print',\n author = '(need author)',\n media = mention.info[\"media\"],\n )", "def header_huffington(self):\n head = '\\n ^^Polls ^^fetched ^^from ^^[http://elections.huffingtonpost.com/](http://elections.huffingtonpost.com/).\\n\\n'\n head += '***{}***\\n\\n'.format(self.get_greeting())\n head += '.\\n\\n'\n head += '.\\n\\n'\n return head", "def top_headlines():\n source = \"google-news\" # TODO: Add option to choose source\n try:\n r = requests.get(\"https://newsapi.org/v2/top-headlines?sources=\" + source + \"&apiKey=\" + NEWS_API_TOKEN)\n data = r.json()\n # TODO: Find a way to include multiple articles instead of a random one\n article = data['articles'][randint(0, len(data['articles']) - 1)]\n imageurl = article['urlToImage'].replace('\\\\', '')\n embed = discord.Embed(\n title=article['title'],\n description=article['description'],\n url=article['url'],\n image_url=imageurl\n )\n embed.set_image(url=imageurl)\n embed.set_footer(text=\"Powered by NewsAPI! (newsapi.org)\")\n return embed\n except Exception as e:\n print(e)\n return discord.Embed(title=\"Something went wrong\")", "def hentry2atom(entry_mf):\n\n\t# generate fall backs or errors for the non-existing required properties ones.\n\n\tif 'properties' in entry_mf:\n\t\tprops = entry_mf['properties']\n\telse:\n\t\treturn None, 'properties of entry not found.'\n\n\tentry = {'title': '', 'subtitle': '', 'link': '', 'uid': '', 'published': '', 'updated': '', 'summary': '', 'content': '', 'categories': ''}\n\n\t## required properties first\n\n\t# construct id of entry\n\tuid = _get_id(entry_mf)\n\n\tif uid:\n\t\t# construct id of entry -- required\n\t\tentry['uid'] = templates.ID.substitute(uid = escape(uid))\n\telse:\n\t\treturn None, 'entry does not have a valid id'\n\n\t# construct title of entry -- required - add default\n\t# if no name or name is the content value, construct name from title or default from URL\n\tname = props.get('name')\n\tif name:\n\t\tname = name[0]\n\n\tcontent = props.get('content')\n\tif content:\n\t\tcontent = content[0]\n\t\tif isinstance(content, dict):\n\t\t\tcontent = content.get('value')\n\n\tif name:\n\t\t# if name is generated from content truncate\n\t\tif not mf2util.is_name_a_title(name, content):\n\t\t\tif len(name) > 50:\n\t\t\t\tname = name[:50] + '...'\n\telse:\n\t\tname = uid\n\n\tentry['title'] = templates.TITLE.substitute(title = escape(name), t_type='title')\n\n\t# construct updated/published date of entry\n\tupdated = _updated_or_published(entry_mf)\n\n\t# updated is -- required\n\tif updated:\n\t\tentry['updated'] = templates.DATE.substitute(date = escape(updated), dt_type = 'updated')\n\telse:\n\t\treturn None, 'entry does not have valid updated date'\n\n\t## optional properties\n\n\tentry['link'] = templates.LINK.substitute(url = escape(uid), rel='alternate')\n\n\t# construct published date of entry\n\tif 'published' in props:\n\t\tentry['published'] = templates.DATE.substitute(date = escape(props['published'][0]), dt_type = 'published')\n\n\t# construct subtitle for entry\n\tif 'additional-name' in props:\n\t\tfeed['subtitle'] = templates.TITLE.substitute(title = escape(props['additional-name'][0]), t_type='subtitle')\n\n\t# content processing\n\tif 'content' in props:\n\t\tif isinstance(props['content'][0], dict):\n\t\t\tcontent = props['content'][0]['html']\n\t\telse:\n\t\t\tcontent = props['content'][0]\n\telse:\n\t\tcontent = None\n\n\tif content:\n\t\tentry['content'] = templates.CONTENT.substitute(content = escape(content))\n\n\t# construct summary of entry\n\tif 'featured' in props:\n\t\tfeatured = templates.FEATURED.substitute(featured = escape(props['featured'][0]))\n\telse:\n\t\tfeatured = ''\n\n\tif 'summary' in props:\n\t\tsummary = templates.POST_SUMMARY.substitute(post_summary = escape(props['summary'][0]))\n\telse:\n\t\tsummary = ''\n\n\t# make morelink if content does not exist\n\tif not content:\n\t\tmorelink = templates.MORELINK.substitute(url = escape(uid), name = escape(name))\n\telse:\n\t\tmorelink = ''\n\n\tentry['summary'] = templates.SUMMARY.substitute(featured=featured, summary=summary, morelink=morelink)\n\n\t# construct category list of entry\n\tif 'category' in props:\n\t\tfor category in props['category']:\n\t\t\tif isinstance(category, dict):\n\t\t\t\tif 'value' in category:\n\t\t\t\t\tcategory = category['value']\n\t\t\t\telse:\n\t\t\t\t\tcontinue\n\n\t\t\tentry['categories'] += templates.CATEGORY.substitute(category=escape(category))\n\n\t# construct atom of entry\n\treturn templates.ENTRY.substitute(entry), 'up and Atom!'", "def extract_text(self, categories, entries):\n text = \" \".join(\n [\n self.podcast.title,\n self.podcast.description,\n self.podcast.keywords,\n self.podcast.authors,\n ]\n + [c.name for c in categories]\n + [e[\"title\"] for e in entries][:6]\n )\n return \" \".join([kw for kw in extract_keywords(self.podcast.language, text)])", "def get_headlines():\n country = request.args.get('country', type=str)\n if country is not None:\n data = te.getNews(country=country).dropna()\n return jsonify(data.to_dict(orient='records'))\n data = te.getNews()\n return jsonify(te.getNews().dropna().to_dict(orient='records'))", "def print_entry(entry):\n border = '-' * 50\n print(border)\n print(entry['name'])\n print(\"Date: {}\".format(entry['date']))\n print(\"Time Spent: {}\".format(entry['time_spent']))\n if entry['notes'] != '':\n print(\"Notes:\\n{}\\n{}\".format('----------', entry['notes']))\n print(border)", "def parse_episode_page_html(season, episode, html):\n\n data = []\n\n lines = html.split('\\n')\n\n start_parse_dialog = False\n\n for line in lines:\n\n if 'class=\"postbody\"' in line:\n start_parse_dialog = True\n\n if start_parse_dialog and '<p>' in line and ':' in line:\n datum = {}\n datum['season'] = season\n datum['episode'] = episode\n\n dialog_str = line.split(':')[1].split('</p>')[0]\n dialog_str = re.sub(r'\\([a-zA-Z ]*\\)', '', dialog_str)\n dialog_str = dialog_str.strip()\n datum['dialog'] = dialog_str\n datum['num_words'] = len(dialog_str.split())\n\n speakers_str = line.split('<p>')[1].split(':')[0]\n if ',' in speakers_str and 'and' in speakers_str:\n for speaker in speakers_str.split(','):\n if 'and' in speaker:\n for sub_speaker in speaker.split('and'):\n datum['speaker'] = clean_speaker_string(sub_speaker.strip())\n else:\n datum['speaker'] = clean_speaker_string(speaker.strip())\n elif 'and' in speakers_str:\n for sub_speaker in speakers_str.split('and'):\n datum['speaker'] = clean_speaker_string(sub_speaker.strip())\n else:\n datum['speaker'] = clean_speaker_string(speakers_str.strip())\n\n data.append(datum)\n\n return data", "def __str__(self):\n return f\"\"\"POST({self.key})\n title: {self.title}\n author: {self.author}\n url: {self.post_url}\n main_image_url: {self.main_image_url}\n description: {self.description}\n \"\"\"", "def __init__(self, headline, depth = 0, parent = None):\n \n SitemapTreeElement.__init__(self, depth, parent)\n\n # Check a type of 'headline' parametr\n if not isinstance(headline, basestring):\n raise TypeError('string type expected')\n self._headline = headline", "def gnews(self):\n\t\tfeed_url = self.get_feed()\n\t\tfeed_data = feedparser.parse(feed_url)\n\t\tprint(\"\")\n\t\ttype_tiny = pyshorteners.Shortener()\n\t\tfor data in feed_data[\"items\"]:\n\t\t\ttiny_url = type_tiny.tinyurl.short(data[\"link\"])\n\t\t\t#tiny_url = tinyurl.create_one(data[\"link\"])\n\t\t\tprint('\\033[33m' + data[\"title\"] + \" : \" + Style.RESET_ALL + tiny_url)\n\t\t\tprint(\"\")", "def __init__(self, title, storyline, poster_image_url, trailer_youtube_url):\n self.title = title\n self.storyline = storyline\n self.poster_image_url = poster_image_url\n self.trailer_youtube_url = trailer_youtube_url", "def parse_title(self, pre):\n # Extract datapoints\n title_text = str(pre)\n title = {}\n\n identity_data = self.identity_regex.search(title_text)\n title['linc'] = int(identity_data.group(1).strip().replace(' ', ''))\n title['short_legal'] = identity_data.group(2).strip().replace(';', ' ')\n title['title_number'] = identity_data.group(3).strip()\n\n try:\n title['ats_reference'] = self.ats_regex.search(title_text).group(1).replace(';',' ')\n except AttributeError:\n title['ats_reference'] = ''\n\n title['municipality'] = self.municipality_regex.search(title_text).group(1).replace('\\r','')\n\n try:\n references = self.reference_regex.search(title_text).group(1).split(\"\\n\")\n references = [i.strip() for i in references]\n references = list(filter(None, references))\n title['reference_number'] = references\n except AttributeError:\n title['reference_number'] = ['']\n\n payday_raw = self.payday_regex.search(title_text).group(3).strip('</pre>').strip()\n title['registration'] = payday_raw[:11]\n title['date'] = reversed(payday_raw[15:25].split('/'))\n title['date'] = '-'.join(title['date'])\n title['document_type'] = payday_raw[27:46].strip()\n\n title['value'] = self._try_int(payday_raw[46:62].strip())\n title['consideration'] = self._try_int(payday_raw[62:80].strip())\n\n if \"CONDOMINIUM\" in title_text:\n title['condo'] = True\n else:\n title['condo'] = False\n\n title['title_text'] = title_text.strip('<pre>').strip('</pre>').strip()\n\n return title", "def getTitle(test:str) -> str:\n return test[5:].strip()", "def parse_headline(section):\n tokens = HEADLINE_RE.findall(section.replace(\"\\n\", \" \"))\n myfmt = \"%b %d %Y\" if len(tokens[0][2].split()[0]) == 3 else \"%B %d %Y\"\n cli_valid = datetime.datetime.strptime(tokens[0][2], myfmt).date()\n cli_station = (tokens[0][0]).strip().upper()\n return (cli_valid, cli_station)", "def info(entry: BibItem) -> str:\n return \"{title}{author}{date}\".format(\n title=(\n \"Title: {}\\n\".format(re.sub(r\"[}{]\", \"\", entry[\"title\"]))\n if \"title\" in entry\n else \"\"\n ),\n author=(\n \"Author{plural}: {author}\\n\".format(\n plural=\"s\" if len(entry[\"author\"]) > 1 else \"\",\n author=\"; \".join(entry[\"author\"]),\n )\n if \"author\" in entry\n else \"\"\n ),\n date=(\n \"Year: {}\\n\".format(entry[\"date\"].split(\n \"-\")[0]) if \"date\" in entry else \"\"\n ),\n )", "def make_head_line():\n with open(args.out_folder.strip() + \"/files/head_line.txt\", \"a\") as headLine:\n headLine.write(\"#Query ID\\t#Subject\\t#Subject accession\\t#Subject Taxonomy ID\\t#Identity percentage\\t#Coverage\\t#evalue\\t#bitscore\\n\")", "def get_kindle_strs(self):\n # Title/author\n title_line = '%s (%s)' % (self.title, self.author)\n\n # Metadata line\n highlight_type = '- Your %s' % (self.clip_type.capitalize())\n if (self.loc_range[0] == self.loc_range[1]):\n location_string = 'Location %d' % (self.loc_range[0])\n else:\n location_string = 'Location %d-%d' % (self.loc_range[0], self.loc_range[1])\n page_string = None\n if (self.page is not None):\n page_string = 'on Page %d' % self.page\n date_string = self.datetime.strftime('%A, %B %d, %Y %I:%M:%S %p')\n\n if (self.page is not None):\n metadata_line = '%s %s | %s | Added on %s' % (highlight_type, page_string, location_string, date_string)\n else:\n metadata_line = '%s %s | Added on %s' % (highlight_type, location_string, date_string)\n\n return (title_line, metadata_line, '\\n', self.clip_text)", "def __init__(self, title, year,story, poster_url, trailer_url):\n self.title = title\n self.year = year\n self.story = story\n self.poster_url = poster_url\n self.trailer_url = trailer_url", "def topheadlines():\n newsSource = click.prompt(\"Please enter your choice from listsources\")\n \n main_url = \"https://newsapi.org/v2/top-headlines?apiKey=f45fa2c71932483f832f0cc745af0325&sources=\"+newsSource\n\n\t# fetching data in json format \n open_headline = requests.get(main_url).json() \n\n\t# getting all headlines in a string articles \n headline = open_headline[\"articles\"] \n\n\t# empty list which will \n\t# contain all trending newssources \n output = [] \n\t\n for h in headline: \n click.echo('\\n')\n click.secho(click.style('TITLE: ' + h['title'], fg='red'))\n click.secho(click.wrap_text(h['description']))\n click.secho(click.style('DOMAIN: ' + h['url'], fg='blue'))\n \n \t\n for i in output[:11]:\n print(i)", "def output(self):\n entry = []\n entry.append('''<entry>\n <title mode=\"escaped\" type=\"text/html\">%(title)s</title>\n <link rel=\"alternate\" type=\"text/html\" href=\"%(url)s\" />\n <issued>%(issued)s</issued>\n <modified>%(modified)s</modified>\n ''' % self.__dict__)\n \n if self.feed:\n entry.append('''<link rel=\"service.feed\" type=\"application/atom+xml\" href=\"%s\" title=\"%s\" />''' % (self.feed, self.feed_title))\n if self.comments:\n entry.append('''<link rel=\"comments\" type=\"application/atom+xml\" href=\"%s\" />''' % self.comments)\n if self.author:\n entry.append('''<author>%s</author>''' % self.author.output())\n for person in self.contributors:\n entry.append('''<contributor>%s</contributor>''' % person.output())\n if self.id:\n entry.append('''<id>%s</id>''' % self.id)\n if self.created:\n entry.append('''<created>%s</created>''' % self.created)\n if self.summary:\n entry.append('''<summary type=\"application/xhtml+xml\" xml:base=\"%s\" xml:space=\"preserve\">\n <div xmlns=\"http://www.w3.org/1999/xhtml\">%s</div></summary>''' % (self.base_url, self.summary))\n if self.content:\n #entry.append('''<content type=\"application/xhtml+xml\" xml:base=\"%s\" xml:space=\"preserve\">\n # <div xmlns=\"http://www.w3.org/1999/xhtml\">%s</div></content>''' % (self.base_url, self.content))\n entry.append('''<content type=\"text/html\" mode=\"escaped\" xml:base=\"%s\" xml:space=\"preserve\">%s</content>''' % (self.base_url, self.content))\n \n entry.append('''</entry>''')\n return '\\n'.join(entry)", "def parse_description(self, track: dict):\n try:\n album = track['album']\n link = album['external_urls'].get('spotify')\n preview = track.get('preview_url')\n return (f\"<p>Song from album <a href='{link}'>{album.get('name')}</a><p>\" +\n f\"<audio controls><source src='{preview}' type='audio/mp3'></audio>\")\n except KeyError:\n return \"\"", "def home_page():\n return \"<h4>Welcome !</h4><br><a href='/fetch'>View Results</a>\"", "def welcome():\n return (\n f\"<b>Available Routes:</b><br/>\"\n f\"<br/>\"\n f\"<b>Stats:</b><br/>\"\n f\"Precipitation: /api/v1.0/precipitation<br/>\"\n f\"List of Stations: /api/v1.0/stations<br/>\"\n f\"Temperatures for last year: /api/v1.0/tobs<br/>\"\n f\"<br/>\"\n f\"<b>Stats for Dates:</b><br/>\"\n f\"Temperature stats a specific date(yyyy-mm-dd): /api/v1.0/yyyy-mm-dd<br/>\"\n f\"Temperature stats from start to end dates(yyyy-mm-dd): /api/v1.0/yyyy-mm-dd/yyyy-mm-dd<br/>\"\n f\"<br/>\"\n f\"<b>** Note: </b>First Record Date: 2010-01-01 , Last Record Date: 2017-08-23<br/>\" # from jupyter notebook\n )", "def __init__(self, **kwargs):\n self.category = kwargs.pop('category', '')\n self.name = kwargs.pop('name', '')\n self.a_id = kwargs.pop('id', '')\n self.since = kwargs.pop('since', '')\n self.photos = kwargs.pop('photos', list())\n self.description = kwargs.pop('description', list())\n self.url = kwargs.pop('url', '')\n self.place = kwargs.pop('place', '')", "def get_headline_data(website_url, source):\n\tpage = requests.get(website_url)\n\tpage.raise_for_status()\n\tall_headlines = []\n\tbs_obj = bs4.BeautifulSoup(page.text, 'html.parser')\n\titem_list = bs_obj.select('item')\n\tprintable = set(string.printable)\n\tfor curr_item in item_list:\n\t\titem_title = curr_item.title.string\n\t\tfollowup_link = curr_item.select('link')[0].string\n\t\tdatestamp = curr_item.select('pubdate')[0].string\n\t\titem_title = item_title.replace(\"&apos;\", \"'\")\n\t\tfollowup_link = followup_link.replace(u\"\\u2018\", \"'\").replace(u\"\\u2019\", \"'\")\n\t\titem_title = item_title.encode('utf-8', errors='ignore')\n\t\tnew_headline = data_structures.Headline(item_title, followup_link, source, datestamp)\n\t\tall_headlines.append(new_headline)\n\treturn all_headlines", "def get_headlines(outlet):\n if outlet == \"BBC\":\n parser = news_parser.BBC(\"https://www.bbc.co.uk\")\n elif outlet == \"DailyMail\":\n parser = news_parser.DailyMail(\"https://www.dailymail.co.uk\")\n elif outlet == \"Guardian\":\n parser = news_parser.Guardian(\"https://www.theguardian.com\")\n elif outlet == \"Metro\":\n parser = news_parser.Metro(\"https://www.metro.co.uk\")\n elif outlet == \"Mirror\":\n parser = news_parser.Mirror(\"https://www.mirror.co.uk/news/\")\n elif outlet == \"Reuters\":\n parser = news_parser.Reuters(\"https://uk.reuters.com\")\n elif outlet == \"Sun\":\n parser = news_parser.Sun(\"https://www.thesun.co.uk\")\n elif outlet == \"Independent\":\n parser = news_parser.Independent(\"https://www.independent.co.uk\")\n else:\n parser = news_parser.BBC(\"https://www.bbc.co.uk/news\")\n \n index = outlets.index(outlet)\n url_list = []\n while len(url_list) < 50:\n opts = {\n 'language': ['en'],\n 'source_id': [ids[index]],\n 'published_at_start':'NOW-1DAY',\n 'published_at_end':'NOW',\n 'sort_by': 'hotness',\n 'sort_direction': 'desc',\n 'cursor': '*',\n 'per_page': 100\n }\n\n try:\n api_response = api_instance.list_stories(**opts)\n for story in api_response.stories:\n url = story.links.permalink\n if url:\n url_list.append(url)\n except ApiException as e:\n print(\"Exception when calling DefaultApi->list_stories: %s\\n\" %e)\n \n opts['cursor'] = api_response.next_page_cursor\n \n url_list = url_list[:50]\n \n articles_list = []\n for url in url_list:\n raw_article = parser.get_article(url)\n if raw_article is not None:\n articles_list.append(raw_article)\n\n articles = []\n for article in articles_list:\n parsed_article = parser.parse(article)\n if parsed_article is not None:\n articles.append(parsed_article)\n \n if len(articles) > 30:\n articles = articles[:30]\n\n return articles", "def __str__(self):\n artist = self.artist if self.artist else ''\n album = self.album if self.album else ''\n title = self.title if self.title else ''\n return \"{} {} {}\".format(artist,album,title)", "def __init__(self):\n self.title = ''\n self.desc = '';\n self.course_number = ''\n self.duration = ''\n self.difficulty = ''\n self.instructors = ''\n self.url = ''", "def dc2fields(file):\r\n try:\r\n from bs4 import BeautifulSoup\r\n except ImportError:\r\n error = ('Missing dependency '\r\n '\"BeautifulSoup4\" and \"lxml\" required to import Dotclear files.')\r\n sys.exit(error)\r\n\r\n\r\n in_cat = False\r\n in_post = False\r\n category_list = {}\r\n posts = []\r\n\r\n with open(file, 'r', encoding='utf-8') as f:\r\n\r\n for line in f:\r\n # remove final \\n\r\n line = line[:-1]\r\n\r\n if line.startswith('[category'):\r\n in_cat = True\r\n elif line.startswith('[post'):\r\n in_post = True\r\n elif in_cat:\r\n fields = line.split('\",\"')\r\n if not line:\r\n in_cat = False\r\n else:\r\n # remove 1st and last \"\"\r\n fields[0] = fields[0][1:]\r\n # fields[-1] = fields[-1][:-1]\r\n category_list[fields[0]]=fields[2]\r\n elif in_post:\r\n if not line:\r\n in_post = False\r\n break\r\n else:\r\n posts.append(line)\r\n\r\n print(\"%i posts read.\" % len(posts))\r\n\r\n for post in posts:\r\n fields = post.split('\",\"')\r\n\r\n # post_id = fields[0][1:]\r\n # blog_id = fields[1]\r\n # user_id = fields[2]\r\n cat_id = fields[3]\r\n # post_dt = fields[4]\r\n # post_tz = fields[5]\r\n post_creadt = fields[6]\r\n # post_upddt = fields[7]\r\n # post_password = fields[8]\r\n # post_type = fields[9]\r\n post_format = fields[10]\r\n # post_url = fields[11]\r\n # post_lang = fields[12]\r\n post_title = fields[13]\r\n post_excerpt = fields[14]\r\n post_excerpt_xhtml = fields[15]\r\n post_content = fields[16]\r\n post_content_xhtml = fields[17]\r\n # post_notes = fields[18]\r\n # post_words = fields[19]\r\n # post_status = fields[20]\r\n # post_selected = fields[21]\r\n # post_position = fields[22]\r\n # post_open_comment = fields[23]\r\n # post_open_tb = fields[24]\r\n # nb_comment = fields[25]\r\n # nb_trackback = fields[26]\r\n post_meta = fields[27]\r\n # redirect_url = fields[28][:-1]\r\n\r\n # remove seconds\r\n post_creadt = ':'.join(post_creadt.split(':')[0:2])\r\n\r\n author = \"\"\r\n categories = []\r\n tags = []\r\n\r\n if cat_id:\r\n categories = [category_list[id].strip() for id in cat_id.split(',')]\r\n\r\n # Get tags related to a post\r\n tag = post_meta.replace('{', '').replace('}', '').replace('a:1:s:3:\\\\\"tag\\\\\";a:', '').replace('a:0:', '')\r\n if len(tag) > 1:\r\n if int(tag[:1]) == 1:\r\n newtag = tag.split('\"')[1]\r\n tags.append(\r\n BeautifulSoup(\r\n newtag\r\n , \"xml\"\r\n )\r\n # bs4 always outputs UTF-8\r\n .decode('utf-8')\r\n )\r\n else:\r\n i=1\r\n j=1\r\n while(i <= int(tag[:1])):\r\n newtag = tag.split('\"')[j].replace('\\\\','')\r\n tags.append(\r\n BeautifulSoup(\r\n newtag\r\n , \"xml\"\r\n )\r\n # bs4 always outputs UTF-8\r\n .decode('utf-8')\r\n )\r\n i=i+1\r\n if j < int(tag[:1])*2:\r\n j=j+2\r\n\r\n \"\"\"\r\n dotclear2 does not use markdown by default unless you use the markdown plugin\r\n Ref: http://plugins.dotaddict.org/dc2/details/formatting-markdown\r\n \"\"\"\r\n if post_format == \"markdown\":\r\n content = post_excerpt + post_content\r\n else:\r\n content = post_excerpt_xhtml + post_content_xhtml\r\n content = content.replace('\\\\n', '')\r\n post_format = \"html\"\r\n\r\n kind = 'article' # TODO: Recognise pages\r\n\r\n yield (post_title, content, slugify(post_title), post_creadt, author,\r\n categories, tags, kind, post_format)", "def get_headlines(newssource):\n \n \n newssource_dict = {}\n url = 'https://newsapi.org/v1/articles?source=' + newssource + '&sortBy=top&apiKey=' + api\n request = http.request('GET',url,timeout=4.0)\n\n headline = json.loads(request.data)\n \n if not headline['articles']:\n return \"NewsAPI can not receive information from\" + newsource + \"right now\"\n \n newssource_dict['url'] = headline['articles'][0]['url']\n newssource_dict['title']= headline['articles'][0]['title']\n newssource_dict['description'] = headline['articles'][0]['description']\n \n \n return newssource_dict", "def set_content(self, **kwargs):\n self.content = kwargs.get('content')\n self.title = kwargs.get('title')\n self.description = kwargs.get('description')\n self.url = kwargs.get('url')\n self.color = kwargs.get('color')\n self.timestamp = kwargs.get('timestamp')", "def process_info(info, site):\n # Urubu doesn't split the 'tags' into multiple strings\n if \"tags\" in info:\n if isinstance(info[\"tags\"], str):\n info[\"tags\"] = info[\"tags\"].split(\", \")\n # Identify to which folder the item belongs (paper, blog, etc)\n if \"type\" not in info:\n info[\"type\"] = \"/{}\".format(info[\"id\"].split(\"/\")[1])\n # Add the current date to the site metadata\n if \"now\" not in site:\n site[\"now\"] = datetime.utcnow()\n # Add the last git commit hash to the site metadata\n if \"commit\" not in site:\n completed = subprocess.run(\n [\"git\", \"rev-parse\", \"--short\", \"HEAD\"], capture_output=True, text=True\n )\n site[\"commit\"] = completed.stdout.strip()", "def _preprocess_feed(tweet: str):\n t = tweet.lower()\n t = re.sub(url_re, \" <URL> \", t)\n t = t.replace(\"\\n\", \"\")\n t = t.replace(\"#\", \" <HASHTAG> \")\n t = re.sub(mention_re, \" <USER> \", t)\n t = re.sub(smile_re, \" <EMOTICON> \", t)\n t = re.sub(emoji_re, \" <EMOJI> \", t)\n t = re.sub(time_re, \" <TIME> \", t)\n t = re.sub(numbers_re, \" <NUMBER> \", t)\n t = re.sub(not_ascii_re, \"\", t)\n t = re.sub(space_collapse_re, \" \", t)\n t = t.strip()\n return t", "def _preprocess_feed(tweet: str):\n t = tweet.lower()\n t = re.sub(url_re, \" <URL> \", t)\n t = t.replace(\"\\n\", \"\")\n t = t.replace(\"#\", \" <HASHTAG> \")\n t = re.sub(mention_re, \" <USER> \", t)\n t = re.sub(smile_re, \" <EMOTICON> \", t)\n t = re.sub(emoji_re, \" <EMOJI> \", t)\n t = re.sub(time_re, \" <TIME> \", t)\n t = re.sub(numbers_re, \" <NUMBER> \", t)\n t = re.sub(not_ascii_re, \"\", t)\n t = re.sub(space_collapse_re, \" \", t)\n t = t.strip()\n return t", "def news():\n mesosite = get_dbconn(\"mesosite\")\n mcursor = mesosite.cursor(cursor_factory=psycopg2.extras.DictCursor)\n # Last dailyb delivery\n lastts = datetime.datetime.now() + datetime.timedelta(days=-1)\n mcursor.execute(\n \"SELECT *, to_char(entered, 'DD Mon HH:MI AM') as nicedate \"\n \"from news WHERE entered > %s ORDER by entered DESC\",\n (lastts,),\n )\n\n textfmt = \"\"\"\n +----------------------------------------------\n | Title : %(title)s\n | Date : %(nicedate)s\n | Author: %(author)s\n | URL : %(url)s\n +----------------------------------------------\n\n%(body)s\n\n\"\"\"\n htmlfmt = (\n \"<hr />\\n\"\n \"<br /><strong>Title:</strong>\\n\"\n '<a href=\"https://mesonet.agron.iastate.edu/'\n 'onsite/news.phtml?id=%(id)s\">%(title)s</a>\\n'\n \"<br /><strong>Date:</strong> %(nicedate)s\\n\"\n \"<br /><strong>Author:</strong> %(author)s\\n\"\n '<br /><a href=\"%(url)s\">link</a>\\n\\n'\n \"<p>%(body)s\\n\"\n )\n txt = \"> News\\n\"\n html = \"<h3>News</h3>\"\n\n for row in mcursor:\n txt += textfmt % row\n html += htmlfmt % row\n if mcursor.rowcount == 0:\n txt += \"\\n No news is good news\\n\\n\"\n html += \"<strong>No news is good news</strong>\"\n\n return txt, html", "def meta(self):\n title = 'Месторасположение: {0}'.format(self.object.emplacement)\n return {\n 'title': title\n }", "def home():\n return \"<h1>Not Much Going On Here</h1>\"", "def parse_metadata(self, item):\n self.skip_ws\n item.metadata_name = name = self.name\n if name == \"\":\n self.backspace", "def _parse_title(self, response):\n title_str = re.sub(\n r\"\\s+\", \" \", \" \".join(response.css(\".soi-container h2 *::text\").extract())\n ).strip()\n return re.sub(\n r\"(Illinois Commerce Commission|(?=Committee )Committee Meeting$)\",\n \"\",\n title_str,\n ).strip()", "def __init__(self: object) -> None:\n self.empty: bool = True\n self.episode_broadcast: str = \"\"\n self.episode_id: int = 0\n self.episode_inspectors: str = \"\"\n self.episode_name: str = \"\"\n self.episode_sequence: str = \"\"\n self.episode_url: str = \"\"\n self.episode_year: int = 0", "def __str__(self):\n return \"Author: \" + self.author +\"\\nTitle: \" + self.title + \"\\nHaiku: \" + str(self.haiku) + \\\n \"\\nText: \" + self.text + \"\\n\"", "def welcome():\n return (\n \"Hawaii Precipitation and Weather Data<br/><br/>\"\n \"Pick from the available routes below:<br/><br/>\"\n \"Precipiation from 2016-08-23 to 2017-08-23.<br/>\"\n \"/api/v1.0/precipitation<br/><br/>\"\n \"A list of all the weather stations in Hawaii.<br/>\"\n \"/api/v1.0/stations<br/><br/>\"\n \"The Temperature Observations (tobs) from 2016-08-23 to 2017-08-23.<br/>\"\n \"/api/v1.0/tobs<br/><br/>\"\n \"Type in a date (i.e., 2013-09-26) to see the min, max and avg temperature since that date.<br/>\"\n \"/api/v1.0/temp/<start><br/><br/>\"\n \"Type in a date range (anywhere between 2010-01-01/2017-08-23) to see the min, max and avg temperature for that range.<br/>\"\n \"/api/v1.0/temp/<start>/<end><br/>\"\n )", "def __init__(self, body, author):\n self.body = body.strip()\n self.author = author.strip()", "def home():\n return (\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/2015-01-01<br/>\"\n f\"/api/v1.0/2015-01-01/2015-12-31\"\n )", "def parse_post_metadata(post_text):\n result = {}\n \n header_end = 0\n \n promed_date_match = re.search(\n r\"Published Date:\\s(?P<date>.*)\", post_text)\n result[\"promedDate\"] = parse_promed_pub_datetime(\n promed_date_match.group(\"date\"))\n \n archive_match = re.search(r\"Archive Number: (?P<num>.*)\", post_text)\n result[\"archiveNumber\"] = archive_match.group(\"num\")\n header_end = archive_match.end()\n \n subject = re.search(r\"Subject:\\s(?P<subject>.*)\", post_text).group(\"subject\")\n result[\"subject\"] = parse_subject_line(subject)\n result[\"subject\"][\"raw\"] = subject\n \n # This will not find all linked reports.\n # Some older posts refrence posts using different indexes I do not know\n # how to interpret.\n # Example: http://promedmail.org/direct.php?id=2194235\n result[\"linkedReports\"] = [\n report_id for report_id in re.findall(r\"\\d{8}\\.\\d+\", post_text)]\n \n # Most links will be article source urls or links to promed.\n result[\"links\"] = list(set(\n re.findall(r\"http\\S+[^(\\.\\])(\\.\\)>\\s]\", post_text)))\n result[\"links\"].sort()\n \n communicated_match = re.search(communicated_by_regex, post_text, re.M)\n if communicated_match:\n result[\"communicatedBy\"] = communicated_match.group(\"communicated_by\")\n return result, header_end", "def trim_title(string):\n title = string.replace(\" - The New York Times\", \"\").replace(\"IE=e \", \"\")\\\n .replace(\"Blog \", \"\").replace(\"NYT_ \", \"\").replace(\" - International Herald Tribune\", \"\")\\\n .replace(\"- Editorials & Commentary\", \"\").replace(\"Opinion | \", \"\")\\\n .replace(\"TECHNOLOGY; \", \"\").replace(\"ART;\\n\", \"\").replace(\"PERFORMANCE ART;\", \"\")\\\n .replace(\"ESSAY; \", \"\").replace(\"ALT / \", \"\").replace(\"Review/Film; \", \"\")\\\n .replace(\"NOTICED; \", \"\").replace(\"DOWNTIME; \", \"\").replace(\"PERFORMANCE ART;\", \"\")\\\n .replace(\"Technology: CONNECTIONS;\", \"\").replace(\"PERFORMANCE \\n\", \"\")\n return title", "def parse_articles(self, response):\n item = NasdaqcrawlerItem()\n item['date_published'] = response.xpath('//span[@itemprop=\"datePublished\"]/text()').extract()\n item['text'] = \"\".join(self.clean_text(response.xpath('//div[@id=\"articlebody\"]//p//text()').extract()))\n item['title'] = response.xpath('//h1/text()').extract()\n item['stock_ticker'] = response.meta['ticker']\n # captures any text between symbol/ and /\n # this should only return a single item\n \n yield item", "def news_speech():\n #Fetches data from API and creates global varibles.\n news_handle(news_fetch(config_fetcher('news_region'), config_fetcher('news_key')))\n #Creates a daily breifing using varibles\n news_daily_news = Markup((f\"The top headline for today is entitled: {title_1}, and was \\\nwritten by {author_1}. Here is a second headline, entitled: {title_2}, written by {author_2}.\"))\n return news_daily_news", "def get_title(portobjlist):\n #fetch_title(portobjlist)\n fetch_title(portobjlist)", "def get_title(line):\n\n assert line is not None\n # the format of line should be like this:\n # ' TITLE \"Some Title\"'\n # and we simply can just ignore the first 11 chars, but to be safe lets\n # make this assertion\n assert line[:11] == ' TITLE \"'\n # the last char should be a quote, this assertion helps validate this\n # assumption\n assert line[-1] == '\"'\n return line[11:-1].strip()", "def prepare_metadata(self, presentation):\r\n return {\"title\": presentation.title,\r\n \"artist\": presentation.speaker,\r\n \"performer\": presentation.speaker,\r\n \"album\": presentation.event,\r\n \"location\": presentation.room,\r\n \"date\": str(datetime.date.today()),\r\n \"comment\": presentation.description}", "def __init__(\r\n self, movie_title, movie_storyline, poster_image, \r\n trailer_youtube): \r\n self.title = movie_title\r\n self.storyline = movie_storyline\r\n self.poster_image_url = poster_image\r\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, title, image, movie_tagline=\"\", trailer_url=\"\"):\n self.title = title\n self.poster_image_url = image\n self.trailer_youtube_url = trailer_url\n self.storyline = movie_tagline", "def __init__(self,movie_title,movie_storyline,poster_image,youtube_trailer,release_date):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = youtube_trailer\n self.release_date = release_date", "def title(self) -> Optional[str]:\n return self.get(\"/Title\")", "def title(self):\n # Use the first line of the articles text as title, if not title\n # exists.\n title = self._text[:min(32, self._text.find(\"\\n\"))]\n return title", "def ALOHA():\r\n return (\r\n \r\n f\"<h1>ALOHA!!!</h1></br>\"\r\n f\"<h2>This API is for Climate Data in Hawaii</h2></br>\"\r\n f\"Available Routes:<br/>\"\r\n f\"/api/v1.0/precipitation<br/><br/>\"\r\n f\"/api/v1.0/stations<br/><br/>\"\r\n f\"/api/v1.0/tobs<br/><br/>\"\r\n f\"/api/v1.0/start_date</br>\"\r\n f\"/api/v1.0/start_date/end_date\"\r\n \r\n )", "def top_post(section: izi.types.one_of(('news', 'newest', 'show'))='news'):\n content = requests.get('https://news.ycombinator.com/{0}'.format(section)).content\n text = content.decode('utf-8')\n return text.split('<tr class=\\'athing\\'>')[1].split(\"<a href\")[1].split(\">\")[1].split(\"<\")[0]", "def parse_list(el):\n el = pq(el)\n name = strip_tags(el.children(\".title a\").html())\n phone = strip_tags(el.children(\".phone\").html())\n email = strip_tags(el.children(\".email a\").attr(\"href\"))\n if email:\n email = email.replace(\"mailto:\", \"\")\n source_url = el.children(\".title a\").attr(\"href\")\n\n data = {\n 'name': name,\n 'source_url': 'http://www.guidestockholm.com%s' % source_url,\n 'email': email,\n 'phone': phone,\n }\n scraperwiki.sqlite.save(unique_keys=['source_url'], data=data, table_name=\"guidestockholm\")", "def parse_list(el):\n el = pq(el)\n name = strip_tags(el.children(\".title a\").html())\n phone = strip_tags(el.children(\".phone\").html())\n email = strip_tags(el.children(\".email a\").attr(\"href\"))\n if email:\n email = email.replace(\"mailto:\", \"\")\n source_url = el.children(\".title a\").attr(\"href\")\n\n data = {\n 'name': name,\n 'source_url': 'http://www.guidestockholm.com%s' % source_url,\n 'email': email,\n 'phone': phone,\n }\n scraperwiki.sqlite.save(unique_keys=['source_url'], data=data, table_name=\"guidestockholm\")", "def _parse_title(self, item):\n title_str = \" \".join(item.css(\"td:first-child *::text\").extract()).strip()\n content_match = re.search(r\"(?<=\\().*(?=\\))\", title_str)\n if not content_match:\n return \"Advisory Board\"\n return content_match.group().title()", "def parse(self, src, line):\n r = line.split('\\t')\n p = {}\n if src == 'sf':\n p['businessID'] = r[0]\n p['name'] = r[1]\n p['address'] = r[2]\n p['city'] = r[3]\n p['state'] = r[4]\n p['zip'] = r[5]\n p['latitude'] = r[6]\n p['longitude'] = r[7]\n p['phone'] = r[8]\n elif src == 'nyc':\n p['businessID'] = r[0]\n p['name'] = r[1]\n # nyc separates the building number from the street name\n p['address'] = ' '.join([r[3].strip(), r[4].strip()])\n p['city'] = 'NYC'\n p['state'] = 'NY'\n p['zip'] = r[5]\n p['latitude'] = None\n p['longitude'] = None\n p['phone'] = r[6]\n return p", "def crawl_website(url):\n page = get_page(url)\n source, created = Source.objects.get_or_create(url=url)\n source.last_sync = datetime.now()\n source.save()\n properties = get_properties(page)\n title = properties[0][:49].replace(\"\\n\", \" \")\n content = properties[1].replace(\"\\n\", \" \").encode('unicode_escape')\n summary = properties[2]['text'][:500].replace(\"\\n\", \" \").encode('unicode_escape')\n content = Content(source=source,\n title=title,\n summary=summary,\n content=content)\n content.save()\n Media(type=\"image\", content=content, url=properties[3]).save()\n\n return properties", "def _parse_title(self, links):\n for link in links:\n if \"hearing\" in link[\"title\"].lower():\n return link[\"title\"].replace(\"Notice\", \"\").strip()\n if \"special\" in link[\"title\"].lower():\n return \"Special Meeting\"\n return \"Illinois Medical District Commission\"" ]
[ "0.5254778", "0.522776", "0.5127181", "0.50627685", "0.48750266", "0.4832583", "0.4781134", "0.47785923", "0.47693622", "0.47292462", "0.47047243", "0.46810403", "0.4663331", "0.46538934", "0.4653328", "0.46179616", "0.45948377", "0.4588498", "0.45725253", "0.45705068", "0.45669144", "0.45630112", "0.4558305", "0.4558165", "0.45564353", "0.45527303", "0.45453352", "0.45411843", "0.45311156", "0.45265108", "0.45251688", "0.4521028", "0.45199534", "0.45198178", "0.4511837", "0.45086247", "0.45017138", "0.45016095", "0.4481878", "0.44693276", "0.44473952", "0.44297755", "0.44174942", "0.44169745", "0.44052985", "0.440432", "0.4398647", "0.43949956", "0.4393014", "0.43879184", "0.4372084", "0.43568528", "0.4354859", "0.43547642", "0.43413898", "0.4340809", "0.4340795", "0.4340171", "0.43386358", "0.433624", "0.43329147", "0.43287006", "0.4320889", "0.43127835", "0.43020076", "0.429908", "0.42970702", "0.42964968", "0.42950004", "0.42950004", "0.42921323", "0.42877376", "0.42870754", "0.4284054", "0.428346", "0.42780405", "0.4276409", "0.42736593", "0.42713314", "0.42712003", "0.4270273", "0.42666355", "0.42662063", "0.4264992", "0.4264056", "0.42633468", "0.42542514", "0.4254196", "0.4252693", "0.42519534", "0.42478567", "0.42433575", "0.42428392", "0.42412522", "0.42401686", "0.42401686", "0.42395473", "0.4238933", "0.4237363", "0.42356306" ]
0.52180666
2
audiourl > url of the transcriptions mp3 is stored here (NOT NULL)\n PodcastName > THe name of the show (references podcast(name))\n Description > The provided summary of that days podcast\n Date > The date that podcast aired (parsed to mmddyyyy\n Title > The title of that specific podcast\n Duration > the running time of that podcast (use strptime to parse, need mmddyyyy\n pending > right now will be false because were not transcribing\n (dateTranscribed) > date of transcription (updated later)\n
def insertClip(dbConnection, audiourl, podcastName, description, parsedDate, title): try: cursor = dbConnection.cursor() title = title.replace("'", "''") cursor.execute("INSERT INTO transcriptions(audiourl, realtimefactor, podcastname, transcription, description, date, title, pending, datetranscribed) VALUES('" + audiourl + "', NULL, '" + podcastName + "', NULL, '" + description + "', '" + parsedDate + "', '" + title + "', FALSE, NULL);") dbConnection.commit() cursor.close() return True except: return False return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.split(' ') for item in data]\n data = [\n item[:-2] + [' '.join(item[-2:])] if item[-1] == '\\n' else item\n for item in data\n ]\n data = [item for sublist in data for item in sublist]\n\n df = pd.DataFrame(data, columns=['word'])\n df['conversation_id'] = 1\n\n return df", "def podcast_download(self):\r\n warnings.filterwarnings(\"ignore\", category=UnicodeWarning)\r\n now = datetime.datetime.now()\r\n\r\n for podcast_file in self.podcast_list:\r\n published, name, link, title = podcast_file\r\n if self.podcast_list != []:\r\n line_file = (published + ';' + title + ';' + name + ';' + link).encode(\"utf-8\") \r\n if line_file in open(self.download_log).read():\r\n pass\r\n else:\r\n title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')\r\n download_folder = os.path.join('downloads', title)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n try:\r\n published = str(parser.parse(published))[:10]\r\n except IOError as error:\r\n print 'Error' + (error) + ': File - ' + str(title)\r\n download_folder = os.path.join(download_folder, published)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n namefile_unicode = link[link.rfind('/')+1:]\r\n namefile_str = unicodedata.normalize('NFKD', \r\n namefile_unicode).encode('ascii', 'ignore')\r\n namefile_str = namefile_str.decode('utf-8', 'ignore').encode(\"utf-8\")\r\n if '.mp3' in namefile_str:\r\n len_name = namefile_str.index('.mp3')\r\n elif '.MP3' in namefile_str:\r\n len_name = namefile_str.index('.MP3')\r\n namefile_str = namefile_str[:len_name + 4]\r\n fileoutput = os.path.join(download_folder, namefile_str)\r\n name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')\r\n print str(published) + '; ' + name\r\n ## downlink\r\n download_file(link, fileoutput) \r\n ## tagging\r\n mp3_tagging(fileoutput, podcast_file)\r\n ## write log\r\n write_file(self.download_log, line_file)\r\n end = datetime.datetime.now()\r\n print '\\r' + 'Download Time = ' + str(end-now) + '\\r'\r\n return None", "def construct_metadata(song):\n print(song) #temp", "def podcast_show(url, name):\n for recording in scraper.get_podcast_episodes(url):\n INTERFACE.add_item(recording['title'],\n 'play_podcast',\n recording['url'],\n extra_info=recording)", "def get_podcast_episodes(url):\n\n def parse_pubdate(date_string):\n \"\"\"\n Change pubdate string to datetime object. Tries a bunch of\n possible formats, but if none of them is a match, it will\n return a epoch = 0 datetime object\n\n :param date_string: A string representing a date\n :return: datetime object\n \"\"\"\n date_formats = (\n '%a, %d %b %Y %H:%M:%S +0000',\n '%a, %d %b %Y',\n '%a, %d %b %Y%H:%M:%S +0000',\n '%a, %d %b %Y %H:%M',\n '%a, %d %b %Y %H.%M'\n )\n df_generator = (format for format in date_formats)\n\n date = None\n while date is None:\n try:\n date = datetime.strptime(date_string, next(df_generator))\n except ValueError:\n pass\n except StopIteration:\n date = datetime.fromtimestamp(0)\n\n return date\n\n doc = get_document(url)\n\n return (\n {\n 'url': item.select('guid')[0].text,\n 'Premiered': parse_pubdate(\n item.select('pubdate')[0].text\n ).strftime(\"%d.%m.%Y\"),\n # 'Duration': duration_to_seconds(item.find('itunes:duration').text),\n 'title': item.title.text,\n 'Plot': item.description.text\n }\n for item in doc.find_all(\"item\")\n )", "def parse_description(self, track: dict):\n try:\n album = track['album']\n link = album['external_urls'].get('spotify')\n preview = track.get('preview_url')\n return (f\"<p>Song from album <a href='{link}'>{album.get('name')}</a><p>\" +\n f\"<audio controls><source src='{preview}' type='audio/mp3'></audio>\")\n except KeyError:\n return \"\"", "def build_transcript(speaker_label_transcript):\n with open('main_transcript.txt', 'a') as the_file:\n for t in speaker_label_transcript:\n the_file.write(f\"{t['speaker']}:\\n\")\n the_file.write(f\"{t['content']}\\n\\n\")", "def track_info(filename):\n tag = id3.Tag()\n tag.parse(filename)\n a = load(filename)\n print(\"# {}\".format('=' * 78))\n print(\"Track Name: {}\".format(tag.title))\n print(\"Track Artist: {}\".format(tag.artist))\n print(\"Track Album: {}\".format(tag.album))\n print(\"Track Duration: {}\".format(duration_from_seconds(a.info.time_secs)))\n print(\"Track Number: {}\".format(tag.track_num))\n print(\"Track BitRate: {}\".format(a.info.bit_rate))\n print(\"Track BitRate: {}\".format(a.info.bit_rate_str))\n print(\"Sample Rate: {}\".format(a.info.sample_freq))\n print(\"Mode: {}\".format(a.info.mode))\n print(\"# {}\".format('=' * 78))\n print(\"Album Artist: {}\".format(tag.album_artist))\n print(\"Album Year: {}\".format(tag.getBestDate()))\n print(\"Album Recording Date: {}\".format(tag.recording_date))\n print(\"Album Type: {}\".format(tag.album_type))\n print(\"Disc Num: {}\".format(tag.disc_num))\n print(\"Artist Origin: {}\".format(tag.artist_origin))\n print(\"# {}\".format('=' * 78))\n print(\"Artist URL: {}\".format(tag.artist_url))\n print(\"Audio File URL: {}\".format(tag.audio_file_url))\n print(\"Audio Source URL: {}\".format(tag.audio_source_url))\n print(\"Commercial URL: {}\".format(tag.commercial_url))\n print(\"Copyright URL: {}\".format(tag.copyright_url))\n print(\"Internet Radio URL: {}\".format(tag.internet_radio_url))\n print(\"Publisher URL: {}\".format(tag.publisher_url))\n print(\"Payment URL: {}\".format(tag.payment_url))\n print(\"# {}\".format('=' * 78))\n print(\"Publisher: {}\".format(tag.publisher))\n print(\"Original Release Date: {}\".format(tag.original_release_date))\n print(\"Play Count: {}\".format(tag.play_count))\n print(\"Tagging Date: {}\".format(tag.tagging_date))\n print(\"Release Date: {}\".format(tag.release_date))\n print(\"Terms Of Use: {}\".format(tag.terms_of_use))\n print(\"isV1: {}\".format(tag.isV1()))\n print(\"isV2: {}\".format(tag.isV2()))\n print(\"BPM: {}\".format(tag.bpm))\n print(\"Cd Id: {}\".format(tag.cd_id))\n print(\"Composer: {}\".format(tag.composer))\n print(\"Encoding date: {}\".format(tag.encoding_date))\n print(\"# {}\".format('=' * 78))\n print(\"Genre: {}\".format(tag.genre.name))\n print(\"Non Std Genre Name: {}\".format(tag.non_std_genre.name))\n print(\"Genre ID: {}\".format(tag.genre.id))\n print(\"Non Std Genre ID: {}\".format(tag.non_std_genre.id))\n print(\"LAME Tag: {}\".format(a.info.lame_tag))\n print(\"# {}\".format('=' * 78))\n print(\"Header Version: {}\".format(tag.header.version))\n print(\"Header Major Version: {}\".format(tag.header.major_version))\n print(\"Header Minor Version: {}\".format(tag.header.minor_version))\n print(\"Header Rev Version: {}\".format(tag.header.rev_version))\n print(\"Header Extended: {}\".format(tag.header.extended))\n print(\"Header Footer: {}\".format(tag.header.footer))\n print(\"Header Experimental: {}\".format(tag.header.experimental))\n print(\"Header SIZE: {}\".format(tag.header.SIZE))\n print(\"Header Tag Size: {}\".format(tag.header.tag_size))\n print(\"Extended Header Size: {}\".format(tag.extended_header.size))\n print(\"# {}\".format('=' * 78))\n print(\"File Name: {}\".format(tag.file_info.name))\n print(\"File Tag Size: {}\".format(tag.file_info.tag_size))\n print(\"File Tag Padding Size: {}\".format(tag.file_info.tag_padding_size))\n print(\"File Read Only: {}\".format(tag.read_only))\n print(\"File Size: {}\".format(a.info.size_bytes))\n print(\"Last Modified: {}\".format(time.strftime('%Y-%m-%d %H:%M:%S',\n time.localtime(tag.file_info.mtime))))\n print(\"Last Accessed: {}\".format(time.strftime('%Y-%m-%d %H:%M:%S',\n time.localtime(tag.file_info.atime))))\n print(\"# {}\".format('=' * 78))", "def get_track_info_mp3(filepath, tags, stream, cover):\n tag = lambda t: get_tag(tags, t)\n discogs = extract(list(filter(lambda x: x.desc == 'DISCOGS_RELEASE_ID', tags.getall('TXXX'))))\n musicbrainz = extract(list(filter(lambda x: x.desc == 'MusicBrainz Album Id', tags.getall('TXXX'))))\n if musicbrainz: musicbrainz = extract(musicbrainz.text)\n if not cover:\n coverinfo = tags.get('APIC:')\n if coverinfo:\n if coverinfo.mime == 'image/jpeg':\n cover = os.path.dirname(filepath) + '/cover.jpg'\n else:\n raise ValueError('Not supporting %s' % coverinfo.mime)\n if cover:\n f = open(cover, 'wb+')\n f.write(coverinfo.data)\n f.close()\n\n track = sanitize_track(extract(tag('TRCK')))\n\n date = tag('TDRC') or tag('TDAT') or tag('TYER')\n return {\n \"title\": extract(tag('TIT2')),\n \"track\": track,\n \"artists\": tag('TPE1'),\n \"albumartist\": extract(tag('TPE2')) or extract(tags.get('TPE1')),\n \"album\": extract(tag('TALB')),\n \"discogs_id\": bytes(discogs).decode('utf-8') if discogs else None,\n \"musicbrainz_id\": musicbrainz,\n \"disk\": sanitize_disk(extract(tag('TPOS'))),\n \"year\": sanitize_year(extract(date)),\n \"genres\": sanitize_genres(tag('TCON')),\n \"length\": stream.length,\n \"bitrate\": stream.bitrate,\n \"size\": os.path.getsize(filepath),\n \"cover\": cover,\n \"filepath\": filepath,\n }", "def transcribeAll(service, url, fileName):\n if(service == \"omny.fm\"):\n url = url.replace(\".mp3\",\"\") + \".mp3\"\n subprocess.Popen(\"wget -c -O ./podcasts/\" + fileName + \".mp3 \" + url + \" && sleep 40 && ffmpeg -i ./podcasts/\"\n + fileName + \".mp3 -acodec pcm_s16le -ac 1 -ar 8000 ./podcasts/\" + fileName + \".wav && sleep 10 && rm ./podcasts/\" \n + fileName + \".mp3 && nohup ./online2-wav-nnet3-latgen-faster --online=false --do-endpointing=false \"\n + \"--frame-subsampling-factor=3 --config=online.conf --max-mem=2000000000 --max-active=7000 --beam=15.0 --lattice-beam=6.0 \"\n + \"--acoustic-scale=1.0 --word-symbol-table=words.txt final.mdl HCLG.fst 'ark:echo utterance-id\" + fileName \n + \" utterance-id\" + fileName + \"|' 'scp:echo utterance-id\" + fileName + \" ./podcasts/\" + fileName + \".wav|' 'ark:/dev/null' &\", shell=True)", "def test_gathering_links_for_audio_track(\n lep_dl: LepDL,\n) -> None:\n json_test = \"\"\"\\\n [\n {\n \"episode\": 3,\n \"date\": \"2000-01-01T00:00:00+00:00\",\n \"url\": \"https://teacherluke.co.uk/2009/04/15/episode-3-musicthe-beatles/\",\n \"post_title\": \"3. Music/The Beatles\",\n \"post_type\": \"\",\n \"files\": {\n \"audios\": [],\n \"atrack\": [\n [\n \"https://someurl1.local\", \"https://someurl2.local\", \"https://someurl3.local\"\n ]\n ]\n },\n \"parsed_at\": \"2021-10-14T07:35:24.575575Z\",\n \"index\": 2009041501,\n \"admin_note\": \"Check audio track.\"\n }\n ]\n \"\"\" # noqa: E501,B950\n db_episodes = Lep.extract_only_valid_episodes(json_test)\n lep_dl.files = downloader.gather_all_files(db_episodes)\n assert len(lep_dl.files) == 2\n assert lep_dl.files[0].primary_url == \"https://someurl1.local\"\n assert lep_dl.files[0].secondary_url == \"https://someurl2.local\"\n assert lep_dl.files[0].tertiary_url == \"https://someurl3.local\"\n assert isinstance(lep_dl.files[0], ATrack)\n assert (\n lep_dl.files[0].filename == \"[2000-01-01] # 3. Music/The Beatles _aTrack_.mp3\"\n )", "def set_meta_mp3(file):\n\n list_str_prop_mp3 = ['album', 'artist', 'title']\n list_other_prop_mp3 = ['comment', 'genre', 'year']\n dict_file_mp3 = {}\n # For each string properties into the tag\n for prop in list_str_prop_mp3:\n # If the tag exist (i.e it's not empty for the music file)\n if file.tag.d.has_key(prop.upper()):\n # We delete spe char and we format it\n dict_file_mp3[prop] = delete_spe_char_and_format(file.tag[prop.upper()])\n else:\n # Or we define it's value as 'Unknow ' + prop\n # For instance 'Unknow Artist'\n dict_file_mp3[prop] = 'Unknow ' + prop.capitalize()\n # For each other properties\n for prop in list_other_prop_mp3:\n if file.tag.d.has_key(prop.upper()):\n # We just copy them\n dict_file_mp3[prop] = file.tag[prop.upper()]\n else:\n dict_file_mp3[prop] = ''\n # To try to find the tracknumber, we need 'title'\n if dict_file_mp3.has_key('title'): \n # But before, we delete the duplicate\n list_duplicate = [dict_file_mp3['artist'], dict_file_mp3['album']]\n # Now we delete the duplicates\n dict_file_mp3['title'] = delete_duplicate(dict_file_mp3['title'], list_duplicate)\n # So we are able to find the tracknumber\n number = ''\n # If ID3 already find it\n if file.tag.d.has_key(\"TRACKNUMBER\"):\n number = file.tag[\"TRACKNUMBER\"]\n # Else we try to find by ourself\n else:\n number = find_tracknumber(dict_file_mp3['title'])\n # If we found a tracknumber, we delete it from 'title'\n if number:\n dict_file_mp3['title'] = delete_duplicate(dict_file_mp3['title'], [number])\n dict_file_mp3['tracknumber'] = number\n # And we format the new title\n dict_file_mp3['title'] = build_track_name(dict_file_mp3['title'], number)\n dict_file_mp3['name'] = dict_file_mp3['title'] + '.mp3'\n dict_file_mp3['path'] = build_path([dict_file_mp3['artist'], dict_file_mp3['album']])\n return dict_file_mp3", "def get_audio_data(filename):\n\n audio_file = eyed3.load(filename)\n artist = audio_file.tag.artist\n title = audio_file.tag.title\n time = audio_file.info.time_secs\n album = audio_file.tag.album\n genre = re.sub('^\\(.*\\)', '', str(audio_file.tag._getGenre().name).lower().replace('|', ',').replace('/', ','))\n\n try:\n year = audio_file.tag.getBestDate().year\n except:\n year = None\n\n comments = []\n for i in audio_file.tag.comments:\n comment = correct_playlist_names(i.text.lower().strip())\n comments += comment.replace('|', ',').replace('/', ',').strip('|').split(',')\n\n return {\n 'artist' : artist,\n 'title' : title,\n 'album' : album,\n 'time' : time,\n 'comments' : filter(None, comments),\n 'genre' : genre.split(','),\n 'year' : year\n }", "def news_speech():\n #Fetches data from API and creates global varibles.\n news_handle(news_fetch(config_fetcher('news_region'), config_fetcher('news_key')))\n #Creates a daily breifing using varibles\n news_daily_news = Markup((f\"The top headline for today is entitled: {title_1}, and was \\\nwritten by {author_1}. Here is a second headline, entitled: {title_2}, written by {author_2}.\"))\n return news_daily_news", "def subject_item(url):\n soup = abcradionational.get_soup(url)\n \n playable_podcast = abcradionational.get_playable_podcast(soup)\n\n items = abcradionational.compile_playable_podcast(playable_podcast)\n\n\n return items", "def transcribe_audio_file(filename):\n url = 'https://api.nexiwave.com/SpeechIndexing/file/storage/' + USERNAME +'/recording/?authData.passwd=' + PASSWORD + '&auto-redirect=true&response=application/json'\n\n # To receive transcript in plain text, instead of html format, comment this line out (for SMS, for example)\n #url = url + '&transcriptFormat=html'\n\n\n # Ready to send:\n sys.stderr.write(\"Send audio for transcript with \" + url + \"\\n\")\n r = requests.post(url, files={'mediaFileData': open(filename,'rb')})\n data = r.json()\n transcript = data['text']\n foo = data['text']\n f = open('newf.txt', 'w')\n f.write(foo)\n f.close() \n # Perform your magic here:\n print \"Transcript for \"+filename+\"=\" + transcript", "def dummy_add_transcript():\n return {\n \"message\": \"AddTranscript\",\n \"format\": \"2.1\",\n \"metadata\": {\n \"start_time\": 0.0, \"end_time\": 2.0, \"transcript\": \"Foo\\nBar.\"},\n \"results\": [\n {\n \"type\": \"word\",\n \"start_time\": 0.0,\n \"end_time\": 1.0,\n \"alternatives\": [\n {\"content\": \"foo\", \"confidence\": 1.0, \"language\": \"en\"},\n ],\n },\n {\n \"type\": \"speaker_change\",\n \"start_time\": 1.0,\n \"end_time\": 1.0,\n \"score\": 0.8,\n },\n {\n \"type\": \"word\",\n \"start_time\": 1.0,\n \"end_time\": 2.0,\n \"alternatives\": [\n {\"content\": \"bar\", \"confidence\": 1.0, \"language\": \"en\"},\n ],\n },\n {\n \"type\": \"punctuation\",\n \"start_time\": 2.0,\n \"end_time\": 2.0,\n \"alternatives\": [{\"content\": \".\", \"confidence\": 1.0}],\n },\n ],\n }", "def gen_dl_text(ddata, song, p):\n hdr = []\n hdr.append(\" %s%s%s\" % (c.r, song.title, c.w))\n author = p.author\n hdr.append(c.r + \" Uploaded by \" + author + c.w)\n hdr.append(\" [\" + fmt_time(song.length) + \"]\")\n hdr.append(\"\")\n\n heading = tuple(\"Item Format Quality Media Size Notes\".split())\n fmt = \" {0}%-6s %-8s %-13s %-7s %-5s %-16s{1}\"\n heading = [fmt.format(c.w, c.w) % heading]\n heading.append(\"\")\n\n content = []\n\n for n, d in enumerate(ddata):\n row = (n + 1, d['ext'], d['quality'], d['mediatype'], d['size'],\n d['notes'])\n fmt = \" {0}%-6s %-8s %-13s %-7s %5s Mb %-16s{1}\"\n row = fmt.format(c.g, c.w) % row\n content.append(row)\n\n content.append(\"\")\n\n footer = \"Select [%s1-%s%s] to download or [%sEnter%s] to return\"\n footer = [footer % (c.y, len(content) - 1, c.w, c.y, c.w)]\n return(content, hdr, heading, footer)", "def add_simple_metadata(file_path, artist='', title='', album='', albumartist='', override=False):\r\n try:\r\n audio = EasyID3(file_path)\r\n except mutagen.id3.ID3NoHeaderError:\r\n audio = File(file_path)\r\n audio.add_tags()\r\n audio.save()\r\n audio = EasyID3(file_path)\r\n filename = pathlib.Path(file_path).name\r\n advanced_audio = File(file_path)\r\n try:\r\n if (not override and audio.get('title', '') and audio.get('artist', '')\r\n and audio.get('albumartist', '') and has_album_cover(file_path)) and 'TDRC' in advanced_audio: return False\r\n if not artist: artist = get_artist(filename)\r\n else:\r\n if artist.count(' , '): artist.split(' , ')\r\n elif artist.count(' ,'): artist = artist.split(' ,')\r\n elif artist.count(', '): artist = artist.split(', ')\r\n elif artist.count(','): artist = artist.split(',')\r\n if not title: title = filename.split(' - ')[-1][:-4]\r\n if override:\r\n audio['title'] = title\r\n audio['artist'] = artist\r\n if album: audio['album'] = album\r\n if albumartist: audio['albumartist'] = albumartist\r\n else:\r\n if 'album' not in audio:\r\n if album == '': audio['album'] = title\r\n else: audio['album'] = album\r\n if 'title' not in audio: audio['title'] = title\r\n if 'artist' not in audio: audio['artist'] = artist\r\n if 'albumartist' not in audio:\r\n if albumartist: audio['albumartist'] = albumartist\r\n else: audio['albumartist'] = artist\r\n audio.save()\r\n audio = MP3(file_path)\r\n # if artist and title and override or audio.get('TDRC', False):\r\n # auto_set_year(audio, artist, title)\r\n if not has_album_cover(file_path):\r\n if not set_album_cover(file_path):\r\n print(f'Album art not found for {file_path}')\r\n except MutagenError:\r\n print(f'{filename} in use')\r\n return False\r\n except ValueError as e:\r\n print(e)\r\n print('Error adding metadata to', filename)\r\n return False\r\n return True", "def test_extracting_audio_data(\n only_audio_episodes: LepEpisodeList,\n lep_dl: LepDL,\n) -> None:\n expected_audio = Audio(\n ep_id=2009101908, # many posts in that day\n name=\"15. Extra Podcast – 12 Phrasal Verbs\",\n short_date=\"2009-10-19\",\n filename=\"[2009-10-19] # 15. Extra Podcast – 12 Phrasal Verbs\",\n primary_url=\"http://traffic.libsyn.com/teacherluke/15-extra-podcast-12-phrasal-verbs.mp3\", # noqa: E501,B950\n )\n lep_dl.files = downloader.gather_all_files(only_audio_episodes)\n audio_files = lep_dl.files.filter_by_type(Audio)\n assert audio_files[1] == expected_audio", "def oc_metadata(row):\n t = _parse_date(row['startTime'])\n\n def _make_field(id_, value):\n return {'id': id_, 'value': value}\n\n return [\n {\n 'flavor': 'dublincore/episode',\n 'fields': [\n _make_field('title', row['title']),\n _make_field('description', row['courseDescription']),\n _make_field('startDate', t.strftime(\"%Y-%m-%d\")),\n _make_field('startTime', t.strftime(\"%H:%M:%SZ\")),\n ],\n }\n ]", "def checkPre(dbConnection):\n cursor = dbConnection.cursor()\n cursor.execute(\"SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;\")\n entry = cursor.fetchone()\n cursor.close()\n return entry", "def test_gathering_multi_part_audio_track(\n lep_dl: LepDL,\n) -> None:\n json_test = \"\"\"\\\n [\n {\n \"episode\": 3,\n \"date\": \"2000-01-01T00:00:00+00:00\",\n \"url\": \"https://teacherluke.co.uk/2009/04/15/episode-3-musicthe-beatles/\",\n \"post_title\": \"3. Music/The Beatles\",\n \"post_type\": \"\",\n \"files\": {\n \"audios\": [],\n \"atrack\": [\n [\n \"https://someurl1.local\", \"https://someurl2.local\", \"https://someurl3.local\"\n ],\n [\n \"https://part2-someurl1.local\", \"https://part2-someurl2.local\"\n ]\n ]\n },\n \"parsed_at\": \"2021-10-14T07:35:24.575575Z\",\n \"index\": 2009041501,\n \"admin_note\": \"Check audio track.\"\n }\n ]\n \"\"\" # noqa: E501,B950\n db_episodes = Lep.extract_only_valid_episodes(json_test)\n lep_dl.files = downloader.gather_all_files(db_episodes)\n assert len(lep_dl.files) == 3\n assert lep_dl.files[0].secondary_url == \"https://someurl2.local\"\n assert lep_dl.files[0].tertiary_url == \"https://someurl3.local\"\n assert lep_dl.files[1].secondary_url == \"https://part2-someurl2.local\"\n assert isinstance(lep_dl.files[0], ATrack)\n assert isinstance(lep_dl.files[1], ATrack)\n assert (\n lep_dl.files[0].filename\n == \"[2000-01-01] # 3. Music/The Beatles [Part 01] _aTrack_.mp3\"\n )\n assert (\n lep_dl.files[1].filename\n == \"[2000-01-01] # 3. Music/The Beatles [Part 02] _aTrack_.mp3\"\n )", "def to_m3u_track(record: Dict[str, str]) -> str:\n\n location = normalize(unquote(record.get(\"Location\")))\n\n # m3u duration in seconds, not ms\n duration = int(record.get(\"Total Time\")) // 1000\n name = normalize(unquote(record.get(\"Name\")))\n artist = normalize(unquote(\n record.get(\"Artist\") or\n record.get(\"Album Artist\") or\n record.get(\"Composer\", \"\")\n ))\n # print(\"Location {}\".format(location))\n return M3U_TRACK_TEMPLATE.format(\n length=duration,\n artist=artist,\n title=name,\n path=location\n )", "def read_transcription_file(file_path, audio_file_path):\n with open(file_path) as in_file:\n last_timestamp = 0\n res = []\n transcription = \"\"\n for line in in_file:\n time_stamp_match = re.match(\"\\[([0-9\\]+\\.[0-9]+)\\]\", line)\n #if this regex matched then the line is a timestamp\n if time_stamp_match:\n timestamp = float(time_stamp_match.group(1))\n if transcription and transcription.strip() not in ['(())', \"<no-speech>\"]:\n single_instance = {\"start_time\": last_timestamp, \n \"end_time\": timestamp,\n \"transcription\": transcription,\n \"audio_file\" : audio_file_path}\n res.append(single_instance)\n last_timestamp = timestamp\n else:\n last_timestamp = timestamp # this handles silence at beginning\n else:\n transcription = line.strip()\n \n return res", "def process_transcript(transcript_label):\n transcript_key = f\"{transcript_label}.json\"\n\n # Load Transcribe output from S3.\n raw_transcript = get_transcribe_output(transcript_key)\n\n # Parse to assign speaker parts.\n speaker_parts = assign_speakers(raw_transcript)\n\n # Identify Karen and Georgia.\n assigned = karen_or_georgia(speaker_parts)\n\n # Update the full transcript.\n build_transcript(assigned)\n\n # Upload the latest transcript to S3.\n s3 = boto3.resource(\"s3\")\n s3.Bucket(os.getenv(\"S3_BUCKET\")).upload_file(\"main_transcript.txt\", \"main_transcript.txt\")", "def track_04():\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title='Sunshine Live', force_radio=True)\n return \"Ok\"", "def uploadPodcast(dbConnection, homepage, name, description, category, source, imageurl, web, twitter, facebook, rss):\n try:\n cursor = dbConnection.cursor()\n name = name.replace(\"'\", \"''\")\n description = description.replace(\"'\", \"''\")\n cursor.execute(\"\"\"INSERT INTO podcasts(homepage, name, description, category, source, imageuri, web, twitter, Facebook, rss) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);\"\"\", (homepage, name, description, category, source, imageurl, web, twitter, facebook, rss))\n dbConnection.commit()\n cursor.close()\n return True\n except Exception as e:\n\t\t Tools.writeException(\"insertHeader\", \"e\")\n return False", "def parse_song_data(data):\r\n song_title_regex = re.compile(r'<title>([\\S\\s]+)</title>')\r\n\r\n match = song_title_regex.search(data)\r\n\r\n song_title = match.groups(0)[0]\r\n\r\n # Replaces the HTML code for apostrophe with the symbol\r\n return re.sub(r'&#39;', \"\\'\", song_title)", "async def download_audio(event):\n url = event.pattern_match.group(1)\n rmsg = await event.get_reply_message()\n if not url and rmsg:\n myString = rmsg.text\n url = re.search(\"(?P<url>https?://[^\\s]+)\", myString).group(\"url\")\n if not url:\n return await edit_or_reply(event, \"`What I am Supposed to find? Give link`\")\n codevent = await edit_or_reply(event, \"`Preparing to download...`\")\n reply_to_id = await reply_id(event)\n ytdl_data = await ytdl_down(codevent, audio_opts, url)\n if ytdl_data is None:\n return\n await codevent.edit(\n f\"`Preparing to upload song:`\\\n \\n**{ytdl_data['title']}**\\\n \\nby *{ytdl_data['uploader']}*\"\n )\n f = pathlib.Path(f\"{ytdl_data['title']}.mp3\".replace(\"|\", \"_\"))\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.jpg\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.webp\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = None\n c_time = time.time()\n ul = io.open(f, \"rb\")\n uploaded = await event.client.fast_upload_file(\n file=ul,\n progress_callback=lambda d, t: asyncio.get_event_loop().create_task(\n progress(d, t, codevent, c_time, \"upload\", file_name=f)\n ),\n )\n ul.close()\n attributes, mime_type = await fix_attributes(f, ytdl_data, supports_streaming=True)\n media = types.InputMediaUploadedDocument(\n file=uploaded,\n mime_type=mime_type,\n attributes=attributes,\n thumb=await event.client.upload_file(codthumb) if codthumb else None,\n )\n await event.client.send_file(\n event.chat_id,\n file=media,\n reply_to=reply_to_id,\n caption=ytdl_data[\"title\"],\n supports_streaming=True,\n force_document=False,\n )\n os.remove(f)\n if codthumb:\n os.remove(codthumb)\n await codevent.delete()", "def get_transcription(url):\n\n # Checks the format of the URL\n if \"https://www.youtube.com/watch?v=\" in url:\n input_url_id = url.replace(\"https://www.youtube.com/watch?v=\", \"\")\n elif \"https://youtu.be/\" in url:\n input_url_id = url.replace(\"https://youtu.be/\", \"\")\n\n # Creates a blank list to iterate over\n text_parts = []\n\n # Gets a list of all available transcripts\n try:\n\n list_of_transcripts = YouTubeTranscriptApi.list_transcripts(input_url_id)\n print(\"Checking for Transcriptions...\")\n\n # Checks to see if a manual transcript is created if not, checks to see if a generated one is created\n if 'en-US' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en-US'])\n elif 'en' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en'])\n elif 'en' in list_of_transcripts._generated_transcripts:\n print(\"Auto-Generated Transcription Found.\")\n transcript = list_of_transcripts.find_generated_transcript(['en'])\n\n # Saves the transcript into a variable to iterate over\n raw_transcription = transcript.fetch()\n\n # Indexing of raw transcripts\n iteration_of_raw = 0\n\n # Iterates over each dictionary and extracts 'text' key then appends the blank text_parts list\n for i in raw_transcription:\n indexed_dictionary = raw_transcription[iteration_of_raw]\n text_from_dictionary = indexed_dictionary['text']\n text_parts.append(text_from_dictionary)\n iteration_of_raw += 1\n # Defines how we want each text element to be separated with\n separator_for_each_text = \" \"\n\n # Joins the separator with the text_parts\n clean_transcription = separator_for_each_text.join(text_parts)\n\n # Returns the cleaned transcripts\n return clean_transcription\n\n except:\n print(\"No Transcriptions Found\")\n clean_transcription = \"No Transcriptions Found\"\n return clean_transcription", "def get_line_data(line):\n line_parts = line.split(\",\")\n\n cast_player = line_parts[2].strip('\"').split(\"-\")[0]\n target = line_parts[6].strip('\"')\n if \"-\" in target:\n target = target.split(\"-\")[0]\n\n spell_id = line_parts[9]\n spell_name = line_parts[10]\n\n return (spell_id, spell_name, cast_player, target)", "def get_short(self):\n data = self.nowplaying()\n #print(str(data))\n\n if \"error\" in data:\n print(\"mpd error detected, skipping status\")\n return data[\"error\"]\n\n #truncate strings to 60 chars cause that's what we have on the MCU\n \n msg_list = []\n if len(data['title']) > 0:\n title_str = data['title']\n # special case for WFMU which puts way too much stuff in title\n if 'on WFMU' in title_str:\n title_list = title_str.split('on WFMU')\n for title in title_list:\n msg_list.append(title[:60])\n else:\n msg_list.append(title_str[:60])\n msg_list.append(data['name'][:60])\n if data['state'] == 'play':\n msg_list.append(\"-{}-\".format(data['artist'][:60]))\n if len(data['song']) > 1:\n msg_list.append(\"{}\".format(data['song'][:60]))\n else:\n msg_list.append(\"* {} *\".format(data['state']))\n\n #print(\"msg_list:\")\n #print(str(msg_list))\n sys.stdout.flush()\n self.log_status(msg_list)\n return msg_list", "def get_TAL_URL( epno, verify = True ):\n url_epelem = 'https://www.thisamericanlife.org/radio-archives/episode/%d' % epno\n response = requests.get( url_epelem, verify = verify )\n if not response.ok:\n logging.info( 'ERROR, %s not accessible' % url_epelem )\n return None\n html = BeautifulSoup( response.content, 'html.parser' )\n #\n ## now find podcast URL from the enclosing A element whose class has text Download\n def is_download_href( href_elem ):\n if 'href' not in href_elem.attrs: return False\n valid_label_elems = list(\n filter(lambda elem: 'class' in elem.attrs and elem['class'] == [ 'label' ], href_elem.find_all('span' ) ) )\n if len( valid_label_elems ) != 1: return False\n valid_label_elem = valid_label_elems[ 0 ]\n return valid_label_elem.text.strip( ) == 'Download'\n #\n podcast_URL_elems = list(filter(is_download_href, html.find_all('a')))\n if len( podcast_URL_elems ) != 1:\n logging.info( 'ERROR, could not find MP3 podcast URL for episode %d, with page %s.' % (\n epno, url_epelem ) )\n return None\n podcast_URL_elem = podcast_URL_elems[ 0 ]\n podcast_URL = podcast_URL_elem['href']\n return podcast_URL", "def add_to_playlist(file, list, data = None):\n\n if not list:\n return\n\n exists = os.path.isfile(list)\n playlist = open(list, 'a')\n if not exists:\n playlist.write(\"#EXTM3U\\n\")\n\n if data:\n metadata = u\"#EXTINF: {}, {} - {} \\n\".format(data['time'], data['artist'], data['title'])\n playlist.write(metadata.encode('utf8'))\n\n playlist.write(file + \"\\n\")\n playlist.close()\n try:\n print 'Added to {}'.format(os.path.basename(list))\n except:\n pass", "def _parse_track_line(self, inp):\n self.metadata = {}\n ltmp = shlex.split(inp.strip(\"\\n\"))\n for item in ltmp:\n k, v = item.split(\"=\")\n self.metadata[k] = v\n\n track_type = self.metadata.get(\"type\", None)\n if track_type is not None:\n if track_type in bed_x_formats:\n self.printer.write(\n \"Found track type '%s' in track definition line. Assuming extra columns follow UCSC definitions.\"\n % track_type\n )\n if self.extra_columns == 0:\n self.extra_columns = bed_x_formats[track_type]\n elif self.extra_columns != bed_x_formats[track_type]:\n my_columns = self._get_extra_column_names()\n track_format_columns = \",\".join([X[0] for X in bed_x_formats[track_type]])\n warn(\"Extra columns specified by %s track type declaration (%s) don't match those specified by user (%s). Using those specified by user.\" %\\\n (track_type,track_format_columns,my_columns),FileFormatWarning)\n self.metadata[\"type\"] = \"custom\"\n else:\n self.printer.write(\"Found track type '%s' in track definition line.\" % track_type)", "def track_01():\n sonos.play_uri('http://mp3stream1.apasf.apa.at:8000', title='FM4.ORF.AT', force_radio=True)\n return \"Ok\"", "def signal_metadata(self, url, artist=None, title=None):\n\n # default values to return\n path = None\n full_title = None\n lyrics = 'No lyrics'\n\n # if url is null, send an empty message with the default values\n # (happens when the player has just started and is not playing)\n # if not, extract lyrics\n if url != None:\n # decode path from url\n path = urllib.parse.urlparse(url).path\n path = urllib.parse.unquote(path)\n\n # extract the artist name and title\n # then create a window title from them\n full_title = artist + ' - ' + title\n\n try:\n # extract the lyrics from the file using mutagen\n tags = mutagen.id3.ID3(path)\n lyrics_tag = tags.getall('USLT')\n\n if len(lyrics_tag) > 0:\n lyrics = lyrics_tag[0].text\n except mutagen.id3.ID3NoHeaderError:\n # no lyrics in the file\n pass\n\n # do not return /home/username if we can replace it with '~'\n home = GLib.get_home_dir()\n if path.startswith(home):\n path = path.replace(home, '~', 1)\n\n self.callback_func(path, full_title, lyrics)", "def get_track_info_mp4(filepath, tags, stream, cover=None):\n discogs = extract(tags.get('----:com.apple.iTunes:DISCOGS_RELEASE_ID'))\n if not cover:\n coverinfo = extract(tags.get('covr'))\n if coverinfo:\n if coverinfo.imageformat == mutagen.mp4.AtomDataType.JPEG:\n cover = os.path.dirname(filepath) + '/cover.jpg'\n elif coverinfo.imageformat == mutagen.mp4.AtomDataType.PNG:\n cover = os.path.dirname(filepath) + '/cover.png'\n if cover:\n f = open(cover, 'wb+')\n f.write(bytes(coverinfo))\n f.close()\n\n return {\n \"title\": extract(tags.get('\\xa9nam')),\n \"track\": sanitize_track(extract(tags.get('trkn'))),\n \"artists\": tags.get('\\xa9ART'),\n \"albumartist\": extract(tags.get('aART')) or extract(tags.get('\\xa9ART')),\n \"album\": extract(tags.get('\\xa9alb')),\n \"discogs_id\": bytes(discogs).decode('utf-8') if discogs else None,\n \"musicbrainz_id\": \"\",\n \"disk\": sanitize_disk(extract(tags.get('disk'))),\n \"year\": sanitize_year(extract(tags.get('\\xa9day'))),\n \"genres\": sanitize_genres(tags.get('\\xa9gen')),\n \"length\": stream.length,\n \"bitrate\": stream.bitrate,\n \"size\": os.path.getsize(filepath),\n \"cover\": cover,\n \"filepath\": filepath,\n }", "def create_dp_playlist(msg):\n print ''\n print '------'\n print '***Dynamic Programming method***'\n print 'Original message: ', msg\n # Normalize and tokenize message and use it to query songs\n words = normalize(msg).split(' ')\n songs = ngram_search(words)\n # Form playlist and print\n playlist = dp_parse(normalize(msg), songs=songs)\n print 'Playlist: '\n print '# | SONG TITLE | ARTIST | ALBUM'\n for i, p in enumerate(playlist[2]):\n song_info = '{0} | {1} | {2}'.format(p.Title, ', '.join(p.Artists),\n p.Album)\n print '{0}. | '.format(i + 1) + song_info", "def update_podcast(_id, _name_of_the_podcast, _duration_in_number_of_seconds,\r\n _host, _participants):\r\n podcast_to_update = Podcast.query.filter_by(id=_id).first()\r\n podcast_to_update.name_of_the_podcast = _name_of_the_podcast\r\n podcast_to_update.duration_in_number_of_seconds = _duration_in_number_of_seconds\r\n podcast_to_update.host = _host\r\n podcast_to_update.participants = _participants\r\n db.session.commit()", "def assign_speakers(transcript):\n speaker_time_labels = {seg[\"start_time\"]: seg[\"speaker_label\"] for seg in\n transcript[\"results\"][\"speaker_labels\"][\"segments\"]}\n\n speaker_transcript = []\n speaker = None\n current_line = ''\n\n for i in transcript[\"results\"][\"items\"]:\n # What was said.\n content = i[\"alternatives\"][0][\"content\"]\n # Check if the current speaker has changed. Otherwise keep the same speaker.\n try:\n current_speaker = speaker_time_labels[i[\"start_time\"]]\n\n if speaker:\n if speaker != current_speaker:\n speaker_transcript.append({\"speaker\": speaker, \"content\": current_line})\n current_line = ''\n except KeyError:\n current_speaker = speaker\n\n # Check if punctuation.\n if i[\"type\"] == \"punctuation\":\n current_line += content\n elif current_line == '':\n current_line += content\n else:\n current_line += f' {content}'\n\n speaker = current_speaker\n\n return speaker_transcript", "def get_Metadata(metafile):\n\n mslist_file = open(metafile, 'r')\n LINES = mslist_file.readlines()\n mslist_file.close()\n\n nBlocks = 6 # these are the number of correlator cards (PILOT survey value)\n \n obs_date = 'Observed from'\n code = 'Code'\n duration = 'Total elapsed time'\n antenna = 'antennas'\n frame = 'Frame'\n \n for i in range(len(LINES)):\n line = LINES[i]\n if line.find(antenna) >=0:\n TOKS = line.split()\n n_ant = TOKS[5][-2:]\n if line.find(obs_date) >=0:\n TOKS = line.split()\n start_obs_date = TOKS[6]\n end_obs_date = TOKS[8]\n if line.find(duration) >=0:\n TOKS = line.split()\n tobs = float(TOKS[10]) # in second\n if line.find(code) >= 0:\n next_line = LINES[i+1]\n TOKS = next_line.split()\n field = TOKS[5]\n ra = TOKS[6][:-5]\n dec = TOKS[7][:-4]\n if line.find(frame) >= 0:\n next_line = LINES[i+1]\n TOKS = next_line.split()\n total_obs_bw = float(TOKS[10])*nBlocks/1000.0 # kHz to MHz \n \n return n_ant, start_obs_date, end_obs_date, tobs, field, ra, dec, total_obs_bw", "def add_podcast(_name_of_the_podcast, _duration_in_number_of_seconds,\r\n _host, _participants):\r\n # creating an instance of our Podcast constructor\r\n new_podcast = Podcast(name_of_the_podcast=_name_of_the_podcast,\r\n duration_in_number_of_seconds=_duration_in_number_of_seconds,\r\n host=_host, participants=_participants)\r\n db.session.add(new_podcast) # add new Podcast to database session\r\n db.session.commit() # commit changes to session\r", "def detect_netease_music_name(file_path, dist_path, KEEP_SOURCE=True):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0\"\n }\n url_base = \"http://music.163.com/api/song/detail/?id={}&ids=[{}]\"\n\n if not os.path.exists(dist_path):\n os.mkdir(dist_path)\n\n for file_name in os.listdir(file_path):\n if not file_name.endswith(\".mp3\"):\n continue\n if not len(file_name.split(\"-\")) == 3:\n print(\n \">>>> File %s not in format <song id>-<bite rate>-<random number>.mp3\"\n % (file_name)\n )\n continue\n\n try:\n song_id = file_name.split(\"-\")[0]\n url_target = url_base.format(song_id, song_id)\n resp = requests.get(url_target, headers=headers)\n rr = json.loads(resp.text)\n\n tt = eyed3.load(os.path.join(file_path, file_name))\n tt.tag.title = rr[\"songs\"][0][\"name\"].replace(\"\\xa0\", \" \")\n tt.tag.artist = rr[\"songs\"][0][\"artists\"][0][\"name\"]\n tt.tag.album = rr[\"songs\"][0][\"album\"][\"name\"]\n tt.tag.album_artist = rr[\"songs\"][0][\"album\"][\"artists\"][0][\"name\"]\n print(\n \"song_id = %s, tt.tag title = %s, artist = %s, album = %s, album_artist = %s\"\n % (\n song_id,\n tt.tag.title,\n tt.tag.artist,\n tt.tag.album,\n tt.tag.album_artist,\n )\n )\n tt.tag.save()\n except UnicodeEncodeError as e:\n print(\n \">>>> UnicodeEncodeError, try again later: file_name = %s, error = %s\"\n % (file_name, str(e))\n )\n continue\n except:\n print(\">>>> Some other error happens: file_name = %s\" % (file_name))\n continue\n\n dist_name = (\n os.path.join(\n dist_path,\n \"%s - %s\"\n % (tt.tag.artist.replace(\"/\", \" \"), tt.tag.title.replace(\"/\", \" \")),\n )\n + \".mp3\"\n )\n \n if KEEP_SOURCE == True:\n shutil.copyfile(os.path.join(file_path, file_name), dist_name)\n else:\n os.rename(os.path.join(file_path, file_name), dist_name)", "def parse_play(play):\n return None", "def podcast(user_uid, podcast_id):\n try:\n podcast = Podcast.load(user_uid, podcast_id)\n podcast.last_accessed = datetime.datetime.utcnow()\n podcast.save()\n except Exception:\n abort(404)\n return Response(podcast.feed.to_rss(), mimetype=\"text/xml\")", "def parseShowDetails(self, data):\n cast = []\n\n title = data[\"name\"]\n\n year = data[\"premiered\"]\n try:\n year = year[:4] # just get the year from premiere date\n except TypeError:\n year = \"N/A\" # premiere date unavailable\n\n imdbRating = data[\"rating\"][\"average\"]\n\n try:\n network = data[\"network\"][\"name\"]\n except TypeError:\n network = \"N/A\" # network unavailable\n\n try:\n streaming = data[\"webChannel\"][\"name\"]\n except TypeError:\n streaming = \"N/A\"\n\n try:\n poster = data[\"image\"][\"medium\"]\n except TypeError:\n poster = \"N/A\" # poster unavailable\n\n summary = re.sub(\"<.*?>\", \"\", data[\"summary\"]) # remove HTML tags\n\n count = 0\n for member in data[\"_embedded\"][\"cast\"]:\n if(count == 3): # only get first 3 cast members listed\n break\n\n try:\n castImage = member[\"person\"][\"image\"][\"medium\"]\n except TypeError:\n castImage = \"N/A\" # cast image unavailable\n\n tempCast = {\n \"name\": member[\"person\"][\"name\"],\n \"character\": member[\"character\"][\"name\"],\n \"image\": castImage\n }\n cast.append(tempCast)\n count += 1\n\n numSeasons = self.getNumSeasons()\n\n details = {\n \"id\": self.__showID,\n \"title\": title,\n \"year\": year,\n \"numSeasons\": numSeasons,\n \"imdbRating\": imdbRating,\n \"network\": network,\n \"streaming\": streaming,\n \"poster\": poster,\n \"summary\": summary,\n \"cast\": cast,\n }\n\n return details", "def create_artist_new_music_line(spotify_artist_music):\n body = ''\n for item in spotify_artist_music:\n if item['thumbnail']:\n artist_string = '<p><img src=\"{}\" width=\"{}\" height=\"{}\" /> {} released on {}--{}</p>\\n'\n body += artist_string.format(item['thumbnail'][0]['url'], item['thumbnail'][0]['width'],\n item['thumbnail'][0]['height'], item['name'], item['releaseDate'], item['url'])\n return body", "def download_data(self, format = 'srt'):\n \n def to_ascii(data):\n \"\"\"\n Remove non-ascii characters\n \"\"\"\n return ''.join([x for x in data if ord(x) < 128])\n \n url = self.config.subtitles['url'] % (self.media_resource_id)\n \n print url\n resp, content = httplib2.Http(\".cache\").request(url, \"GET\")\n \n subtitles = json.loads(content)['subtitles']\n \n final_data = ''\n if format == 'srt':\n final_data += self.to_srt(subtitles)\n else:\n final_data += subtitles\n \n return to_ascii(final_data)", "def get_podcast(self, object_id):\n return self.get_object(\"podcast\", object_id)", "def update_transcript_info(ensembl_info, word, value):\n if \"transcript\" in word:\n if \"id\" in word:\n ensembl_info[\"ensembl_transcript_id\"] = value\n elif \"start\" in word:\n ensembl_info[\"transcript_start\"] = int(value)\n elif \"end\" in word:\n ensembl_info[\"transcript_end\"] = int(value)\n return ensembl_info", "def get_american_life(epno, directory = '/mnt/media/thisamericanlife', extraStuff = None):\n\n try:\n title, year = get_americanlife_info(epno, extraStuff = extraStuff)\n except ValueError as e:\n print(e)\n print('Cannot find date and title for This American Life episode #%d.' % epno)\n return\n\n if not os.path.isdir(directory):\n raise ValueError(\"Error, %s is not a directory.\" % directory)\n outfile = os.path.join(directory, 'PRI.ThisAmericanLife.%03d.mp3' % epno) \n urlopn = 'http://www.podtrac.com/pts/redirect.mp3/podcast.thisamericanlife.org/podcast/%d.mp3' % epno\n\n resp = requests.get( urlopn, stream = True )\n if not resp.ok:\n urlopn = 'http://audio.thisamericanlife.org/jomamashouse/ismymamashouse/%d.mp3' % epno\n resp = requests.get( urlopn, stream = True )\n if not resp.ok:\n print(\"Error, could not download This American Life episode #%d. Exiting...\" % epno)\n return\n with open( outfile, 'wb') as openfile:\n for chunk in resp.iter_content(65536):\n openfile.write( chunk )\n \n mp3tags = ID3( )\n mp3tags['TDRC'] = TDRC(encoding = 0, text = [ u'%d' % year ])\n mp3tags['TALB'] = TALB(encoding = 0, text = [ u'This American Life' ])\n mp3tags['TRCK'] = TRCK(encoding = 0, text = [ u'%d' % epno ])\n mp3tags['TPE2'] = TPE2(encoding = 0, text = [u'Chicago Public Media'])\n mp3tags['TPE1'] = TPE1(encoding = 0, text = [u'Ira Glass'])\n mp3tags['TIT2'] = TIT2(encoding = 0, text = [u'#%03d: %s' % ( epno, title ) ])\n mp3tags['TCON'] = TCON(encoding = 0, text = [u'Podcast'])\n mp3tags.save( outfile )", "def out_line(song_info):\n datev = fix_date(song_info[2])\n retv = [song_info[0], str(song_info[3] + 1), datev, song_info[1]]\n return retv", "def collect_metadata(path, output_csv=None):\n audio_files = list_audio_files(path)\n\n if not audio_files:\n print_error('No audio files where found in {}'.format(path))\n sys.exit(1)\n\n event_name = os.path.basename(os.path.dirname(path))\n\n metadata_list = MetadataList()\n\n if not output_csv:\n output_csv = '{}.csv'.format(event_name)\n\n if os.path.isfile(output_csv):\n metadata_list.read_from_csv(output_csv)\n\n clear_and_title(\n 'Welcome to CAPS, a SALTY Conference Audio Processing System'\n )\n\n try:\n # Wrap user input block in try-except to catch Ctrl-C and Ctrl+D\n # input so data can be saved before exiting cleanly\n print_info('\\nFound {} audio files'.format(len(audio_files)))\n\n event_name = prompt(\n input_prompt=\"Event\",\n message='\\nEnter event name',\n condition=lambda x: True if x else False,\n error=\"You must enter an event name\",\n default=event_name,\n )\n\n if not confirm('\\nAre you ready to play audio? Raw audio could be very loud.', default='yes'):\n sys.exit(0)\n\n for file in audio_files:\n clear_and_title('\\nOpening ' + file)\n\n metadata = metadata_list.get_item('filepath', file)\n\n if metadata:\n metadata.print_pretty()\n\n with VLCPlayer(file) as vlc:\n if confirm('\\nSkip this file?', default='yes'):\n continue\n\n if not metadata:\n metadata = metadata_list.add_item({\n 'filepath': file,\n 'event_name': event_name,\n 'title': None,\n 'speakers': None,\n 'segments': None,\n })\n\n metadata['title'] = prompt(\n input_prompt='Title',\n message='\\nEnter the title for this audio',\n condition=lambda x: True if x else False,\n error='You must enter a title',\n default=metadata['title'],\n )\n\n metadata['speakers'] = multi_prompt(\n input_prompt='Speaker',\n message='\\nInput each speakers name',\n defaults=metadata['speakers'],\n )\n\n metadata['segments'] = multi_prompt(\n input_prompt='Segment',\n message='\\nInput start and end cut of each audio segment (hh:mm:ss-hh:mm:ss)',\n condition=is_valid_segment,\n error='You must input the correct format (hh:mm:ss-hh:mm:ss)'\n ' and start cut must precede end cut',\n defaults=metadata['segments'],\n )\n\n except (KeyboardInterrupt, EOFError):\n print_error('\\nAborted')\n else:\n return metadata_list\n finally:\n if metadata_list and output_csv:\n metadata_list.write_to_csv(output_csv)", "def test_load_mp3_file(self):\n track = Track.from_filename(self.track_path('silence.mp3'))\n self.assertEqual(track.artist, 'Artist')\n self.assertEqual(track.album, 'Album')\n self.assertEqual(track.title, 'Track')\n self.assertEqual(track.ensemble, 'Group')\n self.assertEqual(track.composer, 'Composer')\n self.assertEqual(track.conductor, 'Conductor')\n self.assertEqual(track.tracknum, 1)\n self.assertEqual(track.seconds, 2.0)", "def PrintMetadata(self):\n def PrintTrack(trackno, track):\n output = [f\"File {str(trackno + 1).zfill(2)}:\"]\n with IgnoreKeyError:\n output.append(f\"Disc {track['disc']}\")\n with IgnoreKeyError:\n output.append(f\"Side {track['side']}\")\n output.append(f\"Track {track['track'].ljust(2)}\")\n with IgnoreKeyError:\n output.append(f\"Phase {track['phase']}\")\n with IgnoreKeyError:\n output.append(f\"Subindex {track['subindex']}\")\n output.append(f\"Time {track['start_time']}\")\n output.append(f'\"{track[\"title\"]}\"')\n with IgnoreKeyError:\n output[-1] = f'{output[-1][:-1]}: {track[\"subtitle\"]}\"'\n print(' '.join(output))\n\n print(self)\n for trackno, track in enumerate(self.tracks):\n PrintTrack(trackno, track)\n filename = self.GetOutputFilename().replace(ext.WAV, ext.MKA)\n print(\"Filename:\", filename)", "def get_metadata(diagnostics_dir, verbose=False):\n metafile = find_metadata_file(diagnostics_dir, 'mslist-2*txt', verbose=False)\n\n with open(metafile, 'r') as mslist_file:\n lines = mslist_file.readlines()\n\n nBlocks = 6 # these are the number of correlator cards (PILOT survey value)\n \n obs_metadata = ObservationMetadata()\n\n obs_date = 'Observed from'\n fields = 'Fields'\n code = 'Code'\n duration = 'Total elapsed time'\n antenna = 'antennas'\n frame = 'Frame'\n \n field_list = []\n\n for i in range(len(lines)):\n line = lines[i]\n if line.find(antenna) >=0:\n toks = line.split()\n obs_metadata.n_ant = toks[5][-2:]\n if line.find(obs_date) >=0:\n toks = line.split()\n obs_metadata.start_obs_date = toks[6]\n obs_metadata.end_obs_date = toks[8]\n if line.find(duration) >=0:\n toks = line.split()\n obs_metadata.tobs = float(toks[10]) # in second\n\n # Field details\n if line.find(fields) >=0:\n toks = line.split()\n obs_metadata.num_fields = int(toks[-1])\n\n if line.find(code) >= 0:\n for j in range(obs_metadata.num_fields):\n field_metadata = FieldMetadata()\n field_line = lines[i+j+1]\n toks = field_line.split()\n field_metadata.name = toks[5]\n field_metadata.ra = toks[6][:-5]\n field_metadata.dec = toks[7][:-4]\n field_metadata.num_rows = int(toks[9])\n obs_metadata.fields.append(field_metadata)\n\n if line.find(frame) >= 0:\n next_line = lines[i+1]\n toks = next_line.split()\n obs_metadata.total_obs_bw = float(toks[10])*nBlocks/1000.0 # kHz to MHz \n \n return obs_metadata #n_ant, start_obs_date, end_obs_date, tobs, field, ra, dec, total_obs_bw", "def encodeMP3(self, wavf: str, dstf: str, cover: str, meta: TrackMeta) -> None:\n FNULL = open(os.devnull, 'w')\n subprocess.call(['lame', '-V2', wavf, dstf], stdout=FNULL, stderr=FNULL)\n FNULL.close()\n # tag MP3\n mm = TrackMeta(meta)\n mp3 = MP3(dstf, ID3=ID3)\n mp3[\"TIT2\"] = TIT2(encoding=3, text=mm.title())\n mp3[\"TPE1\"] = TPE1(encoding=3, text=mm.artist())\n mp3[\"TALB\"] = TALB(encoding=3, text=mm.album())\n mp3[\"TPE2\"] = TPE2(encoding=3, text=mm.albumartist())\n if mm.date():\n mp3[\"TDRC\"] = TDRC(encoding=3, text=mm.date())\n mp3[\"TRCK\"] = TRCK(encoding=3,\n text=mm.tracknumber() + \"/\" + mm.tracktotal())\n mp3[\"TPOS\"] = TPOS(encoding=3,\n text=mm.discnumber() + \"/\" + mm.disctotal())\n\n # composer\n if mm.composer():\n mp3[\"TCM\"] = TCM(encoding=3, text=mm.composer())\n\n # cover\n if cover:\n data = open(cover, 'rb').read()\n if cover.endswith('png'):\n mime = 'image/png'\n else:\n mime = 'image/jpeg'\n mp3.tags.add(APIC(encoding=3, mime=mime, type=3, desc=u'Cover', data=data))\n\n # save\n mp3.save()", "def extract_transcript(resp: str):\n if 'result' not in resp:\n raise ValueError({'Error non valid response from api: {}'.format(resp)})\n for line in resp.split(\"\\n\"):\n try:\n line_json = json.loads(line)\n out = line_json['result'][0]['alternative'][0]['transcript']\n return out\n except:\n continue", "def print_entry(item):\n print('Date: ', item[\"Date\"])\n print('Task: ', item[\"Task\"])\n print('Time Spent: ', item[\"Time\"])\n print('Notes: ', item[\"Notes\"], '\\n')", "def test_collecting_auxiliary_audio_links(\n lep_dl: LepDL,\n) -> None:\n json_test = \"\"\"\\\n [\n {\n \"episode\": 3,\n \"date\": \"2000-01-01T00:00:00+00:00\",\n \"url\": \"https://teacherluke.co.uk/2009/04/15/episode-3-musicthe-beatles/\",\n \"post_title\": \"3. Music/The Beatles\",\n \"post_type\": \"\",\n \"files\": {\n \"audios\": [\n [\n \"https://someurl1.local\", \"https://someurl2.local\", \"https://someurl3.local\"\n ],\n [\n \"https://part2-someurl1.local\", \"https://part2-someurl2.local\"\n ]\n ],\n \"page_pdf\": []\n },\n \"parsed_at\": \"2021-10-14T07:35:24.575575Z\",\n \"index\": 2009041501,\n \"admin_note\": \"Edge case - null in 'audios'\"\n }\n ]\n \"\"\" # noqa: E501,B950\n db_episodes = Lep.extract_only_valid_episodes(json_test)\n lep_dl.files = downloader.gather_all_files(db_episodes)\n assert len(lep_dl.files) == 3\n assert lep_dl.files[0].secondary_url == \"https://someurl2.local\"\n assert lep_dl.files[0].tertiary_url == \"https://someurl3.local\"\n assert lep_dl.files[1].secondary_url == \"https://part2-someurl2.local\"", "def get_cast_notes():\n \n #get all movies from db\n movies_df = movie_helper.get_movies_df() \n \n with tqdm(total=len(movies_df)) as pbar:\n for index, row in movies_df.iterrows(): \n \n #if imdbid exists use it to collect cast notes\n if (row['imdbId']):\n movie = ia.get_movie(str(row['imdbId']))\n cast_list = movie.get('cast')\n if (cast_list != None) :\n for cast_member in cast_list: \n imdb_id = cast_member.personID\n updates = { 'notes' : cast_member.notes }\n selects = {\"p_imdbId\" : imdb_id, \"m_imdbId\" : row['imdbId'] }\n database_helper.update_data(\"actors\", update_params = updates, select_params = selects)\n \n pbar.update(1)", "def tag_file(filename, artist, title, year=None, genre=None, artwork_url=None, album=None, track_number=None, url=None):\n\n try:\n audio = EasyMP3(filename)\n audio.tags = None\n audio[\"artist\"] = artist\n audio[\"title\"] = title\n if year:\n audio[\"date\"] = str(year)\n if album:\n audio[\"album\"] = album\n if track_number:\n audio[\"tracknumber\"] = track_number\n if genre:\n audio[\"genre\"] = genre\n if url: # saves the tag as WOAR\n audio[\"website\"] = url\n audio.save()\n\n if artwork_url:\n\n artwork_url = artwork_url.replace('https', 'http')\n\n mime = 'image/jpeg'\n if '.jpg' in artwork_url:\n mime = 'image/jpeg'\n if '.png' in artwork_url:\n mime = 'image/png'\n\n if '-large' in artwork_url:\n new_artwork_url = artwork_url.replace('-large', '-t500x500')\n try:\n image_data = requests.get(new_artwork_url).content\n except Exception as e:\n # No very large image available.\n image_data = requests.get(artwork_url).content\n else:\n image_data = requests.get(artwork_url).content\n\n audio = MP3(filename, ID3=OldID3)\n audio.tags.add(\n APIC(\n encoding=3, # 3 is for utf-8\n mime=mime,\n type=3, # 3 is for the cover image\n desc='Cover',\n data=image_data\n )\n )\n audio.save()\n\n # because there is software that doesn't seem to use WOAR we save url tag again as WXXX\n if url:\n audio = MP3(filename, ID3=OldID3)\n audio.tags.add(WXXX(encoding=3, url=url))\n audio.save()\n\n return True\n\n except Exception as e:\n puts(colored.red(\"Problem tagging file: \") + colored.white(\"Is this file a WAV?\"))\n return False", "def add_track(self, track, show_artist=False):\n\n url = self.connection.streamUrl(\n sid=track[\"id\"], maxBitRate=self.bitrate,\n tformat=self.transcode_format)\n\n # Create list item\n if show_artist:\n title = \"%s - %s\" % (\n track.get(\"artist\", \"<Unknown>\"),\n track.get(\"title\", \"<Unknown>\"))\n else:\n title = track.get(\"title\", \"<Unknown>\")\n\n # Create item\n li = xbmcgui.ListItem(title)\n\n # Handle cover art\n if \"coverArt\" in track:\n cover_art_url = self.connection.getCoverArtUrl(track[\"coverArt\"])\n\n li.setIconImage(cover_art_url)\n li.setThumbnailImage(cover_art_url)\n li.setProperty(\"fanart_image\", cover_art_url)\n\n # Handle metadata\n li.setProperty(\"IsPlayable\", \"true\")\n li.setMimeType(track.get(\"contentType\"))\n li.setInfo(type=\"Music\", infoLabels={\n \"Artist\": track.get(\"artist\"),\n \"Title\": track.get(\"title\"),\n \"Year\": track.get(\"year\"),\n \"Duration\": track.get(\"duration\"),\n \"Genre\": track.get(\"genre\"),\n \"TrackNumber\": track.get(\"track\")})\n\n xbmcplugin.addDirectoryItem(\n handle=self.addon_handle, url=url, listitem=li)", "def prepare_metadata(self, presentation):\r\n return {\"title\": presentation.title,\r\n \"artist\": presentation.speaker,\r\n \"performer\": presentation.speaker,\r\n \"album\": presentation.event,\r\n \"location\": presentation.room,\r\n \"date\": str(datetime.date.today()),\r\n \"comment\": presentation.description}", "def voice_three(request):\n call_sid = None\n recording_url = None\n if request.method == \"POST\":\n call_sid = request.POST.get('CallSid', None)\n recording_url = request.POST.get('RecordingUrl', None)\n if request.method == \"GET\":\n call_sid = request.GET.get('CallSid', None)\n recording_url = request.GET.get('RecordingUrl', None)\n\n if recording_url:\n call_detail = CallDetail.objects.get(call_sid=call_sid)\n call_detail.comment = recording_url\n call_detail.save()\n twiml = VoiceResponse()\n gather = Gather(num_digits=1, action='/VoiceFour')\n gather.play('http://roelofvandijk.com/mp33/IVR/PNGK-speakOrEndCall.mp3')\n twiml.append(gather)\n return HttpResponse(str(twiml))", "def add_signal(data):\n for row in data:\n row[\"synopsis\"] = row[\"synopsis\"] + f' The film grossed ${row[\"gross\"]}'", "def process_song_file(cur, filepath):\n \n # open song file\n \n df = pd.read_json(filepath,lines=True)\n \n # insert song record\n song_data = df[['song_id', 'title', 'artist_id','year',\n 'duration']].values[0].tolist()\n cur.execute(song_table_insert, song_data)\n \n # insert artist record\n artist_data = df[['artist_id','artist_name',\n 'artist_location', 'artist_latitude',\n 'artist_longitude']].values[0].tolist()\n cur.execute(artist_table_insert, artist_data)", "def displayMeta(ctx, data, vid):\n\t\n\ts = \"\"\n\ts += \"Title: %s \" % data.getElementsByTagName(\"title\")[0].firstChild.data\n\ts += \" • By: %s\" % data.getElementsByTagName(\"author\")[0].getElementsByTagName(\"name\")[0].firstChild.data\n\n\tshowRest = True\n\n\tr = data.getElementsByTagName(\"yt:state\")\n\tif len(r):\n\t\tr = r[0]\n\t\tif r.getAttribute(\"name\") == \"restricted\":\n\t\t\tshowRest = r.getAttribute(\"reasonCode\") == \"limitedSyndication\"\n\t\t\tif showRest:\n\t\t\t\ts += \" • Syndication Limited.\"\n\t\t\telse:\n\t\t\t\ts += \" • Video is unavailable: %s\" % r.firstChild.data\n\n\tif showRest:\n\t\ts += \" • Length: %s\" % prettyTime(data.getElementsByTagName(\"yt:duration\")[0].getAttribute(\"seconds\"))\n\t\ts += \" • View Count: %s\" % prettyNumber(data.getElementsByTagName(\"yt:statistics\")[0].getAttribute(\"viewCount\"))\n\n\t\tr = data.getElementsByTagName(\"gd:rating\")\n\t\tif len(r):\n\t\t\tr = r[0]\n\t\t\ts += \" • Average Rating: %1.2f/5 over %s people\" % (\n\t\t\t\tfloat(r.getAttribute(\"average\")),\n\t\t\t\tprettyNumber(r.getAttribute(\"numRaters\"))\n\t\t\t\t)\n\t\telse:\n\t\t\ts += \" • No ratings\"\n\t\n\ts += \" • https://youtu.be/%s\" % vid\n\tctx.reply(s, \"YouTube\")", "def text_to_speech(entry):\n text = entry.get_text()\n if text:\n subprocess.call([\"milena_say\", text])", "def subject_info(intent, extra_info=[]):\n\n text = intent['inputTranscript'].lower()\n utterances = AS.load_file('sample_utterances.txt')\n\n # add \"book\" and \"books\" to every utterance\n for line in list(utterances):\n utterances.insert(0, line + \" book\")\n utterances.insert(0, line + \" books\")\n\n # tells how many characters needs to be dropped before the subject starts\n to_drop = 0\n\n for line in utterances:\n if text.startswith(line):\n to_drop = len(line)\n break\n\n # drops the characters and makes a list from the strings that are left\n text = text[to_drop:].strip()\n text_list = text.split(' ', len(text))\n\n subject_list = []\n keywords = [\"books\", \"book\", \"by\", \"published\", \"written\"]\n keyword = \"\"\n\n # Find out when the book name ends\n for word in text_list:\n if word not in keywords:\n subject_list.append(word)\n else:\n break\n\n subject = \" \".join(subject_list)\n\n # Get all the keywords in the middle, so they can be\n # all be dropped at once, eg written by, books by\n text_list = text_list[len(subject_list):]\n if text_list:\n word = text_list[0]\n while word in keywords:\n keyword += word + \" \"\n text_list = text_list[1:]\n if text_list:\n word = text_list[0]\n else:\n break\n\n # search for an author from the rest of the characters\n author_text = text[len(keyword):].strip()\n author = AS.search(author_text, False)\n if author is \"\":\n author = None\n\n # There might be old info in the extra_info (author), so \n # we need to clear it\n extra_info.clear()\n\n # add the author to extra info so it can be used in the Finna API call\n if author:\n extra_info += [\"author:\\\"\" + author + \"\\\"\"]\n elif intent['sessionAttributes'].get('author'):\n extra_info += [\n \"author:\\\"\" + intent['sessionAttributes']['author'] + \"\\\"\"\n ]\n\n # The Finna API call\n request = lookfor(term=subject, filter=extra_info)['json']\n\n return parse_subject(request, subject, {'author': author})", "async def _create_embed(self, event, info):\n\n e = discord.Embed(url=info.get(\"url\"))\n e.title = \"%s %s!\" % (info.get(\"streamer\"), info.get(\"live_status\"))\n e.add_field(name=\"Stream title\", value=info.get(\"title\"), inline=False)\n e.add_field(name=\"Begin:\", value=event.begin.format(\"HH:mm:ss ZZZ\") + \" (\" + event.begin.humanize() + \")\", inline=False)\n e.add_field(name=\"Duration: \", value=str(event.duration), inline=False)\n #e.add_field(name=\"Link\", value=info.get(\"url\"), inline=False)\n e.set_image(url=info.get(\"thumbnail\") or e.Empty)\n return e", "def transcript_lines(transcript_text):\n lines = []\n for line in transcript_text.splitlines():\n if line.strip() and line.strip()[0] != '#':\n split = line.split(':')\n speaker = split[0][-1]\n utterance = ' '.join(split[1:]).strip()\n lines.append((speaker, utterance))\n return lines", "def show_release_details(release):\n def get(key, dictionary=release):\n try:\n return dictionary[key]\n except KeyError as e:\n return None\n\n date = get('date')\n date = date[:4] if date else \"\"\n print(\"{} / {} ({})\".format(get('artist-credit-phrase'), get('title'), date))\n print()\n\n # print track list\n track_lists = [get(\"track-list\", medium) for medium in release['medium-list']]\n track_list = [track for tracks in track_lists for track in tracks]\n track_width = max([len(track[\"recording\"][\"title\"]) for track in track_list])\n time_width = len(get_time(sum([int(track[\"length\"]) for track in track_list])))\n\n total_ms = 0\n for idx,track in enumerate(track_list):\n title = track[\"recording\"][\"title\"]\n ms = int(track[\"length\"])\n time = get_time(ms)\n total = get_time(total_ms)\n print(\"{:2d}. {:{track_width}} {:>{time_width}} ({})\".format( \\\n idx+1, title, total, time, track_width=track_width, time_width=time_width))\n total_ms += ms\n width = 5 + track_width + time_width\n print(\"{:>{width}}\".format(get_time(total_ms), width=width))", "def set_track_metadata(self, track = None, filename = None, url = None):\n if url == None or track == None:\n return None\n\n if filename == None:\n filename = get_track_filename(url)\n\n # id3 is only for mp3\n if not filename.endswith(\".mp3\"):\n if filename.endswith(\".wav\"):\n filename = self.convert_wav_to_mp3(filename)\n else:\n return None\n\n\n # Set title\n try:\n meta = ID3(filename)\n except ID3NoHeaderError:\n try:\n meta = File(filename, easy=True)\n meta.add_tags()\n meta.save()\n meta = ID3(filename)\n except:\n return\n except IOError:\n return\n\n try:\n meta.add(TIT2(encoding=3, text=track.title))\n meta.add(TCON(encoding=3, text=track.genre))\n meta.add(TCOM(encoding=3, text=track.user[\"username\"]))\n meta.save()\n\n artwork_filename = wget.download(track.artwork_url)\n\n audio = MP3(filename, ID3=ID3)\n\n # add ID3 tag if it doesn't exist\n try:\n audio.add_tags()\n except error:\n pass\n\n audio.tags.add(\n APIC(\n encoding=3, # 3 is for utf-8\n mime='image/jpeg', # image/jpeg or image/png\n type=3, # 3 is for the cover image\n desc=u'Cover',\n data=open(artwork_filename).read()\n )\n )\n audio.save()\n except:\n return", "def studio_transcript(self, request, dispatch):\r\n _ = self.runtime.service(self, \"i18n\").ugettext\r\n\r\n if dispatch.startswith('translation'):\r\n language = dispatch.replace('translation', '').strip('/')\r\n\r\n if not language:\r\n log.info(\"Invalid /translation request: no language.\")\r\n return Response(status=400)\r\n\r\n if request.method == 'POST':\r\n subtitles = request.POST['file']\r\n save_to_store(subtitles.file.read(), unicode(subtitles.filename), 'application/x-subrip', self.location)\r\n generate_sjson_for_all_speeds(self, unicode(subtitles.filename), {}, language)\r\n response = {'filename': unicode(subtitles.filename), 'status': 'Success'}\r\n return Response(json.dumps(response), status=201)\r\n\r\n elif request.method == 'GET':\r\n\r\n filename = request.GET.get('filename')\r\n if not filename:\r\n log.info(\"Invalid /translation request: no filename in request.GET\")\r\n return Response(status=400)\r\n\r\n content = Transcript.get_asset(self.location, filename).data\r\n response = Response(content, headerlist=[\r\n ('Content-Disposition', 'attachment; filename=\"{}\"'.format(filename.encode('utf8'))),\r\n ('Content-Language', language),\r\n ])\r\n response.content_type = Transcript.mime_types['srt']\r\n\r\n else: # unknown dispatch\r\n log.debug(\"Dispatch is not allowed\")\r\n response = Response(status=404)\r\n\r\n return response", "def SongTitle( path ):\n p = subprocess.Popen( ['ffprobe',path], stderr=subprocess.PIPE )\n\n output = p.communicate()[1].decode()\n if 'Invalid data found' in output:\n return None\n\n # find the first occurance of \"title : stuff\" with any number of spaces.\n res = re.search( r'title\\s+:\\s+([a-zA-Z0-9,\\(\\) ]+)', output )\n\n if res is None:\n return \"\"\n\n ret = res.group(1)\n\n return ret", "def transform_song(filepath):\n f = json.load(open(filepath))\n return '\\t'.join([str(v) if (v := f[k]) else ''\n for k in song_cols.keys()]) + '\\n'", "def getSubtitleTable(date) -> str:\n return \"\"\"| Start of the day | Weeks until NIMCET |\n| ---------------- | -----------------: |\n| {time} | {weeks} weeks |\"\"\".format(time=formattedTimeNow(), weeks=round((datetime(2021, 5, 21) - date).days/7, 1))", "def transcribe_file_with_word_time_offsets(speech_file):\n from google.cloud import speech\n from google.cloud.speech import enums\n from google.cloud.speech import types\n client = speech.SpeechClient()\n\n with io.open(speech_file, 'rb') as audio_file:\n content = audio_file.read()\n\n audio = types.RecognitionAudio(content=content)\n config = types.RecognitionConfig(\n encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,\n language_code='en-US',\n enable_word_time_offsets=True)\n\n response = client.recognize(config, audio)\n\n word_with_ts = []\n for result in response.results:\n #print result\n alternative = result.alternatives[0]\n print('Transcript: {}'.format(alternative.transcript))\n\n for word_info in alternative.words:\n word = word_info.word\n start_time = word_info.start_time\n end_time = word_info.end_time\n word_with_ts.append((word ,start_time.seconds + start_time.nanos * 1e-9, end_time.seconds + end_time.nanos * 1e-9))\n #print('Word: {}, start_time: {}, end_time: {}'.format(\n # word,\n # start_time.seconds + start_time.nanos * 1e-9,\n # end_time.seconds + end_time.nanos * 1e-9))\n return word_with_ts", "def playlist(self):\n def iconv(s):\n encoding = self.options[\"id3_encoding\"]\n try:\n if encoding:\n return s.encode('latin1').decode(encoding).encode('utf-8')\n else:\n return s.encode('latin1')\n except UnicodeEncodeError:\n return \"\"\n\n lst = []\n r = self.x.playlist_list_entries()\n r.wait()\n for id in r.get_list():\n r = self.x.medialib_get_info(id)\n r.wait()\n if r.iserror():\n print r.get_error()\n lst.append(' ')\n continue\n song = r.get_propdict()\n try:\n artist = iconv(song[('plugin/id3v2', 'artist')])\n except KeyError:\n try:\n artist = iconv(song[('plugin/mad', 'artist')])\n except KeyError:\n artist = ''\n try:\n title = iconv(song[('plugin/id3v2', 'title')])\n except KeyError:\n try:\n title = iconv(song[('plugin/mad', 'title')])\n except KeyError:\n title = ''\n if artist == \"\" and title == \"\":\n name = os.path.split(song[('server', 'url')])[1]\n name = os.path.splitext(name)[0]\n name = urllib.unquote(name.decode('utf-8').encode('latin1'))\n name = name.replace(\"+\", \" \")\n lst.append(' ' + name)\n else:\n lst.append(' %s - %s' % (artist.ljust(6), title))\n\n return lst", "def record_metadata(id, sleep_time=1):\n regex = re.compile('\\W')\n url = \"http://catalog.hathitrust.org/api/volumes/brief/recordnumber/{0}.json\"\n\n url = url.format(id)\n r = requests.get(url)\n data = r.json()\n\n # data = data['items'][id]\n items = []\n if data:\n for item in data['items']:\n enum = regex.sub('', str(item.get('enumcron', '')).lower())\n htid = item.get('htid', '')\n items.append((enum, htid))\n else:\n items = []\n\n sleep(sleep_time)\n return items", "def set_track_info(self, payload):\n self.raw_trackname = payload['currentTrack'].get('title', \"\")\n self.artist = payload['currentTrack'].get('artist', \"\")\n self.album = payload['currentTrack'].get('album', \"\")\n self.station = payload['currentTrack'].get('stationName', \"\")\n\n if sonos_settings.artist_and_album_newlook :\n if self.raw_trackname.startswith(\"x-sonosapi-\") :\n self.raw_trackname = self.station\n\n if self.artist == self.station and self.type == \"radio\" :\n if self.raw_trackname.count(\"~\") : c = \"~\"\n elif self.raw_trackname.count(\"˗\") : c = \"˗\"\n elif self.raw_trackname.count(\"*\") : c = \"*\"\n elif self.raw_trackname.count(\"|\") : c = \"|\"\n elif self.raw_trackname.count(\" - \") : c = \" - \"\n elif self.raw_trackname.count(\" / \") : c = \" / \"\n else : c = \"\"\n\n if c :\n oldstr=self.raw_trackname.casefold()\n splitstr = oldstr.split(c)\n self.artist = ' '.join(word[0].upper() + word[1:] for word in splitstr[0].split())\n self.raw_trackname = ' '.join(word[0].upper() + word[1:] for word in splitstr[1].split())\n if c == \"~\" :\n self.album = ' '.join(word[0].upper() + word[1:] for word in splitstr[2].split())\n else :\n self.album = \"\"\n# self.album = self.station\n\n # Abort update if all data is empty\n if not any([self.album, self.artist, self.duration, self.station, self.raw_trackname]):\n _LOGGER.debug(\"No data returned by the API, skipping update\")\n return None\n\n if self.type == \"radio\" and not self.station:\n # if not then try to look it up (usually because its played from Alexa)\n self.station = find_unknown_radio_station_name(self.raw_trackname)\n\n # Clear uninteresting tracknames\n if self.raw_trackname.startswith(\"x-sonosapi-\") or self.raw_trackname.endswith(\".m3u8\"):\n self.trackname = \"\"\n else:\n self.trackname = self.raw_trackname\n\n\n track_id = self.artist\n if self.trackname:\n track_id += f\" - {self.trackname}\"\n if self.album:\n track_id += f\" ({self.album})\"\n if self.duration:\n track_id += f\" - {timedelta(seconds=self.duration)}\"\n if self.station:\n track_id += f\" [{self.station}]\"\n\n return track_id", "def process_song_file(cur, filepath):\n # open song file\n df = pd.read_json(filepath,lines=True)\n\n # insert song record\n __insert_song_data(cur, df)\n \n # insert artist record\n __insert_artist_data(cur, df)", "def transform_log(filepath):\n cols = ['song', 'artist', 'userId', 'firstName', 'lastName',\n 'gender', 'level', 'sessionId', 'location', 'userAgent']\n\n result = ''\n with open(filepath, 'rt') as f:\n for line in f:\n jf = json.loads(line)\n if jf['userId'] and (jf['page'] == 'NextSong'):\n jf['userAgent'] = jf['userAgent'].strip('\"')\n \n temp1 = '\\t'.join([str(v) if (v := jf[k]) else '' for k in cols])\n\n t = round(jf['ts']/1000) # UNIX timestamp, ignore ms\n x = datetime.datetime.fromtimestamp(t)\n \n temp2 = '\\t'.join([x.strftime(\"%Y-%m-%d %H:%M:%S\"),\n str(x.hour),\n str(x.day),\n str(x.isocalendar()[1]),\n str(x.month),\n str(x.year),\n str(x.weekday() not in [5, 6])])\n\n result += temp1 + '\\t' + temp2 + '\\n'\n return result", "def get_dl_data(song, mediatype=\"any\"):\n def mbsize(x):\n \"\"\" Return size in MB. \"\"\"\n return str(int(x / (1024 ** 2)))\n\n p = get_pafy(song)\n dldata = []\n text = \" [Fetching stream info] >\"\n streamlist = [x for x in p.allstreams]\n\n if mediatype == \"audio\":\n streamlist = [x for x in p.audiostreams]\n\n l = len(streamlist)\n for n, stream in enumerate(streamlist):\n sys.stdout.write(text + \"-\" * n + \">\" + \" \" * (l - n - 1) + \"<\\r\")\n sys.stdout.flush()\n\n try:\n size = mbsize(stream.get_filesize())\n\n except TypeError:\n dbg(c.r + \"---Error getting stream size\" + c.w)\n size = 0\n\n item = {'mediatype': stream.mediatype,\n 'size': size,\n 'ext': stream.extension,\n 'quality': stream.quality,\n 'notes': stream.notes,\n 'url': stream.url}\n\n dldata.append(item)\n\n writestatus(\"\")\n return dldata, p", "def convert_one_song(audiofile,output,mbconnect=None,verbose=0,DESTROYAUDIO=False):\n # inputs + sanity checks\n if not os.path.exists(audiofile):\n print 'ERROR: song file does not exist:',songfile\n return 0\n if os.path.exists(output):\n print 'ERROR: hdf5 output file already exist:',output,', delete or choose new path'\n return 0\n # get EN track / song / artist for that song\n if verbose>0: print 'get analysis for file:',audiofile\n track = trackEN.track_from_filename(audiofile)\n song_id = track.song_id\n song = songEN.Song(song_id)\n if verbose>0: print 'found song:',song.title,'(',song_id,')'\n artist_id = song.artist_id\n artist = artistEN.Artist(artist_id)\n if verbose>0: print 'found artist:',artist.name,'(',artist_id,')'\n # hack to fill missing values\n try:\n track.foreign_id\n except AttributeError:\n track.__setattr__('foreign_id','')\n if verbose>0: print 'no track foreign_id found'\n try:\n track.foreign_release_id\n except AttributeError:\n track.__setattr__('foreign_release_id','')\n if verbose>0: print 'no track foreign_release_id found'\n # create HDF5 file\n if verbose>0: print 'create HDF5 file:',output\n HDF5.create_song_file(output,force=False)\n # fill hdf5 file from track\n if verbose>0:\n if mbconnect is None:\n print 'fill HDF5 file with info from track/song/artist'\n else:\n print 'fill HDF5 file with info from track/song/artist/musicbrainz'\n h5 = HDF5.open_h5_file_append(output)\n HDF5.fill_hdf5_from_artist(h5,artist)\n HDF5.fill_hdf5_from_song(h5,song)\n HDF5.fill_hdf5_from_track(h5,track)\n if not mbconnect is None:\n HDF5.fill_hdf5_from_musicbrainz(h5,mbconnect)\n h5.close()\n # done\n if DESTROYAUDIO:\n if verbose>0: print 'We remove audio file:',audiofile\n os.remove(audiofile)\n return 1", "def stt(self, audio, language, limit):\n\n return self.request({\n \"method\": \"POST\",\n \"headers\": {\"Content-Type\": \"audio/x-flac\"},\n \"query\": {\"lang\": language, \"limit\": limit},\n \"data\": audio\n })", "def _parse_entry(self,entry):\n item_meta={'title':entry.title,\n 'description':entry.description,\n 'category':entry.category,\n 'tags':entry.tags,\n 'page_url':entry.url,\n 'lq_url':None,\n 'hq_url':None,\n 'hd_url':None,\n 'search-id':self.search_id,\n 'source':'4',}\n self._logger.debug('Video Metadata: %s',item_meta)\n return item_meta", "def just_in():\n soup = abcradionational.get_soup(URL + \"/podcasts\")\n \n playable_podcast = abcradionational.get_playable_podcast(soup)\n \n items = abcradionational.compile_playable_podcast(playable_podcast)\n\n\n return items", "def scrape_song(url):\n soup = scrapekit.handle_url(url)\n\n contents = scrape_id_to_div(soup, \"Lyrics\")\n if not contents:\n return None\n\n filetext = ''.join(c.text for c in contents)\n\n # Check if there is a reprise\n REPRISE = 'Reprise'\n\n reprise = soup.find(id=REPRISE)\n if reprise:\n filetext += '\\n\\n'\n filetext += REPRISE + ':\\n\\n'\n\n contents = scrape_id_to_div(soup, REPRISE)\n filetext += ''.join(c.text for c in contents)\n\n # Get song title, fix blank spaces for file name\n songtitle = soup.title.text.split('|')[0]\n\n song_text = ''\n song_text += 'Song: {}\\n'.format(songtitle)\n song_text += get_infobox_info(soup)\n song_text += '\\n\\n'\n song_text += filetext\n\n return song_text", "def process_song_file(cur, filepath):\n # open song file\n data_frame = pd.read_json(filepath, lines=True)\n\n # insert song record\n song_data = list(data_frame[['song_id', 'title', 'artist_id', 'year', 'duration']].values[0])\n cur.execute(song_table_insert, song_data)\n\n # insert artist record\n artist_data = list(\n data_frame[['artist_id', 'artist_name', 'artist_location',\n 'artist_latitude', 'artist_longitude']].values[0])\n cur.execute(artist_table_insert, artist_data)", "def scrape_donau_3_fm():\n\n url = 'http://www.donau3fm.de/' \\\n 'wp-content/themes/ex-studios-2015/playlist/getplaylist.php'\n\n element = get_tag(url=url,\n xpathExpression='//table//td/text()')\n artist = None\n title = None\n if len(element) >= 3:\n artist = element[2]\n title = element[1]\n if artist and title:\n return Song(artist, title)\n return None", "def media_to_chromecast_command(\n media=None,\n type=\"LOAD\", # pylint: disable=redefined-builtin\n requestId=1,\n offset=0,\n directPlay=True,\n directStream=True,\n subtitleSize=100,\n audioBoost=100,\n transcoderVideo=True,\n transcoderVideoRemuxOnly=False,\n transcoderAudio=True,\n isVerifiedHostname=True,\n contentType=\"video\",\n myPlexSubscription=True,\n contentId=None,\n streamType=STREAM_TYPE_BUFFERED,\n port=32400,\n protocol=\"http\",\n address=None,\n username=None,\n autoplay=True,\n currentTime=0,\n playQueue=None,\n playQueueID=None,\n startItem=None,\n version=\"1.10.1.4602\",\n **kwargs,\n): # pylint: disable=invalid-name, too-many-locals, protected-access\n\n if media is not None:\n # Lets set some params for the user if they use plexapi.\n server = media[0]._server if isinstance(media, list) else media._server\n server_url = urlparse(server._baseurl)\n protocol = server_url.scheme\n address = server_url.hostname\n port = server_url.port\n machineIdentifier = server.machineIdentifier\n token = server._token\n username = server.myPlexUsername\n myPlexSubscription = server.myPlexSubscription\n\n if getattr(media, \"TYPE\", None) == \"playqueue\":\n if startItem:\n media = media.items\n else:\n playQueue = media\n\n if playQueue is None:\n playQueue = server.createPlayQueue(media, startItem=startItem)\n\n playQueueID = playQueue.playQueueID\n contentId = playQueue.selectedItem.key\n contentType = playQueue.items[0].listType\n version = server.version\n\n # Chromecasts seem to start playback 5 seconds before the offset.\n if offset != 0:\n currentTime = offset\n\n msg = {\n \"type\": type,\n \"requestId\": requestId,\n \"media\": {\n \"contentId\": contentId,\n \"streamType\": streamType,\n \"contentType\": contentType,\n \"customData\": {\n \"offset\": offset,\n \"directPlay\": directPlay,\n \"directStream\": directStream,\n \"subtitleSize\": subtitleSize,\n \"audioBoost\": audioBoost,\n \"server\": {\n \"machineIdentifier\": machineIdentifier,\n \"transcoderVideo\": transcoderVideo,\n \"transcoderVideoRemuxOnly\": transcoderVideoRemuxOnly,\n \"transcoderAudio\": transcoderAudio,\n \"version\": version,\n \"myPlexSubscription\": myPlexSubscription,\n \"isVerifiedHostname\": isVerifiedHostname,\n \"protocol\": protocol,\n \"address\": address,\n \"port\": port,\n \"accessToken\": token,\n \"user\": {\"username\": username},\n },\n \"containerKey\": f\"/playQueues/{playQueueID}?own=1&window=200\",\n },\n \"autoplay\": autoplay,\n \"currentTime\": currentTime,\n \"activeTrackIds\": None,\n },\n }\n\n # Allow passing of kwargs to the dict.\n msg.update(kwargs)\n\n return msg", "def main():\n # transcribe_audio()\n summarize()", "def parse_line(log_line):\n\n logger = logging.getLogger(__name__)\n\n REGEX = [\n # universal-transcoder\n re.compile('.*GET\\s\\/music\\/:\\/transcode\\/universal\\/start\\.mp3.*metadata%2F(\\d+)\\&.*'),\n # stream based transcoder\n re.compile('.*\\sDEBUG\\s-\\sLibrary\\sitem\\s(\\d+)\\s\\'.*\\'\\sgot\\splayed\\sby\\saccount.*')\n ]\n\n for regex in REGEX:\n m = regex.match(log_line)\n\n if m:\n logger.info('Found played song and extracted library id \"{l_id}\" from plex log '.format(l_id=m.group(1)))\n return m.group(1)", "def __init__(self, first_line):\n self.title = nlp(first_line[0])\n self.durations = []\n self.rec_paths = [first_line[1]]\n self.text = [first_line[0]]\n self.full_text = ''\n\n # get the authors name from folder name, add space between first and last\n a = os.path.dirname(first_line[1][6:])\n last_name_index = re.search(r'^([^A-Z]*[A-Z]){2}', a).span()[1] - 1\n self.author = a[:last_name_index] + \" \" + a[last_name_index:]\n\n # for keeping track of the media\n self.media = []\n self.total_duration = None\n # self.instance = vlc.Instance()\n\n with contextlib.closing(wave.open(self.rec_paths[0],'r')) as f:\n frames = f.getnframes()\n rate = f.getframerate()\n self.durations.append(frames / float(rate))\n\n self.total_duration = self.calculateTotalDuration(self.durations)\n # self.media.append(self.instance.media_new(self.rec_paths[-1]))", "def to_srt(self, subtitles):\n \n srt_data = ''\n subtitle_num = self.start_index\n for subtitle in subtitles:\n subtitle_num += 1\n \n offset = self.start_time\n \n start_time = self._ms_to_time(subtitle['start_time'] + offset)\n end_time = self._ms_to_time(subtitle['end_time'] + offset)\n \n content = subtitle['content'].replace('<br>', ' ')\n \n srt_data += str(subtitle_num) + '\\r\\n'\n srt_data += '%s --> %s' % (start_time, end_time) + '\\r\\n'\n srt_data += content + '\\r\\n'\n srt_data += '\\r\\n'\n \n self.end_index = subtitle_num\n \n return srt_data", "def get_podcast(_id):\r\n return [Podcast.podcast_json(Podcast.query.filter_by(id=_id).first())]\r\n # Podcast.podcast_json() coverts our output to the json format defined earlier\r\n # the filter_by method filters the query by the id\r\n # since our id is unique we will only get one result\r\n # the .first() method will get that first value returned\r" ]
[ "0.5720898", "0.56445146", "0.5573998", "0.55513567", "0.55373365", "0.5462284", "0.5397166", "0.53909075", "0.5349029", "0.53120935", "0.51405776", "0.51242185", "0.51071036", "0.50734013", "0.5052101", "0.50449437", "0.50282156", "0.5022951", "0.49973148", "0.49737933", "0.49661297", "0.49635798", "0.49503383", "0.49322152", "0.49305058", "0.49285465", "0.49257424", "0.48962638", "0.4868232", "0.48550746", "0.484253", "0.4833983", "0.48308915", "0.48175955", "0.48039526", "0.47969493", "0.4788232", "0.47848025", "0.47846115", "0.4779599", "0.47779357", "0.47773826", "0.47611752", "0.4757011", "0.47554517", "0.4751788", "0.47434717", "0.4739298", "0.47345394", "0.47318318", "0.47290894", "0.47262555", "0.47185344", "0.46913847", "0.46745738", "0.46745065", "0.46695122", "0.46690843", "0.46605206", "0.46567813", "0.4646558", "0.46432394", "0.4639964", "0.4638318", "0.46356812", "0.46350685", "0.4633116", "0.4629461", "0.46227056", "0.4622614", "0.4622521", "0.4622261", "0.46211967", "0.4618203", "0.4616561", "0.46165448", "0.46120465", "0.46110186", "0.4609155", "0.4608012", "0.4605277", "0.46028838", "0.459868", "0.45899367", "0.45789927", "0.45742756", "0.4573498", "0.4572", "0.45641455", "0.4557094", "0.4556069", "0.45526695", "0.45506948", "0.45488316", "0.45486373", "0.45439717", "0.45418674", "0.4541344", "0.45360872", "0.45359832" ]
0.5656862
1
This basically uploads the arguents to the database, returning false and throwing an error if unsuccesful (or true otherwise)\n
def insertTranscription(dbConnection, realtimefactor, transcription, duration, dbID): try: cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET realtimefactor = '" + realtimefactor + "', transcription = '" + transcription + "', datetranscribed = now(), duration = '" + duration + "' WHERE id = '" + str(dbID) + "';") dbConnection.commit() cursor.close() return True except Exception as e: Tools.writeException("uploadTranscriptionData", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_db( self, *args ):\n try:\n toSave = [ a for a in args ]\n # save them\n self.session.add_all( toSave )\n self.session.commit()\n self._fire_save_notification()\n return True\n except Exception as e:\n print( \"Error : %s\" % e )\n self._fire_error_saving_notification( e )\n return False", "def save(self):\n args = list(map(self._get_value_or_default, self.COLUMN_TO_FILED))\n columns = list(map(lambda k: k, self.COLUMN_TO_FILED))\n sql = 'INSERT INTO {} ({}) VALUES({});'.format(\n self.TABLE_NAME,\n ', '.join(columns),\n '%s,'.join(' '*len(columns)) + '%s'\n )\n cursor = yield self._pool.execute(sql, args)\n app_log.info('save arg %s', args)\n count = cursor.rowcount\n result = True if count == 1 else False\n return result", "def do_save(self, arg):\n \treturn False", "def upload(self, durations, skill_name, skill_args):\n for dur in durations:\n doc = {\"outcome\": 1, \"error\": \"\", \"name\": skill_name, \"duration\": dur}\n args_dict = dict()\n if skill_args != None:\n for arg in skill_args:\n if len(arg) == 1:\n args_dict[arg[0]] = \".*\"\n else:\n args_dict[arg[0]] = np.random.choice(arg[1:])\n doc[\"args\"] = args_dict\n print(doc)\n if not self.dry_run:\n self.lookup_col.insert_one(doc)", "def put(self):\n parser.parse(self.arg_schema_put, request, location='json_or_form')\n if not request.files.get('upload_data'):\n raise FileError(\"Missing upload file.\")\n # Figure out how to validate inputs\n mime_type = request.files.get('upload_data').mimetype\n if mime_type == 'text/csv':\n self.upload_csv_data(request.files.get('upload_data'))\n else:\n raise FileError(\"Bad upload file type received.\")\n return {'status': 200}", "def upload(self):\n if not self.prepare():\n Settings.err_print(\"unable to upload file - {}\".format(self.get_title()))\n return False\n self.backup()\n self.delete()\n return True", "def handle_upload(f, attrs):\n\n # chunked = False\n dest_folder = os.path.join(app.config['UPLOAD_DIRECTORY'], attrs['qquuid'])\n dest = os.path.join(dest_folder, attrs['qqfilename'])\n save_upload(f, dest)", "def fileUpload(fieldName):\n## we don't deal with OS specific \"\\n\"\n## because R does not have a problem (at least with Windows files)\n## no problem in R either with empty carriage returns at end of file\n \n if fs.has_key(fieldName):\n fileClient = fs[fieldName].file\n if not fileClient:\n shutil.rmtree(tmpDir)\n commonOutput()\n print \"<h1> ADaCGH ERROR </h1>\" \n print \"<p> The \", fieldName, \"file you entered is not a file </p>\"\n print \"<p> Please fill up the required fields and try again</p>\"\n print \"</body></html>\"\n sys.exit()\n else:\n shutil.rmtree(tmpDir)\n commonOutput()\n print \"<h1> ADaCGH ERROR </h1>\" \n print \"<p> \", fieldName, \"file required </p>\"\n print \"<p> Please fill up the required fields and try again</p>\"\n print \"</body></html>\"\n sys.exit()\n \n # transferring files to final destination;\n\n fileInServer = tmpDir + \"/\" + fieldName\n srvfile = open(fileInServer, mode = 'w')\n fileString = fs[fieldName].value\n srvfile.write(fileString)\n srvfile.close()\n\n os.chmod(fileInServer, 0666)\n \n if os.path.getsize(fileInServer) == 0:\n shutil.rmtree(tmpDir)\n commonOutput()\n print \"<h1> ADaCGH ERROR </h1>\"\n print \"<p>\", fieldName, \" file has size 0 </p>\"\n print \"<p> Please enter a file with something in it.</p>\"\n print \"<p> (Did you enter only a single file, but did not check 'One file'?\\\n If you are using only one file, the 'Two files' button should not be checked.)</p>\"\n print \"</body></html>\"\n sys.exit()", "def upload_remote(link,local_file,force_upload):\n\n \n global total_passed\n global total_failed\n global failed_files\n global success_files\n s3_uploaded_link = None\n\n\n #query local db to ask if this link was already uploaded or not\n #if returned 0 it mean it was not uploaded previously and\n #uploading fresh\n\n query_res_count = len(list(localsession.query(upload_table.t_serial).filter(upload_table.t_serial==link))) #pylint: disable=maybe-no-member\n\n #force uplod will ignore the local status\n #and uplod to s3 \n \n if (query_res_count == 0) or (force_upload == True ):\n correct_upload,s3_uploaded_link = upload_file(local_file,s3_folder,s3_bucket,supress_print=False)\n if correct_upload:\n total_passed += 1\n #if s3 upload is successful then only add the data to local table \n #and update remote table\n #but do not do this if force upload is set to true\n if force_upload == False:\n \n new_ua = upload_table(t_serial=link,updatedon=datetime.datetime.now())\n localsession.add(new_ua) #pylint: disable=maybe-no-member\n localsession.commit() #pylint: disable=maybe-no-member\n #update the remote db\n #update the remove anyways\n update_remote_stat = update_remote(link=link,awsurl=s3_uploaded_link)\n if update_remote_stat == True:\n success_files.append(local_file)\n return True\n else:\n failed_files.append(local_file)\n total_failed +=1\n return False\n else:\n total_failed +=1\n failed_files.append(local_file)\n return False\n else:\n return False", "async def test_valid_insert(database, valid_data):\n await database.setup_database(reset=True)\n for id ,user_id,embeddings,batch_id in valid_data:\n await database.insert_user(user_id=user_id)\n await database.insert(id=id,\n user_id=user_id,\n embeddings=embeddings,\n batch_id=batch_id)\n await database.close_pool()", "def test_unicode_param(self):\n uploadFile = os.path.join(testdatadir, \"upload.data.gz\")\n r = gracedb.writeFile(eventId, uploadFile)\n self.assertEqual(r.status, 201) # CREATED", "def handle_upload(f, attrs):\n\n # chunked = False\n print 'UPLOAD DIRECTORY:', UPLOAD_DIRECTORY\n dest_folder = os.path.join(UPLOAD_DIRECTORY, attrs['qquuid'])\n dest = os.path.join(dest_folder, attrs['qqfilename'])\n save_upload(f, dest)", "def save(self, *args):\n self.party_name, self.office_name, self.user_id, self.date_created, self.status = args\n format_str = f\"\"\"\n INSERT INTO public.applications (party_name,office_name,user_id,date_created,status)\n VALUES ('{args[0]}','{args[1]}','{args[2]}','{(datetime.now())}','pending');\n \"\"\"\n cursor.execute(format_str)", "def take_action(self, parsed_args):\n if parsed_args.file:\n for file in parsed_args.file:\n if not os.path.exists(file):\n self.logger.error('Specified file does not exist: {}'.format(file))\n continue\n self.logger.info('File uploading is started: {}'.format(file))\n file_id = self.app.metagen.upload_files(file)\n if not file_id:\n return False\n self.logger.info('File {} has been sent to analysis.'.format(file))\n self.logger.info('Use File ID to get Analysis Result: {}'.format(file_id))\n self.logger.info('Task Done')", "def execute(self, args):", "def test_upload(self):\n pkg = make_package(factory=SQLPackage)\n content = BytesIO(b\"test1234\")\n self.db.upload(pkg.filename, content, pkg.name, pkg.version)\n count = self.sql.query(SQLPackage).count()\n self.assertEqual(count, 1)\n saved_pkg = self.sql.query(SQLPackage).first()\n self.assertEqual(saved_pkg, pkg)\n # If calculate hashes is on, it'll read the data\n # and rewrap with BytesIO\n self.storage.upload.assert_called_with(pkg, ANY)", "def check_args(name, arg_str):\n if len(arg_str) < 1:\n raise gdb.GdbError(\"ERROR: '%s' requires an argument.\"\n % name)\n return False\n else:\n return True", "def save(self, *args):\n self.firstname, self.lastname, self.othername, self.email, self.phonenumber, self.passporturl, self.roles, self.nationalid, self.county, self.password, self.date_created, self.date_modified = args\n format_str = f\"\"\"\n INSERT INTO public.users (firstname,lastname,othername,email,phonenumber,passporturl,roles,nationalid,county,password,date_created,date_modified)\n VALUES ('{args[0]}','{args[1]}','{args[2]}','{args[3]}','{args[4]}','{args[5]}','{args[6]}','{args[\n 7]}','{args[8]}','{args[9]}','{(datetime.now())}','{(datetime.now())}');\n \"\"\"\n cursor.execute(format_str)", "async def upload_to_dataset(self, dataset_name: str, paths: List[Path]) -> bool:\n return await self.dataset_client.upload_to_dataset(dataset_name, paths)", "def test_upload(self):\n pkg = make_package(factory=DynamoPackage)\n self.db.upload(pkg.filename, BytesIO(b\"test1234\"), pkg.name, pkg.version)\n count = self.engine.scan(DynamoPackage).count()\n self.assertEqual(count, 1)\n saved_pkg = self.engine.scan(DynamoPackage).first()\n self.assertEqual(saved_pkg, pkg)\n self.storage.upload.assert_called_with(pkg, ANY)", "async def test_valid_insert_batch(database,valid_data):\n await database.setup_database(reset=True)\n data = []\n for _id,user_id,embeddings,batch_id in valid_data: #pylint: disable=unused-variable\n await database.insert_user(user_id)\n data.append((_id,user_id,embeddings,1))\n await database.insert_batch(data)\n await database.close_pool()", "def upload_game():\n if (\"game_output\" not in flask.request.values or\n \"users\" not in flask.request.values):\n raise util.APIError(\n 400, message=\"Please provide both the game output and users.\")\n\n game_output = json.loads(flask.request.values[\"game_output\"])\n users = json.loads(flask.request.values[\"users\"])\n challenge = json.loads(flask.request.values.get(\"challenge\", \"null\"))\n\n replay_name = os.path.basename(game_output[\"replay\"])\n if replay_name not in flask.request.files:\n raise util.APIError(\n 400, message=\"Replay file not found in uploaded files.\")\n\n stats = parse_replay(decode_replay(flask.request.files[replay_name]))\n if stats is None:\n raise util.APIError(\n 400, message=\"Replay file cannot be parsed.\")\n\n # Store the replay and any error logs\n replay_key, bucket_class = store_game_artifacts(replay_name, users)\n\n with model.engine.begin() as conn:\n total_users = conn.execute(model.total_ranked_users).first()[0]\n # Sort the users to prevent deadlock in the stored_bot for update lock\n for user in sorted(users, key=lambda x: x['user_id']):\n stored_user = conn.execute(\n sqlalchemy.sql.select([\n model.users.c.id.label(\"user_id\"),\n model.users.c.on_email_list,\n model.users.c.github_email.label(\"email\"),\n model.users.c.player_level,\n model.users.c.creation_time,\n model.users.c.username,\n model.organizations.c.organization_name,\n ]).select_from(model.users.join(\n model.organizations,\n model.organizations.c.id == model.users.c.organization_id,\n isouter=True\n )).where(model.users.c.id == user[\"user_id\"])\n ).first()\n\n stored_bot = conn.execute(\n sqlalchemy.sql.select([\n model.bots.c.version_number,\n model.bots.c.language,\n model.bots.c.mu,\n model.bots.c.sigma,\n ], for_update=True).where(\n (model.bots.c.id == user[\"bot_id\"]) &\n (model.bots.c.user_id == user[\"user_id\"])\n )\n ).first()\n\n stored_rank = conn.execute(\n sqlalchemy.sql.select([\n model.ranked_bots_users.c.rank,\n ]).where(\n (model.ranked_bots_users.c.bot_id == user[\"bot_id\"]) &\n (model.ranked_bots_users.c.user_id == user[\"user_id\"])\n )\n ).first()\n\n if not stored_user or not stored_bot:\n raise util.APIError(400, message=\"User or bot doesn't exist\")\n\n # If the user has submitted a new bot in the meanwhile,\n # ignore the game\n if stored_bot[\"version_number\"] != user[\"version_number\"]:\n return util.response_success({\n \"message\": \"User {} has uploaded a new bot, discarding \"\n \"match.\".format(user[\"user_id\"])\n })\n\n user.update(dict(stored_user))\n user.update(dict(stored_bot))\n if stored_rank:\n user[\"leaderboard_rank\"] = stored_rank[\"rank\"]\n user[\"tier\"] = util.tier(stored_rank[\"rank\"], total_users)\n else:\n user[\"leaderboard_rank\"] = total_users\n user[\"tier\"] = util.tier(total_users, total_users)\n\n # Store game results in database\n game_id = store_game_results(conn, game_output, stats,\n replay_key, bucket_class,\n users, challenge)\n # Store game stats in database\n store_game_stats(conn, game_output, stats, game_id, users)\n # Update rankings\n if not challenge:\n update_rankings(conn, users)\n\n return util.response_success()", "def test_upload_binary(self):\n uploadFile = os.path.join(testdatadir, \"upload.data.gz\")\n r = gracedb.writeFile(eventId, uploadFile)\n self.assertEqual(r.status, 201) # CREATED", "def _check_args(self, args):\n if len(args) == 0:\n print(\"No parameters provided.\")\n return False\n else:\n return True", "def save_massage(self, text: str, sender_username: str):\n self.sql_lock.acquire()\n query: str = \"INSERT INTO messages VALUES(?, ?, NULL)\"\n if len(text) > 0:\n self.cursor.execute(query,(text, sender_username))\n self.connection.commit()\n self.sql_lock.release()\n return {\"saved\": True, \"type\": \"uploaded successfully\"}\n else:\n self.sql_lock.release()\n return {\"saved\": False, \"type\": \"could not upload an empty message!!\"}", "def cmd_put(self, msg_dict):\r\n filename = msg_dict[\"filename\"]\r\n filename_path = msg_dict[\"current_directory\"] + '/' + filename\r\n print(\"102 line:\", filename_path)\r\n print(\"103 line:\", os.path.isfile(filename_path))\r\n # input(\"please wait me:\")\r\n if not os.path.isfile(filename_path):\r\n print(\"the server line 36:\")\r\n self.receive_data(msg_dict)\r\n else:\r\n filename = msg_dict['filename']\r\n file_size = msg_dict['file_size']\r\n received_data = 0\r\n size = os.stat(filename_path).st_size # 需要发给客户端,客户端从这个字节位置开始读取上传文件\r\n file_path = db_path + \"/data/\" + \"user.json\" # 用户信息存放的绝对路径\r\n with open(file_path, 'r') as f:\r\n user_information = json.load(f)\r\n print(\"line 118:\", user_information)\r\n username = msg_dict[\"username\"]\r\n remain_quota = float(user_information[username][\"remain_quota\"].split('M')[0]) * 1024 * 1024\r\n need_put_size = file_size - size\r\n if need_put_size < remain_quota:\r\n # 字典中表示上传文件在服务器上已经存在,客户端需要从文件什么位置 seek 内容\r\n put_file_stat_information = {\"file_status\": \"exists\", \"seek_size\": size}\r\n self.request.send(json.dumps(put_file_stat_information).encode())\r\n filename_abs_path = \"%s\" % msg_dict[\"current_directory\"] + \"/\" + filename # 上传文件的存放的绝对路径\r\n new_remain_quota = remain_quota - float(need_put_size) # 此次上传成功后,用户的剩余空间额度/单位为bytes\r\n new_remain_quota = new_remain_quota / 1024 / 1024 # 此次上传成功后,用户的剩余空间额度/单位为M\r\n # 把上传文件成功后,剩余的额度写入数据库文件存储\r\n with open(file_path, 'r') as f:\r\n data1 = json.load(f)\r\n data1[username][\"remain_quota\"] = str(new_remain_quota) + 'M'\r\n with open(file_path, 'w') as f:\r\n json.dump(data1, f)\r\n \r\n with open(filename_path, 'wb') as f:\r\n f.seek(size)\r\n while received_data < need_put_size:\r\n data = self.request.recv(1024)\r\n received_data += len(data)\r\n f.write(data)", "def test_upload_body(db_conn, cards_table):\n\n card, errors = UploadCard.insert(db_conn, {\n 'unit_id': 'RUF531',\n 'name': 'What is?',\n 'file_extensions': ['jpg'],\n 'rubric': True, # TODO\n })\n assert len(errors) == 1\n card, errors = card.update(db_conn, {'body': 'Testing 1234'})\n assert len(errors) == 0", "def upload(request):\n gi = GalaxyInstance(url=request.session.get('server'), email=request.session.get('galaxyemail'), password=request.session.get(\"galaxypass\"))\n selected = request.POST.get('selected')\n selectedmeta = request.POST.get('meta')\n filetype = request.POST.get('filetype')\n dbkey = request.POST.get('dbkey')\n workflowid = request.POST.get('workflowid')\n pid = request.POST.get('data_id')\n onlydata = request.POST.get('onlydata')\n makecol = request.POST.get('col')\n data_ids = []\n control = request.POST.get('samples')\n test = request.POST.get('samplesb')\n new_hist = request.POST.get('historyname')\n group = request.POST.get('group')\n investigation = request.POST.get('investigation')\n date = strftime(\"%d_%b_%Y_%H:%M:%S\", gmtime())\n select = selected.split(',')\n mselect = selectedmeta.split(',')\n gselect = group.split(',')\n iselect = investigation.split(',')\n files = get_selection(iselect, gselect, select, mselect)[0]\n mfiles = get_selection(iselect, gselect, select, mselect)[1]\n groups = get_selection(iselect, gselect, select, mselect)[2]\n investigations = get_selection(iselect, gselect, select, mselect)[3]\n history_id = create_new_hist(gi, request.session.get('galaxyemail'), request.session.get(\"galaxypass\"),\n request.session.get('server'), workflowid, files, new_hist)\n inputs = {}\n if len(filter(None, files)) <= 0:\n return HttpResponseRedirect(reverse(\"index\"))\n else:\n if onlydata == \"true\":\n make_data_files(gi, files, request.session.get('username'), request.session.get('password'), request.session.get('galaxyemail'),\n request.session.get('galaxypass'), control, test, history_id, filetype, dbkey)\n else:\n make_data_files(gi, files, request.session.get('username'), request.session.get('password'), request.session.get('galaxyemail'),\n request.session.get('galaxypass'), control, test, history_id, filetype, dbkey)\n make_meta_files(gi, mfiles, request.session.get('username'), request.session.get('password'), request.session.get('galaxyemail'),\n request.session.get('galaxypass'), control, test, history_id)\n if workflowid != \"0\":\n in_count = 0\n resultid = uuid.uuid1()\n datamap = dict()\n mydict = {}\n jsonwf = gi.workflows.export_workflow_json(workflowid)\n for i in range(len(jsonwf[\"steps\"])):\n if jsonwf[\"steps\"][str(i)][\"name\"] == \"Input dataset\":\n try:\n label = jsonwf[\"steps\"][str(i)][\"inputs\"][0][\"name\"]\n except IndexError:\n label = jsonwf[\"steps\"][str(i)][\"label\"]\n mydict[\"in%s\" % (str(i + 1))] = gi.workflows.get_workflow_inputs(workflowid, label=label)[0]\n for k, v in mydict.items():\n datamap[v] = {'src': \"hda\", 'id': get_input_data(request.session.get('galaxyemail'), request.session.get('galaxypass'),\n request.session.get('server'))[0][in_count]}\n data_ids.append(get_input_data(request.session.get('galaxyemail'), request.session.get('galaxypass'),\n request.session.get('server'))[0][in_count])\n in_count += 1\n if makecol == \"true\":\n gi.histories.create_dataset_collection(history_id, make_collection(data_ids))\n gi.workflows.invoke_workflow(workflowid, datamap, history_id=history_id)\n gi.workflows.export_workflow_to_local_path(workflowid, request.session.get('username'), True)\n datafiles = get_output(request.session.get('galaxyemail'), request.session.get('galaxypass'), request.session.get('server'))\n store_results(1, datafiles, request.session.get('server'), request.session.get('username'),\n request.session.get('password'), request.session.get('storage'),\n groups, resultid, investigations, date)\n store_results(3, datafiles, request.session.get('server'), request.session.get('username'),\n request.session.get('password'), request.session.get('storage'),\n groups, resultid, investigations, date)\n ga_store_results(request.session.get('username'), request.session.get('password'), workflowid,\n request.session.get('storage'), resultid, groups, investigations)\n call([\"rm\", request.session.get('username') + \"/input_test\"])\n return render_to_response('results.html', context={'workflowid': workflowid, 'inputs': inputs, 'pid': pid,\n 'server': request.session.get('server')})\n else:\n if makecol == \"true\":\n history_data = gi.histories.show_history(history_id, contents=True)\n for c in range(0, len(history_data)):\n data_ids.append(history_data[c]['id'])\n gi.histories.create_dataset_collection(history_id, make_collection(data_ids))\n ug_store_results(\n request.session.get('galaxyemail'), request.session.get('galaxypass'), request.session.get('server'), workflowid,\n request.session.get('username'), request.session.get('password'), request.session.get('storage'), groups, investigations, date)\n return HttpResponseRedirect(reverse(\"index\"))", "def _upload_file(self, file_name, full_path, quiet, request, resources):\r\n\r\n if not quiet:\r\n print('Starting upload for file ' + file_name)\r\n\r\n content_length = os.path.getsize(full_path)\r\n token = self.dataset_upload_file(full_path, quiet)\r\n if token is None:\r\n if not quiet:\r\n print('Upload unsuccessful: ' + file_name)\r\n return True\r\n if not quiet:\r\n print('Upload successful: ' + file_name + ' (' +\r\n File.get_size(content_length) + ')')\r\n upload_file = DatasetUploadFile()\r\n upload_file.token = token\r\n if resources:\r\n for item in resources:\r\n if file_name == item.get('path'):\r\n upload_file.description = item.get('description')\r\n if 'schema' in item:\r\n fields = self.get_or_default(item['schema'], 'fields',\r\n [])\r\n processed = []\r\n count = 0\r\n for field in fields:\r\n processed.append(self.process_column(field))\r\n processed[count].order = count\r\n count += 1\r\n upload_file.columns = processed\r\n request.files.append(upload_file)\r\n return False", "def run(args):\n\n drive_uid = str(args[\"drive_uid\"])\n file_uid = str(args[\"file_uid\"])\n chunk_idx = int(args[\"chunk_index\"])\n secret = str(args[\"secret\"])\n data = string_to_bytes(args[\"data\"])\n checksum = str(args[\"checksum\"])\n\n drive = DriveInfo(drive_uid=drive_uid)\n\n drive.upload_chunk(file_uid=file_uid, chunk_index=chunk_idx,\n secret=secret, chunk=data, checksum=checksum)\n\n return True", "def addArgos(row, tag_id, animal_id, timevalue, gt, bd):\r\n feature_id = 0\r\n feature_type = 'argos'\r\n try:\r\n dev = (tag_id, animal_id, timevalue, feature_type, gt) # instantiate Argos object\r\n argosObj = tables.Argos(*dev, **row) # returns 0 if duplicate\r\n feature_id = dbutil.dbTransact(conn,argosObj.sql_insert(),\r\n argosObj.param_dict())\r\n if feature_id:\r\n transmit_id = addTransmit(feature_id, row, bd)\r\n\r\n except Exception as e:\r\n print 'addArgos Error '+ e.message\r\n conn.rollback()\r\n finally:\r\n dev = None\r\n argosObj = None\r\n conn.commit()\r\n return feature_id", "def execute(self, inputs={}):\n self.validate()", "def cmd_stor(args):", "def do_insert_data(self, *args):\n print(\"Provide data to insert\")\n self.connection_obj.insert_into_table(**self.__class__.populate_data())\n print(\"Data Insertion Successful\")", "def upload_batch(entity_batch: Dict[URIRef, WDEntity]):\n print(\"#\" * 80)\n print(\"> BATCH UPLOAD\")\n print(f\">> Temporary TSV batch file: {temp_TSV_batch_output_file}\")\n print(\">> The temporary TSV batch file is going to be overwritten.\")\n print(\">>\")\n print(\">> Would you like to proceed anyway?\")\n input(\">>> Press [ENTER] to proceed or [CTRL+C] to stop the execution of the entire process: \")\n input(\">>> Please confirm (press ENTER again): \")\n with open(temp_TSV_batch_output_file, 'w', encoding='utf-8') as f:\n entity_list = list(entity_batch.values())\n for entity in entity_list:\n if not WDEntity.is_not_null(entity.qid):\n # QID is missing -> we need to create this entity\n statement_lines: str = entity.stringify() + '\\n'\n f.write(statement_lines)\n print(\">> The TSV file has been overwritten. Please manually upload it with the web interface\"\n \" of QuickStatements!\")\n input(\">>> Press [ENTER] when you're done to proceed or [CTRL+C] to stop the execution of the entire process: \")\n input(\">>> Please confirm (press ENTER again): \")", "def save(self):\n self.save_to_db()\n if hasattr(self, 'id'):\n self.status_code = 201\n return True\n else:\n self.errors['messages'].append(\"DataBase Error, Please Try again\")\n self.status_code = 500\n return False", "def valid_args(args):\n is_valid = True\n if not args.ts_url or not args.username or not args.password or not args.from_user or not args.to_user:\n eprint(\"Missing required parameters.\")\n is_valid = False\n\n return is_valid", "def execute(*args):", "def insertJointCtx(*args, exists: bool=True, image1: Union[AnyStr, bool]=\"\", image2:\n Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", q=True, query=True,\n e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def checkArguments(args, log):\n\n\t\n\n \t\n\tif not args.variant_caller or not args.genome_ref or not args.bam or not args.bed or not args.vcf:\n\t\tlog.error(\"necessary pre-requisite arguments\")\n\t\tsys.exit()\n\n\t\n\tif args.genome_ref:\n\t\tif not os.path.isfile(args.genome_ref): \n \t\t\n \t\t\tlog.error(\"it does not exist file corresponding to the reference genome\")\n \t\t\tsys.exit()\n\n \t\tif not os.access(args.genome_ref, os.R_OK):\n \t\t\tlog.error(\"permission to read the reference genome file is not accorded\")\n \t\t\tsys.exit()\n\n\t \n \tif args.bam:\n \t\tif not os.path.isfile(args.bam): \n \t\t\n \t\t\tlog.error(\"it does not exist file corresponding to the bam\")\n\n \t\t\tsys.exit()\n\n \t\tif not os.access(args.bam, os.R_OK):\n \t\t\tlog.error(\"permission to read the bam file is not accorded\")\n \t\t\tsys.exit()\n\n\n \tif args.bed:\n \t\tif not os.path.isfile(args.bed):\n \t\t\tlog.error(\"it does not exist file corresponding to the target regions\")\n \t\t\tsys.exit()\n\n \t\tif not os.access(args.bed, os.R_OK):\n \t\t\tlog.error(\"permission to read the target regions file is not accorded\")\n \t\t\tsys.exit()", "def save(self, *args):\n # need to do!!\n pass", "def insert_good_data():\n get_file_reply(files[0][0], files[0][1])\n get_file_reply(files[1][0], files[1][1])", "def do_update(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n return False\n elif args[0] in classes:\n if len(args) > 1:\n k = args[0] + \".\" + args[1]\n if k in models.storage.all():\n if len(args) > 2:\n if len(args) > 3:\n try:\n if isinstance(args[2], datetime) is True:\n pass\n if args[0] in classes:\n if isinstance(args[2], ints) is True:\n args[3] = int(args[3])\n elif isinstance(args[2], floats) is True:\n args[3] = float(args[3])\n except:\n pass\n setattr(models.storage.all()[k], args[2], args[3])\n models.storage.all()[k].save()\n else:\n print(\"** value missing **\")\n else:\n print(\"** attribute name missing **\")\n else:\n print(\"** no instance found **\")\n else:\n print(\"** instance id missing **\")\n else:\n print(\"** class doesn't exist **\")", "def test_1_data_insertion_multiple_users(self):\n s = self.fitness.insert_in_database(self.fitness_dict, date_time=self.dt1)\n self.assertEqual(s, True)\n s_1 = self.fitness_1.insert_in_database(self.fitness_dict_1, date_time=self.dt1)\n self.assertEqual(s_1, True)", "def check_status(self) -> bool:\n ct_status = self.status()\n if ct_status >> 3:\n raise RuntimeError(\n \"Uploading of data to the command table failed \"\n \"due to a JSON parsing error.\"\n )\n return ct_status == 1", "def execute(self, *args, **kwargs):", "def execute(self, *args, **kwargs):", "def insert_data(self, row, table_fields_names, table_fields_types):\n\n\t\tquery = ''\n\n\t\ttry:\t\t\t\t\n\t\t\tquery = self.form_insert_query(TABLE_NAME, row, table_fields_names, table_fields_types)\n\t\t\t# print query\n\t\t\tself.execute_query(query)\t\t\t\n\t\texcept Exception, e:\t\t\t\t\n\t\t\tprint '[e] Exeption: %s' % (str(e))\n\t\t\tprint '\\t[q] Query that caused exception \\n %s' % (query)\n\t\t\treturn False\n\n\t\treturn True", "async def test_invalid_insert_no_user(database, valid_data):\n await database.setup_database(reset=True)\n for id,user_id,embeddings,batch_id in valid_data:\n try:\n await database.insert(id=id,\n user_id=user_id,\n embeddings=embeddings,\n batch_id=batch_id)\n assert False\n except(NotFoundError, DuplicateKeyError):\n assert True\n await database.close_pool()", "async def insert(self, args: Dict[str, Any]):\n keys = \", \".join(args.keys())\n values = \", \".join([f\"${i + 1}\" for i in range(len(args))])\n\n conn: Connection\n async with self.db_pool.acquire() as conn:\n await conn.execute(\n f\"INSERT INTO {self.table_name} \"\n f\"({keys}) VALUES ({values})\",\n *args.values(),\n )", "def save(self, name, args, user):\n # Make sure the given arguments are valid first.\n RollCommand.from_args(args)\n\n connection = self.connect()\n cursor = connection.cursor()\n cursor.execute(\n self.sql[\"save\"], {\"name\": name, \"args\": \" \".join(args), \"user\": user}\n )\n connection.commit()", "def execute(self, args):\r\n pass", "def uploadJobState(self,jobdata):\n\t\tsql = \"INSERT INTO jobresults(jobname,viewname,started,ended,result) VALUES (%s,%s,%s,%s,%s)\"\n\t\tdata = ( jobdata['name'], jobdata['view'], jobdata['start'], jobdata['end'],jobdata['result'] )\n\t\tcsr = self.db.cursor()\n\t\tres = csr.execute(sql,data)\n\t\tprint \"Uploaded a build for %(name)s to the DB\" % jobdata", "def set_data(db_dir, command, args = None):\n #print command\n with lite.connect((db_dir)) as conn:\n #try:\n cursor = conn.cursor()\n if args:\n cursor.execute(command,args)\n else:\n cursor.execute(command)\n conn.commit()\n #print '[sql management] set successfully the data according to:\\n--- %s ---'%(command )\n return True\n #except:\n return False\n return False", "def post(self):\n blob_key = self.request.get(\"blobkey\")\n\n database_creation.run(blob_key)", "def commit(self):", "def uploadPodcast(dbConnection, homepage, name, description, category, source, imageurl, web, twitter, facebook, rss):\n try:\n cursor = dbConnection.cursor()\n name = name.replace(\"'\", \"''\")\n description = description.replace(\"'\", \"''\")\n cursor.execute(\"\"\"INSERT INTO podcasts(homepage, name, description, category, source, imageuri, web, twitter, Facebook, rss) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);\"\"\", (homepage, name, description, category, source, imageurl, web, twitter, facebook, rss))\n dbConnection.commit()\n cursor.close()\n return True\n except Exception as e:\n\t\t Tools.writeException(\"insertHeader\", \"e\")\n return False", "def do_update(self, arg):\n obj = self.verify(arg, 1)\n if obj:\n args = arg.split(\" \")\n if len(args) < 3:\n print(\"** attribute name missing **\")\n return\n if len(args) < 4:\n print(\"** value missing **\")\n return\n setattr(obj, args[2], args[3])\n obj.save()", "def _check_args_genome(args):\n if args.install:\n # -n/--name must be specified\n if not args.name:\n _exit(1, \"motifscan genome --install: error: argument -n/--name \"\n \"is required\")\n # check conflict between local model and remote mode\n if args.remote and (args.fasta_files or args.gene_file):\n _exit(1, \"motifscan genome --install: error: argument -r/--remote \"\n \"is not allowed with argument -i or -a\")\n # -i/-a must be specified in local mode\n if not args.remote:\n if not args.fasta_files:\n _exit(1, \"motifscan genome --install: error: argument -i is \"\n \"required\")\n if not args.gene_file:\n _exit(1, \"motifscan genome --install: error: argument -a is \"\n \"required\")\n # check if the input files are existed\n input_files = list(args.fasta_files)\n input_files.append(args.gene_file)\n for path in input_files:\n if not os.path.isfile(path):\n _exit(1, f\"motifscan genome --install: error: file not \"\n f\"found: {path}\")", "async def test_invalid_insert_duplicate_key(database, duplicate_data):\n await database.setup_database(reset=True)\n await database.insert_user(\"1\")\n\n await database.insert(id=1,user_id=\"1\",embeddings=[1,2])\n for id,user_id,embeddings,batch_id in duplicate_data:\n try:\n await database.insert(id=id,\n user_id=user_id,\n embeddings=embeddings,\n batch_id=batch_id)\n assert False\n except:\n assert True\n await database.close_pool()", "def upload_analysis(list_of_contents, list_of_names, list_of_dates, session_id, job_id, clean_input_dir):\n\n clean_input_dir = len(clean_input_dir) != 0\n\n print('UPLOAD')\n\n if session_id is not None and list_of_contents is None:\n print(f'Running in session {session_id}')\n\n # make a subdirectory for this session if one doesn't exist\n input_dir = join(BASE_DIR, 'input', f'input_{session_id}')\n try:\n os.mkdir(input_dir)\n except FileExistsError:\n pass\n\n try:\n os.mkdir(join(input_dir, 'analysis'))\n except FileExistsError:\n pass\n\n # Create an output directory for this session if it doesn't exist\n output_dir = join(BASE_DIR, 'output', f'output_{session_id}')\n try:\n os.mkdir(output_dir)\n except FileExistsError:\n pass\n\n try:\n os.mkdir(join(output_dir, 'analysis'))\n except FileExistsError:\n pass\n\n try:\n os.mkdir(join(output_dir, 'analysis', 'images'))\n except FileExistsError:\n pass\n\n def _clean_input_dir():\n \"\"\"\n Clean the input directory by removing every existing file.\n \"\"\"\n for existing_file in os.listdir(join(input_dir, 'analysis')):\n if existing_file != '.hold':\n os.remove(join(input_dir, 'analysis', existing_file))\n\n try:\n\n # If the user isn't uplaoding anything and\n # hasn't uploaded anything, ask them to do so.\n # print(os.listdir(input_dir))\n if list_of_contents is None and len(os.listdir(join(input_dir, 'analysis'))) == 0:\n return 'Please upload some files.'\n\n # if the user is uploading something, first clean the input directory,\n # then write the uploaded files to BASE_DIR/input/input_{session_id}\n if list_of_contents:\n\n if clean_input_dir:\n _clean_input_dir()\n\n # Save successfully uploaded filenames here\n written = list()\n\n # Write uploaded files to BASE_DIR/input/input_{session_id}\n # If any of the files do not end in .txt,\n # or cannot be decoded properly, or cannot be parsed\n # into Voigt models, then clean the input directory and print\n # the error message. Otherwise, show a bullet list of files\n # uploaded to the input directory.\n\n if not clean_input_dir:\n old_peaks = pd.read_csv(join(input_dir, 'peaks.csv'))\n old_models = pd.read_csv(join(input_dir, 'models.csv'))\n else:\n old_peaks = pd.DataFrame()\n old_models = pd.DataFrame()\n \n new_peaks = pd.DataFrame()\n\n for i, c in enumerate(list_of_contents):\n\n if not list_of_names[i].endswith('.txt'):\n raise Exception(f'File {list_of_names[i]} must be .txt')\n\n s = c.split(',')[1]\n\n try:\n s = base64.b64decode(s).decode()\n except UnicodeDecodeError:\n raise Exception(f'Error uploading file {list_of_names[i]}.\\\n Please check file format and try again.')\n\n with open(join(input_dir, 'analysis', list_of_names[i]), 'w') as f:\n f.write(s)\n\n try:\n parsed_file = parse_file(join(input_dir, 'analysis', list_of_names[i]))\n new_peaks = pd.concat([new_peaks, parsed_file], sort=True)\n except Exception as e:\n import traceback\n traceback.print_exc()\n raise Exception(f'Cannot parse file {list_of_names[i]}: {e}')\n\n written.append(list_of_names[i])\n\n res = [html.Li(x) for x in written]\n res.insert(0, html.P(f'Success! {len(written)} \\\n .txt files were uploaded.'))\n\n # peaks = read_input(session_id)\n id_vars = pd.Series(new_peaks.columns)\n mask = ~(id_vars.str.contains('(p|n)m', regex=True) &\n id_vars.str.contains('center'))\n id_vars = id_vars.loc[mask]\n new_peaks = new_peaks.melt(id_vars=id_vars)\n new_peaks = new_peaks.loc[new_peaks.value.notnull()]\n\n def compute_models(DATA):\n res = pd.DataFrame([], columns=['filename', 'peak_name', 'peak_position', 'amplitude'])\n for idx, (_, model) in enumerate(DATA.iterrows()):\n\n row = pd.Series()\n row['filename'] = model.filename\n row['peak_name'] = model.variable\n row['peak_position'] = model.value\n \n amp_col = model.variable[:model.variable.index('_')] + '_amplitude'\n row['amplitude'] = model[amp_col]\n\n res.loc[idx] = row\n\n return res\n\n new_models = compute_models(new_peaks)\n\n models = pd.concat([old_models, new_models])\n peaks = pd.concat([old_peaks, new_peaks])\n\n models.to_csv(join(input_dir, 'models.csv'))\n\n peaks.to_csv(join(input_dir, 'peaks.csv'))\n\n return res\n\n except Exception as e:\n # If any of the files raise an error (wrong extension,\n # decoding error, error parsing into models),\n # then print the error message.\n _clean_input_dir()\n import traceback; traceback.print_exc()\n return f'An error occurred while uploading files: {e}'", "def execute():", "def upload():\n # verify user\n email = flask.request.args[\"email\"]\n username = flask.request.args[\"username\"]\n\n file = flask.request.files[\"file\"]\n print(file.filename)\n file_bytestr = file.read()\n\n # query ms api\n emotion = ms_emotion_api(file_bytestr)\n print(emotion)\n if emotion is None:\n return flask.jsonify(error=\"MS API error, possibly no human face\")\n\n # save to mongodb\n saved = mongo.db.images.insert_one({\n \"filename\": file.filename,\n \"content\": file_bytestr,\n \"emotion\": emotion,\n \"date\": datetime.datetime.utcnow(),\n \"user_username\": username,\n \"user_email\": email,\n })\n # print(saved.inserted_id)\n # create user if needed\n mongo.db.users.update_one(filter={\n \"email\": email,\n }, update={\n \"$set\": {\"username\": username},\n # image_ids: list of foreign ids to images\n \"$push\": {\"image_ids\": saved.inserted_id},\n }, upsert=True)\n\n # client resend image_id when reporting music\n emotion[\"image_id\"] = str(saved.inserted_id)\n return flask.jsonify(emotion)", "async def prepared(self, *args, **kwargs):\n pass", "async def post_multipart(self, part1, part_2, test):", "def check_arguments(self):\n ## only four test operation is permitted, if given anything apart from this, then it should print error message\n if (self.args.snap is False and self.args.snapcheck is False and self.args.check is False and self.args.diff is False and self.args.version is False):\n self.logger.error(colorama.Fore.RED +\n \"Arguments not given correctly, Please refer help message\", extra=self.log_detail)\n self.parser.print_help()\n sys.exit(1)\n\n if(((self.args.snap is True and (self.args.pre_snapfile is None or self.args.file is None)) or\n (self.args.snapcheck is True and self.args.file is None) or\n (self.args.check is True and self.args.file is None)) and \n (self.args.testfiles is None or self.args.hostname is None)\n ):\n self.logger.error(colorama.Fore.RED +\n \"Arguments not given correctly, Please refer help message\", extra=self.log_detail)\n self.parser.print_help()\n sys.exit(1)\n if self.args.diff is True:\n if (self.args.pre_snapfile is not None and os.path.isfile(self.args.pre_snapfile)) and (\n self.args.post_snapfile is not None and os.path.isfile(self.args.post_snapfile)):\n comp = Comparator()\n comp.compare_diff(\n self.args.pre_snapfile,\n self.args.post_snapfile,\n None)\n sys.exit(1)\n else:\n if (self.args.file is None) and (\n self.args.testfiles is None or self.args.hostname is None):\n self.parser.print_help()\n sys.exit(1)", "def _upload_chunk(self, final=False):\n out = self.fs.session.post(\n self.location,\n data=self.buffer.getvalue(),\n headers={\"content-type\": \"application/octet-stream\"},\n )\n out.raise_for_status()\n return True", "def edit_db(self, query, args=()):\n conn = self.get_db()\n try:\n cur = conn.execute(query, args)\n conn.commit()\n cur.close()\n except Exception as e:\n print(e)\n return False\n return True", "def log_to_db(data):\n from helpers.database import Database\n query = '''INSERT INTO Error Values(null, \"%s\", \"%s\", \"%s\", \"%s\", \"%s\", \"%s\");'''\n\n try:\n db = Database().get_db()\n conn = db.cursor()\n conn.execute(query % (data.type, data.content, data.timestamp,\n data.field, data.dataset, data.link))\n db.commit()\n return True\n\n except sqlite3.Error as er:\n print(er)\n return False\n\n finally:\n db.close()", "def upload_finish(self, cloud_file):", "def checkArgs( args ):\n # Set Intial Variables required\n getCvpAccess = False\n destList = []\n\n # React to the options provided \n\n # CVP Username for script to use\n if args.userName == None:\n getCvpAccess = True\n \n # CVP Password for script to use\n if args.password == None:\n getCvpAccess = True\n else:\n if (args.password[0] == args.password[-1]) and args.password.startswith((\"'\", '\"')):\n password = args.password[1:-1]\n\n if getCvpAccess:\n args.userName = raw_input(\"User Name to Access CVP: \")\n args.password = askPass( args.userName, \"CVP\" )\n \n # CVP appliances to get snapsots from\n if not args.target:\n applianceNumber = int(raw_input(\"Number of CVP Appliance to use: \"))\n loop = 0\n while loop < applianceNumber:\n args.target.append(raw_input(\"CVP Appliance %s: \" %(loop+1)))\n loop += 1\n\n # Target snapshot\n if args.snapshot == None:\n args.snapshot = raw_input(\"Name of Snapshot to retrieve: \")\n else:\n if (args.snapshot[0] == args.snapshot[-1]) and args.snapshot.startswith((\"'\", '\"')):\n args.snapshot = args.snapshot[1:-1]\n\n return args", "def update(self, **validated_data):\n updated = self._provision(validated_data)\n if updated:\n try:\n db.session.commit()\n return True\n except Exception as error:\n db.session.rollback()\n print(error.args)\n return False", "def store(request):\n if request.method == 'POST':\n username = request.session.get('username')\n password = request.session.get('password')\n storage = request.session.get('storage')\n inv = request.POST.get('inv')\n study = request.POST.get('study')\n metadata = request.POST.get('metadata')\n datafile = request.POST.get('datafile')\n disgenet = onto(request.POST.get('disgenet'), request.POST.get('edam'))[0]\n edam = onto(request.POST.get('disgenet'), request.POST.get('edam'))[1]\n if username == \"\" or username is None:\n login()\n else:\n pid = datafile\n metadata = metadata.split(',')\n if metadata is not None:\n for m in metadata:\n mfile = m.replace('[', '').replace(']', '').replace('\"', '').replace(' ', '')\n metafile = subprocess.Popen([\"curl -s -k -u\" + username + \":\" + password + \" \" + mfile[1:]],\n stdout=subprocess.PIPE, shell=True).communicate()[0]\n metaf = open(username + '/metafile.csv', 'w')\n metaf.write(metafile)\n metaf.close()\n filemeta = \"metafile.csv\"\n if \"This is the WebDAV interface. It can only be accessed by WebDAV clients such as the ownCloud desktop sync client.\" in metafile:\n createMetadata(request, datafile)\n filemeta = \"meta.txt\"\n call([\"curl -s -k -u \" + username + \":\" + password + \" -T \" + '\\'' + \"meta.txt\" + '\\'' +\n \" \" + storage + \"/\" + inv + \"/\" + study + \"/meta.txt\"], shell=True)\n with open(username + \"/\" + filemeta, 'rb') as csvfile:\n count = 0\n reader = csv.DictReader(csvfile)\n cnt = 0\n for row in reader:\n for p in pid.split(','):\n data = p.replace('[', '').replace(']', '').replace(\"'\", \"\").replace('\"', '').replace(' ', '')[1:]\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#pid> \\\"\" + data + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#investigation_id> \\\"\" + inv + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#group_id> \\\"\" + study + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#disgenet_iri> \\\"\" + disgenet + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#edam_iri> \\\"\" + edam + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#disease> \\\"\" + request.POST.get('disgenet') + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n if filemeta == \"meta.txt\":\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#meta> \\\"\" + storage + \"/\" + inv + \"/\" + study +\n \"/meta.txt\" + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n else:\n for m in metadata:\n mfile = m.replace('[', '').replace(']', '').replace('\"', '').replace(\"'\", \"\").replace(' ', '')\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) +\n \"> <http://127.0.0.1:3030/ds/data?graph=\" + username.replace('@', '') + \"#meta> \\\"\" + mfile[1:] +\n \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n headers = []\n for (k, v) in row.items():\n for h in range(0, len(k.split('\\t'))):\n if k.split('\\t')[h] != \"\":\n value = v.split('\\t')[h]\n header = k.split('\\t')[h]\n headers.append(header.replace('\"', ''))\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) +\n \"> <http://127.0.0.1:3030/ds/data?graph=\" + username.replace('@', '') + \"#\" + header.replace('\"', '') + \"> \\\"\" +\n value.replace('\"', '').replace('+', '%2B') + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n if \"sex\" not in headers:\n call([\"curl http://127.0.0.1:3030/ds/update -X POST --data 'update=INSERT DATA { GRAPH <http://127.0.0.1:3030/ds/data/\" +\n username.replace('@', '') + \"> { <http://127.0.0.1:3030/\" + study + \"_\" + str(cnt) + \"> <http://127.0.0.1:3030/ds/data?graph=\" +\n username.replace('@', '') + \"#sex> \\\"\" + 'Unknown' + \"\\\" } }' -H 'Accept: text/plain,*/*;q=0.9'\"], shell=True)\n count += 1\n cnt += 1\n call([\"rm\", username + \"/metafile.csv\"])\n call([\"rm\", username + \"/meta.txt\"])\n return HttpResponseRedirect(reverse('index'))", "def Persist(self) -> bool:", "def Persist(self) -> bool:", "def save_data_to_db(rq, ji, request):\n if rq['change_privacy_level']:\n privacy_id = request.POST.get('privacy-id', False)\n ji['instance'].privacy = Privacy(pk=privacy_id)\n ji['instance'].save()\n elif rq['delete_img_flag']:\n ji['instance'].save()\n elif rq['delete_target_instance_id']:\n ji['instance'].delete()\n elif ji['form'].is_valid():\n if rq['add_new_profile_data']:\n ji['instance'].save()\n print(\"success adding new data.\")\n else:\n ji['form'].save()\n print(\"success editing data.\")\n if rq['portfolio_images'] and ji['image_form'].is_valid():\n for idx, image in enumerate(rq['portfolio_images']):\n image_instance = Image(\n work_id=ji['instance'].id,\n image=image,\n )\n image_instance.save()\n if idx == 0:\n ji['instance_image'] = image_instance.image\n print(\"success save images.\")", "async def insert_requirements(conn, mapname):\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ? where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (f\"pics/mapshots/{mapname}\", \"mapshot\", 0, f\"pics/mapshots/{mapname}\"))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, f\"pics/mapshots/{mapname}\"))\n (reqs, sky, texs, exts, linkeds) = await get_required_files(mapname)\n if reqs:\n for req in reqs:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"requiredfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if sky:\n for suffix in [\"bk\", \"dn\", \"ft\", \"lf\", \"rt\", \"up\"]:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (sky + suffix, \"sky\", 0, sky + suffix))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, sky + suffix))\n\n if texs:\n for req in texs:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"texture\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if exts:\n for req in exts:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"externalfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))\n if linkeds:\n for req in linkeds:\n select_sql = \"\"\"insert into media_files(path, type, provided) select ?, ?, ?\n where not exists(select * from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (req, \"linkedfile\", 0, req))\n select_sql = \"\"\"insert into requirements(map_id, file_id) select (select map_id from maps where map_path=?), (select file_id from media_files where path=?)\"\"\"\n _ = select(conn, select_sql, (mapname, req))", "def check_vargs(vargs):\n server_uri = vargs.get('server', '')\n parsed = urllib.parse.urlsplit(server_uri)\n if not all([parsed.scheme, parsed.netloc]):\n print(\n \"You must specify the full, absolute URI to your devpi server \"\n \"(including protocol).\")\n sys.exit(1)\n index = vargs.get('index')\n if not index:\n print(\"You must specify an index on your devpi server to upload to.\")\n sys.exit(1)\n username = vargs.get('username')\n if not username:\n print(\"You must specify a username to upload packages as.\")\n sys.exit(1)\n password = vargs.get('password')\n if password is None:\n print(\"You must specify a password.\")\n sys.exit(1)", "async def validate(\n # accept both pep_registry and pep_files, both should be optional\n pep_registry: Optional[str] = Form(None),\n pep_files: Optional[List[UploadFile]] = None,\n schema: Optional[str] = Form(None),\n schema_file: Optional[UploadFile] = None,\n schema_registry: Optional[str] = Form(None),\n agent: PEPDatabaseAgent = Depends(get_db),\n):\n\n # check they sent at least pep_registry or pep_files\n if pep_registry is None and pep_files is None:\n raise HTTPException(\n status_code=400,\n detail={\n \"error\": \"Must supply either a registry path or a list of files to validate.\"\n },\n )\n\n if pep_registry is not None:\n namespace, name, tag = registry_path_converter(pep_registry)\n tag = tag or DEFAULT_TAG\n p = agent.project.get(namespace, name, tag)\n else:\n init_file = parse_user_file_upload(pep_files)\n init_file, other_files = split_upload_files_on_init_file(pep_files, init_file)\n\n # create temp dir that gets deleted when we're done\n with tempfile.TemporaryDirectory() as dirpath:\n # save init file\n with open(f\"{dirpath}/{init_file.filename}\", \"wb\") as cfg_fh:\n shutil.copyfileobj(init_file.file, cfg_fh)\n\n # save any other files the user might have supplied\n if other_files is not None:\n for upload_file in other_files:\n # open new file inside the tmpdir\n with open(f\"{dirpath}/{upload_file.filename}\", \"wb\") as local_tmpf:\n shutil.copyfileobj(upload_file.file, local_tmpf)\n\n p = peppy.Project(f\"{dirpath}/{init_file.filename}\")\n\n if schema is None and schema_registry is None and schema_file is None:\n raise HTTPException(\n status_code=400,\n detail={\n \"error\": \"Must supply either a registry path or a list of files to validate.\"\n },\n )\n\n if schema_registry is not None:\n schema_url = f\"https://schema.databio.org/{schema_registry}.yaml\"\n\n try:\n response = requests.get(schema_url)\n response.raise_for_status() # Check if the request was successful\n yaml_string = response.text\n except requests.exceptions.RequestException as e:\n raise HTTPException(\n status_code=400,\n detail={\"error\": f\"Error fetching schema: {str(e)}\"},\n )\n\n # save schema string to temp file\n with tempfile.NamedTemporaryFile(mode=\"w\", delete=False) as yaml_file:\n yaml_file.write(yaml_string)\n schema_dict = yaml_file.name\n elif schema_file is not None:\n contents = schema_file.file.read()\n schema_dict = yaml.safe_load(contents)\n else:\n # save schema string to temp file, then read in with eido\n with tempfile.NamedTemporaryFile(mode=\"w\") as schema_file:\n schema_file.write(schema)\n schema_file.flush()\n try:\n schema_dict = eido.read_schema(schema_file.name)[0]\n except eido.exceptions.EidoSchemaInvalidError as e:\n raise HTTPException(\n status_code=200,\n detail={\"error\": f\"Schema is invalid: {str(e)}\"},\n )\n\n # validate project\n try:\n eido.validate_project(\n p,\n schema_dict,\n )\n\n # while we catch this, its still a 200 response since we want to\n # return the validation errors\n except eido.exceptions.EidoValidationError as e:\n error_type, property_names = await eido_error_string_converter(e)\n\n return {\"valid\": False, \"error_type\": error_type, \"errors\": property_names}\n\n except Exception as e:\n errors = [str(e)]\n return {\"valid\": False, \"error_type\": \"Schema\", \"errors\": errors}\n\n # everything passed, return valid\n else:\n # return project is valid\n return {\"valid\": True, \"errors\": None}", "async def exec_write(self, query, *args):", "async def funnypts_transaction(message, client, extra_args, operation):\n\n awarder = message.author.id\n\n # input screening\n if len(extra_args) < 2:\n await message.channel.send(f\"PLEASE USE THIS: `funnypts {operation} user_mention reason`\")\n return False\n\n if not (awardee := utils.from_mention(extra_args[0])):\n await message.channel.send(\"PLEASE MENTION SOMEONE. WHAT ARE THEY GONNA DO, CRY?\")\n return False\n\n reason_length = funny_controls[\"reason_length\"]\n if len(reason := \" \".join(extra_args[1:])) > reason_length:\n await message.channel.send(F\"APOLOGIES, I ONLY STORE DESCRIPTIONS OF UP TO {reason_length} CHARACTERS. WELCOME TO TWITTER\")\n return False\n\n if client.get_user(awarder).bot or client.get_user(awardee).bot:\n return False\n\n if awarder == awardee:\n await message.channel.send(\"WHAT ARE YOU, AN EGOMANIAC?\")\n return False\n\n # writing\n if operation == \"add\":\n transaction = 1\n elif operation == \"remove\":\n transaction = -1\n\n @database.query\n def write_entry(conn):\n conn.execute(\"INSERT INTO funnypts VALUES(?, ?, ?, ?, ?)\",\n (awarder, awardee, reason, transaction, datetime.now()))\n conn.commit()\n conn.close()\n\n write_entry()\n return True", "def save(self, arguments):\n name = arguments['<name>']\n\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n if vmrun.snapshot(name) is None:\n puts_err(colored.red(\"Cannot take snapshot\"))\n else:\n puts_err(colored.green(\"Snapshot {} taken\".format(name)))", "def execute(self, *args, **kwargs):\n pass", "def test_9_incorrect_database(self):\n fitness = Fitness(None, self.user_id_1)\n s = fitness.insert_in_database(self.fitness_dict, date_time=self.dt1)\n self.assertFalse(s)", "def _process_upload_job_async(self, job):\n logger.info('Uploading file to Sia: %s', job.local_path)\n try:\n return self._sia_client.upload_file_async(job.local_path,\n job.sia_path)\n except Exception as ex:\n logger.error('Upload failed: %s', ex.message)\n job.increment_failure_count()\n return False", "def do_update(self, args):\n args = shlex.split(args)\n dicti = storage.all()\n if not args:\n print(\"** class name missing **\")\n elif not args[0] in name_of_class:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif not \"{}.{}\".format(args[0], args[1]) in dicti:\n print(\"** no instance found **\")\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n key = dicti[\"{}.{}\".format(args[0], args[1])]\n setattr(key, args[2], args[3])\n key.save()", "def handle(self, *args, **options):\n self.verbosity = options['verbosity']\n\n self.insert_translations()\n self.insert_elections()\n self.flush_voting_history()\n self.insert_voting_histories()", "def _execute(self, db):\n raise NotImplementedError", "def _check_args(self, args_):\n\n pass", "def post(self):\n args = change_tag_or_sentiment_parser.parse_args() \n sentence = args[\"sentence\"]\n value = args[\"value\"]\n whether_allowed = args[\"whether_allowed\"]\n\n if not whether_allowed:\n return {\"success\": False,\n \"error\": True,\n \"messege\": \"Right now, Updating Tags or sentiments are not allowed\",\n }\n\n\n tag_list = [\"food\", \"service\", \"cost\", \"null\", \"ambience\", \"overall\"]\n sentiment_list = [\"positive\", \"super-positive\", \"neutral\", \"negative\", \"super-negative\", \"mixed\"]\n\n print value, sentence\n if not value in (tag_list+sentiment_list):\n return {\"success\": False,\n \"error\": True,\n \"messege\": \"Error occured\",\n }\n\n if value in [\"food\", \"service\", \"cost\", \"null\", \"ambience\", \"overall\"]:\n training_tag_collection.update({\"sentence\": sentence}, {\"$set\": {\n \"review_id\": \"misc\",\n \"tag\": value, }}, upsert=True)\n print \"tag updated\"\n\n if value in [\"positive\", \"super-positive\", \"neutral\", \"negative\", \"super-negative\"]:\n training_sentiment_collection.update({\"sentence\": sentence}, {\"$set\": {\n \"review_id\": \"misc\",\n \"sentiment\": value,\n }}, upsert=True)\n print \"sentiment updated\"\n return {\"success\": True,\n \"error\": False,\n \"messege\": \"Updated!!!\",\n }", "def Execute(self):\n return True", "def upload_file():\n \n #query the db and render the table used to display the leaderboard to users \n userBoard = query_db('''\n select submission_id, submit_date, public_score\n from submission sub\n where user_id = '%s'\n order by public_score %s''' % (session['user_id'], orderBy))\n \n userBoard = [dict(row) for row in userBoard]\n for row in userBoard:\n row['score'] = row['public_score']\n row['str_time'] = str(datetime.fromtimestamp(row['submit_date']))\n \n colNames = ['Submission Time', 'Public Score']\n \n if request.method == 'POST':\n try:\n #check if contest has ended\n if contestEndBool():\n flash(\"Error: contest has ended\")\n raise Exception(\"contest has ended\")\n \n print(\"here\")\n #ensure user hasn't exceeded daily submission limit\n dailyCnt = query_db('''select count(*) sub_cnt\n from submission sub\n where submit_date > %s\n and user_id = %s\n group by user_id''' % (time.time() - 60*60*24, session['user_id']))\n \n if len(dailyCnt) == 0:\n dailyCnt = 0\n else:\n dailyCnt = int(dict(dailyCnt[0])['sub_cnt'])\n \n if dailyCnt > dailyLimit:\n flash(\"Error: exceeded daily upload limit\")\n raise Exception('Upload limit exceeded')\n \n file = request.files['file']\n #throw error if extension is not allowed\n if not allowed_file(file.filename):\n raise Exception('Invalid file extension')\n \n if file and allowed_file(file.filename):\n filename = werkzeug.secure_filename(file.filename)\n #append userid and date to file to avoid duplicates\n filename = str(session['user_id']) + '_' + \\\n str(int(time.time())) + '_' + filename\n fullPath = os.path.join(app.config['UPLOAD_FOLDER'], filename)\n file.save(fullPath)\n model_score = loadAndScore(fullPath)\n \n #cache the filename and submission to database\n db = get_db()\n db.execute('''insert into submission (user_id, filename, submit_date, \n public_score, private_score, total_score) \n values (?, ?, ?, ?, ?, ?)''', \n (session['user_id'], filename, int(time.time()), *model_score))\n db.commit()\n \n #inform user upload was a success\n flash('Your submission was recorded.')\n return redirect(url_for('leaderboard'))\n except:\n #if exception is thrown in process then flash user\n flash('File did not upload or score! Make sure the submission format is correct.')\n return render_template('uploadsubmission.html', \n title=\"Upload Submission\", \n userBoard=userBoard,\n subNbr=subNbr)", "def check_arguments(self):\n # only four test operation is permitted, if given anything apart from this,\n # then it should print error message.\n if not (\n (self.args.file is None)\n and ((self.args.testfiles is None or self.args.hostname is None))\n ):\n action = None\n if self.set_action_cmd(action) is not None:\n # the operation is checked in above function\n return None\n\n self.logger.error(\n colorama.Fore.RED\n + \"Arguments not given correctly, Please refer help message\",\n extra=self.log_detail,\n )\n self.parser.print_help()\n sys.exit(1)", "def insertInTable(self, tablename, columns, arguments, chunksize=None, verbose=False):\n\n # Make sure columns is a list, and not a single string\n if not isinstance(columns, (list,)):\n columns = [columns]\n\n # To allow for column names that have spaces\n columns = list(map(lambda x: '`'+x+'`', columns))\n\n ncol = len(columns)\n\n if len(arguments[0]) == ncol:\n # Make sure the tablename is valid\n if tablename in self.getTableNames():\n # Make sure we have a list of tuples; necessary for mysql\n arguments = list(map(tuple, arguments))\n\n sqlcmd = ('INSERT INTO ' + tablename +\n '(' + ','.join(columns) + ') VALUES (')\n if self.connector == 'mysql':\n sqlcmd += '%s' + (ncol-1)*',%s' + ')'\n else: \n sqlcmd += '?' + (ncol-1)*',?' + ')'\n\n if chunksize:\n\n n_chunks = np.ceil(len(arguments)/chunksize)\n if verbose:\n print('\\n')\n bar = Bar('Inserting chunks of data in database', max=n_chunks)\n for chk in chunks(arguments, chunksize):\n if verbose:\n bar.next()\n self._c.executemany(sqlcmd, chk)\n self._conn.commit()\n\n if verbose:\n bar.finish()\n\n else:\n\n self._c.executemany(sqlcmd, arguments)\n # Commit changes\n self._conn.commit()\n else:\n print('Error inserting data in table: The table does not exist')\n else:\n print('Error inserting data in table: number of columns mismatch')\n\n return", "def post(self, args):\n try:\n db = get_db('intents')\n db.add_intent(args['intent'])\n if 'entities' in args.keys() and args['entities']:\n db.add_entities_to_intent(args['intent'], args['entities'])\n if 'stopwords' in args.keys() and args['stopwords']:\n db.add_stopwords_to_intent(args['intent'], args['stopwords'])\n intents = db.get_intents()\n resp = jsonify(intents=intents)\n resp.status_code = 200\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 400\n return resp", "def validate_upload(self, upload_id: ObjectId, valid: bool):\n action_doc = self.cc.action_log.find_one({'upload_ids': upload_id, 'action': 'upload'})\n if action_doc is None:\n print(\"upload doesn't exist in action_log\")\n return\n\n upload_doc = self.cc.metadata_coll.find_one({'_id': upload_id, self.action_id_name: {'$exists': True}})\n if upload_doc is None:\n print(\"upload doesn't exist in upload database or upload has no field called '\"+self.action_id_name+\"'\")\n return\n\n if not isinstance(upload_id, ObjectId):\n print(\"argument upload_id is not of type ObjectId.\")\n\n if not isinstance(valid, bool):\n print(\"argument valid is not of type bool.\")\n\n timespans = action_doc['timespans']\n output_formats = action_doc['output_formats']\n\n action = \"marked_valid\" if valid else \"marked_invalid\"\n\n action_id = self._action_id_creator()\n self.cc.metadata_coll.update_one({'_id': upload_id}, {'$set': {self.valid_name: valid}})\n self.cc.action_log.insert_one({ \"_id\": action_id, \"timespans\": timespans, \"upload_ids\": [upload_id],\n \"action\": action, \"output_formats\": output_formats })", "def save_in_db(self):\n self.sql_database.table_name = self.table_db\n self.sql_database.db_name = self.db\n if self.sql_database.insert_item(text_path=self.path, word_first=self.word_1.get(),\n word_second=self.word_2.get(),\n word_third=self.word_3.get(), word_fourth=self.word_4.get(),\n word_fifth=self.word_5.get()):\n msg.showinfo(message=\"Done\")", "def submit(self):\n data = self.getFSNDataDict()\n if data != []:\n MOSES.addToPiggyBank(data, self.user_id, self.password)", "def add_data(self, site: str, pid: int, formatted_pid: str, image_url: str) -> bool:\n try:\n self._cursor.execute(f\"INSERT INTO {site} VALUES ({pid}, '{formatted_pid}', '{image_url}', {datetime.now().timestamp()});\")\n self._connection.commit()\n return True\n \n except sqlite3.IntegrityError:\n print(f'Failed to add {pid} ({formatted_pid}) - PID already exists in table \"{site}\".')\n \n return False", "def upload_assignment(self, request, suffix=''):\n log.info(\"upload_assignment called\")\n upload = request.params['assignment']\n sha1 = _get_sha1(upload.file)\n log.info(type(upload.file))\n answer = {\n \"sha1\": sha1,\n \"filename\": upload.file.name,\n \"mimetype\": mimetypes.guess_type(upload.file.name)[0]\n }\n\n self.raw_answer = answer\n path = self._file_storage_path(sha1, upload.file.name)\n \n log.info(\"upload1-----------------------------------------------\")\n filepathexists=os.path.join(IMAGEDIFF_ROOT, path)\n file_exists=os.path.exists(filepathexists)\n if not file_exists:\n log.info(\"saving the file onto local store\")\n save_file(path, File(upload.file))\n file_exists=True\n try:\n storage.store_data(str(self.course_id), str(self.xmodule_runtime.anonymous_student_id), str(self.location.block_id), file(IMAGEDIFF_ROOT + path))\n log.info(\"upload through storage api successful\")\n except PersonValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"peson argument cant be an empty string\")\n except DepartmentValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"department argument cant be an empty string\")\n except QualifierValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"qualifier argument cant be an empty string\")\n except BucketValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"invalid bucket key argument\")\n except S3ValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"invalid S3 credentials\")\n except SocketValueError:\n log.info(\"storage api upload failed:\")\n log.info(\"invalid host\")\n log.info(\"$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\") \n log.info(self)\n self.grade_this_guy()\n if self.score != -1:\n self.attempts += 1\n os.remove(IMAGEDIFF_ROOT + path) \n return Response(json_body=self.student_state())", "def test_0_data_insertion(self):\n s = self.fitness.insert_in_database(self.fitness_dict, date_time=self.dt1)\n self.assertTrue(s)" ]
[ "0.6018636", "0.56575793", "0.5548777", "0.5302964", "0.52453786", "0.52212244", "0.5169866", "0.51535535", "0.5146376", "0.5134887", "0.5131358", "0.51230496", "0.51160276", "0.5075478", "0.506222", "0.5057487", "0.5046628", "0.50436354", "0.50288576", "0.50007904", "0.4996246", "0.49924737", "0.49914554", "0.49820867", "0.49658042", "0.49541822", "0.49480665", "0.49426568", "0.49397644", "0.4937888", "0.49354383", "0.4928141", "0.49266604", "0.4924016", "0.49195233", "0.49139297", "0.4908697", "0.49055558", "0.49022835", "0.48983866", "0.48977703", "0.48942447", "0.48725596", "0.4872512", "0.48609728", "0.48549902", "0.48549902", "0.485306", "0.4847469", "0.4843768", "0.48406422", "0.484053", "0.48362374", "0.4817714", "0.47906387", "0.4789292", "0.47880334", "0.47837308", "0.47831738", "0.478195", "0.47747356", "0.47705182", "0.47647345", "0.4759177", "0.47585303", "0.47582415", "0.47551596", "0.4753401", "0.4750021", "0.47435772", "0.47345337", "0.47320327", "0.47280422", "0.47278267", "0.47278267", "0.47234374", "0.47036618", "0.4702195", "0.4700368", "0.46990764", "0.46971354", "0.46930438", "0.4687226", "0.46856883", "0.46855754", "0.46827298", "0.46799317", "0.46797907", "0.46738225", "0.46702552", "0.4668753", "0.46583828", "0.465274", "0.46480808", "0.4647983", "0.46471158", "0.46453574", "0.46451944", "0.46451527", "0.46414027", "0.46385378" ]
0.0
-1
checks the database for empty transcription entries, returns a list with \n\n index 0 audiourl\n index 1 id\n index 2 podcast name\n index 3 service of podcast
def checkPre(dbConnection): cursor = dbConnection.cursor() cursor.execute("SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;") entry = cursor.fetchone() cursor.close() return entry
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listMissingItems():\n global doc\n texts = doc.getElementsByTagName(\"text\")\n for t in texts:\n xmlid = t.getAttribute(\"id\")\n for table in langtables:\n execute(\"SELECT * FROM %s WHERE xmlid=\\\"%s\\\"\" % (table, xmlid))\n rows = cursor.fetchall()\n if rows == None or len(rows) == 0:\n warn(t.toxml() + \" missing in %s\" % table)", "def test_filter_messages_empty_data(self):\n pass", "def test_170417_empty(self):\n spc = parser(get_file('PTSD48_empty.txt'))\n # spc.draw_outlooks()\n spc.sql(self.txn)\n jabber = spc.get_jabbers('')\n self.assertEquals(jabber[0][0],\n (\"The Storm Prediction Center issues Days 4-8 \"\n \"Convective Outlook at Dec 25, 9:41z \"\n \"http://www.spc.noaa.gov/products/exper/day4-8/\"\n \"archive/2008/day4-8_20081225.html\"))", "def nohupTranscriptionContent(filePath):\n try:\n continu = True\n fileContent = \"\"\n f = open(filePath, 'r')\n while (continu):\n temp = f.readline(900000)\n if(len(temp) == 0):\n continu = False\n else:\n fileContent += temp\n results = []\n realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent)\n results.append(realTimeFactor)\n transcription = re.findall(r'utterance-id(.*?) (.*?)\\n', fileContent)\n transcriptionList = []\n transcriptionIDList = []\n for item in transcription:\n if(len(item[1]) > 1000):\n transcriptionIDList.append(item[0])\n transcriptionList.append(item[1])\n results.append(transcriptionList)\n results.append(transcriptionIDList)\n transcriptionTime = re.findall(r'seconds / (.*?) seconds\\.', fileContent)\n results.append(transcriptionTime)\n return results\n except Exception as e:\n Tools.writeException(\"nohupTranscriptionContent\", e)\n return False", "def filter_empty(word_list):\n new_list = []\n for x in word_list:\n if(x):\n new_list.append(x)\n return new_list", "def __look_for_missing_pseudotext_info(self, force_update=False):\n logging.debug('Starting method that looks for a missing pseudo-text info')\n counter = 0\n max_vids_to_process = self.num_vids_to_use\n logging.info('Examining ' + str(max_vids_to_process) + ' records.')\n list_vids_no_pt_data = []\n percent_tracker = PercentTracker(max_vids_to_process, int_output_every_x_percent=10)\n for vid_id in self.transcripts_ds:\n execution_should_continue = self.var_mgr.var_retrieve(my_globals.str_execution_may_go_on)\n if (not execution_should_continue) or (counter >= max_vids_to_process):\n break\n\n need_to_append_to_list = False\n if force_update:\n need_to_append_to_list = True\n if not need_to_append_to_list:\n # if we already found that the video should be appended to the list,\n # then there is no need for further checks. But if NOT, then\n # the following should still be performed\n transcript = Transcript(vid_id)\n transcript.set_transcript_directory(self.str_path_to_transcripts_files)\n transcript.load_transcript_object_from_dictionary(self.transcripts_ds.fetch_data(vid_id))\n has_pseudotranscript_data = transcript.is_pseudotranscript_filename_populated()\n if not has_pseudotranscript_data:\n # we are here if the video has a transcript (it exists in the transcripts SimpleDS),\n # but the field for the filename of the TranscriptAnalysis file has never been populated.\n need_to_append_to_list = True\n\n if need_to_append_to_list:\n list_vids_no_pt_data.append(vid_id)\n counter += 1\n percent_tracker.update_progress(counter,\n str_description_to_include_in_logging='Finding missing pseudotext files.')\n return list_vids_no_pt_data", "def test_get_all_unassociated_no_tracks(self):\n self.assertEqual(self.get_track_count(), 0)\n tracks = Track.get_all_unassociated(self.app.curs)\n self.assertEqual(tracks, [])", "def __look__missing_termcount_info(self):\n logging.debug('Starting method that looks for missing Term Count data.')\n counter = 0\n max_vids_to_process = self.num_vids_to_use\n logging.info('Examining ' + str(max_vids_to_process) + ' records.')\n list_vids_no_tc_data = []\n percent_tracker = PercentTracker(max_vids_to_process, int_output_every_x_percent=10)\n for vid_id in self.transcripts_ds:\n execution_should_continue = self.var_mgr.var_retrieve(my_globals.str_execution_may_go_on)\n if (not execution_should_continue) or (counter >= max_vids_to_process):\n break\n transcript = Transcript(vid_id)\n transcript.set_transcript_directory(self.str_path_to_transcripts_files)\n transcript.load_transcript_object_from_dictionary(self.transcripts_ds.fetch_data(vid_id))\n has_tc_data = transcript.is_termcount_filename_populated()\n if not has_tc_data:\n # we are here if the video has a transcript (it exists in the transcripts SimpleDS),\n # but the field for the filename of the Term Count file has never been populated.\n list_vids_no_tc_data.append(vid_id)\n counter += 1\n percent_tracker.update_progress(counter,\n str_description_to_include_in_logging='Finding missing term-count files.')\n return list_vids_no_tc_data", "def _checkForBlankLines(self, datalines):\n empties = None\n count = 0\n rtlines = []\n for line in datalines:\n if line.strip() == \"\":\n empties = 1\n else:\n if empties == 1: # If data line found after empty line then raise\n raise Exception(\"Empty line found in data section at line: \" + str(count))\n else:\n rtlines.append(line)\n count = count + 1\n return rtlines", "def test_get_all_unassociated_single_track_without_album(self):\n track = Track(artist='Artist', title='Title')\n track.insert(self.app.db, self.app.curs,\n 'xmms', datetime.datetime.now())\n self.assertEqual(self.get_track_count(), 1)\n tracks = Track.get_all_unassociated(self.app.curs)\n self.assertEqual(len(tracks), 0)", "def not_empty(cur, conn, v=False):\n logs = []\n print(\"Checking that all tables have at least 10 rows...\")\n for t in TABLES:\n try:\n cur.execute(f\"SELECT COUNT(*) FROM {t}\")\n conn.commit()\n row = cur.fetchone()\n except:\n raise ValueError(f\"Something went wrong executing `not_empty` query on table {t}\")\n\n if row == None:\n text = f\"!!Data quality check on table {t} failed, no data fetched.\"\n elif row[0] <= 10:\n text = f\"!!Data quality check on table {t} failed with {row[0]} rows.\"\n else:\n text = f\"Data quality check on table {t} passed with {row[0]} rows.\"\n if v: print(text)\n logs.append(f\"{text}\\n\")\n\n return logs", "def test_query_with_no_matches_returns_nothing(test_store):\n items = list(test_store.get_by(name=\"Sugar\"))\n\n assert len(items) == 0", "def check_txt_ids(self):\n for awi in self:\n if not awi.txt_ids:\n raise exceptions.except_orm(\n _(\"Missing Values !\"),\n _(\"Missing VAT TXT Lines!!!\"))\n return True", "def missing_samples(self):\n missing = [s for s in self.subjects if len(s.samples) == 0]\n if len(missing) == 0:\n return None\n return missing", "def get_transcription(url):\n\n # Checks the format of the URL\n if \"https://www.youtube.com/watch?v=\" in url:\n input_url_id = url.replace(\"https://www.youtube.com/watch?v=\", \"\")\n elif \"https://youtu.be/\" in url:\n input_url_id = url.replace(\"https://youtu.be/\", \"\")\n\n # Creates a blank list to iterate over\n text_parts = []\n\n # Gets a list of all available transcripts\n try:\n\n list_of_transcripts = YouTubeTranscriptApi.list_transcripts(input_url_id)\n print(\"Checking for Transcriptions...\")\n\n # Checks to see if a manual transcript is created if not, checks to see if a generated one is created\n if 'en-US' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en-US'])\n elif 'en' in list_of_transcripts._manually_created_transcripts:\n print(\"Manual Transcription Found.\")\n transcript = list_of_transcripts.find_manually_created_transcript(['en'])\n elif 'en' in list_of_transcripts._generated_transcripts:\n print(\"Auto-Generated Transcription Found.\")\n transcript = list_of_transcripts.find_generated_transcript(['en'])\n\n # Saves the transcript into a variable to iterate over\n raw_transcription = transcript.fetch()\n\n # Indexing of raw transcripts\n iteration_of_raw = 0\n\n # Iterates over each dictionary and extracts 'text' key then appends the blank text_parts list\n for i in raw_transcription:\n indexed_dictionary = raw_transcription[iteration_of_raw]\n text_from_dictionary = indexed_dictionary['text']\n text_parts.append(text_from_dictionary)\n iteration_of_raw += 1\n # Defines how we want each text element to be separated with\n separator_for_each_text = \" \"\n\n # Joins the separator with the text_parts\n clean_transcription = separator_for_each_text.join(text_parts)\n\n # Returns the cleaned transcripts\n return clean_transcription\n\n except:\n print(\"No Transcriptions Found\")\n clean_transcription = \"No Transcriptions Found\"\n return clean_transcription", "def emptyDVDColection(self):\r\n if (len(self.DVDColectionlist))==0:\r\n print(\"DVD collection is empty!\")\r\n else:\r\n print(\"DVD collection is not empty!\")", "def nonempty_lines(text):\n return [line for line in text.split('\\n') if line]", "def test_empty_transformlist(self):\n tflist = TransformList()\n self.assertEqual(len(tflist), 0)", "def addMissingData():\n\n conn = sqlite3.connect(\"./transactions.db\")\n\n person = pd.read_sql(\n \"\"\"\n select * from person;\n \"\"\",\n conn,\n )\n\n record = pd.read_sql(\n \"\"\"\n select * from record;\n \"\"\",\n conn,\n )\n\n tracked = set([_id for _id in record[\"doc_id\"]])\n\n untracked = []\n\n for url, _id in zip(person[\"url\"], person[\"doc_id\"]):\n if not _id in tracked:\n untracked.append((url[-17:-13], _id))\n\n untracked = pd.DataFrame(untracked, columns=[\"date\", \"doc_id\"])\n\n res = transaction.extractData(untracked)\n\n res.to_sql(\"record\", conn, index=False, if_exists=\"append\")\n\n conn.close()", "def clean_tweets(data):\n count = 0\n f = open(os.path.dirname(__file__) + '/../tweet_output/ft1.txt','w')\n for item in data:\n if item.get('text'):\n string=item['text'].encode('ascii','ignore')+' (timestamp: '+item['created_at']+')\\n'\n f.write(string)\n if item['text'].encode('ascii','ignore')!=item['text']:\n count=count+1\n f.write('\\n')\n string=str(count)+' tweets contained unicode.'\n f.write(string)\n f.close()", "def test_nil_results(self):\n class Test(pyperry.Base):\n def _config(cls):\n cls.attributes('id')\n cls.configure('read', adapter=TestAdapter)\n TestAdapter.data = None\n TestAdapter.count = 3\n result = Test.fetch_records(Test.scoped())\n self.assertEqual(len(result), 0)", "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.split(' ') for item in data]\n data = [\n item[:-2] + [' '.join(item[-2:])] if item[-1] == '\\n' else item\n for item in data\n ]\n data = [item for sublist in data for item in sublist]\n\n df = pd.DataFrame(data, columns=['word'])\n df['conversation_id'] = 1\n\n return df", "def remove_empty(data):\n out = []\n for item in data:\n if item == '':\n continue\n out.append(item)\n return out", "def test_get_all_need_transform_no_tracks(self):\n self.assertEqual(Track.get_all_need_transform(self.app.curs, 1), [])", "def test_returns_all_studies_with_no_query(self):\n url = self.get_url()\n response = self.client.get(url)\n pks = get_autocomplete_view_ids(response)\n self.assertEqual(sorted([study.pk for study in self.studies]), sorted(pks))", "def test_returns_all_studies_with_no_query(self):\n url = self.get_url()\n response = self.client.get(url)\n pks = get_autocomplete_view_ids(response)\n self.assertEqual(sorted([study.pk for study in self.studies]), sorted(pks))", "def get_valid_phrases():\n return [x[0] for x in all_topics if x[1] == \"1\"]", "def empty(self):\n return [cell for cell in self.compact if not cell.peg]", "def no_txs(self):\n return self._no_txs", "def find_add_audio_messages(self):\n return [\n msg for msg in self.messages_received if not isinstance(msg, dict)]", "def is_empty(sample_list=None):\n if sample_list is None or len(sample_list) == 0:\n sample_list = []\n return sample_list", "def existing_logs(self):\n temp = list()\n with sqlite3.connect(self.db_file) as conn:\n cur = conn.cursor()\n cur.execute(\"PRAGMA table_info('data')\")\n temp = cur.fetchall()\n # if len(temp) != 0:\n # empty sequence is false\n if temp:\n self._existing_logs = [item[1] for item in temp]\n return self._existing_logs", "def is_empty(self):\n return super(VideoCarouselTile, self).results() == []", "def blank_tbr(cc): # pragma: no cover\n cc.execute(\"\"\"SELECT DISTINCT DATE_FORMAT(git_commit.timestamp, '%Y-%m')\n FROM git_commit\"\"\")\n months = cc.fetchall()\n results = []\n for month in months:\n month = month[0]\n cc.execute(\"\"\"SELECT COUNT(*)\n FROM commit_people\n INNER JOIN git_commit\n ON commit_people.git_commit_hash = git_commit.hash\n WHERE commit_people.people_email_address = 'NOBODY'\n AND YEAR(git_commit.timestamp) = %s\n AND MONTH(git_commit.timestamp) = %s\"\"\" % (month[:4], month[5:]))\n result = cc.fetchone()\n results.append([month, int(result[0])])\n return results", "def get_non_empty(self, timeframe):\n new_symbol_names_list = []\n for symbol_name in self.symbol_names_list:\n if self[symbol_name][timeframe].timestamps.size == 0:\n continue\n new_symbol_names_list.append(symbol_name)\n symbol_names_list = new_symbol_names_list\n return symbol_names_list", "def transcript_lines(transcript_text):\n lines = []\n for line in transcript_text.splitlines():\n if line.strip() and line.strip()[0] != '#':\n split = line.split(':')\n speaker = split[0][-1]\n utterance = ' '.join(split[1:]).strip()\n lines.append((speaker, utterance))\n return lines", "def test_all_empty(self):\n s1 = ListTimeSeries([])\n s2 = ListTimeSeries([])\n c = CompositeTimeSeries(\"abc\", [s1, s2])\n rows = [r for r in c.rows()]\n assert_that(rows, is_(equal_to([])))", "def test_no_listings(self):\n response = self.client.get(reverse('index'))\n self.assertEqual(response.status_code, 200)\n self.assertQuerysetEqual(response.context[\"listings\"], [])", "def no_filter(blast_subject_entry):\r\n return True", "def test_returns_empty_list(self):\n result = meeting_planner([], [], 1)\n self.assertEqual(result, [])", "def test_attachment_list_empty(self):\n # FIXME: Additional tests should be written for the other 'attachment'\n # commands. This requires being able to control the current\n # time, which in turn would require centralizing the time\n # provider, for example in the environment object.\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('attachment list wiki:WikiStart')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def empty(self):\n return self._read_transaction(tx.is_empty)", "def test_get_all_need_transform_no_tracks_matched(self):\n track = Track(artist='Artist', album='Album', title='Title', last_transform=1)\n pk = track.insert(self.app.db,\n self.app.curs,\n 'xmms',\n datetime.datetime.now())\n self.assertEqual(self.get_track_count(), 1)\n\n tracks = Track.get_all_need_transform(self.app.curs, 1)\n self.assertEqual(len(tracks), 0)", "def clean_none_response(self):\n\n print(\"# Rows before non response are removed: {} \".format(len(self.data)))\n self.data = self.data[self.data['names'].map(lambda d: len(d) > 0)]\n print(\"# Rows after non response are removed: {} \".format(len(self.data)))", "def test_no_one_in_db(self):\n q = self.generate_query('view_manager_report', ())\n res = self.execute_query(q)\n expected = []\n assert len(res) == 0, f'There is suppose to be an empty summary {res}'\n assert res == expected, f'The result is suppose to be empty {res}'", "def test_parse_results_empty():\n assert [] == parse_results({})", "def phrase_list_filler():\n return (Parse.word('we').possibly() + first_word('put write have know see') + \n Parse.word('that').possibly()).nil()", "def just_in():\n soup = abcradionational.get_soup(URL + \"/podcasts\")\n \n playable_podcast = abcradionational.get_playable_podcast(soup)\n \n items = abcradionational.compile_playable_podcast(playable_podcast)\n\n\n return items", "def filterNull(self, result):\n\t\treturn [_ for _ in result if _]", "def test_empty(self):\n eq_([], list(collate()))", "def get_haikus_unposted(cls, db_session) -> list:\n q = (\n db_session.query(cls)\n .filter(cls.date_posted == None) # noqa: E711\n .filter(cls.date_deleted == None) # noqa: E711\n )\n return q.all()", "def list_todo_table(self):\n if self.is_todo_table_empty():\n print(\"nothing to do!\")\n return []\n else:\n return self.make_list_from_task()", "def get_transcription(self):\n q_tscript1 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?subject .\n ?stmt belvoc:hasObject ?target .\n ?subject a belvoc:AbundanceActivity .\n ?subject belvoc:hasActivityType belvoc:Transcription .\n ?subject belvoc:hasChild ?tf .\n ?tf a belvoc:ProteinAbundance .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n }\n \"\"\"\n q_tscript2 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?tf .\n ?stmt belvoc:hasObject ?target .\n ?tf a belvoc:ProteinAbundance .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n }\n \"\"\"\n q_tscript3 = prefixes + \"\"\"\n SELECT ?tfName ?targetName ?stmt ?tf ?target ?rel ?mod ?pos\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?subject .\n ?stmt belvoc:hasObject ?target .\n ?subject a belvoc:ModifiedProteinAbundance .\n ?subject belvoc:hasModificationType ?mod .\n ?subject belvoc:hasChild ?tf .\n ?tf belvoc:hasConcept ?tfName .\n ?target a belvoc:RNAAbundance .\n ?target belvoc:hasConcept ?targetName .\n OPTIONAL { ?subject belvoc:hasModificationPosition ?pos . }\n }\n \"\"\"\n for q_tscript in (q_tscript1, q_tscript2, q_tscript3):\n res_tscript = self.g.query(q_tscript)\n for stmt in res_tscript:\n # Get modifications on the subject, if any\n if q_tscript == q_tscript1:\n tf = self._get_agent(stmt[0], stmt[3])\n tf.activity = ActivityCondition('transcription', True)\n elif q_tscript == q_tscript3:\n mod = term_from_uri(stmt[6])\n mod_pos = term_from_uri(stmt[7])\n mc = self._get_mod_condition(mod, mod_pos)\n if mc is None:\n continue\n tf = self._get_agent(stmt[0], stmt[3])\n tf.mods = mods=[mc]\n else:\n tf = self._get_agent(stmt[0], stmt[3])\n # Parse out the elements of the query\n evidence = self._get_evidence(stmt[2])\n target = self._get_agent(stmt[1], stmt[4])\n stmt_str = strip_statement(stmt[2])\n # Get the relationship (increases/decreases, etc.)\n rel = term_from_uri(stmt[5])\n if rel == 'DirectlyIncreases' or rel == 'DirectlyDecreases':\n is_direct = True\n else:\n is_direct = False\n # Build the INDRA statement\n stmt = None\n if rel == 'DirectlyIncreases' or rel == 'Increases':\n stmt = IncreaseAmount(tf, target, evidence)\n elif rel == 'DirectlyDecreases' or rel == 'Decreases':\n stmt = DecreaseAmount(tf, target, evidence)\n # If we've matched a pattern, mark this as a converted statement\n if stmt is not None:\n if is_direct:\n self.statements.append(stmt)\n self.converted_direct_stmts.append(stmt_str)\n else:\n self.indirect_stmts.append(stmt)\n self.converted_indirect_stmts.append(stmt_str)", "def test_empty_list(self):\n response = self.client.get(self.api_link)\n self.assertEqual(response.status_code, 200)\n\n response_json = response.json()\n self.assertEqual(response_json['count'], 0)", "def is_empty(self):\n return not list(self._filtered_items)", "def showTranslatedWithoutJoin(cls):\n print (\"ALL WORDS WITH TRANSLATIONS STORED IN DATABASE:\")\n for word1 in EnglishHelper.query(\"SELECT english_word FROM EnglishWords\", fetchAll=True):\n try:\n print word1[0],\" - \", (EnglishHelper.query(\"select polish_word from PolishWords where \"\n \" id_pl=(select id_pl from translations where \"\n \"id_eng = (select id_eng from EnglishWords \"\n \"where english_word = '%s'))\"%word1))[0].encode('utf-8')\n except:\n print \"There is no translation, sorry :(\"", "def get_all_data(file, list): \n list = []\n with open(file, \"r\") as list:\n list = [row for row in list if len(row.strip()) > 0]\n return list", "def test_no_key_words(self):\n for msg_test in MSG_TEST_NO_RESULT:\n result = self.parser.msg_analysis(msg_test)\n assert len(result) == 0", "def test_with_empty_list(self):\n self.assertEqual(humanize_list([]),\n '')", "def _clean_list(self, items):\n itemlist = list(filter(None, items))\n if len(itemlist) < 3:\n itemlist.append(\"\")\n return itemlist\n\n return itemlist", "def test_udp_no_records():\n assert dnsck_query(\"8.8.8.8\", \"test.google.com\", \"A\", 1) == 0", "def all():\n\tsub = db.session.query(Target).all()\n\tif sub :\n\t\tfor row in sub:\n\t\t\tlogger.log('INFO',f\"{row.created_date} {row.subdomain}\")\n\t\tlogger.log(\"STATS\", f'{len(sub)} bulks! $_$')\n\telse:\n\t\tlogger.log('WARNING',f'[-] Database is empty')", "def test_empty(self):\n\n tokens = list(Lexer(\"\").generate_tokens())\n self.assertEqual(tokens, [])", "def test_reading_empty_strings_for_different_types(self):\n self.prepare()\n self.session.execute(\"\"\"\n CREATE TABLE test_many_empty_strings (\n a text,\n b text,\n c text,\n d text,\n o uuid,\n i1 bigint,\n i2 bigint,\n t text,\n i3 bigint,\n PRIMARY KEY ((a, b, c, d), o)\n )\"\"\")\n\n tempfile = self.get_temp_file()\n with open(tempfile.name, 'w') as f:\n f.write(',,,a1,645e7d3c-aef7-4e3c-b834-24b792cf2e55,,,,r1\\n')\n\n def _test(prepared_statements):\n logger.debug('Importing from csv file: {name}'.format(name=tempfile.name))\n cmds = \"COPY ks.test_many_empty_strings FROM '{}' WITH NULL='-' AND PREPAREDSTATEMENTS = {}\"\\\n .format(tempfile.name, prepared_statements)\n self.run_cqlsh(cmds=cmds)\n\n out, err, _ = self.run_cqlsh(cmds=\"SELECT * FROM ks.test_many_empty_strings\")\n res = self.parse_cqlsh_query(out=out, num_cols=9)\n\n self.assertCsvResultEqual(tempfile.name, res, 'test_many_empty_strings')\n\n _test(True)\n _test(False)", "def test_drop_empty_tokens():\n assert TextCleaner().transform([[[\",;\", \"hi\"]]])[\"corpus\"][0] == [\"hi\"]", "def test_get_all_unassociated_single_track_already_associated(self):\n track = Track(artist='Artist', album='Album',\n title='Title', album_id=1)\n track.insert(self.app.db, self.app.curs,\n 'xmms', datetime.datetime.now())\n self.assertEqual(self.get_track_count(), 1)\n tracks = Track.get_all_unassociated(self.app.curs)\n self.assertEqual(len(tracks), 0)", "def filter_log(self, items):\n results = []\n for item in items:\n index = item.find(self.PREFIX)\n if index == -1:\n continue\n results.append(item[index + len(self.PREFIX): -1])\n results.append(u'') # to match the new line at the end of the data file\n return results", "def test_get_all_need_transform_no_albums(self):\n self.assertEqual(Album.get_all_need_transform(self.app.curs, 1), [])", "def scrub_db_list(self, db_list):\n clean_list = []\n for item in db_list:\n clean_list.append(item[0])\n return clean_list", "def read_transcription_file(file_path, audio_file_path):\n with open(file_path) as in_file:\n last_timestamp = 0\n res = []\n transcription = \"\"\n for line in in_file:\n time_stamp_match = re.match(\"\\[([0-9\\]+\\.[0-9]+)\\]\", line)\n #if this regex matched then the line is a timestamp\n if time_stamp_match:\n timestamp = float(time_stamp_match.group(1))\n if transcription and transcription.strip() not in ['(())', \"<no-speech>\"]:\n single_instance = {\"start_time\": last_timestamp, \n \"end_time\": timestamp,\n \"transcription\": transcription,\n \"audio_file\" : audio_file_path}\n res.append(single_instance)\n last_timestamp = timestamp\n else:\n last_timestamp = timestamp # this handles silence at beginning\n else:\n transcription = line.strip()\n \n return res", "def nooutput(results):\n for _row in results:\n pass", "def empty(self, name, condition=None):\n empty = []\n if not isinstance(name, list): name = [name]\n return_bool = len(name) == 1\n if condition:\n df = pd.DataFrame(self[self.take(condition), name])\n else:\n df = self._data\n for n in name:\n if df[n].count() == 0:\n empty.append(n)\n if return_bool:\n return bool(empty)\n else:\n return empty", "def getNoeuds(self) -> list:\n return self._noeuds", "def is_empty(self):\n if len(self.messages) < 1:\n return True\n else:\n return False", "def is_empty(self):\n if self.items:\n return 'not empty!'\n return 'empty!'", "def get_queryset(self):\n samples = AudioSample.objects.distinct()\n if samples:\n return samples.filter(\n pub_date__lte=timezone.now()\n ).order_by('-pub_date')\n else:\n return []", "def test_convert_dicts_to_teradata_rows_returns_empty_list():\n data = []\n output = row_handling.convert_dicts_to_teradata_rows(data)\n assert output == []", "def test_tabledata_none(self):\n self.check_response(\n '/attributes/tabledata.json',\n (r'{\"rows\": [], \"total\": 0}'))", "def clean(self, value):\n speakers = speaker_models.Speaker.objects.filter(pk__in=value)\n if len(speakers) != len(value):\n raise ValidationError(self.error_messages['invalid_choice'] % value)\n return speakers", "def _filter_empty(lst):\n return [cell for cell in lst if cell is not Sudoku.EMPTY_CELL]", "def test_get_list_empty(self):\r\n result = self.get_json(self.LIST_URI)\r\n self.assertEqual(result[\"count\"], 0)\r\n self.assertIsNone(result[\"next\"])\r\n self.assertIsNone(result[\"previous\"])\r\n self.assertEqual(result[\"results\"], [])", "def test_table_has_no_rows(self):\n models.Study.objects.all().delete()\n response = self.client.get(self.get_url())\n context = response.context\n table = context['study_table']\n self.assertEqual(len(table.rows), 0)", "def test_no_ngrams():\n tokenizer = Tokenizer(quadgram_freq=2)\n X = tokenizer.transform([[\"a b c d\"]])\n assert X[\"corpus\"][0] == [\"a\", \"b\", \"c\", \"d\"]\n assert tokenizer.quadgrams is None", "def response_texts_to_entries(texts: list):\n entries = []\n\n # Remove empty text entries.\n for i in range(len(texts)):\n if texts[i] == \"\":\n texts.pop(i)\n\n for text in texts:\n # Split string into a list.\n properties = text.split(',')\n identifier = properties[0]\n date = properties[1]\n url = properties[2]\n filename = properties[3]\n views = properties[4]\n unknown = properties[5]\n\n if identifier in DELETED_ENTRIES_IDS:\n log.warning(\"SKIPPING already deleted entry {} from unsanitary API response! \"\n \"The API is untrustworthy, this is sadly expected.\".format(identifier))\n\n continue\n\n entries.append(PuushEntry(identifier, date, url, filename, views, unknown))\n\n return entries", "def get_and_delete_messages (self):\n return []", "def get_and_delete_messages (self):\n return []", "def test_no_skill_aggregate(self):\n actions.login(ADMIN_EMAIL)\n\n get_url = '%s?%s' % (self.URL, urllib.urlencode({\n 'ids': [1]}, True))\n response = self.get(get_url)\n self.assertEqual(200, response.status_int)\n payload = transforms.loads(response.body)['payload']\n result = transforms.loads(payload)\n\n self.assertEqual(['Date'], result['column_headers'])\n self.assertEqual([], result['data'])", "def get_existing_taxonomy() -> List[List[Any]]:\n output = []\n with DBWith() as dbService:\n stmt = \"SELECT id, name, vocabulary, parent_id FROM taxonomy\"\n with closing(dbService.cursor(dictionary=True)) as c:\n c.execute(stmt)\n for item in c:\n sleep(0.000001) # To avoid Mysql.Connector error\n output.append([item[\"id\"], item[\"name\"], item[\"vocabulary\"], item[\"parent_id\"]])\n return output", "def get_all_podcasts():\r\n return [Podcast.podcast_json(podcast) for podcast in Podcast.query.all()]", "def dummy_add_transcript():\n return {\n \"message\": \"AddTranscript\",\n \"format\": \"2.1\",\n \"metadata\": {\n \"start_time\": 0.0, \"end_time\": 2.0, \"transcript\": \"Foo\\nBar.\"},\n \"results\": [\n {\n \"type\": \"word\",\n \"start_time\": 0.0,\n \"end_time\": 1.0,\n \"alternatives\": [\n {\"content\": \"foo\", \"confidence\": 1.0, \"language\": \"en\"},\n ],\n },\n {\n \"type\": \"speaker_change\",\n \"start_time\": 1.0,\n \"end_time\": 1.0,\n \"score\": 0.8,\n },\n {\n \"type\": \"word\",\n \"start_time\": 1.0,\n \"end_time\": 2.0,\n \"alternatives\": [\n {\"content\": \"bar\", \"confidence\": 1.0, \"language\": \"en\"},\n ],\n },\n {\n \"type\": \"punctuation\",\n \"start_time\": 2.0,\n \"end_time\": 2.0,\n \"alternatives\": [{\"content\": \".\", \"confidence\": 1.0}],\n },\n ],\n }", "def test_no_results(self):\n self.mocked_cursor.description = None\n\n db = database.Database()\n result = db.execute(sql=\"SELECT * from FOO WHERE bar LIKE 'baz'\")\n self.assertTrue(isinstance(result, list))", "def is_empty(self):\n return self.count == 0", "def is_empty(self):\n return self.count == 0", "def test_get_all_need_transform_no_albums_matched(self):\n orig_album = Album(artist='Artist', album='Album',\n totaltracks=1, totalseconds=120, last_transform=1)\n pk = orig_album.insert(self.app.db, self.app.curs)\n self.assertEqual(self.get_album_count(), 1)\n\n self.assertEqual(Album.get_all_need_transform(self.app.curs, 1), [])", "def test_transactions_list_no_args(self):\n\n transactions = self.client.transactions.list()\n\n self.assertGreaterEqual(len(transactions), 1, 'No transactions found')", "def test_table_has_no_rows(self):\n models.SourceTrait.objects.all().delete()\n response = self.client.get(self.get_url(self.study.pk))\n context = response.context\n table = context['source_trait_table']\n self.assertEqual(len(table.rows), 0)", "def db_select_unpublished(self):\n \n query = \"SELECT * FROM %s WHERE doi IS NULL\" % PUBLICATIONS_TABLE\n with self.connection:\n c = self.connection.cursor()\n c.execute(query)\n result = c.fetchall()\n \n paths = []\n for r in result:\n paths.append(str(r[\"path\"]))\n return paths", "def is_empty(self):\n return self.list.length == 0", "def is_empty(self):\n return self.list.length == 0", "def test_table_has_no_rows(self):\n models.SourceDataset.objects.all().delete()\n response = self.client.get(self.get_url(self.study.pk))\n context = response.context\n table = context['source_dataset_table']\n self.assertEqual(len(table.rows), 0)" ]
[ "0.5676291", "0.5263004", "0.5223297", "0.52066535", "0.5155342", "0.51544493", "0.51198375", "0.50936806", "0.50230044", "0.49982882", "0.49966055", "0.49849313", "0.4925892", "0.48985046", "0.48830864", "0.48502585", "0.48242763", "0.48231605", "0.48141956", "0.4793201", "0.47868186", "0.4778326", "0.47765735", "0.4775492", "0.4756699", "0.4756699", "0.4752159", "0.47504467", "0.4750284", "0.4746151", "0.4733577", "0.4733417", "0.472572", "0.4718979", "0.47141922", "0.4711387", "0.4710687", "0.47086775", "0.47080362", "0.47040835", "0.46933046", "0.4691405", "0.46880245", "0.46878135", "0.46874368", "0.4674938", "0.46651942", "0.46455795", "0.46446738", "0.46408242", "0.46366075", "0.4636389", "0.46339914", "0.4633184", "0.46290874", "0.4628598", "0.4627561", "0.46180326", "0.4614149", "0.4609722", "0.46005693", "0.45997775", "0.45940563", "0.4590338", "0.4586137", "0.45851323", "0.45847267", "0.45830736", "0.45807052", "0.45784366", "0.45768356", "0.45766598", "0.45723405", "0.45717886", "0.45690975", "0.45641202", "0.4563393", "0.45618272", "0.45602426", "0.45567566", "0.45566082", "0.45537785", "0.45524648", "0.45520794", "0.45501447", "0.45501447", "0.4549258", "0.45478675", "0.4540496", "0.45367947", "0.45290568", "0.45217225", "0.45217225", "0.45173454", "0.4516974", "0.450863", "0.4508469", "0.45077905", "0.45077905", "0.450536" ]
0.6268205
0
This is to be used when both the podcasts folder and transcripts folder are empty.\n For every entry in the database that has an empty transcript and a pending flag set to true, change the pending flag to false. Honestly this is used to deal with a weird bug and should be run every now and then
def refreshDatabase(dbConnection): try: cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET pending = FALSE WHERE COALESCE(transcription, '') = '';") dbConnection.commit() cursor.close() except Exception as e: Tools.writeException("refreshDatabase", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkPre(dbConnection):\n cursor = dbConnection.cursor()\n cursor.execute(\"SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;\")\n entry = cursor.fetchone()\n cursor.close()\n return entry", "def mark_no_changes(self):", "def reset_continued(self): \n self._recent_goal_continued = False\n self._update_action = False\n self._update_action_without_pause = False", "def check_indicator_files(tasks):\n\n for task in tasks:\n if task[\"status\"]==\"unknown\":\n if os.path.exists(task[\"result\"]):\n task[\"status\"]=\"previously completed\"\n else:\n task[\"status\"]=\"to do\"\n return", "def update_bool(file_path):\n with open(\n file_path, 'r'\n ) as the_result_file_from_spark_for_read_and_abbr_not_allowed_by_pylint:\n content = the_result_file_from_spark_for_read_and_abbr_not_allowed_by_pylint.read(\n )\n update = content.replace('true', 'True').replace('false', 'False')\n with open(\n file_path,\n 'w') as the_result_file_from_spark_for_write_and_abbr_not_allowed:\n the_result_file_from_spark_for_write_and_abbr_not_allowed.write(update)", "def prepare_translation_list_update():\n\n with _conn.cursor() as cur:\n cur.execute(\"ALTER TABLE translation_stats DISABLE TRIGGER update_translation_stats_timestamp;\"\n \"UPDATE translation_stats SET available = FALSE;\")\n _conn.commit()", "def on_false(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]:", "async def clean_status(self):\n async with self._mongo.create_session() as session:\n await self._mongo.status.find_one_and_update(\n {\"_id\": \"hmm\"},\n {\"$set\": {\"installed\": None, \"task\": None, \"updates\": []}},\n session=session,\n )", "def _replace_boolean(data):\n output_data = deepcopy(data)\n for row in output_data:\n for i, element in enumerate(row):\n if element is True:\n element = 'P'\n elif element is False:\n element = 'N'\n row[i] = element\n return output_data", "def _pre_sync(self):", "def non_std_update():\n db = database.Database()\n conn = db.get_connection()\n select_statement = (\"SELECT id, postagged FROM songs WHERE language IS 'en' AND non_std_words IS NULL;\")\n update_statement = (\"UPDATE songs SET non_std_words = ? WHERE id = ?;\")\n cur = conn.cursor()\n cur.execute(select_statement)\n iterator = cur.fetchall()\n work = [(sqlid, ast.literal_eval(el)) for sqlid, el in iterator]\n statements = non_std_words(work)\n conn.executemany(update_statement, statements)\n conn.commit()\n conn.close()\n\n return", "def exempt_feed_retroactive(db, c, feed_uid, **kwargs):\n c.execute(\"\"\"update fm_items\n set item_rating=0, item_rule_uid=NULL\n where item_feed_uid=? and item_content!='' and exists (\n select rule_uid from fm_rules\n where rule_feed_uid is null and item_rule_uid=rule_uid\n )\"\"\", [feed_uid])", "def update_autorun_list(self):\n #pylint:disable=bad-builtin\n map(self.proglist.delete, self.proglist.get_children())\n for p in self.progs:\n self.proglist.insert('', 'end', text=p, values=(os.path.basename(os.path.dirname(p)),\n 'Yes' if p in self.lnp.autorun else 'No'))", "def dummy_update_subtask_status(entry_id, _current_task_id, new_subtask_status):\r\n bogus_task_id = \"this-is-bogus\"\r\n update_subtask_status(entry_id, bogus_task_id, new_subtask_status)", "def noncommand(bot, update):\n msg = \"I only answer to the command */nothingtodo [list;of;subreddits]*\"\n update.message.reply_text(msg, parse_mode='Markdown')", "def do_dry_run(self):\n self.loggit.info('DRY-RUN MODE. No changes will be made.')\n msg = f'DRY-RUN: Update cluster routing transient settings: {self.settings}'\n self.loggit.info(msg)", "def set_is_sent_1(database, vacancies_table, vacancy_id):\r\n in_tests.test_database_name(database)\r\n in_tests.test_table_name(vacancies_table)\r\n in_tests.test_var_type(vacancy_id, \"vacancy_id\", int)\r\n\r\n current_time = datetime.datetime.now().astimezone().replace(\r\n microsecond=0, tzinfo=None).isoformat()\r\n print (\r\n f\" [{current_time}] Set `is_sent`=1 in vacancy id={vacancy_id}...\")\r\n\r\n connection = sqlite3.connect(database)\r\n cursor = connection.cursor()\r\n counter = 1\r\n query = \\\r\nf\"UPDATE {vacancies_table} SET is_sent = 1 WHERE id = {vacancy_id}\"\r\n cursor.execute(query)\r\n connection.commit()\r\n database_changes = connection.total_changes\r\n cursor.close()\r\n connection.close()\r\n out_tests.test_write_to_database(database_changes, counter)\r\n return (database_changes)", "def recordNotOk(root, transcript):\n no = root.find('not_ok')\n addOne(no, 'transcripts')\n\n # flatten the annotations so that there are no duplicates\n flattenedTree = flattenAnnotations(transcript.annotations)\n flatTree = flattenedTree\n depthFirstAddOne(flatTree, no, isRoot=True)\n\n # walk the full annotation set as it appears\n for ta in transcript.annotations:\n addOne(no, 'transcript_annotations')\n prev = no\n for label in ta.labels:\n label = cleanLabel(label)\n e = prev.find(label)\n if e is None:\n raise RuntimeError('Unanticipated tag discovered %s:%s'\n % (prev.tag, label))\n addOne(e, 'transcript_annotations')\n prev = e", "def test_that_on_update_commands_dont_get_rerun(tmpdir):\n test_yml = {\n \"hash_dir\": str(tmpdir),\n \"update_on_change\": {\n \"monkey.txt\": \"echo -ne '.' >> onedot.txt\"\n }\n }\n with batman_dir(test_yml) as tmp_batman_dir:\n touch_file(os.path.join(tmp_batman_dir, 'monkey.txt'))\n os.system('batman {0}'.format(tmp_batman_dir))\n test_yml['update_on_change']['walrus.txt'] = 'touch bucket.txt'\n update_batman_yml(tmp_batman_dir, test_yml)\n os.system('batman {0}'.format(tmp_batman_dir))\n assert run('cat onedot.txt', in_dir=tmp_batman_dir).output == '.'", "def resetScript(dbConnection, maxConcurrent):\n while (Tools.numRunningProcesses() != 0): # wait for the transcriptions to end. Pings every 2 mins\n time.sleep(120)\n emptyPodcastFolder = Tools.cleanupFolder(\"podcasts\")\n DatabaseInteract.refreshDatabase(dbConnection)", "def cleanup_callback(self, job: Job):\n from projects.models.files import File\n\n File.objects.filter(project=self, current=True).update(current=False)", "def set_task_not_started(self):\n\n tasks = self._get_all_tasks()\n\n task_id = tasks[self.tasks_view.currentRow()].Id\n\n self.tasks_flow.set_status(task_id, 2)\n\n # Refresh the table\n self.write_tasks_table()", "def prepare_for_commit(self):", "def syncfolder():", "def bool_to_status(self):\n for movie in self.movies:\n if movie.is_watched:\n movie.is_watched = WATCHED\n else:\n movie.is_watched = UNWATCHED", "def can_dry_run(self):\r\n return False", "def __look_for_missing_pseudotext_info(self, force_update=False):\n logging.debug('Starting method that looks for a missing pseudo-text info')\n counter = 0\n max_vids_to_process = self.num_vids_to_use\n logging.info('Examining ' + str(max_vids_to_process) + ' records.')\n list_vids_no_pt_data = []\n percent_tracker = PercentTracker(max_vids_to_process, int_output_every_x_percent=10)\n for vid_id in self.transcripts_ds:\n execution_should_continue = self.var_mgr.var_retrieve(my_globals.str_execution_may_go_on)\n if (not execution_should_continue) or (counter >= max_vids_to_process):\n break\n\n need_to_append_to_list = False\n if force_update:\n need_to_append_to_list = True\n if not need_to_append_to_list:\n # if we already found that the video should be appended to the list,\n # then there is no need for further checks. But if NOT, then\n # the following should still be performed\n transcript = Transcript(vid_id)\n transcript.set_transcript_directory(self.str_path_to_transcripts_files)\n transcript.load_transcript_object_from_dictionary(self.transcripts_ds.fetch_data(vid_id))\n has_pseudotranscript_data = transcript.is_pseudotranscript_filename_populated()\n if not has_pseudotranscript_data:\n # we are here if the video has a transcript (it exists in the transcripts SimpleDS),\n # but the field for the filename of the TranscriptAnalysis file has never been populated.\n need_to_append_to_list = True\n\n if need_to_append_to_list:\n list_vids_no_pt_data.append(vid_id)\n counter += 1\n percent_tracker.update_progress(counter,\n str_description_to_include_in_logging='Finding missing pseudotext files.')\n return list_vids_no_pt_data", "def flag_all_commit(self):\n\t\tfor k in self.data.keys():\n\t\t\tindex = 0\n\t\t\tfor item in self[k]:\n\t\t\t\tself.data[k][index]['meta']['needs_commit'] = True\n\t\t\t\tindex += 1", "def can_mark_as_undone(self):\n if (not self.archived) and self.event_store.done:\n return True\n return False", "def resetFlags():\r\n for flag in flags:\r\n flags[flag] = False", "def IgnorePersistedDecision(self) -> bool:", "def _update():\n\tquery = myTaskSession.query(WorkToolkitDB.db.Task)\n\n\tIDStr = myOpt.id\n\tIDs = re.split('\\s*,\\s*', IDStr)\n\n\tif len(IDs) == 0:\n\t\tprint('ERR: no add task input')\n\t\treturn 1\n\n\t#set default finsih_status if not given\n\tif not myOpt.f:\n\t\tmyOpt.f = 1\n\n\tfor ID in IDs:\n\t\tquery.filter(WorkToolkitDB.db.Task.id == ID).update({WorkToolkitDB.db.Task.finish_status: myOpt.f})\n\n\t\tif myOpt.vt:\n\t\t\tquery.filter(WorkToolkitDB.db.Task.id == ID).update({WorkToolkitDB.db.Task.version_time: myOpt.vt})\n\n\t#commit\n\tmyTaskSession.commit()\n\n\t\"\"\"\n\t#ERR: not given itsm id for update \n\tif not myOpt.id:\n\t\tprint('Error: no itsm id given for update finish_status to 1')\n\t\treturn 1\n\t#set default finsih_status if not given\n\tif not myOpt.f:\n\t\tmyOpt.f = 1\n\n\t\n\tquery.filter(WorkToolkitDB.db.Task.id == myOpt.id).update({'finish_status': myOpt.f})\n\tmyTaskSession.commit()\n\n\t\n\tdata = query.filter(WorkToolkitDB.db.Task.id == myOpt.id).all()\n\tfor record in data:\n\t\t\t#record_arr = record.to_array()\n\t\t\tpt.add_row(record.to_array())\n\n\tprint(pt)\n\t\"\"\"\n\n\treturn 0", "def update_global_file_directory():\n try:\n query_id = random.randint(0,sys.maxsize)\n now_time = \"\".join(str(datetime.datetime.now()).split(\" \"))\n waiting_query_ids.append([query_id,now_time])\n\n for i in range(len(STRONG_PEERS)):\n if i != STRONG_PEER_ID:\n passing_message(i, f\"TIME:{now_time} QUERY_ID:{query_id} FROM:{STRONG_PEER_ID} TO:{i} QUERY:file_list DATA:{json.dumps(local_peer_files)}\") \n except Error as e:\n print(e)", "def already_processed(self):\n # If the flag file has been created by a previous run\n # or if any of the rules have already been re-ordered\n # then we shouldn't make any more changes and instead\n # the system needs to be rebooted.\n return self.syspaths.flag_exists", "def test_python_bool(self):\n\n m = Mothur(**self.init_vars)\n self.set_current_dirs(m)\n m.pcr.seqs(fasta='test_fasta_1.fasta', start=20, keepdots=False)\n m.pcr.seqs(fasta='test_fasta_1.fasta', start=20, keepdots=True)\n\n return", "def ignore(self):\n self._ignore_transids = True", "def pull_up(self):\n return False", "def _abort_on_pending_changes(self) -> None:\n if set(self._dirty_paths_by_status) - {StatusCode.Untracked}:\n raise ActionFailure(\n \"Found pending changes in tracked files. Diff-aware runs require a clean git state.\"\n )", "def test_statusNotClean(self):\n reposDir = self.makeRepository(self.tmpDir)\n reposDir.child(\"some-file\").setContent(b\"something\")\n self.assertFalse(self.createCommand.isStatusClean(reposDir))", "def akf_db_updater(file,dbPath):\n file = file.replace(\"\\\\\", \"/\")\n #Condition\n #if \"0704\" not in file: return\n\n\n print(\"Start SQLTalk\")\n print(file)\n with open(file, 'r', encoding=\"utf-8\") as f:\n new_data = json.load(f, cls=NoneRemover)\n\n # Generate a compare object\n new_data['debug'] = False\n if new_data['debug']:\n new_data['compare'] = deepcopy(new_data)\n del_entry(new_data['compare'], [], ['_fulltext', 'debug'])\n else:\n new_data['compare'] = {\"debug\": False}\n\n # Generate unternehmenId\n new_data.update({'unternehmenId': file.split(\"/\")[-2].replace(\"-\",\".\") + \".\" + file.split(\"/\")[-1][:4]})\n\n # Generate Year\n new_data.update({'year': file.split(\"/\")[-2]})\n\n db_akf = dbPath\n engine = create_engine(db_akf)\n conn = engine.connect()\n\n # Create a MetaData instance\n metadata = MetaData(engine, reflect=True)\n\n # Check if entry already exists\n #s = select([metadata.tables['Unternehmen']]).where(\n # metadata.tables['Unternehmen'].c.unternehmenId == new_data['unternehmenId'])\n #result = conn.execute(s)\n #if len(result.fetchall()) > 0: print(\"Entry already exists!\");conn.close(); return 0;\n\n new_data[\"shareinfo\"] = stck_stimmrecht(new_data[\"_fulltext\"])\n #for shareinfo in new_data[\"shareinfo\"]:\n # print(shareinfo)\n\n for si in new_data[\"shareinfo\"]:\n if si[\"wkn\"]+si[\"isin\"] != \"\":\n for awe in new_data[\"all_wkn_entry\"]:\n if len(awe.keys())<4:\n for key in [\"type\",\"wkn\",\"isin\",\"nw\"]:\n if not awe.get(key,False):\n awe[key] = \"\"\n if si[\"wkn\"] == awe[\"wkn\"] and si[\"wkn\"] != \"\":\n break\n if si[\"isin\"] == awe[\"isin\"] and si[\"isin\"] != \"\":\n break\n else:\n new_data[\"all_wkn_entry\"].append(\n {\"type\":si.get(\"type\",\"\"),\n \"wkn\":si.get(\"wkn\",\"\"),\n \"isin\":si.get(\"isin\",\"\"),\n \"nw\":\"\"}\n )\n #return\n\n # Check if a universal ID already exists\n get_uid(new_data, metadata, conn)\n\n # Update all_wkn_entry\n #update_all_wkn(new_data)\n\n # Get shareinfo for later use\n #get_shareinfo(new_data)\n\n \"\"\"\n with open(\"stimmrecht.txt\",\"a\") as stfile:\n for entry in new_data[\"shareinfo\"]:\n stfile.write(entry[\"voice\"]+\"\\n\")\n with open(\"stuckelung.txt\",\"a\") as stfile:\n for entry in new_data[\"shareinfo\"]:\n stfile.write(entry[\"number\"]+\"\\n\")\n return\n \"\"\"\n # Start writing in the table\n print(\"TABLES\")\n options = {\n 'Aktienkurse': Aktienkursetable,\n 'Aktionaer': Aktionaertable,\n 'Anleihen': Anleihentable,\n 'Aufsichtsrat': Aufsichtsrattable,\n 'Beschaeftigte': Beschaeftigtetable,\n 'Beteiligungen': Beteiligungentable,\n 'BilanzAktiva': BilanzAktivatable,\n 'BilanzPassiva': BilanzPassivatable,\n 'BilanzSumme': BilanzSummetable,\n 'Boersennotiz': Boersennotiztable,\n 'Dependence': Dependencetable,\n 'Dividenden': Dividendentable,\n 'Geschaeftsjahr': Geschaeftsjahrtable,\n 'Geschaeftsleitung': Geschaeftsleitungtable,\n 'Grundkapital': Grundkapitaltable,\n 'GuV': GuVtable,\n 'Kapitalart': Kapitalarttable,\n 'Kapitalentwicklung': Kapitalentwicklungtable,\n 'Kennzahlen': Kennzahlentable,\n 'Main': Maintable,\n 'MainRelation': MainRelationtable,\n 'Organbezuege': Organbezuegetable,\n 'Stimmrecht': Stimmrechttable,\n 'Stueckelung': Stueckelungtable,\n 'Unternehmen': Unternehmentable,\n 'Volume': Volumetable,\n 'Vorstand': Vorstandtable,\n 'WKN': WKNtable,\n 'WeitereBemerkungen': WeitereBemerkungentable,\n }\n for name in metadata.tables:\n if name in ['Dependence','Volume']: continue;\n print(name)\n options[name](conn, new_data, metadata.tables[name])\n conn.close()\n engine.dispose()\n if new_data['debug']:\n TEMP = tempfile.gettempdir()\n create_dir(TEMP + \"/SQLDBTalk/\")\n with open(TEMP + \"/SQLDBTalk/\" + os.path.basename(file), 'w', encoding=\"utf-8\") as f:\n json.dump(new_data['compare'], f, indent=4)\n print(\"Wrote File: \\n\" + os.path.normcase(TEMP + \"/SQLDBTalk/\" + os.path.basename(file)))\n print(\"FINISHED!\")\n return 0", "def update_is_streamed_json(self, index):\n time_now = datetime.datetime.now()\n read_path = self.base_path + time_now.strftime('%Y-%m-%d') + '.json'\n try:\n json_file = open(read_path, 'r')\n data = json.load(json_file)\n json_file.close()\n\n data[index]['being_streamed'] = True\n\n json_file = open(read_path, 'w+')\n json_file.write(json.dumps(data))\n json_file.close()\n \n except IOError:\n print 'File not found'", "def test_migrate_empty_folder_to_document(self):\n output = migrateContents(self.portal, \"Folder\", \"Document\")\n self.assertEqual(output.get('counter', 0), 2)\n self.assertEqual(output.get('error', []), [])\n self.assertTrue(self.portal.portal_catalog(portal_type=\"Document\").actual_result_count == 12)\n self.assertTrue(self.portal.portal_catalog(portal_type=\"Folder\").actual_result_count == 0)", "def preprocess_db(new_subjects_only=False, preprocess_everything=False, sub_id=None):\n try:\n if sub_id is None:\n subjects_db = sql.get_subjects().fetchall()\n else:\n subjects_db = sql.get_subjects(sub_id).fetchall()\n if len(subjects_db) == 0:\n raise SQLError(\"No subject with sub_id \" + str(sub_id))\n subjects_preprocessed = []\n\n try:\n with open('version', 'r') as f:\n if f.readline() != str(version): # outdated version\n print(\"Serialization version has changed to \" + str(version) + \", preprocessing everything\",\n file=sys.stderr)\n preprocess_everything = True\n except FileNotFoundError:\n print(\"No existing version file found, preprocessing everything\", file=sys.stderr)\n preprocess_everything = True\n\n if not preprocess_everything:\n for root, dirs, files in os.walk('./preprocessed'):\n for file_name in files:\n subjects_preprocessed.append(file_name)\n\n for sub_id, sub_name in subjects_db:\n path = \"preprocessed/\" + str(sub_id)\n if preprocess_everything or str(sub_id) not in subjects_preprocessed: # if new subject not yet preprocessed\n QuestionSet.from_db(sub_id).serialize(path)\n print(\"New subject found with id \" + str(sub_id) + \", preprocessing subject\", file=sys.stderr)\n elif not new_subjects_only: # check for modified/new/deleted entries in db\n preprocessed = QuestionSet.deserialize(path)\n changes_made = False\n qs = set(preprocessed.questions)\n for question, answer in sql.get_questions(sub_id):\n for matched in qs:\n if matched.question == question.strip():\n if matched.answer != answer.strip():\n matched.answer = answer.strip()\n changes_made = True\n qs.remove(matched)\n break\n else: # no match, new question\n preprocessed.questions.append(Question(question, answer))\n changes_made = True\n for stored in qs: # any questions left are ones that have been removed from the database\n preprocessed.questions.remove(stored)\n changes_made = True\n if changes_made:\n preprocessed.serialize(path)\n print(\"Changes made to subject \" + str(sub_id) + \" (\" + sub_name + \"), reserializing\",\n file=sys.stderr)\n if preprocess_everything:\n with open('version', 'w') as f:\n f.write(str(version))\n except NameError:\n print(\"Database was never started successfully; cannot preprocess questions\", file=sys.stderr)\n raise SQLError(\"Database was never started successfully; cannot preprocess questions\")\n except OperationalError:\n print(\"Couldn't connect to database; cannot preprocess questions\", file=sys.stderr)\n raise SQLError(\"Couldn't connect to database; cannot preprocess questions\")", "def mark_as_undone(self):\n if self.can_mark_as_undone():\n return self.__set_completion_status(False)\n return False", "def test_partial_update_metadata(self):\n pass", "def runAutoCheck(dbConnection, maxConcurrent):\n # checks if any shows are pending.\n fileContent = DatabaseInteract.checkPre(dbConnection)\n if(len(fileContent) > 0 and Tools.numRunningProcesses() < maxConcurrent):\n cursor = dbConnection.cursor()\n cursor.execute(\"UPDATE transcriptions SET pending = TRUE WHERE id = '\" + str(fileContent[1]) + \"';\")\n dbConnection.commit()\n cursor.close()\n url = fileContent[0]\n indexID = str(fileContent[1]) # get the ID instead of the filename\n service = str(fileContent[3])\n # podcastName = fileContent[2]\n Tools.transcribeAll(service, url, indexID) # download the mp3 will print when done", "def clean_for_commit(self):", "def pending(self):\n self.update({self.STATE: self.STATE_PENDING})", "def uncomplete(self):\n ### TODO: needs test code for code coverage!\n ## (it has been tested through the calendar-cli test code)\n if not hasattr(self.vobject_instance.vtodo, \"status\"):\n self.vobject_instance.vtodo.add(\"status\")\n self.vobject_instance.vtodo.status.value = \"NEEDS-ACTION\"\n if hasattr(self.vobject_instance.vtodo, \"completed\"):\n self.vobject_instance.vtodo.remove(self.vobject_instance.vtodo.completed)\n self.save()", "def is_atomic(self):\n found = True\n if self.ant is not None:\n for p in self.ant:\n if p.conn != 'at':\n found = False\n if self.con is not None:\n for prop in self.con:\n if prop.conn != 'at':\n found= False\n return found", "def move_tobchecked_in_unrecordable(self, data):\r\n conf = self.func.config_info()\r\n folder_name = self.bid_folder_name() \r\n\r\n if \"ProofreaderStatus\" in list(data.keys()):\r\n if data[\"ProofreaderStatus\"] == \"UNRECORDABLE\":\r\n files = os.listdir(conf[\"path_to_batches_tobechecked\"])\r\n if folder_name in files:\r\n src = os.path.join(conf[\"path_to_batches_tobechecked\"], folder_name)\r\n dst = os.path.join(conf[\"path_to_batches_unrecordable\"], folder_name)\r\n self.func.move_folder(src, dst)\r\n\r\n if not self.func.folder_exists(dst):\r\n raise Exception(\"Folder {} not moved in '8 UNRECORDABLE'!\".format(folder_name))\r\n else:\r\n raise Exception(\"Folder {} not found in '4 TO BE CHECKED'!\".format(folder_name))", "def fix_notification_statuses_not_in_sync(self):\n MAX = 10000\n\n subq = \"SELECT id FROM notifications WHERE cast (status as text) != notification_status LIMIT {}\".format(MAX)\n update = \"UPDATE notifications SET notification_status = status WHERE id in ({})\".format(subq)\n result = db.session.execute(subq).fetchall()\n\n while len(result) > 0:\n db.session.execute(update)\n print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))\n db.session.commit()\n result = db.session.execute(subq).fetchall()\n\n subq_hist = \"SELECT id FROM notification_history WHERE cast (status as text) != notification_status LIMIT {}\" \\\n .format(MAX)\n update = \"UPDATE notification_history SET notification_status = status WHERE id in ({})\".format(subq_hist)\n result = db.session.execute(subq_hist).fetchall()\n\n while len(result) > 0:\n db.session.execute(update)\n print('Committed {} updates at {}'.format(len(result), datetime.utcnow()))\n db.session.commit()\n result = db.session.execute(subq_hist).fetchall()", "def test_update_no_note(self):\n self.my_task.notes = None\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.my_task.title = 'foo'\n key = self.task_storage.update(self.my_task)\n new_task = self.task_storage.find(key)\n\n self.assertEqual(self.my_task, new_task)", "def _rec_only_updated(cls, rec):\n return rec.get('uplinked', None) \\\n and not rec.get('queued', None) \\\n and not rec.get('announced', None) \\\n and not rec.get('blocked', None) \\\n and not rec.get('finished', None) \\\n and not rec.get('aborted', None)", "def test_partial_update_metadata1(self):\n pass", "def test_command__dont_update_old_activity(self):\n assignment = WorkflowCollectionAssignment.objects.get(id=self.assignment_2.id)\n self.assertEqual(assignment.status, \"IN_PROGRESS\")\n\n out = StringIO()\n call_command(\"assignment_terminator\", days_old=\"30\", type=\"SURVEY\", stdout=out)\n\n assignment.refresh_from_db()\n self.assertEqual(assignment.status, \"IN_PROGRESS\")", "def no_filter(blast_subject_entry):\r\n return True", "def bool_fix(name_dates, candidate_json, yes_no_key):\n for candidate in name_dates:\n for key in yes_no_key:\n test = str(candidate_json[candidate][key].strip())\n if test == 'Yes' or test == 'yes' or test == 'Y' or test == 'y':\n candidate_json[candidate][key] = True\n elif test == 'No' or test == 'no' or test == 'N' or test == 'n':\n candidate_json[candidate][key] = False\n else:\n # returns incorrect data for error exploration\n print(test)\n return candidate_json", "def toggle_article_flag(self, article: Article) -> None:\n article.flag = not article.flag\n with self._sqlite_connection:\n self._sqlite_connection.execute('''UPDATE articles SET flag = ? WHERE identifier = ? and feed_id = ?''', [article.flag, article.identifier, article.feed_id])", "def add_false_positive(self):\n if not self.can_update():\n self._handle_error(910, [self.type])\n\n return self.tc_requests.add_false_positive(\n self.api_type, self.api_branch, self.unique_id, owner=self.owner\n )", "def resetUpdatedFlag( sesTarget, tblName ):\n\tsesTarget.execute( text( \"UPDATE %s SET updated_flag = :resetFlag\" % ( tblName ) ), { \"resetFlag\" : 0 } )", "def check_ans():\n os.chdir(FLAGS.ans_dir) # 进入到 annotations 文件夹\n\n ans_list = os.listdir()\n\n for i in ans_list:\n with open(i, 'rt') as file_read:\n line = file_read.read()\n\n to_list = line.split(' ')\n print(i, \": \", to_list)\n\n if to_list[-1] == '\\n':\n line = line[:-2]\n\n # with open(i, 'wt') as file_write:\n # file_write.write(line)\n # print('Rewrite: ', line)\n\n return", "def _clear_tasks(self):\n listOfTasks = self.model.find(xmlns + 'ListOfTasks') \n assert listOfTasks != None\n \n for task in listOfTasks:\n task.attrib['scheduled'] = 'false'", "def deny(self):\n self.quest_node['completed_by'] = ''\n self.completed_by = None\n self.active = True\n self.quest_node['active'] = True\n graph.push(self.quest_node)", "def _advance_to_pending(self):\n if all(signup.status != GameSignup.REGISTERED for signup in self.signups.all()):\n try:\n with transaction.atomic():\n self.status = self.PENDING\n self._create_characters()\n self.save()\n except DatabaseError:\n pass\n else:\n raise ValidationError('All user signups must be accepted, rejected, or withdrawn before continuing.')", "def test_no_delete_entry_on_rebuild_server_response(self):\n self.validate_no_deletes_entry_returned()", "def zap_entries(conn):\n with conn:\n c = conn.cursor()\n c.execute(\"SELECT project FROM projects\")\n rows = c.fetchall()\n rows = [x[0] for x in rows]\n for project in rows:\n if not os.path.isdir(project):\n print(\"Zapping from cache: %s\" % project)\n delete(conn, project)", "async def team_unignore(self, ctx: commands.Context):\n await self.config.user(ctx.author).do_not_message.set(False)\n await ctx.send('Okay, I\\'ll include you back in team-wide DMs.')", "def upload_only_when_stable(self):\n return os.getenv(\"CONAN_UPLOAD_ONLY_WHEN_STABLE\", \"True\").lower() in [\"true\", \"1\", \"yes\"]", "def checkNone(comment, entry, update=True):\n res = entry is None\n if update:\n if res:\n results[\"pass\"] += 1\n else:\n print(\"checking answer\",comment,'|','\"{}\" is not None!'.format(entry))\n results[\"fail\"] += 1", "def test_07_provenance_is_empty(self):\n outfiles = reporting.provenance_reports(\"2015-01-01T00:00:00Z\", \"2016-01-01T00:00:00Z\", TMP_DIR)\n assert outfiles is None, outfiles\n\n # Try as background job\n job = reporting.ReportingBackgroundTask.prepare(\"system\", outdir=TMP_DIR, from_date=DEFAULT_TIMESTAMP_VAL,\n to_date=dates.now_str())\n reporting.ReportingBackgroundTask.submit(job)\n time.sleep(1)\n job = models.BackgroundJob.pull(job.id)\n\n assert 'No provenance records found' in json.dumps(job.audit), job.audit", "def fileGone(self) -> None:\n myQuads = [(s, p, o, self.uri) for s, p, o in self.getSubgraph(self.uri)\n ]\n log.debug(\"dropping all statements from context %s\", self.uri)\n if myQuads:\n self.patch(Patch(delQuads=myQuads), dueToFileChange=True)", "def mark_obsolete_in_dataset( dataset_name, engine, table ):\n s = table.select( table.c.dataset_name==dataset_name ) \n result = conn.execute(s) # all rows of replica.files with the specified dataset_name\n\n sr = []\n srf = {}\n for row in result:\n # Note that you can loop through result this way only once.\n sr.append(row)\n fn = filename(row)\n if fn in srf:\n srf[fn].append(row)\n else:\n srf[fn] = [row]\n\n #sr.sort( key=filename )\n\n for fn,rows in srf.items():\n if len(rows)<=1: continue\n rows.sort( key=rowversion )\n print \"jfp will keep abs_path=\",rows[-1]['abs_path'],\"status=\",rows[-1]['status'],\\\n \"dataset_name=\",rows[-1]['dataset_name']\n for row in rows[0:-1]:\n abs_path = row['abs_path']\n dataset_name = \"old_\"+row['dataset_name']\n print \"jfp will do update for abs_path=\",abs_path,\"status from\",row['status'],\"to 50\"\n s = table.update().where( table.c.abs_path==abs_path ).\\\n values( status=50 )\n #if dataset_name.find('old_old_')!=0:\n # s = table.update().where( table.c.abs_path==abs_path ).\\\n # values( dataset_name=dataset_name )\n # ... doesn't work, you first have to create a row in replica.datasets with this name.\n result = conn.execute(s)", "def _reset_downstream(self, _, isdirty, *args):\n if isdirty:\n for name in self.outputs:\n task = self.get_output_task(name=name)\n if task:\n task.dirty = True", "def _does_not_exist_or_forced(self) -> bool:\n if os.path.exists(self.extracted_path) and self.force:\n logger.debug(f\"'-f/--force' flag set, deleting directory: '{self.extracted_path}'\")\n shutil.rmtree(self.extracted_path)\n logger.debug(f\"Deletion successful.\")\n elif os.path.exists(self.extracted_path) and not self.force:\n logger.warning(f\"{self.dataset_name} already exists at the destination directory '{self.extracted_path}'\")\n logger.warning(f\"If you wish to re-download the dataset, try 'sla-cli download -f/--force <DATASET>'\")\n logger.warning(f\"Skipping...\")\n return False\n\n return True", "def doNotTrack(self):\n # return False\n return 'lol'", "def run():\n\n # establish connection\n with sqlite3.connect(DB_PATH) as conn:\n db = conn.cursor()\n\n # run reset queries\n db.execute(\"\"\"\n WITH toReset AS (\n SELECT DISTINCT table_id\n FROM cea\n WHERE mapped IS NULL\n )\n\n UPDATE tables\n SET returned=0\n WHERE table_id IN toReset\n \"\"\")", "def test_skipped_update(self):\n dir0, dir1, dir2 = self.make_temp_dirs(3)\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n\n # Update dir0 and sync dir0/dir1 but not dir2\n self.write_file(dir0, \"foo\", \"baz\")\n self.sync_dirs(dir0, dir1)\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")\n self.assertFile(dir2, \"foo\", \"bar\")\n\n # dir2 should pick up the change when all are sync'd\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")\n self.assertFile(dir2, \"foo\", \"baz\")", "def _remove_flag(self, mbox, msgset, flag):\n self.select_mailbox(mbox, False)\n self._cmd(\"STORE\", msgset, \"-FLAGS\", flag)", "def buildActivityData(self):\n logger.info(\"Running buildActivityData...\")\n ok = False\n try:\n ptsW = ProteinTargetSequenceWorkflow(self.__cfgOb, self.__cachePath)\n ok = ptsW.buildActivityData(referenceResourceName=\"pdbprent\", resourceNameList=[\"chembl\", \"pharos\"], backup=True, remotePrefix=self.__stashRemotePrefix)\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n return ok", "def dummy_update( self ):\r\n pass", "def check_unstaged_changes(self):\n pass", "def resetConflictList(self):\n return\n #self.conflict_list = PersistentMapping()", "def _post_sync(self):", "async def _antiadv(self, ctx):\r\n serverid = ctx.message.server.id\r\n if ctx.invoked_subcommand is None:\r\n await send_cmd_help(ctx)\r\n if serverid not in self.adkillr:\r\n self.adkillr[serverid] = {'toggle': False, 'message': '{0.mention} don\\'t send links!', 'filters': []}\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)", "def test_sync_status_to_cancel(initialized_db):\n\n disable_existing_mirrors()\n mirror, repo = create_mirror_repo_robot([\"updated\", \"created\"], repo_name=\"cancel\")\n\n mirror.sync_status = RepoMirrorStatus.SYNCING\n mirror.save()\n updated = update_sync_status_to_cancel(mirror)\n assert updated is not None\n assert updated.sync_status == RepoMirrorStatus.NEVER_RUN\n\n mirror.sync_status = RepoMirrorStatus.SYNC_NOW\n mirror.save()\n updated = update_sync_status_to_cancel(mirror)\n assert updated is not None\n assert updated.sync_status == RepoMirrorStatus.NEVER_RUN\n\n mirror.sync_status = RepoMirrorStatus.FAIL\n mirror.save()\n updated = update_sync_status_to_cancel(mirror)\n assert updated is None\n\n mirror.sync_status = RepoMirrorStatus.NEVER_RUN\n mirror.save()\n updated = update_sync_status_to_cancel(mirror)\n assert updated is None\n\n mirror.sync_status = RepoMirrorStatus.SUCCESS\n mirror.save()\n updated = update_sync_status_to_cancel(mirror)\n assert updated is None", "def test_single_aclhook_false(self):\n self._test_hook_approval_sequence([False], False)", "def test_pre_commit_has_no_configuration(tmp_path):\n ProjectMock(tmp_path).style(\"\").pre_commit(\"\").api_check_then_fix()", "def resurrectTask(task_id, ignoreStarted = False):\n \n [task] = Hydra_rendertask.fetch(\"where id = '%d'\" % task_id)\n if (\n task.status == 'K' or task.status == 'F' or \n (task.status == 'S' and ignoreStarted == True)\n ):\n task.status = 'R'\n task.host = None\n task.startTime = None\n task.endTime = None\n else:\n return True\n\n with transaction() as t:\n task.update(t)\n \n return False", "def _setAllWithoutUpdate(self, data):\n super(SummonerModel, self)._setAllWithoutUpdate(data)", "def maybe_commit(job):", "def valid_update_flags(self) -> bool:\n if CoronaCaseRaw.objects.all().count() < 2:\n return True\n return not CoronaCaseRaw.objects.filter(update_flag=(not self.latest_flag())).exists()", "def preexecute(self, trans):\n def getobj(elem):\n elem.mark_preprocessed(self)\n return elem.obj\n \n ret = False\n elements = [getobj(elem) for elem in self.targettask.polymorphic_tosave_elements if elem.obj is not None and not elem.is_preprocessed(self)]\n if len(elements):\n ret = True\n self.processor.preprocess_dependencies(self.targettask, elements, trans, delete=False)\n\n elements = [getobj(elem) for elem in self.targettask.polymorphic_todelete_elements if elem.obj is not None and not elem.is_preprocessed(self)]\n if len(elements):\n ret = True\n self.processor.preprocess_dependencies(self.targettask, elements, trans, delete=True)\n return ret", "def _revert(self):\n self.kwargs[\"collect\"].change_status(self.kwargs[\"collect\"].ENDED)", "def _update_if_summmary(self):\n if self._is_summary:\n self.update(True)", "def do_fill_db_with_fake_tracks(self, arg):\n print('*** Warning! All records will be deleted!')\n print('Do you really want to fill the Database with fake records? [y/N] ', end='')\n if not helpers.get_yes_no(default='n'):\n return\n self.db.fill()\n self.set_prompt()", "def prepare_push():\n print(\"Preparing to push\")\n cur = conn.cursor()\n try:\n for tname in TABLES:\n with open(f'{tname}.db', 'w') as f:\n print(f\"Copying {tname}\")\n cur.copy_to(f, f'\"{tname}\"')\n return True\n except IOError:\n print(\"IO ERROR\")\n return False\n finally:\n cur.close()", "def in_trash(self, in_trash):\n self._in_trash = in_trash", "def del_done():\n # This function works just like the deleting function\n c.execute(\"DELETE FROM activities WHERE status = 'done' AND Frequency != 'correct'\")\n conn.commit()", "def replace_none(ret_st, pattern):\n curr_none = [i for i in range(len(fk_array)) if ret_st[i] == 'unmapped-none']\n while curr_none:\n temp_curr_none = curr_none[:MGET_CHUNK]\n curr_none = curr_none[MGET_CHUNK:]\n vals_array = rdb.mget([pattern.format(str(fk_array[i]).upper(), taxid, hint)\n for i in temp_curr_none])\n for i, val in zip(temp_curr_none, vals_array):\n if val is None:\n continue\n ret_st[i] = val.decode()" ]
[ "0.56695527", "0.5449261", "0.5254157", "0.52069896", "0.519826", "0.5033168", "0.4992053", "0.4885781", "0.48527905", "0.48117176", "0.48087612", "0.4767459", "0.47640154", "0.47548455", "0.47459507", "0.47442943", "0.47228667", "0.47036612", "0.46930197", "0.468822", "0.46794957", "0.46776637", "0.46678892", "0.46677774", "0.46649274", "0.4664845", "0.46597415", "0.4654409", "0.46516994", "0.46490273", "0.46412688", "0.46402612", "0.46339566", "0.46306235", "0.46289533", "0.46288684", "0.46117762", "0.45963863", "0.45943007", "0.45905402", "0.45902205", "0.45792368", "0.45780632", "0.45766", "0.4574792", "0.45730284", "0.45674142", "0.45666507", "0.45485005", "0.4538222", "0.4537498", "0.4536", "0.45323458", "0.45309708", "0.45294574", "0.45293918", "0.45145085", "0.4513402", "0.45096615", "0.45081127", "0.44980186", "0.44968867", "0.44931248", "0.44910687", "0.44898483", "0.44720262", "0.44614807", "0.44596586", "0.44561338", "0.4450988", "0.44483164", "0.4447732", "0.44468397", "0.44453666", "0.4444875", "0.4442712", "0.44376898", "0.443747", "0.44360882", "0.44360846", "0.44342092", "0.44321594", "0.44285026", "0.44278595", "0.44255808", "0.44239885", "0.44196966", "0.4417414", "0.44074747", "0.4407294", "0.44065017", "0.43949518", "0.43906775", "0.4388932", "0.43887076", "0.43884638", "0.43820626", "0.43812108", "0.43804547", "0.43802613" ]
0.503461
5
given title, if the podcast is in the database already return true. False if the podcast does not exist in the database
def checkIfExists(dbconnection, title): cursor = dbconnection.cursor() output = "" title = title.replace("'", "''") try: cursor.execute("SELECT * FROM transcriptions WHERE title = '" + title + "';") dbconnection.commit() output = cursor.fetchone() cursor.close() if(output is None): return False else: return True except: dbconnection.rollback() cursor.execute("SELECT * FROM transcriptions WHERE title = '" + title + "';") dbconnection.commit() output = cursor.fetchone() cursor.close() if(output is None): return False else: return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_if_entry_exists(title: str) -> bool:\n conn = sqlite3.connect('rss.db')\n c = conn.cursor()\n try:\n c.execute(\n \"\"\"select * from entries where title = ?\"\"\",\n (title,)\n )\n records = c.fetchall()\n return len(records) > 0\n except sqlite3.OperationalError as e:\n print(f'Exception {e} caught. Recreating database.')\n c.execute('drop table if exists entries')\n conn.commit()\n conn.close()\n create()\n return False", "def exists(self):\n self.cursor.execute(f\"\"\"\n SELECT 1\n FROM {self.table_name}\n WHERE {self.lookup_type}='{self.word}'\n \"\"\")\n return True if self.cursor.fetchone() else False", "def entry_exists(title):\n try:\n f = default_storage.open(f\"entries/{title}.md\")\n return True\n\n except FileNotFoundError:\n return False", "def presentation_exists(self, presentation):\r\n result = QtSql.QSqlQuery('''SELECT * FROM presentations''')\r\n while result.next():\r\n if (unicode(presentation.title) == unicode(result.value(1).toString())\r\n and unicode(presentation.speaker) == unicode(result.value(2).toString())):\r\n return True\r\n return False", "def check_story_exists(self) -> bool:\n title_check = self._soup.find(\"title\").string\n if title_check == u'FicWad: fresh-picked original and fan fiction':\n return False\n return True", "def get_movie_if_exist(item):\n query = Session.query(Movie).filter(Movie.title == item.title)\n result = query.first()\n return result", "def check_repeat(db, record):\n models = [TechRepublicData, SecurityNewsData, PyjobData, RedditData]\n temp = db.query(*models)\n\n for model in models:\n if temp.filter(model.title == record.title).count():\n return True", "def book_exist(author, title, edition):\n book = Book.query.filter_by(\n author=author,\n book_title=title,\n edition=edition).first()\n if book:\n return True\n return False", "def title_exists(form, field):\n if Entry.select().where(Entry.title ** field.data).exists():\n raise ValidationError('That title is already in use.')", "def exists(self):\n return True", "def exists(self):\n return True", "def url_is_in_db(url):\n return bool(find_url(url).first())", "def if_already_present(video_id: str) -> bool:\n return Video.objects.filter(video_id=video_id).exists()", "def check_db_for_vid(self):\n with db.cursor() as cursor:\n if self.videoId in db.\n pass", "def check_repost_exists(type, id):\n \n try:\n soundcloud.get('/e1/me/{}_reposts/{}'.format(type, id))\n return True\n except HTTPError as e:\n if e.response.status_code == 404:\n db.mark_as_deleted(type, id)\n return False\n else:\n raise", "def player_exists_in_db(name: str):\n with open('db.json') as fo:\n data = loads(fo.read())\n return name in data", "def valid_title(self, title):\n if title in self.timers.keys() and isinstance(title, str) and self.timers[title]['count']>0:\n return True\n else:\n return False", "def insertEpisode(ep):\n if check(\"episodes\", ep):\n return \"episode exists\"\n else:\n engine.execute(f\"INSERT INTO episodes (episode) VALUES ('{ep}');\")", "def insertClip(dbConnection, audiourl, podcastName, description, parsedDate, title):\n try:\n cursor = dbConnection.cursor()\n title = title.replace(\"'\", \"''\")\n cursor.execute(\"INSERT INTO transcriptions(audiourl, realtimefactor, podcastname, transcription, description, date, title, pending, datetranscribed) VALUES('\" + audiourl + \"', NULL, '\" + podcastName + \"', NULL, '\" + description + \"', '\" + parsedDate + \"', '\" + title + \"', FALSE, NULL);\")\n dbConnection.commit()\n cursor.close()\n return True\n except:\n return False\n return False", "def _item_exists(self, item):\n cursor = self.conn.cursor()\n cursor.execute(\n 'SELECT * FROM Members where first_name = ?;',\n (item['first_name'])\n )\n return True if len(cursor.fetchall()) else False", "def exists(self, answer):\n return self.find(answer) is not None", "def try_create_uniqe_title(self,title,owner):\n if self.valid_title(title):\n for i in range (1,20):\n new_title=title+\"_\"+str(i)\n if self.unique_title(new_title,owner):\n return new_title\n return False\n else:\n return False", "def exist(self, product_item):\n cursor = self.database.cursor(named_tuple=True, buffered=True)\n sql = \"SELECT * FROM favoris WHERE produit_id = '{}' \".format(product_item.id)\n cursor.execute(sql)\n rows = cursor.fetchone()\n if not rows:\n return False\n return True", "def exist(self):", "def _exists (self):\n cursor = self._exec (self.select)\n return bool (cursor.fetchall ())", "def db_exists(self):\n \n with self.connection:\n c = self.connection.cursor()\n c.execute(\"SELECT EXISTS(SELECT 1 FROM sqlite_master WHERE name=?)\", [PUBLICATIONS_TABLE])\n exists = c.fetchone()\n if(exists[0] == 1):\n return True\n else:\n return False", "def existing_event(self, title, location, category, description):\n for event in self.event_list:\n # test to see if the user has the same event, in the same location in their list\n if event['title'] == title and event['location'] == location and event['category'] == category and event['description'] == description:\n return True\n else:\n return False", "def database_exists (name, parent=None):\n return get_database(name, parent) is not None", "def _check_row_exists(self, pk):\n session = self.session_factory()\n exists = session.query(PipelineRun).filter_by(id=pk).first()\n session.close()\n if exists:\n return True\n return False", "def existsInDatabase(self, url):\n connection = pymongo.MongoClient(\n settings['MONGODB_SERVER'],\n settings['MONGODB_PORT']\n )\n db = connection[settings['MONGODB_DB']]\n collection = db[settings['MONGODB_COLLECTION']]\n\n db_comic = collection.find_one({\n 'url': url\n })\n return True if db_comic else False", "def is_duplicate(self, url):\n dupl_check_sql = '''\n SELECT url FROM {} WHERE url=?\n '''.format(\n self.tablename\n )\n with self.conn:\n return self.conn.execute(dupl_check_sql, (url,)).fetchone()", "def db_exists(self, db):\n raise NotImplementedError()", "async def exists(self, payload: TPayload) -> bool:", "def exists (self, db):\n return hasattr(self, db) and isinstance(getattr(self, db), Database)", "def exists(self):\n return bool(self.get())", "def has_story_title(self, title, stories):\n\tfor story in stories:\n\t break\n\telse:\n\t self.fail(\"Story with title '%s' not found\" % title)", "def has_story_title(self, title, stories):\n\tfor story in stories:\n\t break\n\telse:\n\t self.fail(\"Story with title '%s' not found\" % title)", "def petExist(animal, pet_id):\n return Animal.objects.filter(pk = pet_id).exists()", "def try_create_uniqe_title(self,title,plan_id):\n if self.valid_title(title):\n for i in range (1,20):\n new_title=title+\"_\"+str(i)\n if self.unique_title(new_title,plan_id):\n return new_title\n return False\n else:\n return False", "def exists(self):\n return Criteria(op=\"$exists\", left=self, right=True)", "def check_if_exists(self, bookID):\n query = f\"\"\"SELECT * from {TABLE} WHERE bookID = '{bookID}';\"\"\"\n res = self.cursor.execute(query)\n\n if self.cursor.fetchall():\n return True\n else:\n return False", "def exists(self) -> bool:\n try:\n result = self.get()\n except KeyError:\n return False\n return True", "def exists_in_db(self) -> bool:\n query = \"\"\"SELECT * \n FROM Users \n WHERE Username=?;\"\"\"\n return len(self.db.fetchall(query, values=(self.username,))) > 0", "def isExist(data):\n return True/False", "def exists(self, table, cursor):\n cursor.execute(f\"SELECT name FROM sqlite_master WHERE type='table' AND name='{table}'\")\n res = cursor.fetchone()\n return True if res else False", "def contained_in_title(word, filename):\n title = get_title(filename)\n if word in title:\n return True\n else:\n return False", "def db_has_object(rep_cursor, sql, query_args):\n rep_cursor.execute(sql, query_args)\n if rep_cursor.rowcount == 0:\n return False\n return True", "def entry_exist_bool(dbfile, link):\n\n conn = sqlite3.connect(dbfile)\n c = conn.cursor()\n query = \"\"\"\n SELECT link FROM bringatrailer WHERE link='{}'\n \"\"\".format(link)\n c.execute(query)\n result = c.fetchall()\n if not result:\n return False\n else:\n return True", "def _is_title(self):\n ph = _child(self.__nvXxPr.nvPr, 'p:ph')\n if ph is None:\n return False\n # idx defaults to 0 when idx attr is absent\n ph_idx = ph.get('idx', '0')\n # title placeholder is identified by idx of 0\n return ph_idx == '0'", "def uploadPodcast(dbConnection, homepage, name, description, category, source, imageurl, web, twitter, facebook, rss):\n try:\n cursor = dbConnection.cursor()\n name = name.replace(\"'\", \"''\")\n description = description.replace(\"'\", \"''\")\n cursor.execute(\"\"\"INSERT INTO podcasts(homepage, name, description, category, source, imageuri, web, twitter, Facebook, rss) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);\"\"\", (homepage, name, description, category, source, imageurl, web, twitter, facebook, rss))\n dbConnection.commit()\n cursor.close()\n return True\n except Exception as e:\n\t\t Tools.writeException(\"insertHeader\", \"e\")\n return False", "def add_movies(movie_name: str, release_date: float) -> bool:\n with connection:\n movie_exists = connection.execute(CHECK_MOVIE, (movie_name, release_date)).fetchone()\n if movie_exists is None:\n connection.execute(INSERT_MOVIE, (movie_name, release_date))\n return True\n return False", "def check_video_pruning(self, artist, name, title):\n\n\t\tweeders = ['cover','live','vevo','remix']\t\t\t# words that we want to ignore in our video search\n\t\tname_contains_weed_word = any(weed_word in name.lower() for weed_word in weeders) \n\t\tartist_cointains_weed_word = any(weed_word in artist.lower() for weed_word in weeders)\n\t\tvideo_title_contains_weed_word = any(weed_word in title.lower() for weed_word in weeders)\n\n\t\t# ensure that the artist or track name does not actually include the weeders Ex. live house\n\t\tif video_title_contains_weed_word and (name_contains_weed_word is False and artist_cointains_weed_word is False):\n\t\t\tret_val = True\n\t\telse:\n\t\t\tret_val = False\n\n\n\n\t\t# check duration of song\n\n\t\treturn ret_val", "def checkTMBDAndInsertTitle(self, aTitleInLoc = None, theLanguage = None):\n\n # get the info from tmdb\n movieRec = self.tmdbClient.searchByTitle(aTitleInLoc['title'], theLanguage)\n self.logger.debug(\"Got %d titles for %s\" % (len(movieRec['results']), aTitleInLoc['title']))\n\n # check the translations returned\n if len(movieRec['results']) == 0:\n # unknown title just log it\n self.logger.warn(\"No results found at all for \\\"%s\\\" in %s!\" % (aTitleInLoc['title'], theLanguage))\n output = -1\n\n elif len(movieRec['results']) == 1:\n # match found\n self.insertTranslation(titleRec = aTitleInLoc, \\\n titleLanguage = theLanguage, \\\n tmdbId = movieRec['results'][0]['id'],\n originalTitle = movieRec['results'][0]['original_title'],\n originalLanguage = movieRec['results'][0]['original_language'])\n output = 1\n\n else:\n # several translations found: how many with the exact same title?\n exactMatch = [rec for rec in movieRec['results'] if rec['title'].lower() == aTitleInLoc['title'].lower()]\n\n if len(exactMatch) == 0:\n # ambiguous results: no exact one - log and move on\n self.logger.warn(\"Too many matches for \\\"%s\\\" and no exact match at all (found %d matches)\" % (aTitleInLoc['title'], len(movieRec['results'])))\n output = 0\n\n elif len(exactMatch) > 1:\n # several exact match: anyone in the same language?\n exactMatchWithLang = [rec for rec in exactMatch if rec['title'].lower() == aTitleInLoc['title'].lower() and rec['original_language'] == aTitleInLoc['language']]\n\n if len(exactMatchWithLang) == 1:\n # match found\n self.insertTranslation(titleRec = aTitleInLoc, \\\n titleLanguage = theLanguage, \\\n tmdbId = movieRec['results'][0]['id'],\n originalTitle = exactMatchWithLang[0]['original_title'],\n originalLanguage = exactMatchWithLang[0]['original_language'])\n output = 1\n else:\n # totally ambiguous match: too many exact matches - log and move on\n for r in exactMatch:\n if \"release_date\" not in r.keys():\n r['release_date'] = ''\n self.logger.warning(\"Too many exact matches: ambiguous title (%s) yielded:\\n%s\" % \\\n (aTitleInLoc['title'], pformat([(t['title'], t['original_language'], t['release_date']) for t in exactMatch])))\n output = 0\n\n else:\n # unique exact match: match found\n self.insertTranslation(titleRec = aTitleInLoc, \\\n titleLanguage = theLanguage, \\\n tmdbId = movieRec['results'][0]['id'],\n originalTitle = exactMatch[0]['original_title'],\n originalLanguage = exactMatch[0]['original_language'])\n output = 1\n\n return output", "def exists( identifier ):\n return note.exists(identifier)", "def exists(self, obj):\n return False", "def schedule_exist(self, schedule_name):\r\n schedule = self.find(\"schedules\", schedule_name, attribute=\"name\")\r\n if schedule is not None:\r\n return True\r\n else:\r\n return False", "def check_if_already_exists(list_name, title, description):\n\n for item in list_name:\n if item['title'] == title:\n return 'Sorry, This title has already been used in another question'\n if item['description'] == description:\n return 'Sorry, This description has already been used in another question'", "def check_if_duplicate(self, data):\n\n query = \"SELECT * FROM {} WHERE topic = '{}' AND location = '{}'\\\n \".format(self.table, data['topic'], data['location'])\n\n result = self.fetch_one(query)\n if result:\n return True, 'Meetup with same topic at the same venue\\\n already exists'\n\n query = \"SELECT * FROM {} WHERE happening_on = '{}' AND location = '{}'\\\n \".format(self.table, data['happening_on'], data['location'])\n\n result = self.fetch_one(query)\n if result:\n return True, 'Meetup happening the same date at the same venue \\\n already exists'\n\n query = \"SELECT * FROM {} WHERE topic = '{}' AND happening_on = '{}'\\\n \".format(self.table, data['topic'], data['happening_on'])\n\n result = self.fetch_one(query)\n if result:\n return True, 'Meetup happening the same date with same topic \\\n already exists'\n\n return False, None", "def exist_import_record(session, doc_id):\n try:\n team = session.query(DocumentSentenceText).filter_by(doc_id=doc_id).first()\n if team:\n return True\n else:\n return False\n except Exception:\n traceback.print_exc()\n return False", "def has(self, tag_name: str, category: ty.Optional[str] = None) -> bool:\n tags = self.__holder.db_tags.filter(lambda t: t.name == tag_name)\n if category is not None:\n tags = tags.filter(category=category)\n\n return len(tags) >= 1", "def is_product_exists(product_name) -> bool:\n with MY_CONNECTION as connection:\n cursor = connection.cursor()\n cursor.execute(\"SELECT exists(SELECT 1 FROM Products WHERE product_name=?)\", (product_name,))\n return cursor.fetchone()[0] == 1", "def is_duplicate(kml, collection):\n\t\n\tresults = [ i for i in collection.find({'date': kml['date']}) ]\n\tif results:\n\t\tprint('\\nDuplicate found! %s\\n' % item)\n\t\treturn True\n\telse:\n\t\treturn False", "def exists(self):\n\n if self:\n pass", "def has_item(self, item):\n if item in self._reverse_store:\n return True\n else:\n return False", "def add_to_movies(self):\n \n # Recuperation de la liste des films\n movies_list = self._get_movies()\n\n # Ajout du film\n if self.title not in movies_list :\n \"\"\"si le film n'est pas dans la liste\"\"\"\n movies_list.append(self.title)\n self._write_movies(movies_list)\n return True\n\n else :\n \"\"\"si le film est deja dans la liste\"\"\"\n logging.warning(f\"le film {self.title} est déjà dans la liste\")\n return False", "def exists(self):\r\n try:\r\n self.refresh()\r\n except:\r\n return False\r\n return True", "def business_exists(yelp_id, conn):\n return conn.execute(Business.select().where(Business.c.yelp_id == yelp_id))\\\n .first() is not None", "def exist_import_record(session, doc_id, sentence_index):\n try:\n team = session.query(DocumentSentenceText).filter_by(doc_id=doc_id,\n sentence_index=sentence_index).first()\n if team:\n return True\n else:\n return False\n except Exception:\n traceback.print_exc()\n return False", "def team_exists(team):\r\n exists = False\r\n with sqlite3.connect(database_file) as conn:\r\n cursor = conn.cursor()\r\n team_names = cursor.execute(\"\"\"SELECT lower(teamname) FROM scores\"\"\")\r\n for row in team_names.fetchall():\r\n if row[0] == team.lower():\r\n exists = True\r\n return exists", "def check_exists(cls, **kwargs):\n return bool(cls.query.filter_by(**kwargs).first())", "def exist_import_record(session, doc_id, sentence_index):\n\n try:\n team = session.query(DocumentSentenceText).filter_by(doc_id=doc_id,\n sentence_index=sentence_index).first()\n if team:\n return True\n else:\n return False\n except Exception:\n traceback.print_exc()\n return False", "def isTranslated(appid):\n try:\n testQuery = dbApps.query(\n \"SELECT ttitle from {store}_apps where appId = :appId \"\\\n \"and {colname} = :value\".format(store=store, colname=colname),\n appId=appid, value=source_language\n )\n testText = None \n for entry in testQuery:\n tt = entry['ttitle']\n print(tt)\n if tt is not None:\n testText = tt\n if testText:\n print(\"already translated, skipping\")\n return True\n return False\n except Exception as e:\n print(\"probably no translated column ttitle found >> \",\n e, file=sys.stderr)\n raise ValueError(e)", "def is_article_duplicate(cls, article):\n return cls.db.hkeys(\"article_map\").count(article.link) != 0", "def is_page_stored(self, page_url):\n cursor = self.db.cursor()\n cursor.execute(\"SELECT 1 FROM triples WHERE page_url = ? LIMIT 1\", (page_url,))\n return len(cursor.fetchall()) > 0", "def checkPre(dbConnection):\n cursor = dbConnection.cursor()\n cursor.execute(\"SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;\")\n entry = cursor.fetchone()\n cursor.close()\n return entry", "def test_blogpost_title_presence(self):\r\n self.configure_fixtures()\r\n blogpost = Blogpost(title=None, body=\"body\", app=self.app)\r\n db.session.add(blogpost)\r\n\r\n assert_raises(IntegrityError, db.session.commit)", "def exists(self, arg):\n raise NotImplementedError", "def _exists(cursor, table, data):\n cursor.execute('SELECT 1 FROM {} WHERE ({}) = ({})'.format(table, *_query_fields(data)), data)\n return bool(cursor.fetchone())", "def _check_mongo(url, db_collection):\n\n if db_collection.find_one({\"url\": url}):\n found = True\n else:\n found = False\n\n return found", "def item_exist(self):\n element = self.browser.find_elements_by_class_name(\"Timeline-item\")[-1]\n iid = element.find_element_by_class_name(\"Tweet\").get_attribute('data-tweet-id')\n return dbfunctions.exist_tweet(iid)", "async def check_scheduled_events_exists(self) -> bool:\n\n mycursor, _ = await the_database()\n await mycursor.execute(\"SHOW TABLE STATUS LIKE 'ScheduledEvents'\")\n exists = await mycursor.fetchone()\n await mycursor.close()\n if exists:\n return True\n else:\n return False", "def query_exists(self, q, param=None):\r\n \"\"\" Queries database and return one row \"\"\"\r\n try:\r\n c = self.connection.cursor()\r\n c.execute(q, param)\r\n self.logger.log(logger.LogLevel.DEBUG, 'database.query_exists: %s | %s' % (q, param)) \r\n if c.fetchone() is None:\r\n return False\r\n else:\r\n return True\r\n except Exception as e:\r\n self.logger.log(logger.LogLevel.ERROR, 'database.query_exists: %s. %s | %s' % (e, q, param))\r\n return False", "def carExists(self, carmake):\n data = db.session.query(Car.id).filter_by(make = carmake).first()\n if data is None:\n return False\n else:\n return True", "def exists(self, name):\n raise NotImplementedError()", "def exists(self):\n return self.obj is not None", "def exists(self) -> bool:\n self.connection.describe_activity_type(self.domain.name, self.name, self.version)\n return True", "def Has(cls, word_list):\n entity = WordList.get_by_id(word_list)\n if entity:\n return True\n return False", "def exists(self, word):\n result = self.find(word)\n return False if result is None else result.is_word", "def is_existing(self):\n return self.backend.is_existing", "def exists(self, name):\n return self.backend.exists(name)", "def db_exists(self, db):\n # HDF5 is file based\n return os.path.isfile(db)", "def exists(self, conn, key):\n return conn.exists(key)", "def _object_exists(name):\n conn = sqlite3.connect('/dev/input')\n try:\n cur = conn.cursor()\n sql = 'SELECT ROWID FROM object WHERE name=? AND deleted=0'\n cur.execute(sql, (name, ))\n result = cur.fetchall()\n return len(result) > 0\n finally:\n conn.close()", "def exists(self, proxy):\n return not self.database.zscore(self.key, proxy) == None", "def category_exists(self, category: str) -> bool:\n return all(category in self.data[letter] for letter in self.data)", "def is_on(self):\n return self.coordinator.data.get(\"title\", \"\") == \"foo\"", "def existsTable(self, dbUrl:str, tableName:str) -> bool:\n return self.run(\"existsTable('%s','%s')\" % (dbUrl,tableName))", "def exists(username):\n if Users.query.filter_by(username=username).first():\n return True\n return False", "def is_macd_object_exists(id, items):\n macd = get_macd_by_id(id, items)\n return True if macd != None else False", "def exists_image_in_database(full_image_url):\r\n\r\n logging.debug('exists_image_in_database({})'.format(full_image_url))\r\n\r\n dir_path = os.path.join(os.environ['LOCALAPPDATA'],'WarietyWallpaperImages')\r\n db_file = os.path.join(dir_path,'wariety.db')\r\n conn = sqlite3.connect(db_file)\r\n c = conn.cursor()\r\n\r\n # Select a row\r\n c.execute(\"SELECT id FROM wallpapers WHERE iurl = ?\", (full_image_url,))\r\n\r\n if c.fetchone() is not None:\r\n conn.close()\r\n logging.debug('exists_image_in_database - True')\r\n return True\r\n else:\r\n conn.close()\r\n logging.debug('exists_image_in_database - False')\r\n return False" ]
[ "0.7373473", "0.63370997", "0.6241188", "0.62162536", "0.6169572", "0.6139767", "0.6122975", "0.5888012", "0.5878836", "0.58756447", "0.58756447", "0.58615744", "0.58349437", "0.583252", "0.5811973", "0.5811524", "0.58094037", "0.57614815", "0.57470703", "0.57385516", "0.57263124", "0.5658692", "0.56586796", "0.56561965", "0.5651934", "0.5638678", "0.5634481", "0.56085217", "0.560124", "0.5592616", "0.5587964", "0.55840135", "0.556572", "0.5542878", "0.55280024", "0.5513535", "0.5513535", "0.5505283", "0.55019563", "0.54874736", "0.54761267", "0.546681", "0.545882", "0.5455727", "0.5443069", "0.5442884", "0.54420686", "0.5428548", "0.53911567", "0.53892326", "0.53859484", "0.5385431", "0.5384836", "0.5383276", "0.53682196", "0.5365981", "0.53597414", "0.53588974", "0.5358666", "0.5355795", "0.5355247", "0.535054", "0.53410244", "0.53335345", "0.53296787", "0.5318019", "0.5316334", "0.5308829", "0.53048563", "0.53026795", "0.5297976", "0.52780557", "0.52761245", "0.527497", "0.5273958", "0.5273874", "0.526928", "0.5265862", "0.5257371", "0.52540493", "0.5253011", "0.5237279", "0.52276653", "0.52266634", "0.52206606", "0.52205217", "0.52180094", "0.5217545", "0.5216543", "0.5210851", "0.5209208", "0.52091455", "0.5199533", "0.5187081", "0.5184947", "0.5183534", "0.5177713", "0.51770204", "0.5176034", "0.51745296" ]
0.7353994
1
Checks the rss urls in the database and returns an array of each of the important fields
def rssCheck(podcastName, source, url): try: headers = {'Accept':'text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8' ,'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763'} req = requests.get(url, headers=headers) root = etree.fromstring(req.text) rssArray = [] for element in root[0].iter('item'): try: title = element.find("title").text.replace("''", "'") description = element.find("description").text.replace("<strong>", "").replace("</strong>", "").replace("&amp;", "and").replace("'","''") date = element.find("pubDate").text date = date.split(" ") date = datetime.strptime(date[1] + date[2] + date[3], "%d%b%Y") dateString = str(date.month) + "-" + str(date.day) + "-" + str(date.year) url = ResolveRouter.urlRouter(podcastName, source, element) except: print("error in XMLDetailsDebug parsing issue") if(len(title) > 0 and len(description) > 0 and len(dateString) > 0 and len(url) > 0): rssArray.append([title, dateString, url, description]) else: print("error in XMLDetailsDebug parsing issue") return rssArray except Exception as e: print(e) Tools.writeException("getXMLDetailsDebug", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rss_infos():\n\n url_rss_lib = \"http://www.liberation.fr/rss\"\n soup = utils.recovery_flux_url_rss(url_rss_lib)\n\n rss_items = soup.find_all(\"li\")\n\n rss_list = []\n\n link_rss = []\n\n for ri in rss_items:\n if ri.get(\"class\") == ['rss-item']:\n rss_list.append(ri.a.get('href'))\n\n for rl in rss_list:\n soup = utils.recovery_flux_url_rss(rl)\n entre = soup.find_all('entry')\n for e in entre:\n link_rss.append(e.link.get('href'))\n\n return link_rss", "def getFeedFromXXX(RSSlink):\n summary =\"\"\n link =\"\"\n if \"packetstormsecurity\" in RSSlink:\n link =\"link\"\n summary=\"summary_detail\"\n elif \"jetlib\" in RSSlink:\n link=\"id\"\n summary=\"summary\"\n myFeed=\"\"\n try:\n myFeed = feedparser.parse(RSSlink)\n except:\n print(\"problem with the db website.try to change the source db in option !\")\n return None\n entries = [item for item in myFeed.items() if \"entries\" in item]\n tupleInsideEntries =entries[0]\n #print len(tupleInsideEntries[1])#show the number of result founded\n for dicItem in tupleInsideEntries[1]:\n if dicItem.get(\"title\")==\"No Results Found\":\n return False #break from this loop if theres no result\n print (\"Title : \"+dicItem.get(\"title\"))#title\n if summary ==\"summary_detail\": #packetstormsecurity\n print (\"Description : \"+str(dicItem.get(summary).get(\"value\")))#description\n else:\n print (\"Description : \"+str(dicItem.get(summary)))\n print (\"Date : \"+dicItem.get(\"published\"))#date\n print (\"Link : \"+dicItem.get(link)) #link\n print (\"#################################################################################\")\n return True", "def parse_rss(database, feed, depth=1):\n # Get the updates article count, and article urls and publish dates.\n rss_a = rss_feed(feed)\n \n # Get all (article urls, publish dates) pairs\n articles = []\n pairs = rss_a[1].items()\n for url, pubdate in pairs: \n articles += crawl_url(database, url, date=pubdate, depth=depth)\n \n return articles", "def getRSS(self):\n return [rss for rss in self.rssCol.find()]", "def rss_feed(rss_url):\n try:\n # Use feedparser to analyze given RSS feed, if it is valid RSS.\n d = feedparser.parse(rss_url)\n except:\n return \"Sorry, invalid RSS feed. Please check and try again later.\"\n \n total = len(d['entries'])\n updates = dict()\n for index, item in enumerate(d['entries']):\n # Convert publish time from ctime format to iso-time format.\n a_time = time_convert(item.published)\n # Set article url ad dictionary key, with publish date as value. \n updates[str(item.link)] = a_time \n return (total, updates)", "def fetch_entries(self):\n entries = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n [entries.append(FeedDocument(entry.get('title', ''), entry.get('summary', ''))) for entry in feed.get('entries', [])]\n return entries", "def getUrlsList(self):\n\t\ttry:\n\t\t\tf = ur.urlopen(self.sitemap_url)\n\t\t\tres = f.readlines()\n\t\t\tfor d in res:\n\t\t\t data = re.findall('<loc>(https?:\\/\\/.+?)<\\/loc>',d)\n\t\t\t for i in data:\n\t\t\t\tself.urls.append(i)\n\t\texcept Exception as e:\n\t\t\tself.app.printflush(str(e))\n\t\t\tself.app.printflush(traceback.format_exc())\n\t\tself.fetched_count = len(self.urls)", "def get_items():\n\n list_url = \"http://blog.sina.com.cn/s/articlelist_1216826604_0_1.html\"\n try:\n page = urllib.urlopen(list_url)\n html = page.read()\n except IOError:\n errno = sys.exc_info()[:1]\n curtime = time.strftime('%H:%M:%S', time.localtime(time.time()))\n log_error('\\n\\n-------------\\ngetTodayUrl error\\n' + curtime)\n if errno == socket.timeout:\n log_error('There was a timeout')\n else:\n log_error('Some other socket error')\n return '', ''\n lines = html.split('\\n')\n numlst = list()\n i = 0\n for line in lines:\n if '''<div class=\"articleCell SG_j_linedot1\">''' in line:\n numlst.append(i)\n if len(numlst) == GET_NUM:\n break\n i += 1\n return lines, numlst", "def get_feed(self):\n possible_endings = ('rss', 'rss/')\n if not self.url or not self.url.endswith(possible_endings):\n print('Please check URL(is RSS?) and Internet connection')\n sys.exit()\n try:\n data = feedparser.parse(self.url)\n except urllib.error.URLError:\n print('Please input correct URL')\n sys.exit()\n self.get_content(data)\n return self.items", "def get_links_all(self, number_links):\r\n podcast_data = []\r\n\r\n for entry in self.rss[0].entries: \r\n try:\r\n podcast_data = [entry.published, entry.title, \r\n entry.enclosures[0]['href'], \r\n self.rss[0].feed.title\r\n ]\r\n except IOError as err:\r\n print err\r\n except UnicodeDecodeError as err:\r\n print err\r\n else:\r\n self.podcast_list.append(podcast_data)\r\n if number_links != 0:\r\n if len(self.podcast_list) == number_links: \r\n return None\r\n return None", "def get_rss(self):\r\n rssfiles = []\r\n \r\n rssfiles.append(feedparser.parse(self.url))\r\n return rssfiles", "def process_all_rss(reprocess=False):\n sources = list()\n logger.debug(\"Collecting sources\")\n monitors = mongo.db[app.config['MONITORS_COLLECTION']]\n for item in monitors.find({'active': True}):\n sources.append(item['metadata'].get('rss_link'))\n\n contents = [feedparser.parse(x) for x in sources]\n logger.debug(\"Processing sources\")\n for source in contents:\n for idx, item in enumerate(source.get('entries')):\n response = get_article(item, source['href'], reprocess)\n if response['from_store'] or reprocess:\n continue\n clean_link = response['article']['feed_source']\n monitors.update({'metadata.rss_link': clean_link},\n {'$set': {'checked': now_time()}})\n correct_counts()", "def find_urls(url):\n try:\n #sock = urllib2.urlopen(url)\n result = urlfetch.fetch(url)\n sock = result.content\n parser = URLParser()\n #print sock.read()\n parser.feed(sock.read())\n sock.close()\n parser.close()\n return parser.urls\n except: # This is to take care of links that are not valid.\n return []", "def get_urls_from_database():\n return select(u for u in Url if u.date_scanned is None).order_by(desc(Url.priority_scan))[:8]", "def get_rss_links(url):\n\n import re\n import requests \n\n # Use the requests module to get page source\n page_source = requests.get(url)\n\n # Find all of the RSS links according to some pattern.\n # The pattern that is searched for can be changed, and will probably\n # be site specific. You may also want to look for multiple patterns.\n # We use set() here to eliminate duplicate links. \n\n return set(re.findall(r'http.*\\.xml', page_source.text))", "def get_urls():\n return (constants.UNREVIEWED.col_values(3) +\n constants.REVIEWED.col_values(3) +\n constants.LAST.col_values(3))", "def get_news(url):\r\n \r\n # parse RSS feed into list of dictionaries\r\n feed = feedparser.parse(url)\r\n\r\n # no RSS feed articles for url\r\n if len(feed['entries']) == 0:\r\n return []\r\n \r\n # get first ten articles from the RSS feed\r\n news = []\r\n i = 0\r\n while True:\r\n if i == len(feed['entries']) or i > 30:\r\n break\r\n \r\n try:\r\n # get link to article\r\n link = feed[\"entries\"][i][\"link\"]\r\n\r\n # get title of article\r\n title = feed[\"entries\"][i][\"title\"]\r\n \r\n try:\r\n # get raw summary of article\r\n summary_raw = feed[\"entries\"][i][\"summary\"]\r\n \r\n # format summary\r\n summary = \"\"\r\n for c in summary_raw:\r\n if c == \"<\":\r\n summary += \"...\"\r\n break\r\n summary += c\r\n except KeyError as e:\r\n logging.error(\"no summary for RSS feed article: {}\".format(link))\r\n summary = \"read more here...\"\r\n \r\n # get raw date \r\n date_raw = feed[\"entries\"][i][\"published_parsed\"]\r\n \r\n if date_raw is None:\r\n date = feed[\"entries\"][i][\"published\"]\r\n \r\n else:\r\n # format date\r\n year = str(date_raw.tm_year)\r\n months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\", \"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\r\n month = months[date_raw.tm_mon - 1]\r\n day = str(date_raw.tm_mday)\r\n weekdays = [\"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\"]\r\n wday = weekdays[date_raw.tm_wday]\r\n hour = str(date_raw.tm_hour)\r\n hour = \"{:2}\".format(hour).format(' ','0')\r\n min = str(date_raw.tm_min)\r\n min = \"{:2}\".format(min).replace(' ','0')\r\n date = hour + \":\" + min + \" - \" + wday + \" \" + month + \" \" + day + \", \" + year\r\n \r\n # compile entry and append to news list\r\n entry = {\"link\":link, \"title\":title, \"date\":date, \"summary\":summary}\r\n \r\n # sanitize entry\r\n for key in entry:\r\n # apostrophe\r\n entry[key] = entry[key].replace(\"&#39;\", \"'\")\r\n # right single quotation mark\r\n entry[key] = entry[key].replace(\"’\", \"&#8217;\")\r\n # left single quotation mark\r\n entry[key] = entry[key].replace('\"', \"&#8216;\")\r\n # right double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8221;\")\r\n # left double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8220;\")\r\n # Weird ampersand formatting\r\n entry[key] = entry[key].replace(\"&amp;\", \"&\")\r\n \r\n # prepare entry for sqlite queries\r\n entry[key] = surround(entry[key])\r\n \r\n # add entry to news list\r\n news.append(entry)\r\n \r\n # max 10 entries\r\n if len(news) == 10:\r\n break\r\n i += 1\r\n \r\n except Exception as e:\r\n logging.error(e)\r\n i += 1\r\n pass\r\n \r\n # success\r\n return news", "def get_urls(db):\n return db.meta.find_one({'name':\"urls\"})['urls']", "def check_feeds(self):\n lst = []\n for feed in self.feeds:\n feed.update()\n if feed.get_new_entries():\n lst.append(feed)\n return lst", "def get_urls(links):\n\n temp_list=[]\n url_list=[]\n temp_list2=[]\n #Open the file where the url's are saved and copy the tuple values into an empty list\n z=open('dbdocs.txt','r')\n for line in z:\n temp_list.append(line)\n #print temp_list\n for x in temp_list:\n index=x.find(',')\n if index==-1:\n y=x.split(\" \",1)\n key=int(y[0])\n val=str(x[1]).replace('\\n','')\n url_list.append((key,val))\n else:\n #find the tab seperator between the key and the url, and\n #split them, in order to put in a list\n key=x[0:index-1]\n #print key\n value=str(x[index+3:len(x)-1])\n #print value\n temp_list2.append((int(key),value))\n #Find the url's of the links where the word was found\n for k in links:\n for i,j in temp_list2:\n #print j\n if i==k:\n url_list.append((i,j))\n break\n #print len(url_list)\n #print len(links)\n z.close()\n return url_list", "def scanFeedList(self): \r\n data = self.feed_handler.listScanFeeds()\r\n data = data[:MAX_FEEDS_SCAN]\r\n for idx, feed in enumerate(data):\r\n print \"feeds ... / [%s/%s] (%s docs:%s passed)\" % (idx, len(data),self.feed_item_ctr, self.feed_passed)\r\n try:\r\n baseURL = feed.mainUrl\r\n self.processData(baseURL) \r\n self.createFeedItems()\r\n except Exception, ex:\r\n print(\"ERR: failed to process data and create feed item=%s\" % ex)\r\n print \"done\"", "def download_feed_return_objects(rss_url):\r\n try:\r\n feed_obj = rss_exists(rss_url)\r\n except:\r\n yield None\r\n return\r\n\r\n feed_obj_found = False\r\n feed_parser_results, success = get_rss(rss_url)\r\n\r\n if feed_parser_results is None:\r\n error_reporter.captureMessage(u'Feed Parser results is None', **dict(rss_url=rss_url))\r\n yield None\r\n return\r\n\r\n if feed_obj is None:\r\n feed_obj = create_new_feed(feed_parser_results, rss_url)\r\n else:\r\n feed_obj_found = True\r\n\r\n feed_id = feed_obj.id\r\n feed_obj.title = feed_parser_results.get(\"title\", \"\") or \"\"\r\n max_length_field(feed_obj, 'title', 100)\r\n\r\n feed_obj.status_code = feed_parser_results.get(\"status\", \"\") or 200\r\n feed_obj.status = find_feed_status_from_scode(feed_obj)\r\n\r\n feed_obj.etag = cut_clean_etag(feed_parser_results.get(\"etag\", \"\"))\r\n\r\n updated_date = feed_parser_results.get(\"updated_parsed\")\r\n feed_obj.updated = dt.fromtimestamp(mktime(updated_date)) if updated_date is not None else dt.utcnow()\r\n #\tfeed_obj.published = dt.fromtimestamp(mktime(published_date)) if published_date is not None else None\r\n feed_obj.last_check = dt.utcnow()\r\n\r\n # We could be creating a new feed, or updating the existing one.\r\n yield feed_obj\r\n rss_posts = []\r\n\r\n for feed_article in feed_parser_results.get(\"entries\", []):\r\n ptime = feed_article.get(\"published_parsed\", None)\r\n post_date = dt.fromtimestamp(mktime(ptime)) if ptime is not None else dt.utcnow()\r\n #\t\tprint \"%r\" % post\r\n p = Post(\r\n id=uuid.uuid1(),\r\n title=feed_article.get(\"title\", \"\"),\r\n author=feed_article.get(\"author\", \"\"),\r\n href=feed_article.get(\"href\", \"\"),\r\n post_id=feed_article.get(\"id\", \"\"),\r\n published_at=post_date,\r\n feed_id=feed_id\r\n )\r\n\r\n p.original_title = max_length_field(p, 'title', 200)\r\n p.original_author = max_length_field(p, 'author', 200)\r\n\r\n p.content_html = feed_article.get(\"content\", \"\") or \"\"\r\n\r\n if feed_article.has_key(\"media_content\"):\r\n media_contents = feed_article.get(\"media_content\", []) or []\r\n if media_contents is not None and (not isinstance(media_contents, basestring)) and isinstance(\r\n media_contents, collections.Iterable):\r\n p.media = [media.get(\"url\") for media in media_contents]\r\n\r\n hasHash = False\r\n\r\n if feed_article.has_key(\"feedburner_origlink\"):\r\n p.original_link = feed_article.get(\"feedburner_origlink\", \"\")\r\n if non_empty_str(p.original_link):\r\n p.link_hash = url_hash(safe_str(p.original_link))\r\n hasHash = True\r\n\r\n if feed_article.has_key(\"link\"):\r\n p.href = feed_article.get(\"link\", \"\")\r\n if not hasHash and non_empty_str(p.href):\r\n p.link_hash = url_hash(safe_str(p.href))\r\n hasHash = True\r\n\r\n if not hasHash:\r\n print \"Post don't have any hash\"\r\n\r\n p.title_hash = url_hash(safe_str(p.title)) if non_empty_str(p.title) else \"\"\r\n p.post_id_hash = url_hash(safe_str(p.post_id)) if non_empty_str(p.post_id) else \"\"\r\n\r\n if feed_article.has_key(\"tags\"):\r\n if isinstance(feed_article['tags'], collections.Iterable):\r\n p.tags = [pst.get(\"term\") for pst in feed_article['tags']]\r\n\r\n rss_posts.append(p)\r\n\r\n has_posts = len(rss_posts) > 0\r\n post_id_hashes = [p.post_id_hash for p in rss_posts]\r\n #\tpost_title_hashes = [p.title_hash for p in rss_posts]\r\n post_link_hashes = [p.link_hash for p in rss_posts]\r\n\r\n found_posts_id_hashes = []\r\n found_posts_link_hashes = []\r\n\r\n if feed_obj_found and has_posts:\r\n existing_posts = find_existing_posts(feed_id, post_id_hashes, post_link_hashes)\r\n\r\n for ex_post_id_hash, ex_link_hash in existing_posts:\r\n found_posts_id_hashes.append(ex_post_id_hash)\r\n found_posts_link_hashes.append(ex_link_hash)\r\n\r\n has_existing_posts = len(found_posts_id_hashes) > 0 or len(found_posts_link_hashes) > 0\r\n\r\n new_post_count = 0\r\n if has_posts:\r\n for rss_post in rss_posts:\r\n should_skip = False\r\n\r\n if has_existing_posts:\r\n if non_empty_str(rss_post.post_id_hash) and rss_post.post_id_hash in found_posts_id_hashes:\r\n should_skip = True\r\n elif rss_post.link_hash in found_posts_link_hashes:\r\n should_skip = True # \"Link Hash found in existing records\"\r\n\r\n if not should_skip:\r\n new_post_count += 1\r\n yield rss_post\r\n\r\n feed_history = FeedHistory(id=uuid.uuid1(),\r\n feed_id=feed_obj.id,\r\n timestamp=dt.utcnow(),\r\n status=feed_obj.status_code,\r\n post_count=new_post_count,\r\n etag=feed_obj.etag)\r\n yield feed_history", "def fetch_feeds(self):\n feed_list = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n rss_title = rss.get('title', '-')\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n feed_list.append({\n 'title':rss_title,\n 'href':rss_href,\n 'status': feed.get('status', 400),\n 'updated': feed.get('updated', None),\n 'updated_parsed': feed.get('updated_parsed', None),\n 'encoding': feed.get('encoding', None),\n 'bozo': feed.get('bozo', None),\n 'headers': feed.get('headers', {}),\n 'etag': feed.get('etag', None),\n 'version': feed.get('version', None),\n 'entries': feed.get('entries', []),\n 'namespaces': feed.get('namespaces', None)\n })\n\n return feed_list", "def parse_sitemap():\n xml_list = parse_xmls(SITEMAP_URL)\n\n all_url_list = []\n for xml_url in xml_list:\n all_url_list += parse_xml_url(xml_url)\n\n # print(len(all_url_list))\n # print(all_url_list)\n return all_url_list,len(all_url_list)", "def get_links_filter(self, keyword, number_links):\r\n podcast_data = []\r\n\r\n for entry in self.rss[0].entries:\r\n if keyword in entry.title: \r\n try:\r\n podcast_data = [entry.published, entry.title, \r\n entry.enclosures[0]['href'], \r\n self.rss[0].feed.title\r\n ]\r\n except IOError as err:\r\n print err\r\n except UnicodeDecodeError as err:\r\n print err\r\n else:\r\n self.podcast_list.append(podcast_data)\r\n if number_links != 0:\r\n if len(self.podcast_list) == number_links: \r\n return None\r\n return None", "def rss_fetch():\n items = {}\n\n def add_item(pubDate, title, link):\n nonlocal items\n idx = float(parsedate_to_datetime(pubDate).timestamp())\n while idx in items:\n idx = idx + 0.1\n dbg(\"Adding item: %11.1f \\\"%s\\\" %s\" % (idx, title, link))\n items[idx] = {}\n items[idx]['title'] = title\n items[idx]['link'] = link\n\n state = \"\" # state parser is in (\"\", \"item\", \"title\", \"link\", \"pubDate\")\n title = \"\" # Currently parsing this title.\n link = \"\" # \" \" \" link\n pubDate = \"\" # \" \" \" pubDate (index)\n\n def start_element(name, attrs):\n nonlocal state\n nonlocal title\n nonlocal link\n nonlocal pubDate\n dbg(\"Start: %s %s %s\" %(name, str(attrs), str((state, title, link, pubDate))))\n if state == \"\":\n if name == \"item\":\n state = \"item\"\n elif state == \"item\":\n if name == \"title\":\n state = \"title\"\n if title:\n prn(\"Two titles?\")\n sys.exit(1)\n elif name == \"link\":\n state = \"link\"\n if link:\n prn(\"Two links?\")\n sys.exit(1)\n elif name == \"pubDate\":\n state = \"pubDate\"\n if pubDate:\n prn(\"Two pubDates?\")\n sys.exit(1)\n\n\n def end_element(name):\n nonlocal state\n nonlocal title\n nonlocal pubDate\n nonlocal link\n dbg(\"End: %s %s\" % (name, str((state, title, link, pubDate))))\n if state == \"item\":\n if name == \"item\":\n if title == \"\":\n prn(\"No title at end item.\")\n sys.exit(1)\n if link == \"\":\n prn(\"No link at end item.\")\n sys.exit(1)\n if pubDate == \"\":\n prn(\"No pubDate at end item.\")\n sys.exit(1)\n else:\n add_item(pubDate, title, link)\n state = \"\"\n title = \"\"\n link = \"\"\n pubDate = \"\"\n elif state == \"title\":\n if name == \"title\":\n state = \"item\"\n elif state == \"link\":\n if name == \"link\":\n state = \"item\"\n elif state == \"pubDate\":\n if name == \"pubDate\":\n state = \"item\"\n\n def char_data(data):\n nonlocal state\n nonlocal title\n nonlocal pubDate\n nonlocal link\n dbg(\"Data: %s %s)\" % (str(data), str((state, title, link, pubDate))))\n if state == \"title\":\n title = title + data\n elif state == \"link\":\n link = link + data\n elif state == \"pubDate\":\n pubDate = pubDate + data\n\n\n p = xml.parsers.expat.ParserCreate(\"UTF-8\")\n\n p.StartElementHandler = start_element\n p.EndElementHandler = end_element\n p.CharacterDataHandler = char_data\n\n with urllib.request.urlopen('https://news.ycombinator.com/rss') as f:\n xml_file = b\"\"\n while True:\n r = f.read(255)\n if r:\n xml_file = xml_file + r\n else:\n break\n\n try:\n p.Parse(xml_file.decode(\"UTF-8\"), True)\n except:\n dbg(\"Writing fetched RSS feed to file...\")\n err_f = open(parse_error_output_file, \"ab\")\n err_f.write(b\"GET URL: \")\n err_f.write(f.geturl().encode(\"UTF-8\"))\n err_f.write(b\"\\nReturn Code: \")\n err_f.write((\"%d\\n\" % (f.getcode(), )).encode(\"UTF-8\"))\n err_f.write(b\"Meta Info:\\n\")\n err_f.write(f.info().as_bytes(unixfrom=True))\n err_f.write(b\"XML output:\\n\")\n err_f.write(xml_file)\n err_f.close()\n dbg(\"Done.\")\n raise\n\n return items", "def getValidUrlsFromHtml(self, content):\n a_tags = content.find_all('a')\n urls = []\n for a_tag in a_tags:\n url = a_tag.get('href')\n if self.isUrlValid(url):\n urls.append(self.getFilteredUrl(url.lower()))\n return urls", "def get_posts(url):\r\n feed = feedparser.parse(url)\r\n return feed.entries", "def get_link_list_from_request(request):\n\tquery = request.POST.get('link_list','')\n #Clear all whitespaces from textarea\n whitespace = re.compile(r'\\s+')\n\turls = query.split('\\n')\n\tlinks = list()\n\tfor url in urls:\n\t\turl = whitespace.sub('', url)\n \tif url and url.lower().find('rapidshare') is not -1:\n \tlinks.append(url)\n\n\treturn links", "def getUrls(url):\n f = requests.get(url)\n p = MyParser()\n p.feed(f.text)\n list_of_urls = p.output_list\n #deal with possible strange None values\n list_of_urls = [url for url in list_of_urls if url is not None]\n for url in list_of_urls:\n if 'http' not in url: list_of_urls.remove(url)\n return list_of_urls", "def _parse_past_documents(self, item):\n doc_list = []\n for doc in item.css('a'):\n doc_list.append({\n 'url': 'http://{}{}'.format(self.allowed_domains[0], doc.attrib['href']),\n 'note': doc.css('*::text').extract_first(),\n })\n return doc_list", "def check_for_new_links(feed):\n #read the feed\n feed_url = feed[\"feed_url\"]\n feed_data = feedparser.parse(feed_url)\n\n #parse out entries in the feed for the information we want\n entries = []\n for entry in feed_data.entries:\n parsed_entry = {}\n parsed_entry[\"title\"] = entry[\"title\"]\n parsed_entry[\"link\"] = entry[\"link\"]\n parsed_entry[\"published\"] = entry[\"published\"]\n parsed_entry[\"feed_url\"] = feed_url\n entries.append(parsed_entry)\n\n #check for new entries since the last known entry\n #chop off all entries starting at the last_seen_link\n if \"last_seen_link\" in feed:\n last_link = feed[\"last_seen_link\"]\n idx = -1\n for cidx in range(len(entries)):\n if entries[cidx][\"link\"] == last_link:\n idx = cidx\n break\n #else is a new link\n entries = entries[:idx]\n\n return list(reversed(entries))", "def get_article_webpage_list(self, news_feed_webpage):\n url_list = list()\n # Use HTML parser to extract appropriates urls\n lemonde_parser = LeMondeHTMLParser()\n lemonde_parser.feed(news_feed_webpage)\n partial_url_list = lemonde_parser.links\n\n\n # add the base url of the website if not present in the article url\n for url in partial_url_list:\n if not 'http' in url:\n url_list.append(self.base_url + url)\n else:\n url_list.append(url)\n\n return url_list", "def urls(self):\n header = \"URL,Linked From,Discovery Date\"\n gcsv = self.read()\n if gcsv[0] != header:\n raise Exception(\"Unexpected CSV format\")\n urls = set()\n for line in gcsv[1:]:\n # Get everything before the first commar (just the URL)\n line = line[:line.find(\",\")]\n urls.add(line)\n return urls", "def _get_dates():\n remote = os.path.join(BASE_URL, RSS_FEED)\n local = os.path.join(TMP, RSS_FEED)\n u..(remote, local)\n\n with open(local) as f:\n return PUB_DATE.findall(f.read())", "def extract_urls(genome):\n itemid = genome.get('metadata').get('identifier')\n urls = set([url for url in genome['urls'] if 'archive.org' not in url])\n db_urls_found(itemid, urls)", "def get_task_urls(self):\n # get the count of task\n task_data = ()\n try:\n task_db_con = pymysql.connect(**self._taskdb_config)\n with task_db_con.cursor() as task_cursor:\n task_cursor.execute('SELECT DISTINCT url FROM mv')\n task_data = task_cursor.fetchall()\n finally:\n task_db_con.close()\n\n try:\n result_db_con = pymysql.connect(**self._resultdb_config)\n with result_db_con.cursor() as result_cursor:\n # get all result\n result_cursor.execute('SELECT DISTINCT result FROM dytt8')\n data = result_cursor.fetchall()\n\n # compare task count with result count\n if len(task_data) < len(data):\n return map(lambda row_item: json.loads(row_item[0]).get(\"mv_url\"), data)\n return iter(task_data)\n finally:\n result_db_con.close()", "def get_news(news_url):\n news_final = []\n try:\n news_handler = urllib.urlopen(news_url)\n news = news_handler.read()\n news = nl2br(news)\n news = string.split(news, '<br/>')\n\n news_array = {}\n value = {}\n for newsweb in news:\n value = string.split(newsweb, '|')\n if len(value[0]) > 1:\n news_array[value[0]] = value[1]\n\n info = {}\n for k in news_array:\n info = k[0:int(k.find(\"http://\") - 1)]\n info = string.split(k, ' - ')\n news_final.append((info[0], info[1], news_array[k]))\n\n news_handler.close()\n except IndexError:\n pass\n except IOError:\n pass\n\n return news_final", "def parse_rss(link, mode):\n\n one_feed = []\n news_counter = 0\n app.logger.info(f'Parsing feed: {link}')\n # Get file from internet, open it with xml-parser\n rss = feedparser.parse(link)\n\n for entry in rss.entries:\n\n if mode == 'latest':\n news_item_date = get_timestamp(entry.published)\n\n # Stop reading RSS if current news is already older than time\n # when user last got the news feed\n if news_item_date < last_time_user_got_news:\n return one_feed\n\n post = {'title': entry.title,\n 'published': get_timestamp(entry.published)}\n\n # Try to get link to image from one of a place where it can be\n try:\n pic = entry.enclosures[0].href\n except(IndexError, AttributeError):\n pic = get_img_source(entry.summary)\n\n post['image'] = pic if pic else url_for('static',\n filename=\"400x400.jpg\")\n\n link = entry.link\n post['link'] = link\n domain_name = re.search(r'://(.+?)/', link).group(1)\n post['domain_name'] = domain_name if domain_name else 'unknown'\n\n one_feed.append(post)\n\n if mode != 'latest':\n return one_feed\n else:\n print('There are no new news at all.')\n return []", "def extract(msg):\n\n rgx = url_regex()\n found = re.findall(rgx, msg)\n links_set = Set()\n for link in found:\n # dunno if I like this. don't need seperate groups to make this work.\n links_set.add(link[0].strip())\n return list(links_set)", "def retrieveFeed(self, rss_url):\n url = 'http://{}'.format(rss_url)\n result = feedparser.parse(url)\n if result.status != 200:\n sys.stdout.write('request failed for retrieve this RSS ({})\\n'.format(url))\n else:\n self.storeFeeds(url, result['items'])", "def test_rss_is_parseable(self):\r\n [make_bookmark() for i in range(10)]\r\n transaction.commit()\r\n\r\n res = self.app.get('/rss')\r\n\r\n self.assertEqual(\r\n res.status,\r\n \"200 OK\",\r\n msg='recent status is 200, ' + res.status)\r\n\r\n # http://packages.python.org/feedparser/\r\n # introduction.html#parsing-a-feed-from-a-string\r\n parsed = feedparser.parse(res.body)\r\n links = []\r\n for entry in parsed.entries:\r\n links.append({\r\n 'title': entry.title,\r\n 'category': entry.category,\r\n 'date': time.strftime('%d %b %Y', entry.updated_parsed),\r\n 'description': entry.description,\r\n 'link': entry.link,\r\n })\r\n\r\n self.assertTrue(links, 'The feed should have a list of links.')\r\n self.assertEqual(10, len(links), 'There are 10 links in the feed.')\r\n\r\n sample_item = links[0]\r\n self.assertTrue(sample_item['title'], 'Items have a title.')\r\n self.assertTrue(\r\n sample_item['link'],\r\n 'Items have a link to reach things.')\r\n self.assertTrue(\r\n 'description' in sample_item,\r\n 'Items have a description string.')", "def get_links_from_unbounce(html):\n\n import bs4\n\n soup = bs4.BeautifulSoup(html)\n\n return set([link.get('href') for link in soup.find_all(\"a\") if link.string == \"RSS\"])", "def _parse_sources(self, item):\n return [{'url': item['url']}]", "def urls(self) -> list[str]:\r\n ...", "def request_rss(self, url):\n return feedparser.parse(url)", "def _retrieveFeed(self):\n url = self.url\n if url!='':\n self._last_update_time_in_minutes = time.time()/60\n self._last_update_time = DateTime()\n d = feedparser.parse(url)\n if getattr(d, 'bozo', 0) == 1 and not isinstance(d.get('bozo_exception'),\n ACCEPTED_FEEDPARSER_EXCEPTIONS):\n self._loaded = True # we tried at least but have a failed load\n self._failed = True\n return False\n self._title = d.feed.title\n self._siteurl = d.feed.link\n self._items = []\n for item in d['items']:\n try:\n link = item.links[0]['href']\n itemdict = {\n 'title': item.title,\n 'url': link,\n 'summary': item.get('description', ''),\n }\n if hasattr(item, \"updated\"):\n try:\n itemdict['updated'] = DateTime(item.updated)\n except DateTimeError:\n # It's okay to drop it because in the\n # template, this is checked with\n # ``exists:``\n pass\n except AttributeError:\n continue\n self._items.append(itemdict)\n self._loaded = True\n self._failed = False\n return True\n self._loaded = True\n self._failed = True # no url set means failed\n return False # no url set, although that actually should not really happen", "def extract_news(parser):\r\n news_list = []\r\n\r\n tbl_list = parser.table.findAll('table')\r\n tr_list = tbl_list[1].findAll('tr')\r\n for i in range(0, 90, 3):\r\n new = dict()\r\n new['author'] = tr_list[i + 1].a.text\r\n new['points'] = tr_list[i + 1].span.text[:-6]\r\n comments = tr_list[i + 1].findAll('a')\r\n new['comments'] = comments[len(comments) - 1].text[:-9]\r\n if new['comments'] == '':\r\n new['comments'] = '0'\r\n new['title'] = tr_list[i].findAll('a')[1].text\r\n a_mas = List[str]\r\n a_mas = tr_list[i].findAll('a')\r\n new['url'] = a_mas[len(a_mas) - 1].text\r\n if new['url'] == new['title']:\r\n new['url'] = ''\r\n news_list.append(new)\r\n\r\n return news_list", "def _parse_links(self, item, start, links_list):\n result_list = []\n target_str_1 = start.strftime(\"%m-%d-%Y\").replace(\" 0\", \" \")\n target_str_2 = start.strftime(\"%m-%d-%y\").replace(\" 0\", \" \")\n for item in links_list:\n if item[\"date\"] in target_str_1 or item[\"date\"] in target_str_2:\n new_dict = {}\n new_dict[\"href\"] = item[\"href\"]\n new_dict[\"title\"] = item[\"title\"]\n result_list.append(new_dict)\n return result_list", "def _parse_links(self, item) -> list:\n # TODO This would be a \"nice to have\" but is not necessary right now.\n return [{\"href\": \"\", \"title\": \"\"}]", "def get_rss(address, website):\n #print address\n try:\n results = pattern.web.Newsfeed().search(address, count=100,\n cached=False, timeout=30)\n logger.debug('There are {} results from {}'.format(len(results),\n website))\n \n #print \"Results found\"\n except Exception as e:\n print 'There was an error. Check the log file for more information.'\n logger.warning('Problem fetching RSS feed for {}. {}'.format(address,\n e))\n results = None\n\n return results", "def get_listing():\n\n result_items = []\n\n rss_data = urllib.request.urlopen(ActivityURL)\n rss_xml = xml.dom.minidom.parse(rss_data)\n\n channel = rss_xml.getElementsByTagName('channel')[0]\n items = channel.getElementsByTagName('item')\n for item in items:\n # Most of these are hackish, but a result of using the RSS\n # feed instead of something nicer like a JSON API. This\n # listing method is specifically isolated so we can easily\n # swap out the implementation later.\n asset_id = item.getElementsByTagName('guid')[0].childNodes[0].data.split('/')[-1]\n img_url = item.getElementsByTagName('description')[0].childNodes[0].data\n # Get part after start of img src attribute\n split_href = img_url.split('src=\"', 1)[1]\n # Get part before closing quote\n img_url = split_href.split('\"', 1)[0]\n # FIXME\n zip_url = ''\n result_items.append( Asset(asset_id, img_url, zip_url) )\n\n return result_items", "def FindArticles(sesh):\n\n rssfeeds = FindRSSFeeds()\n\n found = ScraperUtils.FindArticlesFromRSS( rssfeeds, u'dailymail', ScrubFunc )\n return found", "def load_links(self) -> Tuple[List[str], List[str]]:\n\n with open(URL_FILE, 'r') as txt_file:\n lines = txt_file.read().split()\n\n urls = []\n for line in lines:\n urls.append(line.split(',')[0])\n \n return lines, urls", "def urls_in_url(url):\n global url_to_check_manually\n try:\n \"\"\"Return all URLs when given an url\"\"\"\n html = urlopen(url)\n bsObj = BeautifulSoup(html.read(), \"lxml\")\n list_url = []\n for link in bsObj.find_all('a'):\n sublink = link.get('href')\n try:\n list_url.append(str(sublink))\n except:\n pass\n return list_url\n except:\n print('Impossible to open URL :', url)\n url_to_check_manually.append(url)\n return []", "def update_feeds(self) -> tuple[set[str], set[str]]:\n\n updated_urls = set()\n error_urls = set()\n self.sync_reader()\n with make_reader(self.reader_db_file) as reader:\n for (url, value) in reader.update_feeds_iter():\n if isinstance(value, UpdatedFeed):\n logger.info(f'Got updated feed for {url} with {value.new} new entries '\n f'and {value.modified} updated entries.')\n if value.new:\n updated_urls.add(url)\n elif isinstance(value, ReaderError):\n logger.error(f'Got error when updating {url}')\n error_urls.add(url)\n return updated_urls, error_urls", "def get_urls():\r\n return []", "def article_extractor(rss_feed_link):\n user_agent = {\"user-agent\": \"Mozilla/5.0 (Windows NT 6.2; Win64;\\\n x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1\"}\n try:\n feed = requests.get(rss_feed_link, headers=user_agent)\n except requests.exceptions.ConnectionError:\n print(\"No internet connection\")\n exit()\n\n dirty_content = BeautifulSoup(feed.text, \"xml\")\n return dirty_content", "def _discover_url(result):\n # abuse feedparser result to get link tags from html page\n try:\n links = result.feed.links\n except AttributeError:\n links = []\n if not isinstance(links, list):\n links = []\n\n # find link urls that appear to be feeds\n discovered_feeds = [\n link.href for link in links if\n link.get('rel', None) == \"alternate\" and\n link.get('type', None) in FEED_MIME_TYPES and\n len(link.get('href', '')) > 0\n ]\n\n if len(discovered_feeds) == 0:\n _fail(None, \"Failed to download or parse feed\") # XXX\n\n return discovered_feeds[0]", "def add_rss(url):", "def init_urls(self):\n url = 'http://www.lagou.com/'\n for ip_info in self.col.find(no_cursor_timeout=True):\n ip, port = ip_info['ip'], ip_info['port']\n if ip and port:\n self.urls.append((url, ip, port)) # tuple", "def process_links():\n from pymongo import Connection\n conn = Connection()\n db = conn['mchs']\n# db.drop_collection('svodki')\n coll = db['svodki']\n coll.ensure_index(\"url\")\n f = open('alllinks.csv', 'r')\n for l in f:\n parts = l.strip().split('\\t')\n if len(parts) < 4: continue\n year, month, day, url = parts\n o = coll.find_one({'url' : url})\n if o is not None: \n print url, 'passed'\n continue\n u = urllib2.urlopen(url)\n data = u.read()\n u.close()\n data = data.decode('cp1251')\n record = {'year' : int(year), 'month' : int(month), 'day' : int(day), 'url' : url, 'text' : data.encode('utf8')}\n coll.save(record)\n # MCHS site is badly designed and it could block us if we will download pages too often\n time.sleep(5)\n print url, 'processed'", "def get_article_URLs(sourceURL):\n sourceURL = sourceURL.encode().decode()\n articleList = []\n soup = bs.BeautifulSoup(urllib.request.urlopen(sourceURL),'lxml')\n #remove any tables\n for table in soup.find_all(\"table\"):\n table.extract()\n #remove any special wiki pages\n for citation in soup.find_all(href=re.compile(\"^/wiki/Wikipedia:|^/wiki/User:|^/wiki/File:|^/wiki/MediaWiki:|^/wiki/Template:|^/wiki/Help:|^/wiki/Category:|^/wiki/Portal:|^/wiki/Draft:|^/wiki/TimedText:|^/wiki/Module:|^/wiki/Special:\")):\n citation.extract()\n a = soup.find('div', {'class':'mw-parser-output'}).find_all('a', href=re.compile(\"^/wiki/\"))\n for link in a:\n articleList.append(link['href'])\n return (articleList)", "def process(url):\n feed = feedparser.parse(url)\n entries = feed.entries\n ret = []\n for entry in entries:\n print entry\n guid = entry.guid\n title = translate_html(entry.title)\n link = entry.link\n summary = translate_html(entry.summary)\n try:\n subject = translate_html(entry.tags[0]['term'])\n except AttributeError:\n subject = \"\"\n newsStory = NewsStory(guid, title, subject, summary, link)\n ret.append(newsStory)\n return ret", "def process_textfile(inf):\n list_of_urls_to_check = [line.rstrip() for line in inf.readlines()]\n return list_of_urls_to_check", "def _get_items_for_parsing(self):\n count_posts = self.posts_number if 0 < self.posts_number < self.COUNT_POSTS_MAX else self.COUNT_POSTS_MAX\n pastes_page_content = self._get_pastes_page_content()\n tree = html.fromstring(pastes_page_content)\n items = tree.xpath('//table[@class=\"maintable\"]/tr/td[1]/a')\n return items[:count_posts] or []", "def get_urls(self, data):\n data = json.loads(data)\n urls = []\n for article in data['articles']:\n urls.append(article['url'])\n return urls", "def listingURLs(soup):\n\n #Get URLs\n itemListing = soup.find_all(class_=\"user-ad-row link link--base-color-inherit link--hover-color-none link--no-underline\")\n itemListing += soup.find_all(class_=\"user-ad-row user-ad-row--featured-or-premium link link--base-color-inherit link--hover-color-none link--no-underline\")\n itemListing += soup.find_all(class_=\"user-ad-row user-ad-row--premium user-ad-row--featured-or-premium link link--base-color-inherit link--hover-color-none link--no-underline\")\n #Create list\n urlList = [i['href'] for i in itemListing]\n return urlList", "def get_url(soup):\r\n \"\"\"criteria: any(s in a[\"title\"] for s in ('新增', '確診', '肺炎')\"\"\"\r\n url_list = []\r\n for a in soup.find_all('a', {\"href\": re.compile(\"typeid=9$\")}):\r\n if any(s in a[\"title\"] for s in ('新增', '確診', '肺炎')):\r\n url = \"https://www.cdc.gov.tw\" + a['href']\r\n url_list.append(url)\r\n return url_list", "def get_all_category_urls():\n\treturn execute_sql(\"SELECT category_url FROM categories\").fetchall()", "def data_collector(self, n, url, ret):\n try:\n html = urllib2.urlopen(url).read()\n soup = BeautifulSoup(html)\n ret[n] = [soup.title.string, url, html[0:100]]\n except:\n ret[n] = [\"Error\", url, \"Error\"]", "def _extract_links(self, publication, feed_self_url):\n self._logger.debug(\n \"Started extracting links from {0}\".format(encode(publication.links))\n )\n\n links = []\n\n for link in publication.links:\n link_metadata = self._extract_link(link, feed_self_url)\n links.append(link_metadata)\n\n description_link = self._extract_description_link(publication)\n if description_link:\n links.append(description_link)\n\n image_links = self._extract_image_links(publication, feed_self_url)\n if image_links:\n links.extend(image_links)\n\n self._logger.debug(\n \"Finished extracting links from {0}: {1}\".format(\n encode(publication.links), encode(links)\n )\n )\n\n return links", "def _parse_links(self, item):\n links = []\n for link in item.css(\"a\"):\n links.append(\n {\n \"href\": link.attrib[\"href\"],\n \"title\": \" \".join(link.css(\"::text\").getall()),\n }\n )\n return links", "def getLinks(link):\n source = requests.get(link).text\n soup = BeautifulSoup(source, 'lxml')\n rows = soup.find_all(class_ = 'column-1') #select which column \n list_of_links = []\n \n for row in rows[1:]: #rows[1:] is used in case first row is a title row (ie there is no useful data here)\n name = row.find('a')\n link = name.attrs['href'] #the data I'm trying to extract\n list_of_links.append(link)\n return list_of_links", "def check_exists_links(self):\n\n # get all non-own articles\n articles_from_external_resourse = self.articles_from_external_resourse()\n\n # return true if it not\n if not articles_from_external_resourse.count():\n return True\n\n # if found broken link\n # keep all the articles with broken links and return false,\n # otherwise return true\n article_with_broken_links = list()\n for article in articles_from_external_resourse:\n try:\n urllib.request.urlopen(article.source)\n except:\n article_with_broken_links.append(article)\n if article_with_broken_links:\n return (False, article_with_broken_links)\n return True", "def get_on_progress_domains(cursor):\r\n try:\r\n cursor.execute(\"select url from on_progress_domains where is_scrapped = 0\")\r\n return cursor.fetchall()\r\n except:\r\n raise RuntimeError(\"An Exception happened with the Database, make sure you are connected\")", "def extract_articles(self, parsed_xml):\n\n # Iterates over every item (article) in xml\n for item in parsed_xml.xpath(\"//item\"):\n\n article = {}\n\n\n article['title'] = self.get_text_or_attr(item, 'title')\n\n\n # The article's categories must be always a list, even if it has\n # only one element.\n categories = self.get_text_or_attr(item, 'category')\n\n if isinstance(categories, str):\n categories = [categories]\n\n article['categories'] = categories\n\n\n url = self.get_text_or_attr(item, 'feedburner:origLink')\n article['url'] = self.remove_query(url)\n\n self.article_url = article['url']\n\n\n # If article's URL is already stored, don't parse it again\n if Article.objects.filter(url=article['url']).count() > 0:\n continue\n\n\n # It is interesting to have the publication date as a `dateutil`\n # object, so we can do whatever manipulation we want.\n pub_date = self.get_text_or_attr(item, 'pubDate')\n article['date'] = self.parse_datetime_passing_errors(pub_date)\n\n\n # Get the author attribute and tries to fetch informations about\n # him/her. An article can have more than one author; on techcrunch's\n # feed, they are separated by a comma.\n author_names = self.get_text_or_attr(item, 'dc:creator').split(',')\n article['authors'] = []\n\n for i, name in enumerate(author_names):\n article['authors'] += [self.get_author(name, i)]\n\n\n # Tries to find the article's thumbnail url\n thumb = self.get_text_or_attr(item, 'media:thumbnail', 'url')\n if thumb and thumb[0]:\n article['thumb'] = self.remove_query(thumb[0])\n\n\n # Gets the article's description and strip all html tags from it\n content = self.clear_text(item.xpath('description'))\n content = content.strip(' Read More').strip('&nbsp;').strip()\n\n\n article['content'] = content\n\n\n yield article", "def check_page_links():\n\tprint(\"\\nChecking page's link\")\n\treturn [check_link_is_valid(link) for link in get_page_links()]", "def extract_URLs(self, input_file_name):\n file = open(input_file_name, 'r')\n lines = []\n for line in file.readlines():\n # Don't add empty lines.\n if len(line.strip()) > 0:\n lines.append(line.strip())\n return lines", "def get_urls(clean_text):\n for text in clean_text:\n urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',\n text)\n return urls", "def _parse_feed(self,feed): \n meta=[]\n for entry in feed:\n item_meta=self._parse_entry(entry)\n item_meta['video-id']='0'\n meta.append(item_meta)\n self._logger.info('%s videos were founded and parsed at Megavideo',len(meta)) \n return meta", "def get_site_feeds(url):\n url: str = remove_subdomains(url)\n\n site: SiteHost = db_client.query_site_feeds(url)\n\n if site:\n try:\n site_schema = ExternalSiteSchema()\n result = site_schema.dump(site)\n except ValidationError as err:\n app.logger.warning(\"Dump errors: %s\", err.messages)\n return abort(500)\n\n return jsonify(result)\n else:\n response = jsonify({\"message\": f\"No feed information saved for url {url}\"})\n response.status_code = 402\n return response", "def getArticleURLS(base_url, headers):\n \n url_links = []\n for url in base_url:\n try:\n #retrieve webpage from the url\n page = requests.get(url, headers=headers).text\n\n #use beautifulSoup to scrap the page\n soup = BeautifulSoup(page, 'lxml')\n\n links = []\n #loop through the page to collect anchor tags and retrieve the urls\n for a in soup.find_all(href=True):\n links.append(a['href'])\n # titles.append(a.text.encode('ascii',errors='replace').replace(b'?', b' ').decode('utf8'))\n\n #clean collected urls\n final_links = [link for link in links if '/News/' in link]\n clean_links = [link for link in final_links if not 'News/688334-688334' in link]\n clean_urls = ['https://www.monitor.co.ug' + link for link in clean_links if not 'https://www.monitor.co.ug' in link]\n cleaned_links = list(OrderedDict.fromkeys(clean_urls))\n url_links += cleaned_links\n except requests.exceptions.ConnectionError as error:\n return error\n\n #patterns to filter base urls with headlines only\n patterns = ['/News/688324-','/News/National/688334-','/News/Education/688336-',\n '/News/Insight/688338-','/News/World/688340-','/News/photos/3286528-']\n result_list = [row for row in url_links if not any(p in row for p in patterns)]\n\n return json.dumps(result_list)", "def _get_current_rss_items(feed_path: str) -> List[str]:\n if os.path.isfile(feed_path):\n with open(feed_path) as xfd:\n feed_str = xfd.read()\n items = ['<item>{}'.format(ip) for ip in feed_str.split('<item>')[1:]]\n if len(items) > 0:\n items[-1] = items[-1].replace('</channel>', '').replace('</rss>', '')\n return items\n return []", "def get_url():\n urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',\n new_tweet)\n return urls", "def get_feed(self):\n\t\turl=\"http://news.google.com/news?ned=%s&topic=%s&output=rss\"\n\t\tlinks=[{\"ned\":\"us\", \"type\":\"h\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"w\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"nz\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"sa\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"n\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"b\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"t\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"m\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"s\"},\n\t\t\t {\"ned\":\"us\", \"type\":\"e\"},\n\t\t\t ]\n\t\tfeed = links[self.get_input()]\n\t\treturn url%(feed[\"ned\"],feed[\"type\"])", "def codeup_blog_urls():\n \n url1 = 'https://codeup.com/codeup-news/codeup-launches-first-podcast-hire-tech/' \n\n url2 ='https://codeup.com/tips-for-prospective-students/why-should-i-become-a-system-administrator/'\n \n url3 ='https://codeup.com/codeup-news/codeup-candidate-for-accreditation/'\n \n url4 ='https://codeup.com/codeup-news/codeup-takes-over-more-of-the-historic-vogue-building/'\n \n url5 ='https://codeup.com/codeup-news/inclusion-at-codeup-during-pride-month-and-always/'\n \n return [url1, url2, url3, url4, url5]", "def _parse_links(self, item, response):\n links = []\n for link in item.css(\"a\"):\n links.append(\n {\n \"title\": \" \".join(link.css(\"*::text\").extract()).strip(),\n \"href\": response.urljoin(link.attrib[\"href\"]),\n }\n )\n return links", "def _parse_links(self, item, response):\n links = []\n for link in item.css(\"a\"):\n links.append(\n {\n \"title\": \" \".join(link.css(\"*::text\").extract()).strip(),\n \"href\": response.urljoin(link.attrib[\"href\"]),\n }\n )\n return links", "async def get_article_links(self):\n urls = []\n for page in range(self._start, self._end+1):\n urls.append(self._searchURL + str(page))\n result_list = await self._connect(urls)\n\n self._urls = []\n hares_links = []\n for result in result_list:\n soup = result[1]\n search_links = soup.find_all(class_='search-title')\n article_links = re.findall(r'url=(.*?)\\\"', str(search_links))\n for l in article_links:\n l = unquote(l)\n if 'hare48.pixnet.net' in l:\n hares_links.append(l)\n else:\n self._urls.append(l)\n self._urls.extend(await self._transform_hares(hares_links))", "def fetch_article_list(self, url):\n print(url)\n\n r = requests.get(url, headers=headers, timeout=10)\n html = r.text\n time.sleep(1)\n\n if r.status_code is not 200:\n print('Server dinied. Status:[%s].'%r.status_code)\n return\n\n # local data test\n #with open('./dataset/sina-blog-list.html', 'r') as f:\n # html = f.read()\n\n #print(html)\n\n soup = BeautifulSoup(html, 'html5lib')\n tags = soup.select('div[class=articleList] > div[class~=articleCell] > p > span[class=atc_title] > a')\n\n for t in tags:\n print('Appened: '+t['href'])\n self.article_urls.append(t['href'])\n\n # Get the url of next blog-list page\n nxpage = soup.select('div[class=SG_page] > ul > li[class=SG_pgnext] > a')\n if len(nxpage) > 0:\n #print ('Next list page: '+nxpage[0]['href'])\n self.fetch_article_list(nxpage[0]['href'])\n else:\n print('Have reached to the botom of blog lists.')\n\n\n # backup lists to local file\n with open(self.path+'/blog-lists.txt', 'w') as f:\n f.write('\\n'.join(self.article_urls))", "def fetch(feed):\n # Fetch the feed data.\n data = feedparser.parse(feed.ext_url)\n new_articles = []\n\n # If the `bozo` value is anything\n # but 0, there was an error parsing (or connecting) to the feed.\n if data.bozo:\n # Some errors are ok.\n if not isinstance(data.bozo_exception, feedparser.CharacterEncodingOverride) and not isinstance(data.bozo_exception, feedparser.NonXMLContentType):\n raise data.bozo_exception\n\n for entry in data.entries:\n\n # URL for this entry.\n url = entry['links'][0]['href']\n\n # Check for an existing Article.\n # If one exists, skip.\n if Article.objects(ext_url=url).first():\n continue\n\n data = extractor.extract(url, existing_data=entry)\n\n if data is None:\n continue\n\n # Secondary check for an existing Article,\n # by checking the title and source.\n existing = Article.objects(title=data['title']).first()\n if existing and existing.feed.source == feed.source:\n continue\n\n data['feed'] = feed\n\n article = Article(**data)\n article.save()\n new_articles.append(article)\n\n return new_articles", "def getURLs():", "def _findSupplUrls(self, landPage):\n urlParts = ['/suppdata/']\n for urlPart in urlParts:\n suppUrls = findLinksWithUrlPart(landPage, urlPart)\n if len(suppUrls) > 0:\n return suppUrls\n\n return []", "def scrape_article_links(year: int) -> List[str]:\n # Take into considerations leap years and days when no articles are published\n pass", "def get_news(rss_feed):\r\n\r\n class _CurrentData(object):\r\n \"\"\"Class holding a set of current attributes.\"\"\"\r\n item = None\r\n text = None\r\n\r\n def _start_element_handler(name, attrs):\r\n \"\"\"Handle XML start-elements.\"\"\"\r\n if name == 'item':\r\n # Allocate a new item.\r\n current.item = NewsItem()\r\n\r\n def _end_element_handler(name):\r\n \"\"\"Handle XML end-elements.\"\"\"\r\n if name == 'item':\r\n news_items.append(current.item)\r\n elif name in ('title', 'description', 'link', 'category'):\r\n try:\r\n setattr(current.item, name, current.text)\r\n except AttributeError:\r\n # The parser has run into a non-news item.\r\n pass\r\n\r\n def _char_data_handler(data):\r\n \"\"\"Handle XML element character data.\"\"\"\r\n current.text = data\r\n\r\n news_items = list()\r\n current = _CurrentData()\r\n\r\n parser = expat.ParserCreate()\r\n parser.StartElementHandler = _start_element_handler\r\n parser.EndElementHandler = _end_element_handler\r\n parser.CharacterDataHandler = _char_data_handler\r\n\r\n news_handle = urllib2.urlopen(rss_feed)\r\n xml_data = news_handle.read()\r\n \r\n parser.Parse(xml_data)\r\n\r\n return news_items", "def extract_news(parser):\n news_list = []\n\n titles = parser.find_all(\"tr\", class_=\"athing\")\n subtext = parser.find_all(\"td\", class_=\"subtext\")\n\n for i in range(len(titles)):\n x = titles[i].find_all(\"td\", class_=\"title\")[1]\n title = x.a.text\n url = x.a[\"href\"]\n c = subtext[i].find_all(\"a\")[4]\n if c.text == \"discuss\":\n comments = 0\n else:\n comments = c.text\n author = subtext[i].find(\"a\", class_=\"hnuser\").get_text()\n point = subtext[i].find(\"span\", class_=\"score\").text\n points = point.split(' ')[0]\n\n news_list.append({\"author\": author, \"comments\": comments, \"points\": points, \"title\": title, \"url\": url})\n\n return news_list", "def get_urls(text):\n url_regex = r\"https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{2,256}\\.[a-z]{2,6}\\b(?:[-a-zA-Z0-9@:%_\\+.~#?&//=]*)\"\n url_matches = re.findall(url_regex, text)\n\n if url_matches is None or len(url_matches) == 0:\n return []\n \n # Remove duplicate URLs/submissions. This can happen if the actual hyperlink is used as the comment body \n # TODO: This is a messy workaround. It would be better to use an HTML parser or something to grab\n # the actual URL from a link, and ignore the text itself.\n unique_urls = []\n seen_ids = []\n\n for url in url_matches:\n try:\n submission_id = praw.models.Submission.id_from_url(url)\n if submission_id not in seen_ids:\n # The URL is a submission that hasn't been encountered yet\n unique_urls.append(url)\n seen_ids.append(submission_id)\n\n except praw.exceptions.ClientException as e:\n # The URL isn't to a reddit submission, so just add it if it's unique\n if not url in unique_urls:\n unique_urls.append(url)\n\n return unique_urls", "def scan(link):\n try:\n r = requests.get(link)\n if r.status_code == 200:\n soup = BeautifulSoup(r.content, \"html.parser\")\n return soup.find_all(\"a\")\n except ConnectionError as e:\n print(\"Connection error occurred while trying to reach the page\")\n print(e)\n return []", "def listFeeds(key):\n # read and parse config, collect each url\n filepath = confighome+\"config\"\n if fileAccessible(filepath,'r'):\n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n\n # for each url pull the last 5 most recent posts and print them\n str=\"\"\n for url in jconfig[1]['feeds']:\n f = feedparser.parse (url['url'])\n if 'title' not in f.feed:\n print (\"::title not found in url:\",url['url'])\n else:\n str += f.feed.title + \"\\n\" + url['url'] + \"\\n\"\n\n # gimi five\n count=1\n blockcount=1\n for post in f.entries:\n if count % 5 == 1:\n str += post.title +\" - \" + post.link +\"\\n\"\n\n count+=1\n\n str=str+\"\\n\"\n\n if key==0:\n print (str)\n if key==1:\n return str\n else:\n print(\"::unable to read\")\n sys.exit()" ]
[ "0.7014149", "0.65106124", "0.6413672", "0.63931924", "0.6380389", "0.6300671", "0.6237712", "0.62265265", "0.62018305", "0.6194729", "0.6165013", "0.61139023", "0.6107573", "0.6098303", "0.60845596", "0.6067579", "0.60539395", "0.6052752", "0.60521805", "0.58949435", "0.58804005", "0.58480835", "0.58381045", "0.58330977", "0.58272684", "0.58104765", "0.5806272", "0.579874", "0.5785892", "0.5775375", "0.57528096", "0.57494307", "0.57489395", "0.57454133", "0.5735558", "0.5728781", "0.57278407", "0.57188714", "0.57058454", "0.5679633", "0.565995", "0.5654212", "0.5653292", "0.56481564", "0.5635786", "0.563496", "0.56283027", "0.5618772", "0.56081694", "0.5605175", "0.560454", "0.5591229", "0.5579797", "0.556803", "0.5551803", "0.55418503", "0.55156016", "0.5511851", "0.55084014", "0.55059606", "0.5500567", "0.5496237", "0.5487512", "0.5486701", "0.54800546", "0.54722464", "0.54692364", "0.54686564", "0.5465345", "0.54311204", "0.541847", "0.5414665", "0.54124886", "0.5406237", "0.5384679", "0.5382357", "0.5373674", "0.53723073", "0.5369109", "0.53666013", "0.53652763", "0.53615785", "0.5359258", "0.5357849", "0.53455216", "0.5341492", "0.5334186", "0.5331674", "0.5331674", "0.5329632", "0.53219795", "0.5315739", "0.53145885", "0.53145075", "0.5314435", "0.5314164", "0.52935666", "0.5292424", "0.52899855", "0.5288806" ]
0.64719254
2
generate the CUSPARSE FFI definition
def generate_cffi_cdef( cuda_include_path=cuda_include_path, cusparse_header=cusparse_header, cffi_out_file=None): with open(cusparse_header, 'r') as f: cusparse_hdr = f.readlines() # in some version cusparse_v2.h just points to cusparse.h, so read it # instead for line in cusparse_hdr: # if v2 header includes cusparse.h, read that one instead if line.startswith('#include "cusparse.h"'): cusparse_header = os.path.join(cuda_include_path, 'cusparse.h') with open(cusparse_header, 'r') as f: cusparse_hdr = f.readlines() cusparse_hdr = [_remove_comment(l) for l in cusparse_hdr] # skip lines leading up to first typedef for idx, line in enumerate(cusparse_hdr): if line.startswith('typedef'): start_line = idx break # skip closing #if defined logic for idx, line in enumerate(cusparse_hdr[start_line:]): if line.startswith('#if defined(__cplusplus)') or \ 'Define the following symbols for the new API' in line: # second match is to avoid CFFI compilation errror due to the final # define statements in v4.1 through v5.5 end_line = start_line + idx break # define other data types needed by FFI # ... will be filled in from cuComplex.h by the C compiler cffi_cdef = """ typedef struct CUstream_st *cudaStream_t; typedef struct float2 { ...; } float2; typedef float2 cuFloatComplex; typedef float2 cuComplex; typedef struct double2 { ...; } double2; typedef double2 cuDoubleComplex; typedef float cufftReal; typedef double cufftDoubleReal; typedef cuComplex cufftComplex; typedef cuDoubleComplex cufftDoubleComplex; typedef enum cudaDataType_t { CUDA_R_16F= 2, // real as a half CUDA_C_16F= 6, // complex as a pair of half numbers CUDA_R_32F= 0, // real as a float CUDA_C_32F= 4, // complex as a pair of float numbers CUDA_R_64F= 1, // real as a double CUDA_C_64F= 5, // complex as a pair of double numbers CUDA_R_8I= 3, // real as a signed char CUDA_C_8I= 7, // complex as a pair of signed char numbers CUDA_R_8U= 8, // real as a unsigned char CUDA_C_8U= 9, // complex as a pair of unsigned char numbers CUDA_R_32I= 10, // real as a signed int CUDA_C_32I= 11, // complex as a pair of signed int numbers CUDA_R_32U= 12, // real as a unsigned int CUDA_C_32U= 13 // complex as a pair of unsigned int numbers } cudaDataType; typedef enum libraryPropertyType_t //GRL: added this for cuda 8.0 { MAJOR_VERSION, MINOR_VERSION, PATCH_LEVEL } libraryPropertyType; /* definitions from cusparse header below this point */ """ cffi_cdef += ''.join(cusparse_hdr[start_line:end_line]) """ don't use the _v2 versions of the function names defined in CUDA v4.1 through v5.5 """ cffi_cdef = cffi_cdef.replace('_v2(', '(') if os.name == 'nt': # Win cffi_cdef = cffi_cdef.replace('CUSPARSEAPI', '__stdcall') else: # posix, etc cffi_cdef = cffi_cdef.replace('CUSPARSEAPI', '') if cffi_out_file is not None: # create specified output directory if it doesn't already exist out_dir = os.path.dirname(cffi_out_file) if out_dir and not os.path.exists(out_dir): os.makedirs(out_dir) with open(cffi_out_file, 'w') as f: f.write(cffi_cdef) return cffi_cdef
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_c_header(self):\n res = \\\n\"\"\"PyThreadState* ___madz_LANG_python_thread_state; //Holds Thread State for this interpreter\nPyObject *___madz_LANG_python_wrapper_module; //Hold Pointer to the _madz.py file representing this plugin\ntypedef struct{{\n{function_pointers}\n}}___madz_LANG_python_TYPE_;\n___madz_LANG_python_TYPE_ ___madz_LANG_python_OUTPUT;\nvoid ___madz_init_imports();\n{fn_dec}\n\n\"\"\"\n c_gen = c_wrapgen.CGenerator([],\"\", self.description)\n #TODO function_pointers, all same except\n fragments ={\"fn_dec\" : \"\", \"function_pointers\" : \"\"}\n fn = \"\"\"{rettype}{fnname}({args});\\n\"\"\"\n pointer = \"\"\" {prettype} (*{nodename})({args});\\n\"\"\"\n for node in self.description.definitions():\n if isinstance(node.type.get_type(), pdl.TypeFunction):\n frg = {\n \"prettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"rettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"fnname\": \"___madz_LANG_python_FN_\" + node.name,\n \"nodename\": node.name,\n \"args\": \",\".join(map(\n lambda a: c_gen.gen_type_string(a.name, a.type),\n node.type.args)),\n\n }\n fragments[\"fn_dec\"] += fn.format(**frg)\n fragments[\"function_pointers\"] += pointer.format(**frg)\n if fragments[\"function_pointers\"] == \"\":\n fragments[\"function_pointers\"] = \"uint8_t _madz_empty;\"\n return res.format(**fragments)", "def _fc_function_definitions(self) -> str:\n result = 'extern \"C\" {\\n\\n'\n for namespace in self.namespaces:\n for member in namespace.members:\n result += member.fortran_c_wrapper()\n\n result += '}\\n\\n'\n return result", "def make_c_function_stubs(self):\n fn =\\\n\"\"\"{rettype} {fnname}({args}){{\n {rettype} ret;\n\n ret = {cast_and_deref}___madz_LANG_python_OUTPUT.{nodename}({argnames});\n\n return ret;\n}}\n\n\"\"\"\n fn_no_return =\\\n\"\"\"{rettype} {fnname}({args}){{\n ___madz_LANG_python_OUTPUT.{nodename}({argnames});\n return;\n}}\n\n\"\"\"\n res = \"\"\n c_gen = c_wrapgen.CGenerator([],\"\", self.description)\n for node in self.description.definitions():\n if isinstance(node.type.get_type(), pdl.TypeFunction):\n fragments = {\n \"maybe_parentheses\": \")\" if isinstance(node.type.return_type.get_type(),pdl.TypeStruct) else \"\",\n \"cast_and_deref\": self.make_c_cast_deref_string(c_gen, node.type.return_type),\n \"rettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"fnname\": \"___madz_LANG_python_FN_\" + node.name,\n \"nodename\": node.name,\n \"args\": \",\".join(map(\n lambda a: c_gen.gen_type_string(a.name, a.type),\n node.type.args)),\n \"argnames\":\",\".join(map(\n lambda a: a.name,\n node.type.args))\n }\n res += (fn if not isinstance(node.type.return_type, pdl.TypeTypeNone) else fn_no_return).format(**fragments)\n return res", "def gen_capi(args):\n\n if not args.header:\n return \"\"\n\n cmd = [\"ctags\", \"-x\", \"--c-kinds=fpsgx\", args.header]\n\n process = Popen(cmd, stdout=PIPE, stderr=PIPE)\n out, err = process.communicate()\n\n if process.returncode:\n return \"\"\n\n titles = {\n \"nvm_geo\": \"Geometry\",\n \"nvm_buf\": \"Buffer Allocation\",\n \"nvm_dev\": \"Device Management\",\n \"nvm_addr\": \"Addressing\",\n \"nvm_cmd\": \"Raw Commands\",\n \"nvm_vblk\": \"Virtual Block\",\n \"nvm_bbt\": \"Bad-Block-Table\"\n }\n docs = {}\n\n lib = {}\n for line in out.split(\"\\n\"):\n parts = (\" \".join(line.split())).split(\" \")[:2]\n if len(parts) < 2:\n continue\n\n name, kind = parts\n ns = \"_\".join(name.split(\"_\")[:2])\n\n if ns not in lib:\n lib[ns] = {}\n\n if kind not in lib[ns]:\n lib[ns][kind] = []\n\n lib[ns][kind].append(name)\n\n for ns in lib:\n\n if \"prototype\" in lib[ns]:\n ordering = [\n \"bbt_get\", \"bbt_set\", \"bbt_mark\", \"bbt_flush\",\n \"addr_erase\", \"addr_read\", \"addr_write\", \"addr_check\",\n \"addr_.*2\",\n \"vblk_erase\", \"vblk_p?read\", \"vblk_p?write\", \"vblk_pad\",\n \"lba_p?read\", \"lba_p?write\",\n \"_alloc\", \"_fill\", \"_free\", \"_pr\",\n \"_get_\", \"_set_\"\n ]\n\n ordered = []\n for order in ordering:\n for func in lib[ns][\"prototype\"]:\n if re.search(order, func):\n if func not in ordered:\n ordered.append(func)\n\n lib[ns][\"prototype\"] = list(\n set(lib[ns][\"prototype\"]) -\n set(ordered)\n ) + ordered\n\n title = \"%s - %s\" % (ns, titles[ns]) if ns in titles else ns\n\n rst = \"\\n\".join([\n \".. _sec-capi-%s:\" % ns, \"\",\n title,\n \"=\" * len(title),\n \"\", \"\"\n ])\n\n if \"typedefs\" in lib[ns]:\n for typedef in lib[ns][\"typedefs\"]:\n rst += \"\\n\".join([\n typedef,\n \"-\" * len(typedef), \"\",\n \".. doxygentypedef:: %s\" % typedef,\n \"\", \"\"\n ])\n\n for mangler in [\"struct\", \"externvar\"]:\n if mangler in lib[ns]:\n for struct in lib[ns][mangler]:\n rst += \"\\n\".join([\n struct,\n \"-\" * len(struct), \"\",\n \".. doxygenstruct:: %s\" % struct,\n \" :members:\",\n \"\", \"\"\n ])\n\n if \"enum\" in lib[ns]:\n for enum in lib[ns][\"enum\"]:\n rst += \"\\n\".join([\n enum,\n \"-\" * len(enum), \"\",\n \".. doxygenenum:: %s\" % enum,\n \"\", \"\"\n ])\n\n if \"prototype\" in lib[ns]:\n for func in lib[ns][\"prototype\"]:\n rst += \"\\n\".join([\n func,\n \"-\" * len(func), \"\",\n \".. doxygenfunction:: %s\" % func,\n \"\", \"\"\n ])\n\n docs[ns] = rst\n\n return docs", "def fortran_c_wrapper(self) -> str:\n if self.fc_override is not None:\n return self.fc_override.replace('$CLASSNAME$', self.class_name).replace(\n \"$C_PREFIX$\", self.c_prefix).replace(\"$F_PREFIX$\", self.f_prefix)\n\n result = ''\n\n # declaration\n in_parameters = self._fc_in_parameters()\n return_type, out_parameters = self._fc_out_parameters()\n if self.may_throw:\n out_parameters.append('int * err_code')\n out_parameters.append('char ** err_msg')\n out_parameters.append('std::size_t * err_msg_len')\n\n func_name = '{}_{}_{}_'.format(\n self.c_prefix, self.class_name, self.name)\n\n par_str = ', '.join(in_parameters + out_parameters)\n result += '{} {}({}) {{\\n'.format(return_type, func_name, par_str)\n\n # convert input\n for par in self.params:\n result += '{}'.format(par.fc_convert_input())\n\n # call C++ function and return result\n if self.may_throw:\n result += ' try {\\n'\n result += ' *err_code = 0;\\n'\n result += indent(self._fc_cpp_call(), 4*' ')\n result += indent(self._fc_return(), 4*' ')\n result += ' }\\n'\n for exception, code in error_codes.items():\n if code != 0:\n catch = ''\n catch += 'catch (std::{} const & e) {{\\n'.format(exception)\n catch += ' *err_code = {};\\n'.format(code)\n catch += ' static std::string msg;\\n'\n catch += ' msg = e.what();\\n'\n catch += ' *err_msg = const_cast<char*>(msg.data());\\n'\n catch += ' *err_msg_len = msg.size();\\n'\n catch += '}\\n'\n result += indent(catch, 4*' ')\n result += self._fc_return_default()\n else:\n result += self._fc_cpp_call()\n result += self._fc_return()\n result += '}\\n\\n'\n return result", "def build_cffi():\r\n print_banner(\"Building CFFI Module\")\r\n ffi = cffi.FFI()\r\n\r\n this_dir = pathlib.Path().resolve()\r\n h_file_name = this_dir / \"cmult.h\"\r\n with open(h_file_name) as h_file:\r\n # cffi does not like our preprocessor directives, so we remove them\r\n lns = h_file.read().splitlines()\r\n flt = filter(lambda ln: not re.match(r\" *#\", ln), lns)\r\n flt = map(lambda ln: ln.replace(\"EXPORT_SYMBOL \", \"\"), flt)\r\n ffi.cdef(str(\"\\n\").join(flt))\r\n\r\n ffi.set_source(\r\n \"cffi_example\",\r\n # Since we are calling a fully built library directly no custom source\r\n # is necessary. We need to include the .h files, though, because behind\r\n # the scenes cffi generates a .c file which contains a Python-friendly\r\n # wrapper around each of the functions.\r\n '#include \"cmult.h\"',\r\n # The important thing is to include the pre-built lib in the list of\r\n # libraries we are linking against:\r\n libraries=[\"cmult\"],\r\n library_dirs=[this_dir.as_posix()],\r\n extra_link_args=[\"-Wl,-rpath,.\"],\r\n )\r\n\r\n ffi.compile()\r\n print(\"* Complete\")", "def fortran_c_wrapper(self) -> str:\n result = banner('//')\n result += self._fc_includes()\n result += self._fc_using_statements()\n result += self._fc_function_definitions()\n return result", "def gen_cheader(protocol):\n\ts = \"\"\"/* Junior Design Sp2018 Final Project\n * Robot Firmware - RPi <-> Microcontroller Communication\n * Nick Ames 2018\n * WARNING: This file is automatically generated by gen-files.py\n * Any changes you make will be erased.\n */\n#include <stdfix.h>\n#include <stdint.h>\n#include \"config.h\"\n\n\"\"\"\n\ts += \"struct comm_data_t {\\n\"\n\tfor r in protocol:\n\t\ts += \"\\t\" + r.size + \" \" + r.name + \"; /* \" + r.desc + \" */\\n\"\n\ts += \"};\\n\\n\"\n\tfor r in protocol:\n\t\ts += \"%s get_%s(void); /* %s */\\n\"%(r.size, r.name, r.desc)\n\t\ts += \"void set_%s(%s); /* %s */\\n\\n\"%(r.name, r.size, r.desc)\n\ts += \"\"\"extern volatile struct comm_data_t Data;\"\"\"\n\treturn s", "def gen_csource(protocol):\n\tdef format_default(reg):\n\t\t\"\"\"Given a reg, return its default value formatted as a string for inclusion in\n\t\t a C source file.\"\"\"\n\t\tif reg.size == \"accum\":\n\t\t\treturn str(float(reg.default)) + \"k\"\n\t\telse:\n\t\t\treturn str(int(reg.default)) + \"L\"\n\n\ts = \"\"\"/* Junior Design Sp2018 Final Project\n * Robot Firmware - RPi <-> Microcontroller Communication\n * Nick Ames 2018\n * WARNING: This file is automatically generated by gen-files.py\n * Any changes you make will be erased.\n */\n#include <avr/interrupt.h>\n#include <util/atomic.h>\n#include \"protocol.h\"\n#include \"spi.h\"\n\n\"\"\"\n\ts += \"volatile struct comm_data_t Data = {\\n\"\n\tfor r in protocol:\n\t\ts += \"\\t.\" + r.name + \" = \" + format_default(r) + \", /* \" + r.desc + \" */\\n\"\n\ts += \"};\\n\\n\"\n\ts += \"\\n\"\n\t\n\tfor r in protocol:\n\t\ts += \"%s get_%s(void){ /* %s */\\n\"%(r.size, r.name, r.desc)\n\t\ts += \"\"\"\\t%s v;\n\tATOMIC_BLOCK(ATOMIC_RESTORESTATE){\n\t\tv = Data.%s;\n\t}\n\treturn v;\n}\n\"\"\"%(r.size, r.name)\n\t\ts += \"void set_%s(%s v){ /* %s */\\n\"%(r.name, r.size, r.desc)\n\t\ts += \"\"\"\\tATOMIC_BLOCK(ATOMIC_RESTORESTATE){\n\t\tData.%s = v;\n\t}\n}\n\n\"\"\"%(r.name)\n\ts += \"\"\"ISR(SPI0_STC_vect){\n\tuint8_t reg_num = SPDR0;\n\tswitch(reg_num){\n\"\"\"\n\t\n\tfor r in protocol:\n\t\tif r.write:\n\t\t\ts += \"\\t\\tcase % 2d: /* Write %s (%s) */\\n\"%(r.number, r.name, r.desc)\n\t\t\ts += \"\\t\\t\\tspi_rx((uint8_t *) &Data.%s, sizeof(Data.%s));\\n\"%(r.name, r.name)\n\t\t\ts += \"\\t\\t\\tbreak;\\n\"\n\t\tif r.read:\n\t\t\ts += \"\\t\\tcase 0x80 + % 2d: /* Read %s (%s) */\\n\"%(r.number, r.name, r.desc)\n\t\t\ts += \"\\t\\t\\tspi_tx((uint8_t *) &Data.%s, sizeof(Data.%s));\\n\"%(r.name, r.name)\n\t\t\ts += \"\\t\\t\\tbreak;\\n\"\n\ts += \"\"\"\t}\n\n\t/* Clear SPIF flag */\n\treg_num = SPSR0;\n\treg_num = SPDR0;\n}\n\"\"\"\t\n\treturn s", "def build_func_body(func_name, arg_dict, return_type):\n body = \"\"\n arg_list = \"\"\n\n # the following are pointers to scalar outputs\n # Note: pBufferSize was renamed pBufferSizeInBytes in v6.5\n scalar_ptr_outputs = ['nnzTotalDevHostPtr',\n 'pBufferSize',\n 'pBufferSizeInBytes',\n 'resultDevHostPtr']\n\n is_creator = 'cusparseCreate' in func_name\n is_getter = 'cusparseGet' in func_name\n\n if return_type == 'cusparseStatus_t' and not (is_creator or is_getter):\n is_return = False\n else:\n is_return = True\n\n # else:\n return_str = ''\n for k, v in arg_dict.items():\n\n \"\"\"\n set some flags based on the name/type of the argument\n will use these flags to determine whether and how to call ffi.new or\n ffi.cast on each variable\n \"\"\"\n is_ptr = '*' in v\n is_cusparse_type = '_t' in v\n is_cusparse_ptr = is_ptr and is_cusparse_type\n is_output_scalar = k in scalar_ptr_outputs\n if k in ['alpha', 'beta']:\n is_scalar = True\n else:\n is_scalar = False\n if is_getter:\n is_gpu_array = False\n else:\n is_gpu_array = is_ptr and (not is_cusparse_ptr) and (not is_scalar)\n if 'Complex' in v:\n is_complex = True\n else:\n is_complex = False\n\n # convert variable to appropriate type for the FFI\n if is_output_scalar:\n # for scalar outputs make a new pointer\n body += \"%s = ffi.cast('%s', %s)\\n\" % (k, v, k)\n elif is_getter and is_ptr and (return_type == 'cusparseStatus_t'):\n # any pointers in cusparseGet* are new outputs to be created\n body += \"%s = ffi.new('%s')\\n\" % (k, v)\n elif is_gpu_array:\n # pass pointer to GPU array data (use either .ptr or .gpudata)\n body += \"%s = ffi.cast('%s', %s.ptr)\\n\" % (k, v, k)\n elif is_cusparse_ptr:\n if is_creator:\n # generate custom cusparse type\n body += \"%s = ffi.new('%s')\\n\" % (k, v)\n else:\n # cast to the custom cusparse type\n body += \"%s = ffi.cast('%s', %s)\\n\" % (k, v, k)\n elif is_ptr and is_scalar:\n # create new pointer, with value initialized to scalar\n if is_complex:\n # complex case is a bit tricky. requires ffi.buffer\n body += \"%sffi = ffi.new('%s')\\n\" % (k, v)\n if 'cusparseC' in func_name:\n body += \"ffi.buffer(%sffi)[:] = \\\n np.complex64(%s).tostring()\\n\" % (k, k)\n elif 'cusparseZ' in func_name:\n body += \"ffi.buffer(%sffi)[:] = \\\n np.complex128(%s).tostring()\\n\" % (k, k)\n else:\n body += \"%s = ffi.new('%s', %s)\\n\" % (k, v, k)\n elif is_ptr or v == 'cudaStream_t':\n # case non-scalar pointer to appropriate type\n body += \"%s = ffi.cast('%s', %s)\\n\" % (k, v, k)\n else:\n # don't need explicit cast for plain int, float, etc\n pass\n\n # build the list of arguments to pass to the API\n if is_ptr and is_scalar and is_complex:\n # take into account modified argument name for complex scalars\n arg_list += \"%sffi, \" % k\n else:\n arg_list += \"%s, \" % k\n\n # add the function call and optionally return the result\n last_key = k\n arg_list = arg_list[:-2] # remove trailing \", \"\n if is_getter and return_type != 'cusparseStatus_t':\n body += \"return ffi_lib.%s(%s)\\n\" % (func_name, arg_list)\n else:\n # check cusparseStatus_t state before returning\n call_str = \"status = ffi_lib.%s(%s)\\n\" % (func_name, arg_list)\n body += split_line(call_str, break_pattern=', ', nmax=76)\n body += \"cusparseCheckStatus(status)\\n\"\n if is_return:\n # len(arg_dict) == 2) is to avoid return for cusparseGetLevelInfo\n if is_creator or (is_getter and (len(arg_dict) == 2)):\n body += \"return %s[0]\\n\" % last_key\n else:\n body += \"#TODO: return the appropriate result\"\n body += '\\n\\n'\n return reindent(body, numSpaces=4, lstrip=False)", "def make_get_python_out_struct(self):\n res = \\\n\"\"\"DLLEXPORT ___madz_LANG_python_TYPE_* {}_get_out_struct(){{\n return &___madz_LANG_python_OUTPUT;\n}}\n\n\"\"\"\n return res.format(self.python_mangle)", "def arg_to_CFI(self, node, ordered_functions):\n options = node.options\n fmt_func = node.fmtdict\n\n if options.wrap_fortran is False:\n # The buffer function is intended to be called by Fortran.\n # No Fortran, no need for buffer function.\n return\n\n ast = node.ast\n declarator = ast.declarator\n result_typemap = ast.typemap\n # shadow classes have not been added yet.\n # Only care about string, vector here.\n result_is_ptr = declarator.is_indirect()\n if (\n result_typemap\n and result_typemap.base in [\"string\", \"vector\"]\n and result_typemap.name != \"char\"\n and not result_is_ptr\n ):\n node.wrap.c = False\n # node.wrap.fortran = False\n self.config.log.write(\n \"Skipping {}, unable to create C wrapper \"\n \"for function returning {} instance\"\n \" (must return a pointer or reference).\"\n \" Bufferify version will still be created.\\n\".format(\n result_typemap.cxx_type, declarator.user_name\n )\n )\n \n cfi_args = {}\n for arg in ast.declarator.params:\n declarator = arg.declarator\n name = declarator.user_name\n attrs = declarator.attrs\n meta = declarator.metaattrs\n cfi_args[name] = False\n arg_typemap = arg.typemap\n if meta[\"api\"]:\n # API explicitly set by user.\n continue\n elif meta[\"assumed-rank\"]:\n cfi_args[name] = True\n elif attrs[\"rank\"]:\n cfi_args[name] = True\n elif arg_typemap.sgroup == \"string\":\n cfi_args[name] = True\n elif arg_typemap.sgroup == \"char\":\n if declarator.is_indirect():\n cfi_args[name] = True\n elif meta[\"deref\"] in [\"allocatable\", \"pointer\"]:\n cfi_args[name] = True\n has_cfi_arg = any(cfi_args.values())\n\n # Function result.\n need_buf_result = None\n\n result_as_arg = \"\" # Only applies to string functions\n # when the result is added as an argument to the Fortran api.\n\n # Check if result needs to be an argument.\n declarator = ast.declarator\n attrs = declarator.attrs\n meta = declarator.metaattrs\n if meta[\"deref\"] == \"raw\":\n # No bufferify required for raw pointer result.\n pass\n elif result_typemap.sgroup == \"string\":\n need_buf_result = \"cfi\"\n result_as_arg = fmt_func.F_string_result_as_arg\n result_name = result_as_arg or fmt_func.C_string_result_as_arg\n elif result_typemap.sgroup == \"char\" and result_is_ptr:\n need_buf_result = \"cfi\"\n result_as_arg = fmt_func.F_string_result_as_arg\n result_name = result_as_arg or fmt_func.C_string_result_as_arg\n elif meta[\"deref\"] in [\"allocatable\", \"pointer\"]:\n need_buf_result = \"cfi\"\n\n if not (need_buf_result or\n has_cfi_arg):\n return False\n\n options.wrap_fortran = False\n\n # Create a new C function and change arguments\n # and add attributes.\n C_new = node.clone()\n ordered_functions.append(C_new)\n self.append_function_index(C_new)\n\n generated_suffix = \"cfi\"\n C_new._generated = \"arg_to_cfi\"\n C_new.splicer_group = \"cfi\"\n if need_buf_result:\n C_new.ast.declarator.metaattrs[\"api\"] = need_buf_result\n fmt_func = C_new.fmtdict\n fmt_func.function_suffix = fmt_func.function_suffix + fmt_func.C_cfi_suffix\n\n C_new.wrap.assign(c=True)#, fortran=True)\n C_new._PTR_C_CXX_index = node._function_index\n\n for arg in C_new.ast.declarator.params:\n name = arg.declarator.user_name\n if cfi_args[name]:\n arg.declarator.metaattrs[\"api\"] = generated_suffix\n\n ast = C_new.ast\n if True: # preserve to avoid changing indention for now.\n f_attrs = node.ast.declarator.attrs # Fortran function attributes\n f_meta = node.ast.declarator.metaattrs # Fortran function attributes\n if result_as_arg:\n # decl: const char * getCharPtr2() +len(30)\n # +len implies copying into users buffer.\n result_as_string = ast.result_as_arg(result_name)\n result_as_string.const = False # must be writeable\n attrs = result_as_string.declarator.attrs\n # Special case for wrapf.py to override \"allocatable\"\n f_meta[\"deref\"] = None\n result_as_string.declarator.metaattrs[\"api\"] = \"cfi\"\n result_as_string.declarator.metaattrs[\"deref\"] = \"result\"\n result_as_string.declarator.metaattrs[\"is_result\"] = True\n C_new.ast.declarator.metaattrs[\"api\"] = None\n C_new.ast.declarator.metaattrs[\"intent\"] = \"subroutine\"\n C_new.ast.declarator.metaattrs[\"deref\"] = None\n\n if result_as_arg:\n F_new = self.result_as_arg(node, C_new)\n ordered_functions.append(F_new)\n self.append_function_index(F_new)\n else:\n if node._generated in [\"result_to_arg\", \"fortran_generic\", \"getter/setter\"]:\n node.wrap.c = False\n # Fortran function may call C subroutine if string/vector result\n # Fortran function calls bufferify function.\n node._PTR_F_C_index = C_new._function_index\n return True", "def make_ffi(module_path, crate_path, cached_header_filename=None):\n if cached_header_filename is not None and \\\n os.path.isfile(cached_header_filename):\n with open(cached_header_filename, 'rb') as f:\n header = f.read()\n if not PY2:\n header = header.decode('utf-8')\n else:\n from .bindgen import generate_header\n header = generate_header(crate_path)\n header = _directive_re.sub('', header)\n\n if os.environ.get('SNAEK_DEBUG_HEADER') == '1':\n sys.stderr.write('/* generated header for \"%s\" */\\n' % module_path)\n sys.stderr.write(header)\n sys.stderr.write('\\n')\n sys.stderr.flush()\n\n ffi = cffi.FFI()\n ffi.cdef(header)\n ffi.set_source(module_path, None)\n return ffi", "def build(self, cres):\n _launch_threads()\n # Build wrapper for ufunc entry point\n ctx = cres.target_context\n library = cres.library\n signature = cres.signature\n llvm_func = library.get_function(cres.fndesc.llvm_func_name)\n wrapper, env = build_gufunc_wrapper(library, ctx, llvm_func,\n signature, self.sin, self.sout,\n fndesc=cres.fndesc,\n env=cres.environment)\n\n ptr = library.get_pointer_to_function(wrapper.name)\n\n # Get dtypes\n dtypenums = []\n for a in signature.args:\n if isinstance(a, types.Array):\n ty = a.dtype\n else:\n ty = a\n dtypenums.append(as_dtype(ty).num)\n\n return dtypenums, ptr, env", "def _gen_code(self):\r\n #TODO: maybe generate one C function only to save compile time? Also easier to take that as a basis and hand craft other covariances??\r\n\r\n #generate c functions from sympy objects \r\n argument_sequence = self._sp_x+self._sp_z+self._sp_theta\r\n code_list = [('k',self._sp_k)]\r\n # gradients with respect to covariance input\r\n code_list += [('dk_d%s'%x.name,dx) for x,dx in zip(self._sp_x,self._sp_dk_dx)]\r\n # gradient with respect to parameters\r\n code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta,self._sp_dk_dtheta)]\r\n # gradient with respect to multiple output parameters\r\n if self.output_dim > 1:\r\n argument_sequence += self._sp_theta_i + self._sp_theta_j\r\n code_list += [('dk_d%s'%theta.name,dtheta) for theta,dtheta in zip(self._sp_theta_i,self._sp_dk_dtheta_i)]\r\n (foo_c,self._function_code), (foo_h,self._function_header) = \\\r\n codegen(code_list, \"C\",'foobar',argument_sequence=argument_sequence)\r\n #put the header file where we can find it\r\n f = file(os.path.join(tempfile.gettempdir(),'foobar.h'),'w')\r\n f.write(self._function_header)\r\n f.close()\r\n\r\n # Substitute any known derivatives which sympy doesn't compute\r\n self._function_code = re.sub('DiracDelta\\(.+?,.+?\\)','0.0',self._function_code)\r\n\r\n\r\n ############################################################\r\n # This is the basic argument construction for the C code. #\r\n ############################################################\r\n \r\n arg_list = ([\"X2(i, %s)\"%x.name[2:] for x in self._sp_x]\r\n + [\"Z2(j, %s)\"%z.name[2:] for z in self._sp_z])\r\n\r\n # for multiple outputs need to also provide these arguments reversed.\r\n if self.output_dim>1:\r\n reverse_arg_list = list(arg_list)\r\n reverse_arg_list.reverse()\r\n\r\n # Add in any 'shared' parameters to the list.\r\n param_arg_list = [shared_params.name for shared_params in self._sp_theta]\r\n arg_list += param_arg_list\r\n\r\n precompute_list=[]\r\n if self.output_dim > 1:\r\n reverse_arg_list+=list(param_arg_list)\r\n split_param_arg_list = [\"%s1(%s)\"%(theta.name[:-2].upper(),index) for index in ['ii', 'jj'] for theta in self._sp_theta_i]\r\n split_param_reverse_arg_list = [\"%s1(%s)\"%(theta.name[:-2].upper(),index) for index in ['jj', 'ii'] for theta in self._sp_theta_i]\r\n arg_list += split_param_arg_list\r\n reverse_arg_list += split_param_reverse_arg_list\r\n # Extract the right output indices from the inputs.\r\n c_define_output_indices = [' '*16 + \"int %s=(int)%s(%s, %i);\"%(index, var, index2, self.input_dim-1) for index, var, index2 in zip(['ii', 'jj'], ['X2', 'Z2'], ['i', 'j'])]\r\n precompute_list += c_define_output_indices\r\n reverse_arg_string = \", \".join(reverse_arg_list)\r\n arg_string = \", \".join(arg_list)\r\n precompute_string = \"\\n\".join(precompute_list)\r\n\r\n # Code to compute argments string needed when only X is provided.\r\n X_arg_string = re.sub('Z','X',arg_string)\r\n # Code to compute argument string when only diagonal is required.\r\n diag_arg_string = re.sub('int jj','//int jj',X_arg_string)\r\n diag_arg_string = re.sub('j','i',diag_arg_string)\r\n if precompute_string == '':\r\n # if it's not multioutput, the precompute strings are set to zero\r\n diag_precompute_string = ''\r\n diag_precompute_replace = ''\r\n else:\r\n # for multioutput we need to extract the index of the output form the input.\r\n diag_precompute_string = precompute_list[0]\r\n diag_precompute_replace = precompute_list[1]\r\n \r\n\r\n # Here's the code to do the looping for K\r\n self._K_code =\\\r\n \"\"\"\r\n // _K_code\r\n // Code for computing the covariance function.\r\n int i;\r\n int j;\r\n int N = target_array->dimensions[0];\r\n int num_inducing = target_array->dimensions[1];\r\n int input_dim = X_array->dimensions[1];\r\n //#pragma omp parallel for private(j)\r\n for (i=0;i<N;i++){\r\n for (j=0;j<num_inducing;j++){\r\n%s\r\n //target[i*num_inducing+j] = \r\n TARGET2(i, j) += k(%s);\r\n }\r\n }\r\n %s\r\n \"\"\"%(precompute_string,arg_string,\"/*\"+str(self._sp_k)+\"*/\") #adding a string representation forces recompile when needed\r\n\r\n self._K_code_X = \"\"\"\r\n // _K_code_X\r\n // Code for computing the covariance function.\r\n int i;\r\n int j;\r\n int N = target_array->dimensions[0];\r\n int num_inducing = target_array->dimensions[1];\r\n int input_dim = X_array->dimensions[1];\r\n //#pragma omp parallel for private(j)\r\n for (i=0;i<N;i++){\r\n %s // int ii=(int)X2(i, 1);\r\n TARGET2(i, i) += k(%s);\r\n for (j=0;j<i;j++){\r\n %s //int jj=(int)X2(j, 1);\r\n double kval = k(%s); //double kval = k(X2(i, 0), shared_lengthscale, LENGTHSCALE1(ii), SCALE1(ii));\r\n TARGET2(i, j) += kval;\r\n TARGET2(j, i) += kval;\r\n }\r\n }\r\n /*%s*/\r\n \"\"\"%(diag_precompute_string, diag_arg_string, re.sub('Z2', 'X2', diag_precompute_replace), X_arg_string,str(self._sp_k)) #adding a string representation forces recompile when needed\r\n\r\n # Code to do the looping for Kdiag\r\n self._Kdiag_code =\\\r\n \"\"\"\r\n // _Kdiag_code\r\n // Code for computing diagonal of covariance function.\r\n int i;\r\n int N = target_array->dimensions[0];\r\n int input_dim = X_array->dimensions[1];\r\n //#pragma omp parallel for\r\n for (i=0;i<N;i++){\r\n %s\r\n //target[i] =\r\n TARGET1(i)=k(%s);\r\n }\r\n %s\r\n \"\"\"%(diag_precompute_string,diag_arg_string,\"/*\"+str(self._sp_k)+\"*/\") #adding a string representation forces recompile when needed\r\n\r\n # Code to compute gradients\r\n grad_func_list = []\r\n if self.output_dim>1:\r\n grad_func_list += c_define_output_indices\r\n grad_func_list += [' '*16 + 'TARGET1(%i+ii) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, arg_string) for i, theta in enumerate(self._sp_theta_i)]\r\n grad_func_list += [' '*16 + 'TARGET1(%i+jj) += PARTIAL2(i, j)*dk_d%s(%s);'%(self.num_shared_params+i*self.output_dim, theta.name, reverse_arg_string) for i, theta in enumerate(self._sp_theta_i)]\r\n grad_func_list += ([' '*16 + 'TARGET1(%i) += PARTIAL2(i, j)*dk_d%s(%s);'%(i,theta.name,arg_string) for i,theta in enumerate(self._sp_theta)])\r\n grad_func_string = '\\n'.join(grad_func_list) \r\n\r\n self._dK_dtheta_code =\\\r\n \"\"\"\r\n // _dK_dtheta_code\r\n // Code for computing gradient of covariance with respect to parameters.\r\n int i;\r\n int j;\r\n int N = partial_array->dimensions[0];\r\n int num_inducing = partial_array->dimensions[1];\r\n int input_dim = X_array->dimensions[1];\r\n //#pragma omp parallel for private(j)\r\n for (i=0;i<N;i++){\r\n for (j=0;j<num_inducing;j++){\r\n%s\r\n }\r\n }\r\n %s\r\n \"\"\"%(grad_func_string,\"/*\"+str(self._sp_k)+\"*/\") # adding a string representation forces recompile when needed\r\n\r\n\r\n # Code to compute gradients for Kdiag TODO: needs clean up\r\n diag_grad_func_string = re.sub('Z','X',grad_func_string,count=0)\r\n diag_grad_func_string = re.sub('int jj','//int jj',diag_grad_func_string)\r\n diag_grad_func_string = re.sub('j','i',diag_grad_func_string)\r\n diag_grad_func_string = re.sub('PARTIAL2\\(i, i\\)','PARTIAL1(i)',diag_grad_func_string)\r\n self._dKdiag_dtheta_code =\\\r\n \"\"\"\r\n // _dKdiag_dtheta_code\r\n // Code for computing gradient of diagonal with respect to parameters.\r\n int i;\r\n int N = partial_array->dimensions[0];\r\n int input_dim = X_array->dimensions[1];\r\n for (i=0;i<N;i++){\r\n %s\r\n }\r\n %s\r\n \"\"\"%(diag_grad_func_string,\"/*\"+str(self._sp_k)+\"*/\") #adding a string representation forces recompile when needed\r\n\r\n # Code for gradients wrt X, TODO: may need to deal with special case where one input is actually an output.\r\n gradX_func_list = []\r\n if self.output_dim>1:\r\n gradX_func_list += c_define_output_indices\r\n gradX_func_list += [\"TARGET2(i, %i) += PARTIAL2(i, j)*dk_dx_%i(%s);\"%(q,q,arg_string) for q in range(self._real_input_dim)]\r\n gradX_func_string = \"\\n\".join(gradX_func_list)\r\n\r\n self._dK_dX_code = \\\r\n \"\"\"\r\n // _dK_dX_code\r\n // Code for computing gradient of covariance with respect to inputs.\r\n int i;\r\n int j;\r\n int N = partial_array->dimensions[0];\r\n int num_inducing = partial_array->dimensions[1];\r\n int input_dim = X_array->dimensions[1];\r\n //#pragma omp parallel for private(j)\r\n for (i=0;i<N; i++){\r\n for (j=0; j<num_inducing; j++){\r\n %s\r\n }\r\n }\r\n %s\r\n \"\"\"%(gradX_func_string,\"/*\"+str(self._sp_k)+\"*/\") #adding a string representation forces recompile when needed\r\n \r\n\r\n diag_gradX_func_string = re.sub('Z','X',gradX_func_string,count=0)\r\n diag_gradX_func_string = re.sub('int jj','//int jj',diag_gradX_func_string)\r\n diag_gradX_func_string = re.sub('j','i',diag_gradX_func_string)\r\n diag_gradX_func_string = re.sub('PARTIAL2\\(i, i\\)','2*PARTIAL1(i)',diag_gradX_func_string)\r\n\r\n # Code for gradients of Kdiag wrt X\r\n self._dKdiag_dX_code= \\\r\n \"\"\"\r\n // _dKdiag_dX_code\r\n // Code for computing gradient of diagonal with respect to inputs.\r\n int N = partial_array->dimensions[0];\r\n int input_dim = X_array->dimensions[1];\r\n for (int i=0;i<N; i++){\r\n %s\r\n }\r\n %s\r\n \"\"\"%(diag_gradX_func_string,\"/*\"+str(self._sp_k)+\"*/\") #adding a\r\n # string representation forces recompile when needed Get rid\r\n # of Zs in argument for diagonal. TODO: Why wasn't\r\n # diag_func_string called here? Need to check that.\r\n #self._dKdiag_dX_code = self._dKdiag_dX_code.replace('Z[j', 'X[i')\r\n\r\n # Code to use when only X is provided. \r\n self._dK_dtheta_code_X = self._dK_dtheta_code.replace('Z[', 'X[')\r\n self._dK_dX_code_X = self._dK_dX_code.replace('Z[', 'X[').replace('+= PARTIAL2(', '+= 2*PARTIAL2(') \r\n self._dK_dtheta_code_X = self._dK_dtheta_code_X.replace('Z2(', 'X2(')\r\n self._dK_dX_code_X = self._dK_dX_code_X.replace('Z2(', 'X2(')\r\n\r\n\r\n #TODO: insert multiple functions here via string manipulation\r\n #TODO: similar functions for psi_stats\r", "def translate_to_c(Newast):\n ast = parse_file('exampleMin.c', use_cpp=True)\n\n ast.show()\n #print(\"newast: \", Newast.ext[0].decl.type.args.params[0].type.type==ast.ext[0].decl.type.args.params[0].type.type)\n #print(\"newast2: \", Newast.ext[0].decl.type.args.params[0].type.type.coord)\n #print(\"ast2: \", ast.ext[0].decl.type.args.params[0].type.type.coord)\n\n #Newast.show()\n \n # print(ast.ext[0].decl.bitsize)\n # print(Newast.ext[0].decl.bitsize)\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.coord)\n # print(Newast.ext[0].decl.type.args.coord)\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.params)\n # print(Newast.ext[0].decl.type.args.params)\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.params[0])\n # print(Newast.ext[0].decl.type.args.params[0])\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.params[0].type)\n # print(Newast.ext[0].decl.type.args.params[0].type)\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.params[0].type.type)\n # print(Newast.ext[0].decl.type.args.params[0].type.type)\n # print(\"----------------------------------\")\n # print(ast.ext[0].decl.type.args.params[0].type.type.names)\n # print(Newast.ext[0].decl.type.args.params[0].type.type.names)\n # print(\"----------------------------------\")\n\n generator = c_generator.CGenerator()\n #ast.show()\n\n # tracing the generator for debugging\n # import trace\n # tr = trace.Trace(countcallers=1)\n # tr.runfunc(generator.visit, Newast)\n # tr.results().write_results()\n\n print(generator.visit(Newast))", "def compile_cutils():\r\n\r\n types = ['npy_' + t for t in ['int8', 'int16', 'int32', 'int64', 'int128',\r\n 'int256', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',\r\n 'float16', 'float32', 'float64', 'float80', 'float96', 'float128',\r\n 'float256']]\r\n\r\n complex_types = ['npy_' + t for t in ['complex32', 'complex64',\r\n 'complex128', 'complex160', 'complex192', 'complex512']]\r\n\r\n inplace_map_template = \"\"\"\r\n #if defined(%(typen)s)\r\n static void %(type)s_inplace_add(PyArrayMapIterObject *mit, PyArrayIterObject *it)\r\n {\r\n int index = mit->size;\r\n while (index--) {\r\n %(op)s\r\n\r\n PyArray_MapIterNext(mit);\r\n PyArray_ITER_NEXT(it);\r\n }\r\n }\r\n #endif\r\n \"\"\"\r\n\r\n floatadd = \"((%(type)s*)mit->dataptr)[0] = ((%(type)s*)mit->dataptr)[0] + ((%(type)s*)it->dataptr)[0];\"\r\n complexadd = \"\"\"\r\n ((%(type)s*)mit->dataptr)[0].real = ((%(type)s*)mit->dataptr)[0].real + ((%(type)s*)it->dataptr)[0].real;\r\n ((%(type)s*)mit->dataptr)[0].imag = ((%(type)s*)mit->dataptr)[0].imag + ((%(type)s*)it->dataptr)[0].imag;\r\n \"\"\"\r\n\r\n fns = ''.join([inplace_map_template % {'type': t, 'typen': t.upper(),\r\n 'op': floatadd % {'type': t}}\r\n for t in types] +\r\n [inplace_map_template % {'type': t, 'typen': t.upper(),\r\n 'op': complexadd % {'type': t}}\r\n for t in complex_types])\r\n\r\n fn_array = (\"static inplace_map_binop addition_funcs[] = {\" +\r\n ''.join([\"\"\"\r\n #if defined(%(typen)s)\r\n %(type)s_inplace_add,\r\n #endif\r\n \"\"\" % {'type': t, 'typen': t.upper()}\r\n for t in types + complex_types]) +\r\n \"\"\"NULL};\r\n \"\"\")\r\n\r\n type_number_array = (\"static int type_numbers[] = {\" +\r\n ''.join([\"\"\"\r\n #if defined(%(typen)s)\r\n %(typen)s,\r\n #endif\r\n \"\"\" % {'type': t, 'typen': t.upper()}\r\n for t in types + complex_types]) +\r\n \"-1000};\")\r\n\r\n code = (\"\"\"\r\n #include <Python.h>\r\n #include \"numpy/arrayobject.h\"\r\n\r\n extern \"C\"{\r\n static PyObject *\r\n run_cthunk(PyObject *self, PyObject *args)\r\n {\r\n PyObject *py_cthunk = NULL;\r\n if(!PyArg_ParseTuple(args,\"O\",&py_cthunk))\r\n return NULL;\r\n\r\n if (!PyCObject_Check(py_cthunk)) {\r\n PyErr_SetString(PyExc_ValueError,\r\n \"Argument to run_cthunk must be a PyCObject.\");\r\n return NULL;\r\n }\r\n void * ptr_addr = PyCObject_AsVoidPtr(py_cthunk);\r\n int (*fn)(void*) = (int (*)(void*))(ptr_addr);\r\n void* it = PyCObject_GetDesc(py_cthunk);\r\n int failure = fn(it);\r\n\r\n return Py_BuildValue(\"i\", failure);\r\n }\r\n\r\n #if NPY_API_VERSION >= 0x00000008\r\n typedef void (*inplace_map_binop)(PyArrayMapIterObject *, PyArrayIterObject *);\r\n \"\"\" + fns + fn_array + type_number_array +\r\n\r\n\"\"\"\r\nstatic int\r\nmap_increment(PyArrayMapIterObject *mit, PyObject *op, inplace_map_binop add_inplace)\r\n{\r\n PyArrayObject *arr = NULL;\r\n PyArrayIterObject *it;\r\n PyArray_Descr *descr;\r\n if (mit->ait == NULL) {\r\n return -1;\r\n }\r\n descr = PyArray_DESCR(mit->ait->ao);\r\n Py_INCREF(descr);\r\n arr = (PyArrayObject *)PyArray_FromAny(op, descr,\r\n 0, 0, NPY_ARRAY_FORCECAST, NULL);\r\n if (arr == NULL) {\r\n return -1;\r\n }\r\n if ((mit->subspace != NULL) && (mit->consec)) {\r\n PyArray_MapIterSwapAxes(mit, (PyArrayObject **)&arr, 0);\r\n if (arr == NULL) {\r\n return -1;\r\n }\r\n }\r\n it = (PyArrayIterObject*)\r\n PyArray_BroadcastToShape((PyObject*)arr, mit->dimensions, mit->nd);\r\n if (it == NULL) {\r\n Py_DECREF(arr);\r\n return -1;\r\n }\r\n\r\n (*add_inplace)(mit, it);\r\n\r\n Py_DECREF(arr);\r\n Py_DECREF(it);\r\n return 0;\r\n}\r\n\r\n\r\nstatic PyObject *\r\ninplace_increment(PyObject *dummy, PyObject *args)\r\n{\r\n PyObject *arg_a = NULL, *index=NULL, *inc=NULL;\r\n PyArrayObject *a;\r\n inplace_map_binop add_inplace = NULL;\r\n int type_number = -1;\r\n int i =0;\r\n PyArrayMapIterObject * mit;\r\n\r\n if (!PyArg_ParseTuple(args, \"OOO\", &arg_a, &index,\r\n &inc)) {\r\n return NULL;\r\n }\r\n if (!PyArray_Check(arg_a)) {\r\n PyErr_SetString(PyExc_ValueError, \"needs an ndarray as first argument\");\r\n return NULL;\r\n }\r\n\r\n a = (PyArrayObject *) arg_a;\r\n\r\n if (PyArray_FailUnlessWriteable(a, \"input/output array\") < 0) {\r\n return NULL;\r\n }\r\n\r\n if (PyArray_NDIM(a) == 0) {\r\n PyErr_SetString(PyExc_IndexError, \"0-d arrays can't be indexed.\");\r\n return NULL;\r\n }\r\n type_number = PyArray_TYPE(a);\r\n\r\n\r\n\r\n while (type_numbers[i] >= 0 && addition_funcs[i] != NULL){\r\n if (type_number == type_numbers[i]) {\r\n add_inplace = addition_funcs[i];\r\n break;\r\n }\r\n i++ ;\r\n }\r\n\r\n if (add_inplace == NULL) {\r\n PyErr_SetString(PyExc_TypeError, \"unsupported type for a\");\r\n return NULL;\r\n }\r\n mit = (PyArrayMapIterObject *) PyArray_MapIterArray(a, index);\r\n if (mit == NULL) {\r\n goto fail;\r\n }\r\n if (map_increment(mit, inc, add_inplace) != 0) {\r\n goto fail;\r\n }\r\n\r\n Py_DECREF(mit);\r\n\r\n Py_INCREF(Py_None);\r\n return Py_None;\r\n\r\nfail:\r\n Py_XDECREF(mit);\r\n\r\n return NULL;\r\n}\r\n #endif\r\n\r\n\r\n static PyMethodDef CutilsExtMethods[] = {\r\n {\"run_cthunk\", run_cthunk, METH_VARARGS|METH_KEYWORDS,\r\n \"Run a theano cthunk.\"},\r\n #if NPY_API_VERSION >= 0x00000008\r\n {\"inplace_increment\", inplace_increment,\r\n METH_VARARGS,\r\n \"increments a numpy array inplace at the passed indexes.\"},\r\n #endif\r\n {NULL, NULL, 0, NULL} /* Sentinel */\r\n };\"\"\")\r\n\r\n if PY3:\r\n # This is not the most efficient code, but it is written this way to\r\n # highlight the changes needed to make 2.x code compile under python 3.\r\n code = code.replace(\"<Python.h>\", '\"numpy/npy_3kcompat.h\"', 1)\r\n code = code.replace(\"PyCObject\", \"NpyCapsule\")\r\n code += \"\"\"\r\n static struct PyModuleDef moduledef = {\r\n PyModuleDef_HEAD_INIT,\r\n \"cutils_ext\",\r\n NULL,\r\n -1,\r\n CutilsExtMethods,\r\n };\r\n\r\n PyMODINIT_FUNC\r\n PyInit_cutils_ext(void) {\r\n import_array();\r\n return PyModule_Create(&moduledef);\r\n }\r\n }\r\n \"\"\"\r\n else:\r\n code += \"\"\"\r\n PyMODINIT_FUNC\r\n initcutils_ext(void)\r\n {\r\n import_array();\r\n (void) Py_InitModule(\"cutils_ext\", CutilsExtMethods);\r\n }\r\n } //extern C\r\n \"\"\"\r\n\r\n loc = os.path.join(config.compiledir, 'cutils_ext')\r\n if not os.path.exists(loc):\r\n os.mkdir(loc)\r\n\r\n args = cmodule.GCC_compiler.compile_args()\r\n cmodule.GCC_compiler.compile_str('cutils_ext', code, location=loc,\r\n preargs=args)", "def fortran_interface(self) -> str:\n result = ''\n if self.fc_override == '':\n return result\n\n func_name = '{}_{}_{}_'.format(\n self.c_prefix, self.class_name, self.name)\n\n # declaration\n in_parameters = self._fi_in_parameters()\n return_type, out_parameters = self._fi_out_parameters()\n if self.may_throw:\n out_parameters.append(('integer (c_int)', 'err_code'))\n out_parameters.append(('type (c_ptr)', 'err_msg'))\n out_parameters.append(('integer (c_size_t)', 'err_msg_len'))\n\n arg_list = [par_name for _, par_name in in_parameters + out_parameters]\n if len(arg_list) > 1:\n arg_vlist = ' &\\n' + indent(', &\\n'.join(arg_list), 8*' ')\n else:\n arg_vlist = ', '.join(arg_list)\n\n if return_type != '':\n result += '{} function {}({}) &\\n'.format(\n return_type, func_name, arg_vlist)\n else:\n result += 'subroutine {}({}) &\\n'.format(func_name, arg_vlist)\n result += ' bind(C, name=\"{}\")\\n'.format(func_name)\n result += '\\n'\n result += ' use iso_c_binding\\n'\n\n # parameter declarations\n for par_type, par_name in in_parameters:\n result += ' {}, intent(in) :: {}\\n'.format(\n par_type, par_name)\n for par_type, par_name in out_parameters:\n result += ' {}, intent(out) :: {}\\n'.format(par_type, par_name)\n\n # end\n if return_type != '':\n result += 'end function {}\\n\\n'.format(func_name)\n else:\n result += 'end subroutine {}\\n\\n'.format(func_name)\n return indent(result, 8*' ')", "def cblas_header_text():\r\n\r\n return \"\"\"\r\n //#include <stddef.h>\r\n\r\n #undef __BEGIN_DECLS\r\n #undef __END_DECLS\r\n #ifdef __cplusplus\r\n #define __BEGIN_DECLS extern \"C\" {\r\n #define __END_DECLS }\r\n #else\r\n #define __BEGIN_DECLS /* empty */\r\n #define __END_DECLS /* empty */\r\n #endif\r\n\r\n __BEGIN_DECLS\r\n\r\n #define MOD %\r\n\r\n /*\r\n * Enumerated and derived types\r\n */\r\n #define CBLAS_INDEX size_t /* this may vary between platforms */\r\n\r\n enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102};\r\n enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113};\r\n enum CBLAS_UPLO {CblasUpper=121, CblasLower=122};\r\n enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132};\r\n enum CBLAS_SIDE {CblasLeft=141, CblasRight=142};\r\n\r\n float cblas_sdsdot(const int N, const float alpha, const float *X,\r\n const int incX, const float *Y, const int incY);\r\n double cblas_dsdot(const int N, const float *X, const int incX, const float *Y,\r\n const int incY);\r\n float cblas_sdot(const int N, const float *X, const int incX,\r\n const float *Y, const int incY);\r\n double cblas_ddot(const int N, const double *X, const int incX,\r\n const double *Y, const int incY);\r\n\r\n /*\r\n * Functions having prefixes Z and C only\r\n */\r\n void cblas_cdotu_sub(const int N, const void *X, const int incX,\r\n const void *Y, const int incY, void *dotu);\r\n void cblas_cdotc_sub(const int N, const void *X, const int incX,\r\n const void *Y, const int incY, void *dotc);\r\n\r\n void cblas_zdotu_sub(const int N, const void *X, const int incX,\r\n const void *Y, const int incY, void *dotu);\r\n void cblas_zdotc_sub(const int N, const void *X, const int incX,\r\n const void *Y, const int incY, void *dotc);\r\n\r\n\r\n /*\r\n * Functions having prefixes S D SC DZ\r\n */\r\n float cblas_snrm2(const int N, const float *X, const int incX);\r\n float cblas_sasum(const int N, const float *X, const int incX);\r\n\r\n double cblas_dnrm2(const int N, const double *X, const int incX);\r\n double cblas_dasum(const int N, const double *X, const int incX);\r\n\r\n float cblas_scnrm2(const int N, const void *X, const int incX);\r\n float cblas_scasum(const int N, const void *X, const int incX);\r\n\r\n double cblas_dznrm2(const int N, const void *X, const int incX);\r\n double cblas_dzasum(const int N, const void *X, const int incX);\r\n\r\n\r\n /*\r\n * Functions having standard 4 prefixes (S D C Z)\r\n */\r\n CBLAS_INDEX cblas_isamax(const int N, const float *X, const int incX);\r\n CBLAS_INDEX cblas_idamax(const int N, const double *X, const int incX);\r\n CBLAS_INDEX cblas_icamax(const int N, const void *X, const int incX);\r\n CBLAS_INDEX cblas_izamax(const int N, const void *X, const int incX);\r\n\r\n /*\r\n * ===========================================================================\r\n * Prototypes for level 1 BLAS routines\r\n * ===========================================================================\r\n */\r\n\r\n /* \r\n * Routines with standard 4 prefixes (s, d, c, z)\r\n */\r\n void cblas_sswap(const int N, float *X, const int incX, \r\n float *Y, const int incY);\r\n void cblas_scopy(const int N, const float *X, const int incX, \r\n float *Y, const int incY);\r\n void cblas_saxpy(const int N, const float alpha, const float *X,\r\n const int incX, float *Y, const int incY);\r\n\r\n void cblas_dswap(const int N, double *X, const int incX, \r\n double *Y, const int incY);\r\n void cblas_dcopy(const int N, const double *X, const int incX, \r\n double *Y, const int incY);\r\n void cblas_daxpy(const int N, const double alpha, const double *X,\r\n const int incX, double *Y, const int incY);\r\n\r\n void cblas_cswap(const int N, void *X, const int incX, \r\n void *Y, const int incY);\r\n void cblas_ccopy(const int N, const void *X, const int incX, \r\n void *Y, const int incY);\r\n void cblas_caxpy(const int N, const void *alpha, const void *X,\r\n const int incX, void *Y, const int incY);\r\n\r\n void cblas_zswap(const int N, void *X, const int incX, \r\n void *Y, const int incY);\r\n void cblas_zcopy(const int N, const void *X, const int incX, \r\n void *Y, const int incY);\r\n void cblas_zaxpy(const int N, const void *alpha, const void *X,\r\n const int incX, void *Y, const int incY);\r\n\r\n\r\n /* \r\n * Routines with S and D prefix only\r\n */\r\n void cblas_srotg(float *a, float *b, float *c, float *s);\r\n void cblas_srotmg(float *d1, float *d2, float *b1, const float b2, float *P);\r\n void cblas_srot(const int N, float *X, const int incX,\r\n float *Y, const int incY, const float c, const float s);\r\n void cblas_srotm(const int N, float *X, const int incX,\r\n float *Y, const int incY, const float *P);\r\n\r\n void cblas_drotg(double *a, double *b, double *c, double *s);\r\n void cblas_drotmg(double *d1, double *d2, double *b1, const double b2, double *P);\r\n void cblas_drot(const int N, double *X, const int incX,\r\n double *Y, const int incY, const double c, const double s);\r\n void cblas_drotm(const int N, double *X, const int incX,\r\n double *Y, const int incY, const double *P);\r\n\r\n\r\n /* \r\n * Routines with S D C Z CS and ZD prefixes\r\n */\r\n void cblas_sscal(const int N, const float alpha, float *X, const int incX);\r\n void cblas_dscal(const int N, const double alpha, double *X, const int incX);\r\n void cblas_cscal(const int N, const void *alpha, void *X, const int incX);\r\n void cblas_zscal(const int N, const void *alpha, void *X, const int incX);\r\n void cblas_csscal(const int N, const float alpha, void *X, const int incX);\r\n void cblas_zdscal(const int N, const double alpha, void *X, const int incX);\r\n\r\n /*\r\n * ===========================================================================\r\n * Prototypes for level 2 BLAS\r\n * ===========================================================================\r\n */\r\n\r\n /* \r\n * Routines with standard 4 prefixes (S, D, C, Z)\r\n */\r\n void cblas_sgemv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const float alpha, const float *A, const int lda,\r\n const float *X, const int incX, const float beta,\r\n float *Y, const int incY);\r\n void cblas_sgbmv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const int KL, const int KU, const float alpha,\r\n const float *A, const int lda, const float *X,\r\n const int incX, const float beta, float *Y, const int incY);\r\n void cblas_strmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const float *A, const int lda, \r\n float *X, const int incX);\r\n void cblas_stbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const float *A, const int lda, \r\n float *X, const int incX);\r\n void cblas_stpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const float *Ap, float *X, const int incX);\r\n void cblas_strsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const float *A, const int lda, float *X,\r\n const int incX);\r\n void cblas_stbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const float *A, const int lda,\r\n float *X, const int incX);\r\n void cblas_stpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const float *Ap, float *X, const int incX);\r\n\r\n void cblas_dgemv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const double alpha, const double *A, const int lda,\r\n const double *X, const int incX, const double beta,\r\n double *Y, const int incY);\r\n void cblas_dgbmv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const int KL, const int KU, const double alpha,\r\n const double *A, const int lda, const double *X,\r\n const int incX, const double beta, double *Y, const int incY);\r\n void cblas_dtrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const double *A, const int lda, \r\n double *X, const int incX);\r\n void cblas_dtbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const double *A, const int lda, \r\n double *X, const int incX);\r\n void cblas_dtpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const double *Ap, double *X, const int incX);\r\n void cblas_dtrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const double *A, const int lda, double *X,\r\n const int incX);\r\n void cblas_dtbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const double *A, const int lda,\r\n double *X, const int incX);\r\n void cblas_dtpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const double *Ap, double *X, const int incX);\r\n\r\n void cblas_cgemv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *X, const int incX, const void *beta,\r\n void *Y, const int incY);\r\n void cblas_cgbmv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const int KL, const int KU, const void *alpha,\r\n const void *A, const int lda, const void *X,\r\n const int incX, const void *beta, void *Y, const int incY);\r\n void cblas_ctrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *A, const int lda, \r\n void *X, const int incX);\r\n void cblas_ctbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const void *A, const int lda, \r\n void *X, const int incX);\r\n void cblas_ctpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *Ap, void *X, const int incX);\r\n void cblas_ctrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *A, const int lda, void *X,\r\n const int incX);\r\n void cblas_ctbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const void *A, const int lda,\r\n void *X, const int incX);\r\n void cblas_ctpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *Ap, void *X, const int incX);\r\n\r\n void cblas_zgemv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *X, const int incX, const void *beta,\r\n void *Y, const int incY);\r\n void cblas_zgbmv(const enum CBLAS_ORDER order,\r\n const enum CBLAS_TRANSPOSE TransA, const int M, const int N,\r\n const int KL, const int KU, const void *alpha,\r\n const void *A, const int lda, const void *X,\r\n const int incX, const void *beta, void *Y, const int incY);\r\n void cblas_ztrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *A, const int lda, \r\n void *X, const int incX);\r\n void cblas_ztbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const void *A, const int lda, \r\n void *X, const int incX);\r\n void cblas_ztpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *Ap, void *X, const int incX);\r\n void cblas_ztrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *A, const int lda, void *X,\r\n const int incX);\r\n void cblas_ztbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const int K, const void *A, const int lda,\r\n void *X, const int incX);\r\n void cblas_ztpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag,\r\n const int N, const void *Ap, void *X, const int incX);\r\n\r\n\r\n /* \r\n * Routines with S and D prefixes only\r\n */\r\n void cblas_ssymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *A,\r\n const int lda, const float *X, const int incX,\r\n const float beta, float *Y, const int incY);\r\n void cblas_ssbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const int K, const float alpha, const float *A,\r\n const int lda, const float *X, const int incX,\r\n const float beta, float *Y, const int incY);\r\n void cblas_sspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *Ap,\r\n const float *X, const int incX,\r\n const float beta, float *Y, const int incY);\r\n void cblas_sger(const enum CBLAS_ORDER order, const int M, const int N,\r\n const float alpha, const float *X, const int incX,\r\n const float *Y, const int incY, float *A, const int lda);\r\n void cblas_ssyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *X,\r\n const int incX, float *A, const int lda);\r\n void cblas_sspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *X,\r\n const int incX, float *Ap);\r\n void cblas_ssyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *X,\r\n const int incX, const float *Y, const int incY, float *A,\r\n const int lda);\r\n void cblas_sspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const float *X,\r\n const int incX, const float *Y, const int incY, float *A);\r\n\r\n void cblas_dsymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *A,\r\n const int lda, const double *X, const int incX,\r\n const double beta, double *Y, const int incY);\r\n void cblas_dsbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const int K, const double alpha, const double *A,\r\n const int lda, const double *X, const int incX,\r\n const double beta, double *Y, const int incY);\r\n void cblas_dspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *Ap,\r\n const double *X, const int incX,\r\n const double beta, double *Y, const int incY);\r\n void cblas_dger(const enum CBLAS_ORDER order, const int M, const int N,\r\n const double alpha, const double *X, const int incX,\r\n const double *Y, const int incY, double *A, const int lda);\r\n void cblas_dsyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *X,\r\n const int incX, double *A, const int lda);\r\n void cblas_dspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *X,\r\n const int incX, double *Ap);\r\n void cblas_dsyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *X,\r\n const int incX, const double *Y, const int incY, double *A,\r\n const int lda);\r\n void cblas_dspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const double *X,\r\n const int incX, const double *Y, const int incY, double *A);\r\n\r\n\r\n /* \r\n * Routines with C and Z prefixes only\r\n */\r\n void cblas_chemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const void *alpha, const void *A,\r\n const int lda, const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_chbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const int K, const void *alpha, const void *A,\r\n const int lda, const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_chpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const void *alpha, const void *Ap,\r\n const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_cgeru(const enum CBLAS_ORDER order, const int M, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_cgerc(const enum CBLAS_ORDER order, const int M, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_cher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const void *X, const int incX,\r\n void *A, const int lda);\r\n void cblas_chpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const float alpha, const void *X,\r\n const int incX, void *A);\r\n void cblas_cher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_chpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *Ap);\r\n\r\n void cblas_zhemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const void *alpha, const void *A,\r\n const int lda, const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_zhbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const int K, const void *alpha, const void *A,\r\n const int lda, const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_zhpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const void *alpha, const void *Ap,\r\n const void *X, const int incX,\r\n const void *beta, void *Y, const int incY);\r\n void cblas_zgeru(const enum CBLAS_ORDER order, const int M, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_zgerc(const enum CBLAS_ORDER order, const int M, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_zher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const void *X, const int incX,\r\n void *A, const int lda);\r\n void cblas_zhpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo,\r\n const int N, const double alpha, const void *X,\r\n const int incX, void *A);\r\n void cblas_zher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *A, const int lda);\r\n void cblas_zhpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N,\r\n const void *alpha, const void *X, const int incX,\r\n const void *Y, const int incY, void *Ap);\r\n\r\n /*\r\n * ===========================================================================\r\n * Prototypes for level 3 BLAS\r\n * ===========================================================================\r\n */\r\n\r\n /* \r\n * Routines with standard 4 prefixes (S, D, C, Z)\r\n */\r\n void cblas_sgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_TRANSPOSE TransB, const int M, const int N,\r\n const int K, const float alpha, const float *A,\r\n const int lda, const float *B, const int ldb,\r\n const float beta, float *C, const int ldc);\r\n void cblas_ssymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const float alpha, const float *A, const int lda,\r\n const float *B, const int ldb, const float beta,\r\n float *C, const int ldc);\r\n void cblas_ssyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const float alpha, const float *A, const int lda,\r\n const float beta, float *C, const int ldc);\r\n void cblas_ssyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const float alpha, const float *A, const int lda,\r\n const float *B, const int ldb, const float beta,\r\n float *C, const int ldc);\r\n void cblas_strmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const float alpha, const float *A, const int lda,\r\n float *B, const int ldb);\r\n void cblas_strsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const float alpha, const float *A, const int lda,\r\n float *B, const int ldb);\r\n\r\n void cblas_dgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_TRANSPOSE TransB, const int M, const int N,\r\n const int K, const double alpha, const double *A,\r\n const int lda, const double *B, const int ldb,\r\n const double beta, double *C, const int ldc);\r\n void cblas_dsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const double alpha, const double *A, const int lda,\r\n const double *B, const int ldb, const double beta,\r\n double *C, const int ldc);\r\n void cblas_dsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const double alpha, const double *A, const int lda,\r\n const double beta, double *C, const int ldc);\r\n void cblas_dsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const double alpha, const double *A, const int lda,\r\n const double *B, const int ldb, const double beta,\r\n double *C, const int ldc);\r\n void cblas_dtrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const double alpha, const double *A, const int lda,\r\n double *B, const int ldb);\r\n void cblas_dtrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const double alpha, const double *A, const int lda,\r\n double *B, const int ldb);\r\n\r\n void cblas_cgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_TRANSPOSE TransB, const int M, const int N,\r\n const int K, const void *alpha, const void *A,\r\n const int lda, const void *B, const int ldb,\r\n const void *beta, void *C, const int ldc);\r\n void cblas_csymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_csyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *beta, void *C, const int ldc);\r\n void cblas_csyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_ctrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n void *B, const int ldb);\r\n void cblas_ctrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n void *B, const int ldb);\r\n\r\n void cblas_zgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_TRANSPOSE TransB, const int M, const int N,\r\n const int K, const void *alpha, const void *A,\r\n const int lda, const void *B, const int ldb,\r\n const void *beta, void *C, const int ldc);\r\n void cblas_zsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_zsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *beta, void *C, const int ldc);\r\n void cblas_zsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_ztrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n void *B, const int ldb);\r\n void cblas_ztrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA,\r\n const enum CBLAS_DIAG Diag, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n void *B, const int ldb);\r\n\r\n\r\n /* \r\n * Routines with prefixes C and Z only\r\n */\r\n void cblas_chemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_cherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const float alpha, const void *A, const int lda,\r\n const float beta, void *C, const int ldc);\r\n void cblas_cher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const float beta,\r\n void *C, const int ldc);\r\n\r\n void cblas_zhemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side,\r\n const enum CBLAS_UPLO Uplo, const int M, const int N,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const void *beta,\r\n void *C, const int ldc);\r\n void cblas_zherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const double alpha, const void *A, const int lda,\r\n const double beta, void *C, const int ldc);\r\n void cblas_zher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo,\r\n const enum CBLAS_TRANSPOSE Trans, const int N, const int K,\r\n const void *alpha, const void *A, const int lda,\r\n const void *B, const int ldb, const double beta,\r\n void *C, const int ldc);\r\n\r\n void cblas_xerbla(int p, const char *rout, const char *form, ...);\r\n\r\n __END_DECLS\r\n \"\"\"", "def make_get_out_struct(self):\n res = \\\n\"\"\"DLLEXPORT ___madz_TYPE_* ___madz_TYPE_get_out_struct(){{\n return &___madz_OUTPUT;\n}}\n\n\"\"\"\n return res", "def compile(self, args):\n if args not in self._compileinfos:\n cres = compile_with_dppl(self.py_func, None, args, debug=self.debug)\n func = cres.library.get_function(cres.fndesc.llvm_func_name)\n cres.target_context.mark_ocl_device(func)\n first_definition = not self._compileinfos\n self._compileinfos[args] = cres\n libs = [cres.library]\n\n if first_definition:\n # First definition\n cres.target_context.insert_user_function(self, cres.fndesc,\n libs)\n else:\n cres.target_context.add_user_function(self, cres.fndesc, libs)\n\n else:\n cres = self._compileinfos[args]\n\n return cres.signature", "def convert_fus(ast):\n\n parent_fn_name = ast.name_short\n prefix_list = {\"p\": \"p.\", \"r\": \"r.\", \"g\": \"c.\"}\n prefix = prefix_list[parent_fn_name]\n\n fus1_ns = ast.args[0].namespace\n fus1_val = ast.args[0].value\n\n arg_fus = ast.args[1]\n fus_args = [None, \"?\", \"?\"]\n for idx, arg in enumerate(arg_fus.args):\n fus_args[idx] = arg\n\n fus2_ns = fus_args[0].namespace\n fus2_val = fus_args[0].value\n\n if fus_args[1] == \"?\":\n fus1_range = fus_args[1]\n else:\n fus1_range = f'\"{prefix}1_{fus_args[1].value}\"'\n\n if fus_args[2] == \"?\":\n fus2_range = fus_args[2]\n else:\n fus2_range = f'\"{prefix}{fus_args[2].value}_?\"'\n\n fus = Function(\"fus\", version=version, parent=ast)\n fus.args = [\n NSArg(fus1_ns, fus1_val, fus),\n StrArg(fus1_range, fus),\n NSArg(fus2_ns, fus2_val, fus),\n StrArg(fus2_range, fus),\n ]\n\n # Remove BEL\n ast_args = ast.args\n ast_args.pop(0)\n ast_args.pop(0)\n\n if ast_args == [None]:\n ast_args = []\n\n ast.args = []\n ast.add_argument(fus)\n\n if len(ast_args) > 0:\n ast.args.extend(ast_args)\n\n return ast", "def _parse_cc_h(self, fname_cc):\n def _type_translate(p_type, default_v=None):\n \"\"\" Translates a type from C++ to GRC \"\"\"\n translate_dict = {'float': 'float',\n 'double': 'real',\n 'int': 'int',\n 'gr_complex': 'complex',\n 'char': 'byte',\n 'unsigned char': 'byte',\n 'std::string': 'string',\n 'std::vector<int>': 'int_vector',\n 'std::vector<float>': 'real_vector',\n 'std::vector<gr_complex>': 'complex_vector',\n }\n if p_type in ('int',) and default_v[:2].lower() == '0x':\n return 'hex'\n try:\n return translate_dict[p_type]\n except KeyError:\n return 'raw'\n def _get_blockdata(fname_cc):\n \"\"\" Return the block name and the header file name from the .cc file name \"\"\"\n blockname = os.path.splitext(os.path.basename(fname_cc.replace('_impl.', '.')))[0]\n fname_h = (blockname + '.h').replace('_impl.', '.')\n blockname = blockname.replace(self._info['modname']+'_', '', 1)\n return (blockname, fname_h)\n # Go, go, go\n print \"Making GRC bindings for %s...\" % fname_cc\n (blockname, fname_h) = _get_blockdata(fname_cc)\n try:\n parser = ParserCCBlock(fname_cc,\n os.path.join(self._info['includedir'], fname_h),\n blockname,\n self._info['version'],\n _type_translate\n )\n except IOError:\n print \"Can't open some of the files necessary to parse %s.\" % fname_cc\n sys.exit(1)\n return (parser.read_params(), parser.read_io_signature(), blockname)", "def buildCDeclaration(self):\n raise Exception(\"Unimplemented function in symbol: \"+self.name)", "def _compile_C_code(header, body, return_unloaded=False, verbose=False):\n import importlib\n import tempfile\n import uuid\n\n import cffi\n\n module_name = \"module_\" + uuid.uuid4().hex\n\n if \"__uint128\" in header:\n raise ValueError(\"_compile_C_code does not support bit-vector widths \"\n \"larger than 64 bits (cffi does not support __uint128)\")\n\n ffibuilder = cffi.FFI()\n ffibuilder.cdef(header)\n ffibuilder.set_source(module_name, body)\n\n tmpdir = tempfile.TemporaryDirectory()\n lib_path = ffibuilder.compile(tmpdir=tmpdir.name, verbose=verbose)\n\n if return_unloaded:\n return lib_path, module_name, tmpdir\n\n # dynamic import\n # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n spec = importlib.util.spec_from_file_location(module_name, lib_path)\n pymod_parent = importlib.util.module_from_spec(spec)\n # sys.modules[module_name] = module\n spec.loader.exec_module(pymod_parent)\n\n pymod = pymod_parent\n\n return pymod, tmpdir", "def make_func_declarations(self):\n\n\t\tfor name in self.func_dict:\n\t\t\tbody = Lexer(self.func_dict[name]).get_tokens()\n\t\t\ti = body.index('\\\\') + 1 #Start of parameters\n\t\t\tj = body.match_paren(i)\n\t\t\tparam_tokens = body[i + 1: j] #Stuff inside parentheses\n\t\t\t#\t\t\tprint \"param list:\", param_tokens\n\n\t\t\tparams = self.split_params(param_tokens)\n\t\t\tparams = map(lambda n: n.split(':'), params)\n\t\t\t#params is now [[<name>,<type>],...]\n\t\t\tc_types = map(lambda n: self.convert_type(*n), params)\n\t\t\t#\t\t\tprint c_types\n\n\t\t\treturn_type = ''\n\t\t\t# +2 to skip over \")\" and \":\"\n\t\t\tif body[j+2] == '(': #Function returns another function\n\t\t\t\t# +3 for [\")\",\"->\",\"<type>\"]\n\t\t\t\tfor x in xrange(j+2, body.match_paren(j+2)+3):\n\t\t\t\t\treturn_type += body[x]\n\t\t\telse: #Function returns a concrete type\n\t\t\t\treturn_type = body[j+2] #+2 to skip over \")\" and \":\"\n\n\t\t\tfunc_type = self.convert_type(name, return_type)\n\t\t\t#\t\t\tprint \"params\", params\n\t\t\t#\t\t\tprint \"c_types\", c_types\n\t\t\t#while True:exec raw_input() in globals(), locals()\n\t\t\tself.cpp_declarations[name] = func_type + '(' + ', '.join(c_types) + ')'\n\n\t\tself.cpp_declarations['main'] = 'int main()' #actually this isn't used", "def _load_hesaff_clib(rebuild=None):\n global REBUILD_ONCE\n # Get the root directory which should have the dynamic library in it\n #root_dir = realpath(dirname(__file__)) if '__file__' in vars() else realpath(os.getcwd())\n\n # os.path.dirname(sys.executable)\n #if getattr(sys, 'frozen', False):\n # # we are running in a |PyInstaller| bundle\n # root_dir = realpath(sys._MEIPASS)\n #else:\n # # we are running in a normal Python environment\n # root_dir = realpath(dirname(__file__))\n root_dir = realpath(dirname(__file__))\n if rebuild is not False and REBUILD_ONCE == 0 and __name__ != '__main__':\n REBUILD_ONCE += 1\n rebuild = ut.get_argflag('--rebuild-hesaff')\n if rebuild:\n print('REBUILDING HESAFF')\n repo_dir = realpath(dirname(root_dir))\n ut.std_build_command(repo_dir)\n\n libname = 'hesaff'\n (clib, def_cfunc, lib_fpath) = ctypes_interface.load_clib(libname, root_dir)\n # Expose extern C Functions to hesaff's clib\n #def_cfunc(C.c_char_p, 'cmake_build_type', [])\n #def_cfunc(None, 'free_char', [C.c_char_p])\n def_cfunc(int_t, 'get_cpp_version', [])\n def_cfunc(int_t, 'is_debug_mode', [])\n def_cfunc(int_t, 'detect', [obj_t])\n def_cfunc(int_t, 'get_kpts_dim', [])\n def_cfunc(int_t, 'get_desc_dim', [])\n def_cfunc(None, 'exportArrays', [obj_t, int_t, kpts_t, vecs_t])\n def_cfunc(None, 'extractDesc', [obj_t, int_t, kpts_t, vecs_t])\n def_cfunc(None, 'extractPatches', [obj_t, int_t, kpts_t, img32_t])\n def_cfunc(None, 'extractDescFromPatches', [int_t, int_t, int_t, img_t, vecs_t])\n def_cfunc(obj_t, 'new_hesaff_fpath', [str_t] + HESAFF_PARAM_TYPES)\n def_cfunc(obj_t, 'new_hesaff_image', [img_t, int_t, int_t, int_t] + HESAFF_PARAM_TYPES)\n def_cfunc(None, 'free_hesaff', [obj_t])\n def_cfunc(obj_t, 'detectFeaturesListStep1', [int_t, str_list_t] + HESAFF_PARAM_TYPES)\n def_cfunc(None, 'detectFeaturesListStep2', [int_t, obj_t, int_array_t])\n def_cfunc(None, 'detectFeaturesListStep3', [int_t, obj_t, int_array_t, int_array_t, kpts_t, vecs_t])\n return clib, lib_fpath", "def fortran_function(self) -> str:\n if self.f_override is not None:\n return indent(\n self.f_override.replace('$CLASSNAME$', self.class_name).replace(\n \"$C_PREFIX$\", self.c_prefix).replace(\n \"$F_PREFIX$\", self.f_prefix),\n 4*' ')\n\n result = ''\n\n # declaration\n func_name = '{}_{}_{}'.format(\n self.f_prefix, self.class_name, self.name)\n in_parameters = self._f_in_parameters()\n return_type, out_parameters = self._f_out_parameters()\n if self.may_throw:\n out_parameters.append(('integer, optional', 'err_code'))\n out_parameters.append(('character(:), allocatable, optional',\n 'err_msg'))\n\n all_parameters = in_parameters + out_parameters\n arg_list = ', &\\n'.join([par_name for _, par_name in all_parameters])\n arg_ilist = indent(arg_list, 8*' ')\n if return_type != '':\n result += 'function {}( &\\n{})\\n'.format(func_name, arg_ilist)\n else:\n result += 'subroutine {}( &\\n{})\\n'.format(func_name, arg_ilist)\n\n # parameter declarations\n result += ' implicit none\\n'\n for par_type, par_name in in_parameters:\n result += ' {}, intent(in) :: {}\\n'.format(\n par_type, par_name)\n for par_type, par_name in out_parameters:\n result += ' {}, intent(out) :: {}\\n'.format(par_type, par_name)\n if return_type != '':\n result += ' {} :: {}\\n'.format(return_type, func_name)\n result += '\\n'\n\n # variable declarations\n c_return_type, fi_out_parameters = self._fi_out_parameters()\n if c_return_type:\n result += ' {} :: ret_val\\n'.format(c_return_type)\n for par_type, par_name in fi_out_parameters:\n result += ' {} :: {}\\n'.format(par_type, par_name)\n for par_type, par_name in self.ret_type.f_aux_variables():\n result += ' {} :: {}\\n'.format(par_type, par_name)\n if self.may_throw:\n result += ' integer (c_int) :: err_code_v\\n'\n result += ' type (c_ptr) :: err_msg_v\\n'\n result += ' integer (c_size_t) :: err_msg_len_v\\n'\n result += ' character (c_char), dimension(:), pointer :: err_msg_f\\n'\n result += ' character(:), allocatable :: err_msg_p\\n'\n result += ' integer (c_size_t) :: err_msg_i\\n'\n if c_return_type or fi_out_parameters or self.may_throw:\n result += '\\n'\n\n # convert input\n args = [param.f_chain_arg() for param in self.params]\n args += [par_name for _, par_name in fi_out_parameters]\n if self.may_throw:\n args += ['err_code_v', 'err_msg_v', 'err_msg_len_v']\n arg_str = ', &\\n'.join([8*' ' + arg for arg in args])\n\n # call C function\n fc_func_name = '{}_{}_{}_'.format(\n self.c_prefix, self.class_name, self.name)\n chain_call = self.fc_chain_call(\n ns_prefix=self.c_prefix, class_name=self.class_name,\n fc_func_name=fc_func_name, fc_args=arg_str)\n result_name = ''\n if return_type != '':\n result_name = func_name\n elif out_parameters:\n result_name = out_parameters[0][1]\n result += self.ret_type.f_call_c('ret_val', chain_call)\n\n # handle errors if necessary\n if self.may_throw:\n # Note: I tried to factor this out into a function, but Fortran\n # makes that near-impossible. Since we're generating anyway, it's\n # not really duplication, so leave it as is.\n result += indent(dedent(f\"\"\"\\\n if (err_code_v .ne. 0) then\n if (present(err_code)) then\n err_code = err_code_v\n if (present(err_msg)) then\n call c_f_pointer(err_msg_v, err_msg_f, (/err_msg_len_v/))\n allocate (character(err_msg_len_v) :: err_msg)\n do err_msg_i = 1, err_msg_len_v\n err_msg(err_msg_i:err_msg_i) = err_msg_f(err_msg_i)\n end do\n end if\n {dedent(\n self.ret_type.f_return_dummy_result(result_name))}\n return\n else\n call c_f_pointer(err_msg_v, err_msg_f, (/err_msg_len_v/))\n allocate (character(err_msg_len_v) :: err_msg_p)\n do err_msg_i = 1, err_msg_len_v\n err_msg_p(err_msg_i:err_msg_i) = err_msg_f(err_msg_i)\n end do\n print *, err_msg_p\n stop\n end if\n else\n if (present(err_code)) then\n err_code = 0\n end if\n end if\n\n \"\"\"), 4*' ')\n\n # convert and return result\n result += self.ret_type.f_return_result(result_name, 'ret_val')\n\n # end\n if return_type != '':\n result += 'end function {}\\n\\n'.format(func_name)\n else:\n result += 'end subroutine {}\\n\\n'.format(func_name)\n return indent(result, 4*' ')", "def gen_code(self, filename, func_name):\n\n assert self.bits is not None\n\n vd_list = []\n n_vars = 0\n for tree in self.trees:\n vd_list.append(tree.gen_code(n_vars))\n n_vars += len(vd_list[-1])\n\n # checks the type by the suffix\n\n is_v = filename.split(\".\")[-1] == \"v\"\n\n assert self.inputs\n\n f = open(filename, \"w\")\n\n i_bits = np.sum(self.bits[:-1])\n o_bits = self.bits[-1]\n o_sign = self.is_neg[-1]\n\n if is_v:\n f.write(\"module {}(input [{}:0] i, output [{}:0] o);\\n\".format(\n func_name, i_bits-1, o_bits-1))\n else:\n f.write(\"#include<ac_int.h>\\n\\n\")\n f.write(\"void {}(ac_int<{},false> i, ac_int<{},{}> &o)\\n\".format(\n func_name, i_bits, o_bits, o_sign))\n f.write(\"{\\n\")\n\n\n # write function headline\n s_in_line = []\n\n i_bits = self.bits[0]\n i_sign = self.is_neg[0]\n\n if is_v:\n i_datatype = \" wire {}[{}:0] \".format(\n \"signed \" if i_sign else \"\", i_bits-1)\n else:\n i_datatype = \" ac_int<{},{}> \".format(i_bits, i_sign)\n\n len_s = len(i_datatype)\n\n for i in range(self.inputs):\n if is_v:\n s = (\n \"i_\" + str(i) + \" = \" + \"i[\" + str(i_bits*(i+1)-1) + \":\" +\n str(i_bits*i) + \"]\"\n )\n else:\n s = (\n \"i_\" + str(i) + \" = \" + \"i.slc<\" + str(i_bits) + \">(\" +\n str(i_bits*i) + \")\"\n )\n if (\n len_s + len(s) + 2 > 70 or i_bits != self.bits[i] or\n i_sign != self.is_neg[i]\n ):\n f.write(i_datatype + \", \".join(s_in_line) + \";\\n\")\n\n s_in_line = []\n if is_v:\n i_datatype = \" wire {}[{}:0] \".format(\n \"signed \" if i_sign else \"\", i_bits-1)\n else:\n i_datatype = \" ac_int<{},{}> \".format(i_bits, i_sign)\n\n len_s = len(i_datatype)\n\n s_in_line.append(s)\n len_s += len(s) + 2\n\n if s_in_line:\n f.write(i_datatype + \", \".join(s_in_line) + \";\\n\")\n\n if is_v:\n o_datatype = \" wire {}[{}:0] \".format(\n \"signed \" if o_sign else \"\", o_bits)\n else:\n o_datatype = \" ac_int<{},{}> \".format(o_bits, o_sign)\n\n o_list = []\n for i in range(len(vd_list)):\n for v in vd_list[i]:\n if is_v:\n f.write(o_datatype + v + \" = \" + vd_list[i][v] + \";\\n\")\n else:\n f.write(o_datatype + v + \" = \" + vd_list[i][v] + \";\\n\")\n f.write(\"\\n\")\n o_list.append(v)\n\n assert len(o_list) <= 3\n\n if is_v:\n f.write(\" assign \")\n else:\n f.write(\" \")\n\n if len(o_list) == 1:\n f.write(\"o = \" + o_list[0] + \";\")\n elif len(o_list) == 2:\n cond = \"( \" + o_list[0] + \" == \" + o_list[1] + \" ) \"\n n1 = o_list[0]\n n0 = \"( ( \" + \" + \".join(o_list) + \" ) >> 1 )\"\n f.write(\"o = \" + cond + \"? \" + n1 + \": \" + n0)\n elif len(o_list) == 3:\n cond = (\n \"( \" +\n \"( \" + \" == \".join(o_list[0:2]) + \" )?\" + o_list[0] + \":\" +\n \"( \" + \" == \".join(o_list[1:]) + \" )?\" + o_list[1] + \":\" +\n \"( \" + \" == \".join([o_list[0], o_list[2]]) + \" )?\" + o_list[0] +\n \":\" + \"( \" + \" < \".join(o_list[0:2]) + \" ) ?\" +\n \"( ( \" + \" < \".join(o_list[1:]) + \" ) ?\" + o_list[1] + \":\" +\n o_list[2] + \" ) : \" +\n \"( ( \" + \" < \".join([o_list[0], o_list[2]]) + \" ) ?\" + o_list[0] +\n \":\" + o_list[2] + \" )\"\n )\n f.write(\"o = \" + cond + \";\\n\")\n if is_v:\n f.write(\"endmodule\")\n else:\n f.write(\"}\")\n\n f.close()", "def create_function():\n\n ffi = cffi.FFI()\n ffi.cdef(\"\"\"\nint overhead(int32_t* list, size_t num, char* utf8, int* error);\n\"\"\")\n c = ffi.dlopen(\"./liboverhead/liboverhead.so\")\n overhead = c.overhead\n\n def func(list_, length, text, error):\n return overhead(list_, length, text, error)\n\n return overhead", "def set_mangling(self):\n # This one generates a program equivalent to that in BLACS/INSTALL\n # that checks the mangling in FORTRAN function symbols\n print 'Setting Fortran mangling...',\n sys.stdout.flush()\n writefile('tmpf.f',\"\"\"\n program intface\n external c_intface\n integer i\n call c_intface(i)\n stop\n end\\n\"\"\")\n writefile('tmpc.c',\"\"\"\n #include <stdio.h>\n void c_intface_(int *i){fprintf(stdout, \\\"-DADD_\\\");fflush(stdout);}\n void c_intface(int *i){fprintf(stdout, \\\"-DNOCHANGE\\\");fflush(stdout);}\n void c_intface__(int *i){fprintf(stdout, \\\"-DfcIsF2C\\\");fflush(stdout);}\n void C_INTFACE(int *i){fprintf(stdout, \\\"-DUPCASE\\\");fflush(stdout);}\\n\"\"\")\n\n ccomm = self.config.cc+' '+self.config.ccflags+' -c tmpc.c -o tmpc.o'\n fcomm = self.config.fc+' '+self.config.fcflags+' '+self.config.ldflags_fc+' tmpf.f tmpc.o -o xintface'\n\n (output, error, retz) = runShellCommand(ccomm)\n if retz:\n print '\\n\\nCOMMON: in set_mangling: cannot compile'\n print 'error is:\\n','*'*40,'\\n',error,'\\n','*'*40\n sys.exit()\n\n (output, error, retz) = runShellCommand(fcomm)\n if retz:\n print '\\n\\nCOMMON: in set_mangling: cannot compile'\n print 'error is:\\n','*'*40,'\\n',error,'\\n','*'*40\n sys.exit()\n\n comm = os.path.join(os.getcwd(),'xintface')\n (output, error, retz) = runShellCommand(comm)\n if retz:\n print '\\n\\nCOMMON: in set_mangling: cannot run xintface'\n print 'error is:\\n','*'*40,'\\n',error,'\\n','*'*40\n sys.exit()\n\n self.mangling = output\n killfiles(['xintface', 'tmpf.f', 'tmpf.o', 'tmpc.c', 'tmpc.o'])\n\n print self.mangling\n return 1;", "def initialize_header(fopen, uid):\n fopen.seek(0, 0)\n fopen.write(b'\\x84\\x83\\x82\\x81')\n fopen.write(\n struct.pack('<32s', COMPATIBILITY_VERSION.ljust(32, b'\\x00'))\n )\n fopen.write(struct.pack('<16s', uid.bytes))\n fopen.seek(8, 1)", "def build(c):", "def generate_header():\n header_file = AUTOGEN_WARNING\n header_file += \"/// /file atomic_nuclear_data.h\\n\"\n header_file += \"/// /author Andrew Davis (andrew.davis@wisc.edu)\\n\"\n header_file += \"///\\n\"\n header_file += (\n \"/// /brief Implements all the fundamental atomic & nuclear data data\\n\"\n )\n header_file += \"#include <map>\\n\"\n header_file += \"\\n\"\n header_file += \"namespace pyne\\n\"\n header_file += \"{\\n\"\n header_file += (\n \" /// main function to be called when you wish to load the nuclide data \\n\"\n )\n header_file += \" /// into memory \\n\"\n header_file += \" void _load_atomic_mass_map_memory();\\n\"\n header_file += \" /// function to create mapping from nuclides in id form\\n\"\n header_file += \" /// to their atomic masses\\n\"\n header_file += \" \\n\"\n header_file += \" void _insert_atomic_mass_map();\\n\"\n header_file += \" \\n\"\n header_file += \" /// function to create mapping from nuclides in id form \\n\"\n header_file += \" /// to their natural abundances\\n\"\n header_file += \" void _insert_abund_map();\\n\"\n header_file += \" \\n\"\n header_file += (\n \" /// Mapping from nuclides in id form to their natural abundances\\n\"\n )\n header_file += \" extern std::map<int,double> natural_abund_map;\\n\"\n header_file += \" \\n\"\n header_file += \" /// Mapping from nuclides in id form to their atomic masses.\\n\"\n header_file += \" extern std::map<int,double> atomic_mass_map;\\n\"\n header_file += \" \\n\"\n header_file += (\n \" /// Mapping from nuclides in id form to the associated error in \\n\"\n )\n header_file += \" /// abdundance \\n\"\n header_file += \" extern std::map<int,double> atomic_mass_error_map;\\n\"\n header_file += \"} // namespace pyne\\n\"\n return header_file", "def boilerplate(attr: st.EncodingAttr):\n return f\"\"\"\nfunc.func @main(%a: tensor<8x8xf64>,\n %b: tensor<8x8xf64>,\n %c: tensor<8x8xf64>) -> tensor<8x8xf64> attributes {{ llvm.emit_c_interface }} {{\n %t = arith.constant sparse<[[0,0], [0,2], [4,1]], [1.0, 2.0, 3.0]> : tensor<8x8xf64>\n %s = sparse_tensor.convert %t : tensor<8x8xf64> to tensor<8x8xf64, {attr}>\n %0 = call @sddmm(%a, %b, %s, %c) : (tensor<8x8xf64>,\n tensor<8x8xf64>,\n tensor<8x8xf64, {attr}>,\n tensor<8x8xf64>) -> tensor<8x8xf64>\n return %0 : tensor<8x8xf64>\n}}\n\"\"\"", "def gen_header(cmd_list):\n\ts = \"/* Warning: This file is automatically generated. Do not modify. */\\n\"\n\ts += \"#ifndef COMMGEN_H\\n\"\n\ts += \"#define COMMGEN_H\\n\\n\"\n\ts += \"#ifdef __cplusplus\\n\"\n\ts += \"extern \\\"C\\\" {\\n\"\n\ts += \"#endif\\n\\n\"\n\ts += \"#include <stdint.h>\\n\\n\"\n\ts += gen_struct_def(cmd_list)\n\ts += \"/* To avoid the volatile qualifier being a pain in the ass, the main loop\\n\"\n\ts += \" * accesses the DataReal struct through this pointer. */\\n\"\n\ts += \"extern volatile struct comm_data_t *Data;\\n\\n\"\n\ts += \"/* Parse a packet, update the struct, and send a reply. */\\n\"\n\ts += \"void parse_packet(uint8_t *buf, uint16_t count);\\n\\n\"\t\n\tfor c in cmd_list:\n\t\ts += gen_send_proto(c) + \"\\n\"\n\t\ts + gen_parse_proto(c) + \"\\n\"\n\ts += gen_packing_protos()\n\ts += gen_build_str_dec()\n\t#s += \"void send_packet(uint8_t *data, uint16_t count);\\n\\n\"\n\ts += \"#ifdef __cplusplus\\n\"\n\ts += \"}\\n\"\n\ts += \"#endif\\n\\n\"\t\n\ts += \"#endif\\n\"\n\treturn s", "def cpp_function(self):", "def _deviceVariableFunctionName(self, tree, permitted_prefixes, allow_lengths = True):\n cpp_func_name = \"\"\n py_func = tree.attr\n # extract function name start\n for prefix in permitted_prefixes:\n if py_func.startswith(prefix):\n cpp_func_name = prefix\n py_func = py_func[len(prefix):]\n break # dont allow the else\n else:\n return None\n # check type and lengths\n if allow_lengths:\n #split to get type and Array Length (This could **potentially** be looked up from the model description but current syntax is consistent with swig bindings) \n type_and_length = py_func.split(\"Array\")\n if type_and_length[0] not in self._fgpu_types:\n self.RaiseError(tree, f\"'{type_and_length[0]}' is not a valid FLAME GPU type\")\n t = self._fgpu_types[type_and_length[0]]\n # generate template args\n if (len(type_and_length) == 1):\n cpp_func_name += f\"<{t}>\"\n elif (len(type_and_length) == 2):\n cpp_func_name += f\"<{t}, {type_and_length[1]}>\"\n else:\n return None\n else:\n if py_func not in self._fgpu_types:\n self.RaiseError(tree, f\"'{py_func}' is not a valid FLAME GPU type\")\n t = self._fgpu_types[py_func]\n cpp_func_name += f\"<{t}>\"\n # return \n return cpp_func_name", "def GenerateFixedFunctions(self, out):\n out.Write(\"\"\"\n\nstatic PPB_GetInterface __real_PPBGetInterface;\nstatic PPP_GetInterface_Type __real_PPPGetInterface;\n\nvoid __set_real_%(wrapper_prefix)s_PPBGetInterface(PPB_GetInterface real) {\n __real_PPBGetInterface = real;\n}\n\nvoid __set_real_%(wrapper_prefix)s_PPPGetInterface(PPP_GetInterface_Type real) {\n __real_PPPGetInterface = real;\n}\n\n/* Map interface string -> wrapper metadata */\nstatic struct %(wrapper_struct)s *%(wrapper_prefix)sPPBShimIface(\n const char *name) {\n struct %(wrapper_struct)s **next = s_ppb_wrappers;\n while (*next != NULL) {\n if (mystrcmp(name, (*next)->iface_macro) == 0) return *next;\n ++next;\n }\n return NULL;\n}\n\n/* Map interface string -> wrapper metadata */\nstatic struct %(wrapper_struct)s *%(wrapper_prefix)sPPPShimIface(\n const char *name) {\n struct %(wrapper_struct)s **next = s_ppp_wrappers;\n while (*next != NULL) {\n if (mystrcmp(name, (*next)->iface_macro) == 0) return *next;\n ++next;\n }\n return NULL;\n}\n\nconst void *__%(wrapper_prefix)s_PPBGetInterface(const char *name) {\n struct %(wrapper_struct)s *wrapper = %(wrapper_prefix)sPPBShimIface(name);\n if (wrapper == NULL) {\n /* We did not generate a wrapper for this, so return the real interface. */\n return (*__real_PPBGetInterface)(name);\n }\n\n /* Initialize the real_iface if it hasn't been. The wrapper depends on it. */\n if (wrapper->real_iface == NULL) {\n const void *iface = (*__real_PPBGetInterface)(name);\n if (NULL == iface) return NULL;\n wrapper->real_iface = iface;\n }\n\n return wrapper->wrapped_iface;\n}\n\nconst void *__%(wrapper_prefix)s_PPPGetInterface(const char *name) {\n struct %(wrapper_struct)s *wrapper = %(wrapper_prefix)sPPPShimIface(name);\n if (wrapper == NULL) {\n /* We did not generate a wrapper for this, so return the real interface. */\n return (*__real_PPPGetInterface)(name);\n }\n\n /* Initialize the real_iface if it hasn't been. The wrapper depends on it. */\n if (wrapper->real_iface == NULL) {\n const void *iface = (*__real_PPPGetInterface)(name);\n if (NULL == iface) return NULL;\n wrapper->real_iface = iface;\n }\n\n return wrapper->wrapped_iface;\n}\n\"\"\" % { 'wrapper_struct' : self.GetWrapperMetadataName(),\n 'wrapper_prefix' : self.wrapper_prefix,\n } )", "def parse_capi(lines):\n pattern = r'(\\w+)\\s+(\\**)\\s*(\\w+)\\((.*)\\)' # Float32 *sin(...)\n pexcept = r'except (\\??)(.*)'\n\n functions = []\n for line in lines:\n if line.strip():\n m = re.match(pattern, line)\n restype, stars, fname, argtypes = m.groups()\n rest = line[len(m.group(0)):].strip()\n if rest:\n maybe, badval = re.match(pexcept, rest).groups()\n else:\n maybe, badval = None, None\n\n restype = parse_type(\"%s %s\" % (restype, \" \".join(stars)))\n argtypes = map(parse_type, argtypes.split(','))\n signature = Function(restype, argtypes)\n functions.append(Py_Function(fname, signature, maybe, badval))\n\n return functions", "def generate_definition(self):\n apientry = \"\"\n if self.__name[:2] == \"gl\":\n apientry = \"DNLOAD_APIENTRY \"\n params = \"void\"\n if self.__parameters:\n params = \", \".join(self.__parameters)\n return \"%s (%s*%s)(%s)\" % (self.__returntype, apientry, self.__name, params)", "def init_c_code(self):\r\n subd = dict(\r\n zip(self.fgraph.inputs,\r\n [\"%%(i%i)s\" % i for i in xrange(len(self.fgraph.inputs))])\r\n + zip(self.fgraph.outputs,\r\n [\"%%(o%i)s\" % i for i in xrange(len(self.fgraph.outputs))]))\r\n\r\n for orphan in self.fgraph.variables: # fgraph.orphans:\r\n if orphan.owner is None and orphan not in self.fgraph.inputs:\r\n if isinstance(orphan, Constant):\r\n subd[orphan] = orphan.type.c_literal(orphan.data)\r\n else:\r\n raise ValueError(\r\n \"All orphans in the fgraph to Composite must\"\r\n \" be Constant instances.\")\r\n\r\n _c_code = \"{\\n\"\r\n self.nodenames = [\"%(nodename)s_\" + ('subnode%i' % j)\r\n for j, n in enumerate(self.fgraph.toposort())]\r\n\r\n i = 0\r\n for j, node in enumerate(self.fgraph.toposort()):\r\n for output in node.outputs:\r\n if output not in subd:\r\n i += 1\r\n name = \"V%%(id)s_tmp%i\" % i\r\n subd[output] = name\r\n _c_code += \"%s %s;\\n\" % (\r\n output.type.dtype_specs()[1], name)\r\n s = node.op.c_code(node,\r\n self.nodenames[j],\r\n [subd[input] for input in node.inputs],\r\n [subd[output] for output in node.outputs],\r\n dict(fail=\"%(fail)s\",\r\n id=\"%%(id)s_%i\" % j))\r\n _c_code += s\r\n _c_code += \"\\n\"\r\n _c_code += \"}\\n\"\r\n self._c_code = _c_code", "def generate_cpp():\n cpp_file = AUTOGEN_WARNING\n cpp_file += \"// Implements basic nuclear data functions.\\n\"\n cpp_file += \"#ifndef PYNE_IS_AMALGAMATED\\n\"\n cpp_file += '#include \"atomic_data.h\"\\n'\n cpp_file += '#include \"nucname.h\"\\n'\n cpp_file += \"#endif\\n\"\n cpp_file += \" \\n\"\n cpp_file += \"void pyne::_load_atomic_mass_map_memory() { \\n\"\n cpp_file += \" // header version of atomic weight table data \\n\"\n cpp_file += \" //see if the data table is already loaded\\n\"\n cpp_file += \" if(!atomic_mass_map.empty()) {\\n\"\n cpp_file += \" return;\\n\"\n cpp_file += \" } else { \\n\"\n cpp_file += \" _insert_atomic_mass_map();\\n\"\n cpp_file += \" }\\n\"\n cpp_file += \" //see if the data table is already loaded\\n\"\n cpp_file += \" if(!natural_abund_map.empty()) {\\n\"\n cpp_file += \" return;\\n\"\n cpp_file += \" } else { \\n\"\n cpp_file += \" _insert_abund_map();\\n\"\n cpp_file += \" }\\n\"\n cpp_file += \" // calculate the atomic_masses of the elements \\n\"\n cpp_file += \" std::map<int,double> :: iterator it;\\n\"\n cpp_file += \" \\n\"\n cpp_file += \" for (int z = 1; z <= 92 ; z++) {\\n\"\n cpp_file += \" // loop through the natural abundance map\\n\"\n cpp_file += \" double element_atomic_weight = 0.0;\\n\"\n cpp_file += \" for (it = natural_abund_map.begin(); it != natural_abund_map.end() ; ++it){\\n\"\n cpp_file += \" // if the atomic number of the abudance matches the\\n\"\n cpp_file += \" // that of index\\n\"\n cpp_file += \" if(pyne::nucname::znum(it->first) == z) {\\n\"\n cpp_file += \" // take atomic abundance and multiply by mass\\n\"\n cpp_file += (\n \" // to get the mass of that nuclide / 100 since abundance is in %\\n\"\n )\n cpp_file += \" element_atomic_weight += (it->second*atomic_mass_map[it->first]/100.0);\\n\"\n cpp_file += \" }\\n\"\n cpp_file += \" }\\n\"\n cpp_file += \" // insert the abundance of the element into the list\\n\"\n cpp_file += \" atomic_mass_map[z*10000000] = element_atomic_weight;\\n\"\n cpp_file += \" }\\n\"\n cpp_file += \"}\\n\"\n cpp_file += \"\\n\\n\"\n cpp_file += \"void pyne::_insert_atomic_mass_map() { \\n\"\n cpp_file += generate_atomic_mass()\n cpp_file += \"}\\n\"\n cpp_file += \"\\n\\n\"\n cpp_file += \"void pyne::_insert_abund_map() { \\n\"\n cpp_file += generate_abundances()\n cpp_file += \"}\\n\"\n return cpp_file", "def make_cpp_func_bodies(self):\n\t\tfor name, body in self.func_bodies.iteritems():\n\t\t\tt = Lexer(body).get_tokens()\t\t\t\n\t\t\tS = [] #Stack\n\t\t\tx = 0\n\t\t\twhile x < len(t):\n\t\t\t\tif t[x] == '(': #function call begins\n\t\t\t\t\tx += 1\n\t\t\t\t\tS.append(self.FUNCS_DICT.get(t[x], t[x]) + '(')\n\t\t\t\telif t[x] == ')': #function call ends\n\t\t\t\t\tacc = ''\n\t\t\t\t\twhile S[-1][-1] != '(':\n\t\t\t\t\t\t#pop off params until function call is reached\n\t\t\t\t\t\tacc = S.pop() + ',' + acc\n\t\t\t\t\t# [:-1] to strip off comma at the end\n\t\t\t\t\tS.append(S.pop() + acc[:-1] + ')') #S.pop() gives function\n\t\t\t\telse:\n\t\t\t\t\tS.append(self.convert_atom(t[x]))\n\t\t\t\tx += 1\n\t\t\tself.cpp_func_bodies[name] = S[0]", "def buildCDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n dec = self.comment.buildCComment()\n name = self.name\n if len(parent) != 0:\n name = parent.upper()+\"_\"+self.name\n return dec+\"#define %-60s 0x%04X\" % (name, self.value)", "def fortran_c_wrapper(self) -> str:\n return ''", "def __init__(\n self,\n model,\n ipakcb=None,\n intercellt=0,\n laycon=3,\n trpy=1.0,\n hdry=-1e30,\n iwdflg=0,\n wetfct=0.1,\n iwetit=1,\n ihdwet=0,\n ikvflag=0,\n ikcflag=0,\n tran=1.0,\n hy=1.0,\n vcont=1.0,\n kv=1.0,\n anglex=0.0,\n ksat=1.0,\n sf1=1e-5,\n sf2=0.15,\n wetdry=-0.01,\n extension=\"bcf\",\n unitnumber=None,\n filenames=None,\n add_package=True,\n ):\n msg = (\n \"Model object must be of type flopy.mfusg.MfUsg\\n\"\n f\"but received type: {type(model)}.\"\n )\n assert isinstance(model, MfUsg), msg\n\n super().__init__(\n model,\n ipakcb=ipakcb,\n intercellt=intercellt,\n laycon=laycon,\n trpy=trpy,\n hdry=hdry,\n iwdflg=iwdflg,\n wetfct=wetfct,\n iwetit=iwetit,\n ihdwet=ihdwet,\n tran=tran,\n hy=hy,\n vcont=vcont,\n sf1=sf1,\n sf2=sf2,\n wetdry=wetdry,\n extension=extension,\n unitnumber=unitnumber,\n filenames=filenames,\n add_package=False,\n )\n\n dis = model.get_package(\"DIS\")\n if dis is None:\n dis = model.get_package(\"DISU\")\n structured = self.parent.structured\n\n nrow, ncol, nlay, _ = self.parent.nrow_ncol_nlay_nper\n\n self.ikvflag = ikvflag\n self.ikcflag = ikcflag\n self.kv = kv\n self.anglex = anglex\n self.ksat = ksat\n\n if not structured:\n njag = dis.njag\n self.anglex = Util2d(\n model,\n (njag,),\n np.float32,\n anglex,\n \"anglex\",\n locat=self.unit_number[0],\n )\n\n # item 1\n self.kv = Util3d(\n model,\n (nlay, nrow, ncol),\n np.float32,\n kv,\n \"Vertical Hydraulic Conductivity\",\n locat=self.unit_number[0],\n )\n if not structured:\n self.ksat = Util3d(\n model,\n (njag,),\n np.float32,\n ksat,\n \"ksat\",\n locat=self.unit_number[0],\n )\n\n if add_package:\n self.parent.add_package(self)", "def makecxx(self, gen, exe=0):\n services = []\n inits = []\n defs = []\n for serv in self.services:\n defs.append(serv.defs)\n service = cxxService.substitute(component=self.name, service=serv.name,\n parameters=gen.makeArgs(serv),\n body=serv.body, exe=exe)\n streams = []\n for name, typ, dep in serv.instream:\n streams.append(' create_calcium_port(this,(char *)\"%s\",(char *)\"%s\",(char *)\"IN\",(char *)\"%s\");'% (name, typ, dep))\n instream = \"\\n\".join(streams)\n streams = []\n for name, typ, dep in serv.outstream:\n streams.append(' create_calcium_port(this,(char *)\"%s\",(char *)\"%s\",(char *)\"OUT\",(char *)\"%s\");'% (name, typ, dep))\n outstream = \"\\n\".join(streams)\n\n init = initService.substitute(component=self.name, service=serv.name,\n instream=instream, outstream=outstream)\n services.append(service)\n inits.append(init)\n\n CalciumInterface=\"\"\n if self.calciumextendedinterface:\n CalciumInterface=\"#include <CalciumInterface.hxx>\"\n\n return cxxCompo.substitute(component=self.name, module=gen.module.name,\n exe=exe, exe_path=self.exe_path,\n servicesdef=\"\\n\".join(defs),\n servicesimpl=\"\\n\".join(services),\n initservice='\\n'.join(inits),\n CalciumInterface=CalciumInterface)", "def make_get_in_struct(self):\n res = \\\n\"\"\"DLLEXPORT {rettype}* {prefix}_get_{name}_struct(){{\n return ___madz_IN_{name};\n}}\n\"\"\"\n fragments ={\n \"rettype\":\"___madz_TYPE_\" + self._namespace_mangle(self.namespace),\n \"prefix\":self.python_mangle,\n \"name\":self._namespace_mangle(self.namespace)\n }\n return res.format(**fragments)", "def generate_code(spn_id, spn, meta_types, floating_data_type):\r\n\r\n # make sure we have ids\r\n assign_ids(spn)\r\n\r\n # fill method body according to SPN structure\r\n method_body = generate_method_body(spn, spn, floating_data_type, 0)\r\n\r\n # build parameters used in generated c++ function\r\n method_params = []\r\n passed_params = []\r\n for i, type in enumerate(meta_types):\r\n if type == MetaType.DISCRETE:\r\n method_params += [f'vector <int> possibleValues{i}', f'int nullValueIdx{i}']\r\n passed_params += [f'py::arg(\"possibleValues{i}\")', f'py::arg(\"nullValueIdx{i}\")']\r\n elif type == MetaType.REAL:\r\n method_params += [f'bool inverse{i}', f'bool leftMinusInf{i}', f'float leftCondition{i}',\r\n f'bool rightMinusInf{i}', f'float rightCondition{i}', f'bool leftIncluded{i}',\r\n f'bool rightIncluded{i}', f'float nullValue{i}']\r\n passed_params += [f'py::arg(\"inverse{i}\")', f'py::arg(\"leftMinusInf{i}\")', f'py::arg(\"leftCondition{i}\")',\r\n f'py::arg(\"rightMinusInf{i}\")', f'py::arg(\"rightCondition{i}\")',\r\n f'py::arg(\"leftIncluded{i}\")', f'py::arg(\"rightIncluded{i}\")', f'py::arg(\"nullValue{i}\")']\r\n\r\n value_dictionary = {\r\n 'spn_id': spn_id,\r\n 'method_body': method_body,\r\n 'method_params': ', '.join(method_params),\r\n 'node_count': get_number_of_nodes(spn),\r\n 'passed_params': ', '.join(passed_params),\r\n 'floating_data_type': floating_data_type\r\n }\r\n generated_method = replace_template(TemplatePath.METHOD_MASTER, value_dictionary, 0)\r\n registrate_method = replace_template(TemplatePath.REGISTRATION_MASTER, value_dictionary, 0)\r\n\r\n return generated_method, registrate_method", "def buildCDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n dec = self.comment.buildCComment(\"\")\n dec += \"typedef \"\n if self.define.getType() == \"value\":\n dec += self.define.buildCDeclaration(parent=\"\")\n else:\n if self.sym == None:\n dec += self.define.buildCDeclaration(parent=\"\")\n else:\n dec += self.sym.buildCDeclaration(parent=\"\")\n # this is another horrible work around, but replace the last character\n # (a semi-colon) with a space\n if self.define.getType() != \"value\":\n dec = dec[:-1]\n dec += \" \"+self.name+\";\"\n return dec", "def get_C_code(self, C_function_name):\n from cascada.bitvector.printing import BvCCodePrinter\n\n width2type = BvCCodePrinter._width2C_type\n\n # in C, * binds to the declarator, not the type specifier\n input_vars_c = ', '.join([\"{} {}\".format(width2type(v.width), v.name) for v in self.input_vars])\n output_vars_c = ', '.join([\"{} *{}\".format(width2type(v.width), v.name) for v in self.output_vars])\n if self.external_vars:\n external_vars_c = ', '.join([\"{} {}\".format(width2type(v.width), v.name) for v in self.external_vars])\n external_vars_c = external_vars_c + \", \"\n else:\n external_vars_c = \"\"\n\n aux = f\"void {C_function_name}({input_vars_c}, {external_vars_c}{output_vars_c})\"\n header = f\"{aux};\"\n body = f\"#include <stdint.h>\\n{aux}{{\" # stdint for uint_*\n\n outvar2outvar_c = {v: core.Variable(\"*\" + v.name, v.width, allowed_symbols=\"*\") for v in self.output_vars}\n\n def primary_assignment2C_code(my_var, my_expr):\n assert isinstance(my_expr, (core.Constant, core.Variable, operation.PrimaryOperation))\n if my_var in self.output_vars:\n return f\"*{my_var} = {my_expr.crepr()};\"\n else:\n return f\"{width2type(my_var.width)} {my_var} = {my_expr.crepr()};\"\n\n for var, expr in self.assignments.items():\n expr = expr.xreplace(outvar2outvar_c)\n if isinstance(expr, operation.SecondaryOperation):\n expr = expr.doit(eval_sec_ops=True)\n body += f\"\\n\\t{primary_assignment2C_code(var, expr)}\"\n body += \"\\n};\"\n\n return header, body", "def make_cache_key(udf, sig):\n codebytes = udf.__code__.co_code\n constants = udf.__code__.co_consts\n names = udf.__code__.co_names\n\n if udf.__closure__ is not None:\n cvars = tuple(x.cell_contents for x in udf.__closure__)\n cvarbytes = dumps(cvars)\n else:\n cvarbytes = b\"\"\n\n return names, constants, codebytes, cvarbytes, sig", "def fmtd_str(self,c=False,prefix_symbol=\"\"):\n psym = prefix_symbol\n ms1 = (\n f\"{self.filename_prefix_mono}{self.event_kind:9} \"\n f\"{self.separator * len(self.stack)} \"\n )\n lms1 = len(ms1)+len(\"(\")\n join_mstr = f\",\\n{' '*lms1}\"\n mavs = (\n f\"{self.argvars}\"\n )\n ms = f\"{psym}{ms1}{mavs}\"\n if c:\n aac = argvars_argname_color = \"MAGENTA\"\n ps1 = (\n f\"{self.filename_prefix_poly}{self.color.fore(f'{self.event_kind:9}','KIND')} \"\n f\"{self.separator * len(self.stack)} \"\n )\n lps1 = lms1\n join_pstr = f\",\\n{' '*lps1}\"\n pavs = (\n f\"{self.argvars}\"\n )\n ps = f\"{psym}{ps1}{pavs}\"\n return ps\n return ms", "def buildCDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n dec = self.comment.buildCComment(\"\")\n name = self.name\n if len(parent) != 0:\n name = parent.upper()+\"_\"+self.name\n # XAP compiler doesn't like pre-processor commands not left justified so\n # ignore indent\n return dec+\"#define %-60s 0x%04X\" % (name, self.value)", "def load_c_functions(self):\n\n # Load shared object\n lib = ctypes.cdll.LoadLibrary(os.path.join(self.working_directory,\"models/doubly_constrained/flow_forward_models.so\"))\n lib2 = ctypes.cdll.LoadLibrary(os.path.join(self.working_directory,\"models/doubly_constrained/potential_function.so\"))\n\n # Load DSF procedure flow inference\n self.infer_flows_dsf_procedure = lib.infer_flows_dsf_procedure\n self.infer_flows_dsf_procedure.restype = ctypes.c_double\n self.infer_flows_dsf_procedure.argtypes = [ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_size_t,\n ctypes.c_double,\n ctypes.c_size_t,\n ctypes.c_bool,\n ctypes.c_bool]\n\n\n # Load Newton Raphson procedure flow inference\n self.infer_flows_newton_raphson = lib.infer_flows_newton_raphson\n self.infer_flows_newton_raphson.restype = None #ctypes.c_double\n self.infer_flows_newton_raphson.argtypes = [ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_double,\n ctypes.c_size_t,\n ctypes.c_size_t,\n ctypes.c_size_t,\n ctypes.c_size_t]\n\n # Load Iterative proportional filtering procedure flow inference\n self.infer_flows_ipf_procedure = lib.infer_flows_ipf_procedure\n self.infer_flows_ipf_procedure.restype = ctypes.c_double\n self.infer_flows_ipf_procedure.argtypes = [ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_size_t,\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_double,\n ctypes.c_bool]\n\n # Load Iterative proportional filtering procedure flow inference\n self.infer_flows_ipf_procedure_singly = lib.infer_flows_ipf_procedure_singly\n self.infer_flows_ipf_procedure_singly.restype = ctypes.c_double\n self.infer_flows_ipf_procedure_singly.argtypes = [ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_size_t,\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_double,\n ctypes.c_bool]\n\n # Load potential function\n self.potential_stochastic = lib2.potential_stochastic\n self.potential_stochastic.restype = ctypes.c_double\n self.potential_stochastic.argtypes = [ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ndpointer(ctypes.c_double, flags=\"C_CONTIGUOUS\"),\n ctypes.c_size_t,\n ctypes.c_size_t]", "def generate_from(self, ast: ast_pb2.AST):\n for s in self._generate_headlines():\n yield s\n yield f'PYBIND11_MODULE({self._module_name}, m) {{'\n yield I+('m.doc() = \"CLIF generated pybind11-based module for '\n f'{ast.source}\";')\n for decl in ast.decls:\n if decl.decltype == ast_pb2.Decl.Type.FUNC:\n for s in function.generate_from(decl.func):\n yield s\n yield ''\n yield '}'", "def translate(code, hdrname, extra_cpp_args=[], whitelist=None):\n extra_incdir = os.path.dirname(hdrname)\n extra_cpp_args += ['-I', extra_incdir]\n p = AutoPxd(hdrname)\n p.visit(parse(code, extra_cpp_args=extra_cpp_args, whitelist=whitelist))\n pxd_string = ''\n if p.stdint_declarations:\n pxd_string += 'from libc.stdint cimport {:s}\\n\\n'.format(\n ', '.join(p.stdint_declarations))\n pxd_string += str(p)\n return pxd_string", "def get_function(instruction_structure, total_instructions, file_name):\n\n state = [\"LCL\", \"ARG\", \"THIS\", \"THAT\"]\n instruction = instruction_structure[0]\n\n if instruction == \"function\":\n function_name = instruction_structure[1]\n vars_count = int(instruction_structure[2])\n \n bytecode = []\n \n # Start a function block\n bytecode.extend([f\"({function_name})\"])\n\n for _ in range(vars_count):\n bytecode.extend(VirtualMachineLibrary.get_memory(\"push constant 0\", file_name)) \n\n elif instruction == \"call\": \n function_name = instruction_structure[1]\n args_count = instruction_structure[2]\n \n bytecode = []\n \n return_label = \":\".join([file_name, function_name, str(total_instructions), \"RETURN\"])\n\n # Push return address\n bytecode.extend([f\"@{return_label}\"])\n bytecode.extend([\"D=A\", \"@SP\", \"A=M\", \"M=D\"])\n bytecode.extend(VirtualMachineLibrary._get_primary(\"sp++\"))\n\n # Save state\n for address in state:\n bytecode.extend([f\"@{address}\", \"D=M\", \"@R13\", \"M=D\"])\n bytecode.extend(VirtualMachineLibrary._get_primary(\"*a=*b\", a=\"SP\", b=\"R13\", treat_b_as_pointer=False))\n bytecode.extend(VirtualMachineLibrary._get_primary(\"sp++\"))\n\n # Set ARG to point to new base address (sp - 5 - args_count)\n bytecode.extend([\"@SP\", \"D=M\", \"@5\", \"D=D-A\", f\"@{args_count}\", \"D=D-A\", \"@ARG\", \"M=D\"])\n \n # Set LCL to point to current SP\n bytecode.extend([\"@SP\", \"D=M\", \"@LCL\", \"M=D\"])\n \n # Jump to function_name\n bytecode.extend([f\"@{function_name}\", \"0;JMP\"])\n \n # Set return label\n bytecode.extend([f\"({return_label})\"])\n\n bytecode = bytecode\n\n else:\n bytecode = []\n\n # Set R13 to point to callee\"s LCL\n bytecode.extend([\"@LCL\", \"D=M\", \"@R13\", \"M=D\"])\n\n # Set R14 to return address\n bytecode.extend([\"@R13\", \"D=M\", \"@5\", \"D=D-A\", \"A=D\", \"D=M\", \"@R14\", \"M=D\"])\n\n # Set first callee\"s argument to be return value\n bytecode.extend(VirtualMachineLibrary._get_primary(\"sp--\"))\n bytecode.extend(VirtualMachineLibrary._get_primary(\"*a=*b\", a=\"ARG\", b=\"SP\"))\n\n # Reposition SP to be after first callee\"s argument\n bytecode.extend([\"@ARG\", \"D=M+1\", \"@SP\", \"M=D\"])\n \n # Restore registers\n for index, address in enumerate(reversed(state)):\n bytecode.extend([\"@R13\", \"D=M\", f\"@{int(index) + 1}\", \"D=D-A\", \"A=D\", \"D=M\", f\"@{address}\", \"M=D\"])\n \n # Return jump\n bytecode.extend([\"@R14\", \"A=M\", \"0;JMP\"])\n \n return bytecode", "def test_write_osl_file_bsdf_correct_formatting(self):\n m = Mock()\n calls = [call().write('#include \"stdosl.h\"\\n\\n'),\n call().write('shader node_node_name_bsdf('\n 'string dropdown1 = \"prop1\",'\n 'string dropdown2 = \"prop3\",'\n 'int int1 = 0,'\n 'int box1 = 0,'\n 'int box2 = 1,'\n 'float float1 = 0.0,'\n 'float Socket1 = 0.5,'\n 'output float Socket2 = 0.0){}\\n')]\n\n with patch('builtins.open', mock_open(m)) as mf:\n with patch('code_generation.code_generator_util.apply_clang_formatting', Mock()):\n code_gen = self._create_default_class(node_type='Bsdf')\n code_gen.write_osl_shader()\n\n self.assertTrue(all(c in mf.mock_calls for c in calls))", "def fortran_c_wrapper(self) -> str:\n result = ''\n for member in self.members:\n result += member.fortran_c_wrapper()\n return result", "def _set_signatures(self):\n self._lib.sc5520a_uhfsOpenDevice.argtypes = [c_int, c_char_p, c_uint8, POINTER(c_void_p)]\n self._lib.sc5520a_uhfsOpenDevice.restype = c_long\n\n self._lib.sc5520a_uhfsCloseDevice.argtypes = [c_void_p]\n self._lib.sc5520a_uhfsCloseDevice.restype = c_int\n\n self._lib.sc5520a_uhfsSetFrequency.argtypes = [c_void_p, c_longdouble]\n self._lib.sc5520a_uhfsSetFrequency.restype = c_uint\n\n self._lib.sc5520a_uhfsSetPowerLevel.argtypes = [c_void_p, c_float]\n self._lib.sc5520a_uhfsSetPowerLevel.restype = c_int\n\n self._lib.sc5520a_uhfsSetOutputEnable.argtypes = [c_void_p, c_ubyte]\n self._lib.sc5520a_uhfsSetOutputEnable.restype = c_int\n\n self._lib.sc5520a_uhfsFetchRfParameters.argtypes = [c_void_p, POINTER(RFParameters)]\n self._lib.sc5520a_uhfsFetchRfParameters.restype = c_int\n\n self._lib.sc5520a_uhfsFetchDeviceStatus.argtypes = [c_void_p, POINTER(DeviceStatus)]\n self._lib.sc5520a_uhfsFetchDeviceStatus.restype = c_int", "def _init_signature(func_name, restype, argtypes):\n global cfi\n f = getattr(cfi, func_name)\n f.restype = restype\n f.argtypes = argtypes", "def fl_library_full_version():\n _fl_library_full_version = library.cfuncproto(\n library.load_so_libforms(), \"fl_library_full_version\", \\\n cty.c_long, [cty.POINTER(cty.c_int), cty.POINTER(cty.c_int), \\\n cty.POINTER(cty.c_int), cty.POINTER(cty.c_char_p)], \\\n \"\"\"long fl_library_full_version(int * ver, int * rev, int * fix_level\n const char ** extra) \"\"\")\n i_ver, ptr_ver = library.make_intc_and_pointer()\n i_rev, ptr_rev = library.make_intc_and_pointer()\n i_fixlvl, ptr_fixlvl = library.make_intc_and_pointer()\n s_extrafixlvl, ptr_extrafixlvl = library.make_stringc_and_pointer()\n library.keep_elem_refs(i_ver, ptr_ver, i_rev, ptr_rev, i_fixlvl, \\\n ptr_fixlvl, s_extrafixlvl, ptr_extrafixlvl)\n retval = _fl_library_full_version(ptr_ver, ptr_rev, ptr_fixlvl, \\\n ptr_extrafixlvl)\n if isinstance(s_extrafixlvl.value, bytes):\n ns_extrafixlvl_val = s_extrafixlvl.value.decode('utf-8')\n else: # str\n ns_extrafixlvl_val = s_extrafixlvl.value\n return retval, i_ver.value, i_rev.value, i_fixlvl.value, \\\n ns_extrafixlvl_val", "def run(_):\n fscad = types.ModuleType(\"fscad.fscad\")\n fscad.__path__ = [os.path.dirname(os.path.realpath(__file__))]\n sys.modules['fscad.fscad'] = fscad\n\n for key in __all__:\n fscad.__setattr__(key, globals()[key])", "def save_grdecl(self , pyfile):\n cfile = CFILE( pyfile )\n self._fprintf_grdecl( cfile )", "def generate_skeleton(self, ci, qname):\n symbol_table = ci.epv.symbol_table\n cls = ci.co\n\n\n # Skeleton (in Chapel)\n self.pkg_chpl_skel.gen(ir.Import('.'.join(symbol_table.prefix)))\n\n self.pkg_chpl_skel.new_def('use sidl;')\n objname = '.'.join(ci.epv.symbol_table.prefix+[ci.epv.name]) + '_Impl'\n\n self.pkg_chpl_skel.new_def('extern record %s__object { var d_data: opaque; };'\n %qname)#,objname))\n self.pkg_chpl_skel.new_def('extern proc %s__createObject('%qname+\n 'd_data: int, '+\n 'out ex: sidl_BaseInterface__object)'+\n ': %s__object;'%qname)\n self.pkg_chpl_skel.new_def(ci.chpl_skel)\n\n\n # Skeleton (in C)\n cskel = ci.chpl_skel.cstub\n cskel._name = qname+'_Skel'\n cskel.gen(ir.Import('stdint'))\n cskel.gen(ir.Import('stdio'))\n cskel.gen(ir.Import(cskel._name))\n cskel.gen(ir.Import(qname+'_IOR'))\n cskel.gen(ir.Fn_defn([], ir.pt_void, qname+'__call_load', [],\n [ir.Comment(\"FIXME: [ir.Stmt(ir.Call('_load', []))\")], ''))\n\n # set_epv ... Setup the entry-point vectors (EPV)s\n #\n # there are 2*3 types of EPVs:\n # epv: regular methods\n # sepv: static methods\n # pre_(s)epv: pre-hooks\n # post_(s)epv: post-hooks\n epv_t = ci.epv.get_ir()\n sepv_t = ci.epv.get_sepv_ir()\n pre_epv_t = ci.epv.get_pre_epv_ir()\n pre_sepv_t = ci.epv.get_pre_sepv_ir()\n post_epv_t = ci.epv.get_post_epv_ir()\n post_sepv_t = ci.epv.get_post_sepv_ir()\n cskel.gen(ir.Fn_decl([], ir.pt_void, 'ctor', [], ''))\n cskel.gen(ir.Fn_decl([], ir.pt_void, 'dtor', [], ''))\n\n epv_init = []\n sepv_init = []\n for m in builtins+cls.get_methods():\n fname = m[2][1] + m[2][2]\n attrs = sidlir.method_method_attrs(m)\n static = member_chk(sidlir.static, attrs)\n def entry(stmts, epv_t, table, field, pointer):\n stmts.append(ir.Set_struct_item_stmt(epv_t, ir.Deref(table), field, pointer))\n\n if static: entry(sepv_init, sepv_t, 'sepv', 'f_'+fname, '%s_%s_skel'%(qname, fname))\n else: entry(epv_init, epv_t, 'epv', 'f_'+fname, '%s_%s_skel'%(qname, fname))\n\n builtin_names = ['_ctor', '_ctor2', '_dtor']\n with_hooks = member_chk(ir.hooks, attrs)\n if fname not in builtin_names and with_hooks:\n if static: entry(sepv_init, pre_sepv_t, 'pre_sepv', 'f_%s_pre'%fname, 'NULL')\n else: entry(epv_init, pre_epv_t, 'pre_epv', 'f_%s_pre'%fname, 'NULL')\n if static: entry(sepv_init, post_sepv_t, 'post_sepv', 'f_%s_post'%fname, 'NULL')\n else: entry(epv_init, post_epv_t, 'post_epv', 'f_%s_post'%fname, 'NULL')\n\n pkgname = '_'.join(ci.epv.symbol_table.prefix)\n\n dummyargv = '''\n char* argv[] = { \n babel_program_name,\n \"-nl\", /* number of locales */\n \"\",\n \"-v\", /* verbose chapel runtime */\n NULL\n };\n argv[2] = getenv(\"SLURM_NTASKS\");\n if (argv[2] == NULL) {\n fprintf(stdout, \"**ERROR: please set the SLURM_NTASKS environment variable\\\\n\"\n \" to the desired number of Chapel locales.\");\n argv[2] = \"0\";\n }\n int ignored = setenv(\"GASNET_BACKTRACE\", \"1\", 1);\n'''\n cskel.genh(ir.Import('stdlib'))\n cskel.pre_def('extern int chpl_init_library(int argc, char* argv[]);')\n cskel.pre_def('// You can set this to argv[0] in main() to get better debugging output')\n cskel.pre_def('char* __attribute__((weak)) babel_program_name = \"BRAID_LIBRARY\";')\n # These are now called by chpl_init_library -> chpl_gen_init\n #cskel.pre_def('extern void chpl__init_chpl__Program(int, const char*);')\n #cskel.pre_def('extern void chpl__init_%s_Impl(int, const char*);'%pkgname)\n init_code = [dummyargv,\n 'int locale_id = chpl_init_library(4, argv)',\n # 'chpl__init_chpl__Program(__LINE__, __FILE__)',\n # 'chpl__init_%s_Impl(__LINE__, __FILE__)'%pkgname\n ]\n init_code = map(lambda x: (ir.stmt, x), init_code)\n epv_init.extend(init_code)\n sepv_init.extend(init_code)\n\n cskel.gen(ir.Fn_defn(\n [], ir.pt_void, qname+'__set_epv',\n [ir.Arg([], ir.out, epv_t, 'epv'),\n ir.Arg([], ir.out, pre_epv_t, 'pre_epv'),\n ir.Arg([], ir.out, post_epv_t, 'post_epv')],\n epv_init, ''))\n\n if sepv_t:\n cskel.gen(ir.Fn_defn(\n [], ir.pt_void, qname+'__set_sepv',\n [ir.Arg([], ir.out, sepv_t, 'sepv'),\n ir.Arg([], ir.out, pre_sepv_t, 'pre_sepv'),\n ir.Arg([], ir.out, post_sepv_t, 'post_sepv')],\n sepv_init, ''))\n\n # C Skel\n for code in cskel.optional:\n cskel.new_global_def(code)\n cskel.write()", "def main() -> None:\n version_path = './src/fqe/_version.py'\n\n __version__ = version_number(version_path)\n\n if __version__ is None:\n raise ValueError('Version information not found in ' + version_path)\n\n long_description = ('OpenFermion-FQE\\n' +\n '===============\\n')\n stream = io.open('README.md', encoding='utf-8')\n stream.readline()\n long_description += stream.read()\n\n requirements_buffer = open('requirements.txt').readlines()\n requirements = [r.strip() for r in requirements_buffer]\n\n # C code extension\n config_vars = get_config_vars()\n config_vars[\"EXT_SUFFIX\"] = '.' + config_vars[\"EXT_SUFFIX\"].split('.')[-1]\n libdir = os.path.join(\"src\", \"fqe\", \"lib\")\n cfiles = [\n \"macros.c\",\n \"mylapack.c\",\n \"fci_graph.c\",\n \"fqe_data.c\",\n \"cirq_utils.c\",\n \"wick.c\",\n \"bitstring.c\",\n \"binom.c\",\n ]\n srcs = [os.path.join(libdir, cf) for cf in cfiles]\n libraries = []\n extensions = [\n Extension(\"fqe.lib.libfqe\",\n srcs,\n include_dirs=[libdir],\n library_dirs=[libdir],\n libraries=libraries,\n language='c')\n ]\n\n cythonfiles = [\"_fqe_data.pyx\"]\n srcs = [os.path.join(libdir, cf) for cf in cythonfiles]\n extensions.append(Extension(\"fqe.lib.fqe_data\", srcs, language='c'))\n\n setup(name='fqe',\n version=__version__,\n author='The OpenFermion FQE Developers',\n author_email='help@openfermion.org',\n url='http://www.openfermion.org',\n description='OpenFermion Fermionic Quantum Emulator',\n ext_modules=cythonize(extensions,\n compiler_directives={'language_level': \"3\"}),\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n install_requires=requirements,\n license='Apache 2',\n packages=find_packages(where='src'),\n package_dir={'': 'src'},\n cmdclass={'build_ext': CustomBuildOptions})", "def compile_function_to_c(self, function, signatures):\n\n code = []\n code += self.emit_prologue(function)\n labels = Labels()\n for instruction in function.instructions:\n (new_code, new_labels) = self.emit_instruction(instruction, labels, signatures)\n code += new_code\n labels = new_labels\n code += self.emit_epilogue(function)\n return '\\n'.join(code)", "def libSetup(path):\n lib = CDLL(path)\n lib.visitPoints.argtypes = [c_int, c_int, c_char_p]\n lib.visitPoints.restype = c_int\n return lib", "def generate_prototype(self):\n apientry = \"\"\n if self.__name[:2] == \"gl\":\n apientry = \"DNLOAD_APIENTRY \"\n params = \"void\"\n if self.__parameters:\n params = \", \".join(self.__parameters)\n return \"(%s (%s*)(%s))\" % (self.__returntype, apientry, params)", "def build_extension(self, ext):\n build_all()\n super(cffiBuilder, self).build_extension(ext)", "def buildCDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n name = self.name\n if len(parent) != 0:\n name = parent.upper()+\"_\"+self.name\n dec = (\"/* %%-%ds */\\n\" % (DEFAULT_WIDTH-6)) % (name)\n dec += self.comment.buildCComment(indent+\" \")\n for entry in self.entries:\n dec += entry.buildCDeclaration(\" \",name)+\"\\n\"\n return dec", "def make_func_code(params):\n class FuncCode(object):\n __slots__ = ('co_varnames', 'co_argcount')\n fc = FuncCode()\n fc.co_varnames = params\n fc.co_argcount = len(params)\n return fc", "def __init__(self, func_name, spin):\n self.xc_func = None\n self._xc_func_init = False\n\n # Handle func_name\n if isinstance(func_name, str):\n func_id = util.xc_functional_get_number(func_name)\n if func_id == -1:\n raise KeyError(\"LibXCFunctional: name '%s' not found.\" % func_name)\n elif isinstance(func_name, (int, np.integer)):\n func_id = func_name\n if util.xc_functional_get_name(func_name) is None:\n raise KeyError(\"LibXCFunctional: ID '%d' not found.\" % func_name)\n else:\n raise TypeError(\"LibXCFunctional: func_name must either be a string or int. Got {}\".format(func_name))\n\n self._xc_func_name = util.xc_functional_get_name(func_id)\n\n # Handle spin\n if isinstance(spin, str):\n spin = spin.lower()\n if spin == \"polarized\":\n self._spin = 2\n elif spin == \"unpolarized\":\n self._spin = 1\n else:\n raise KeyError(\"LibXCFunctional: spin must either be 'polarized' or 'unpolarized' if represented by a string. Got {}\".format(spin))\n else:\n self._spin = spin\n\n if self._spin not in [1, 2]:\n raise KeyError(\"LibXCFunctional: spin must either be 1 or 2 if represented by a integer. Got {}\".format(self._spin))\n\n # Build the LibXC functional\n self.xc_func = core.xc_func_alloc()\n self.xc_func_size_names = [x for x in dir(self.xc_func.contents.dim) if not \"_\" in x]\n\n # Set all int attributes to zero (not all set to zero in libxc)\n for attr in self.xc_func_size_names:\n setattr(self.xc_func.contents, attr, 0)\n\n ret = core.xc_func_init(self.xc_func, func_id, self._spin)\n if ret != 0:\n raise ValueError(\"LibXC Functional construction did not complete. Error code %d\" % ret)\n self._xc_func_init = True\n\n # Pull out all sizes after init\n self.xc_func_sizes = {}\n for attr in self.xc_func_size_names:\n self.xc_func_sizes[attr] = getattr(self.xc_func.contents.dim, attr)\n\n # Unpack functional info\n self.xc_func_info = core.xc_func_get_info(self.xc_func)\n self._number = core.xc_func_info_get_number(self.xc_func_info)\n self._kind = core.xc_func_info_get_kind(self.xc_func_info)\n self._name = core.xc_func_info_get_name(self.xc_func_info).decode(\"UTF-8\")\n self._family = core.xc_func_info_get_family(self.xc_func_info)\n self._flags = core.xc_func_info_get_flags(self.xc_func_info)\n\n # Set needed flags\n self._needs_laplacian = self._flags & flags.XC_FLAGS_NEEDS_LAPLACIAN\n\n # Set derivatives\n self._have_exc = self._flags & flags.XC_FLAGS_HAVE_EXC\n self._have_vxc = self._flags & flags.XC_FLAGS_HAVE_VXC\n self._have_fxc = self._flags & flags.XC_FLAGS_HAVE_FXC\n self._have_kxc = self._flags & flags.XC_FLAGS_HAVE_KXC\n self._have_lxc = self._flags & flags.XC_FLAGS_HAVE_LXC\n\n # Set omega\n self._have_cam = self._flags & flags.XC_FLAGS_HYB_CAM\n self._have_cam |= self._flags & flags.XC_FLAGS_HYB_CAMY\n self._have_cam |= self._flags & flags.XC_FLAGS_HYB_LC\n self._have_cam |= self._flags & flags.XC_FLAGS_HYB_LCY\n self._cam_omega = self._cam_alpha = self._cam_beta = False\n if self._have_cam:\n self._cam_omega = self.xc_func.contents.cam_omega\n self._cam_alpha = self.xc_func.contents.cam_alpha\n self._cam_beta = self.xc_func.contents.cam_beta\n\n elif self._family in [flags.XC_FAMILY_HYB_LDA, flags.XC_FAMILY_HYB_GGA, flags.XC_FAMILY_HYB_MGGA]:\n self._cam_alpha = self.xc_func.contents.cam_alpha\n\n # VV10\n self._have_vv10 = self._flags & flags.XC_FLAGS_VV10\n self._nlc_b = self._nlc_C = False\n if self._have_vv10:\n self._nlc_b = self.xc_func.contents.nlc_b\n self._nlc_C = self.xc_func.contents.nlc_C\n\n # Stable\n self._stable = self._flags & flags.XC_FLAGS_STABLE\n self._dev = self._flags & flags.XC_FLAGS_DEVELOPMENT\n\n # Pull out references\n self._refs = []\n self._bibtexs = []\n self._dois = []\n\n for pos in range(flags.XC_MAX_REFERENCES):\n ref = core.xc_func_info_get_references(self.xc_func_info, pos)\n if not ref: break\n\n self._refs.append(ref.contents.ref.decode(\"UTF-8\"))\n self._bibtexs.append(ref.contents.bibtex.decode(\"UTF-8\"))\n self._dois.append(ref.contents.doi.decode(\"UTF-8\"))", "def generate_source():\n \"\"\"their dependencies\"\"\"\n global dictionary_names, dictionary_slices\n src = \"\"\n for s in dictionary_slices:\n src += deconstruct(s)\n src += \" '\" + pointer_to_name(s)\n src += \"' define\\n\"\n return src + \"\\n\"", "def make_libfile():\n # wfc3_obsmodes_uvis\n wfc3_uvis = [\n \"f218w\",\n \"f225w\",\n \"f275w\",\n \"f336w\",\n \"f390m\",\n \"f390w\",\n \"f410m\",\n \"f438w\",\n \"f467m\",\n \"f475w\",\n \"f547m\",\n \"f555w\",\n \"f606w\",\n \"f621m\",\n \"f625w\",\n \"f689m\",\n \"f763m\",\n \"f775w\",\n \"f814w\",\n \"f845m\",\n ]\n\n wfc3_ir = [\n \"f098m\",\n \"f105w\",\n \"f110w\",\n \"f125w\",\n \"f127m\",\n \"f139m\",\n \"f140w\",\n \"f153m\",\n \"f160w\",\n ]\n\n wfpc2 = [\n \"f122m\",\n \"f157w\",\n \"f336w\",\n \"f410m\",\n \"f467m\",\n \"f547m\",\n \"f439w\",\n \"f569w\",\n \"f675w\",\n \"f791w\",\n \"f170w\",\n \"f185w\",\n \"f218w\",\n \"f255w\",\n \"f300w\",\n \"f380w\",\n \"f555w\",\n \"f622w\",\n \"f450w\",\n \"f606w\",\n \"f702w\",\n \"f814w\",\n ]\n\n acs_wfc = [\n \"f435w\",\n \"f475w\",\n \"f550m\",\n \"f555w\",\n \"f606w\",\n \"f625w\",\n \"f775w\",\n \"f814w\",\n ]\n # galex\n galex = [\"fuv\", \"nuv\"]\n\n # Open hd5 file for writing\n hf = h5py.File(__ROOT__ + \"filters.hd5\", \"w\")\n\n # Create group for nice hierarchical structure\n f = hf.create_group(\"filters\")\n\n # Define arrays for \"contents\" / descriptive information\n tablenames = []\n observatories = []\n instruments = []\n names = []\n norms = []\n cwaves = []\n pwaves = []\n comments = []\n\n # Loop through WFC3_UVIS filters\n for filt in wfc3_uvis:\n\n # define uvis 1 and uvis2 modes\n mode_1 = \"wfc3, uvis1, \" + filt\n mode_2 = \"wfc3, uvis2, \" + filt\n\n # pull bandpasses from stsynphot for the two uvis modes\n bp_1 = stsyn.band(mode_1)\n bp_2 = stsyn.band(mode_2)\n\n # extract the wavelength array\n wave = bp_1.waveset\n\n # compute the average bandpass between uvis1 and uvis2\n bp_avg = np.average([bp_1(wave), bp_2(wave)], axis=0)\n\n # define the filter name\n filter_name = \"HST_WFC3_\" + filt.upper()\n\n # build array of wavelength and throughput\n arr = np.array(\n list(zip(wave.value.astype(np.float64), bp_avg.astype(np.float64))),\n dtype=[(\"WAVELENGTH\", \"float64\"), (\"THROUGHPUT\", \"float64\")],\n )\n\n # append dataset to the hdf5 filters group\n f.create_dataset(filter_name, data=arr)\n\n # generate filter instance to compute relevant info\n newfilt = phot.Filter(wave, bp_avg, name=filt.upper())\n\n # populate contents lists with relevant information\n tablenames.append(filter_name)\n observatories.append(\"HST\")\n instruments.append(\"WFC3\")\n names.append(newfilt.name)\n norms.append(newfilt.norm.value)\n cwaves.append(newfilt.cl.value)\n pwaves.append(newfilt.lpivot.value)\n comments.append(\"avg of uvis1 and uvis2\")\n\n # Loop through WFC3_IR filters\n for filt in wfc3_ir:\n\n # define ir mode\n mode = \"wfc3, ir, \" + filt\n\n # pull bandpasses from stsynphot for the two uvis modes\n bp = stsyn.band(mode)\n\n # extract the wavelength array\n wave = bp.waveset\n\n # define the filter name\n filter_name = \"HST_WFC3_\" + filt.upper()\n\n # build array of wavelength and throughput\n arr = np.array(\n list(zip(wave.value.astype(np.float64), bp(wave).astype(np.float64))),\n dtype=[(\"WAVELENGTH\", \"float64\"), (\"THROUGHPUT\", \"float64\")],\n )\n\n # append dataset to the hdf5 filters group\n f.create_dataset(filter_name, data=arr)\n\n # generate filter instance to compute relevant info\n newfilt = phot.Filter(wave, bp(wave), name=filt.upper())\n\n # populate contents lists with relevant information\n tablenames.append(filter_name)\n observatories.append(\"HST\")\n instruments.append(\"WFC3\")\n names.append(newfilt.name)\n norms.append(newfilt.norm.value)\n cwaves.append(newfilt.cl.value)\n pwaves.append(newfilt.lpivot.value)\n comments.append(\"\")\n\n # Loop through WFPC2 filters\n for filt in wfpc2:\n\n # define chips 1, 2, 3, 4 modes\n mode_1 = \"wfpc2, 1, \" + filt\n mode_2 = \"wfpc2, 2, \" + filt\n mode_3 = \"wfpc2, 3, \" + filt\n mode_4 = \"wfpc2, 4, \" + filt\n\n # pull bandpasses from stsynphot for the two uvis modes\n bp_1 = stsyn.band(mode_1)\n bp_2 = stsyn.band(mode_2)\n bp_3 = stsyn.band(mode_3)\n bp_4 = stsyn.band(mode_4)\n\n # extract the wavelength array\n wave = bp_1.waveset\n\n # compute the average bandpass between uvis1 and uvis2\n bp_avg = np.average([bp_1(wave), bp_2(wave), bp_3(wave), bp_4(wave)], axis=0)\n\n # define the filter name\n filter_name = \"HST_WFPC2_\" + filt.upper()\n\n # build array of wavelength and throughput\n arr = np.array(\n list(zip(wave.value.astype(np.float64), bp_avg.astype(np.float64))),\n dtype=[(\"WAVELENGTH\", \"float64\"), (\"THROUGHPUT\", \"float64\")],\n )\n\n # append dataset to the hdf5 filters group\n f.create_dataset(filter_name, data=arr)\n\n # generate filter instance to compute relevant info\n newfilt = phot.Filter(wave, bp_avg, name=filt.upper())\n\n # populate contents lists with relevant information\n tablenames.append(filter_name)\n observatories.append(\"HST\")\n instruments.append(\"WFPC2\")\n names.append(newfilt.name)\n norms.append(newfilt.norm.value)\n cwaves.append(newfilt.cl.value)\n pwaves.append(newfilt.lpivot.value)\n comments.append(\"avg of 1, 2, 3, 4\")\n\n # Loop through ACS filters\n for filt in acs_wfc:\n\n # define wfc1, wfc2 modes\n mode_1 = \"acs, wfc1, \" + filt\n mode_2 = \"acs, wfc2, \" + filt\n\n # pull bandpasses from stsynphot for the two uvis modes\n bp_1 = stsyn.band(mode_1)\n bp_2 = stsyn.band(mode_2)\n\n # extract the wavelength array\n wave = bp_1.waveset\n\n # compute the average bandpass between uvis1 and uvis2\n bp_avg = np.average([bp_1(wave), bp_2(wave)], axis=0)\n\n # define the filter name\n filter_name = \"HST_ACS_WFC_\" + filt.upper()\n\n # build array of wavelength and throughput\n arr = np.array(\n list(zip(wave.value.astype(np.float64), bp_avg.astype(np.float64))),\n dtype=[(\"WAVELENGTH\", \"float64\"), (\"THROUGHPUT\", \"float64\")],\n )\n\n # append dataset to the hdf5 filters group\n f.create_dataset(filter_name, data=arr)\n\n # generate filter instance to compute relevant info\n newfilt = phot.Filter(wave, bp_avg, name=filt.upper())\n\n # populate contents lists with relevant information\n tablenames.append(filter_name)\n observatories.append(\"HST\")\n instruments.append(\"ACS_WFC\")\n names.append(newfilt.name)\n norms.append(newfilt.norm.value)\n cwaves.append(newfilt.cl.value)\n pwaves.append(newfilt.lpivot.value)\n comments.append(\"avg of wfc1 and wfc2\")\n\n # Loop through GALEX filters:\n for filt in galex:\n # define ir mode\n mode = \"galex,\" + filt\n\n # pull bandpasses from stsynphot for the two uvis modes\n bp = stsyn.band(mode)\n\n # extract the wavelength array\n wave = bp.waveset\n\n # define the filter name\n filter_name = \"GALEX_\" + filt.upper()\n\n # build array of wavelength and throughput\n arr = np.array(\n list(zip(wave.value.astype(np.float64), bp(wave).astype(np.float64))),\n dtype=[(\"WAVELENGTH\", \"float64\"), (\"THROUGHPUT\", \"float64\")],\n )\n\n # append dataset to the hdf5 filters group\n f.create_dataset(filter_name, data=arr)\n\n # generate filter instance to compute relevant info\n newfilt = phot.Filter(wave, bp(wave), name=filt.upper())\n\n # populate contents lists with relevant information\n tablenames.append(filter_name)\n observatories.append(\"GALEX\")\n instruments.append(\"GALEX\")\n names.append(newfilt.name)\n norms.append(newfilt.norm.value)\n cwaves.append(newfilt.cl.value)\n pwaves.append(newfilt.lpivot.value)\n comments.append(\"\")\n\n # smash the contents arrays together\n contents = np.array(\n list(\n zip(\n tablenames,\n observatories,\n instruments,\n names,\n norms,\n cwaves,\n pwaves,\n comments,\n )\n ),\n dtype=[\n (\"TABLENAME\", \"S40\"),\n (\"OBSERVATORY\", \"S30\"),\n (\"INSTRUMENT\", \"S30\"),\n (\"NAME\", \"S10\"),\n (\"NORM\", \"<f8\"),\n (\"CWAVE\", \"<f8\"),\n (\"PWAVE\", \"<f8\"),\n (\"COMMENT\", \"S100\"),\n ],\n )\n\n # add the contents array as an hd5 dataset\n hf.create_dataset(\"content\", data=contents)\n\n # close the file\n hf.close()", "def __write_cpp_func_name(self, cpp_file, return_type, object_suffix, in_header):\n if in_header:\n func_suffix = \";\"\n else:\n func_suffix = \" {\"\n func_name = \"Make\" + self.class_name + object_suffix + \"()\" + func_suffix\n if len(return_type + \" \" + func_name) > 80:\n print(return_type, file=cpp_file)\n print(func_name, file=cpp_file)\n else:\n print(return_type + \" \" + func_name, file=cpp_file)", "def cython_funcname(self, name, argkinds=None):\n if isinstance(name, basestring):\n return name\n if argkinds is None:\n argkinds = [(Arg.NONE, None)] * (len(name) - 1)\n fname = name[0]\n cfs = []\n for x, (argkind, argvalue) in zip(name[1:], argkinds):\n if argkind is Arg.TYPE:\n cf = self.cython_functionname(x)[1]\n elif argkind is Arg.LIT:\n cf = self.cython_literal(x)\n elif argkind is Arg.VAR:\n cf = x\n elif isinstance(x, Number):\n cf = self.cython_literal(x)\n else:\n try:\n cf = self.cython_functionname(x)[1] # guess type\n except TypeError:\n cf = x # guess variable\n cfs.append(cf)\n fname += '' if 0 == len(cfs) else \"_\" + \"_\".join(cfs)\n return fname", "def decode_cpp_function_names(self) -> None:\n with Popen(['c++filt'], stdin=PIPE, stdout=PIPE, universal_newlines=True) as proc:\n for func in self.source_functions:\n proc.stdin.write(func.name + '\\n')\n proc.stdin.flush()\n func.pretty_name = proc.stdout.readline().rstrip('\\n\\r')", "def genH(self,fp):\n id = 0\n for nm in GetOsekObjects('NM'):\n if(self == nm):\n break\n else:\n id += 1\n fp.write('\\n#define %s %s\\n'%(self.name,id))\n fp.write('#define %s_TYPE NM_%s\\n'%(self.name,self.getValue('TYPE')))\n fp.write('#define %s_tTyp %s\\n'%(self.name,self.getValue('TTYP')))\n fp.write('#define %s_tMax %s\\n'%(self.name,self.getValue('TMAX')))\n fp.write('#define %s_tError %s\\n'%(self.name,self.getValue('TERROR')))\n fp.write('#define %s_tTx %s\\n'%(self.name,self.getValue('TTX')))\n fp.write('#define %s_IDBASE %s\\n'%(self.name,self.getValue('IDBASE')))\n fp.write('#define %s_WINDOWMASK %s\\n'%(self.name,self.getValue('WINDOWMASK')))\n fp.write('#define %s_CONTROLLER %s\\n'%(self.name,self.getValue('CONTROLLER')))", "def generate_c_source(self):\n return template_elfling_source % (self.generate_c_data_block(), ELFLING_WORK, ELFLING_OUTPUT, ELFLING_UNCOMPRESSED, len(self.__contexts), ELFLING_WORK, self.get_input_offset(), ELFLING_OUTPUT, self.get_uncompressed_size(), ELFLING_UNCOMPRESSED)", "def csf_sample(self, tns_dir):\n lib = CDLL('./libsample.so') \n input_dir = tns_dir.encode()\n lib.getCsfFeatures.argtypes = [c_char_p]\n lib.getCsfFeatures.restype = c_float_p\n csfFeatures = lib.getCsfFeatures(input_dir)\n return csfFeatures", "def cppdef(src):\n with _stderr_capture() as err:\n errcode = gbl.gInterpreter.Declare(src)\n if not errcode:\n raise SyntaxError('Failed to parse the given C++ code%s' % err.err)\n return True", "def ResNet18_FPN():\n return FPN(BasicBlock, [2, 2, 2, 2])", "def __init__(self, total, function_name, param_sorts, return_sort):\r\n super(FunctionDecl, self).__init__()\r\n global functions\r\n self.total = total\r\n self.function_name = function_name\r\n self.param_sorts = param_sorts\r\n self.return_sort = return_sort\r\n self.basic = basic\r\n self.static = static\r\n\r\n function_info = []\r\n function_info.append(static)\r\n function_info.append(param_sorts)\r\n function_info.append(return_sort)\r\n functions[function_name] = function_info", "def COC(fp, csiz):\n lcoc = unpack('>H', fp.read(2))[0]\n print('Csiz', csiz)\n if csiz < 257:\n ccoc = unpack('B', fp.read(1))[0]\n else:\n ccoc = unpack('>H', fp.read(2))[0]\n scoc = unpack('B', fp.read(1))[0]\n\n _decomp_levels = unpack('B', fp.read(1))[0]\n _block_width = unpack('B', fp.read(1))[0]\n _block_height = unpack('B', fp.read(1))[0]\n _block_style = unpack('B', fp.read(1))[0]\n _transform = unpack('B', fp.read(1))[0]\n\n _precincts = []\n has_precincts = _get_bit(scoc, 7)\n if has_precincts == 1:\n for ii in range(_decomp_levels + 1):\n _precincts.append(unpack('B', fp.read(1))[0])\n\n info = {\n 'Lcoc' : lcoc,\n 'Ccoc' : ccoc,\n 'Scoc' : scoc,\n 'SPcoc' : {\n 'decomp_levels' : _decomp_levels,\n 'block_width' : _block_width,\n 'block_height' : _block_height,\n 'block_style' : _block_style,\n 'transform' : _transform\n }\n }\n\n if has_precincts:\n info['SPcoc']['precincts'] = _precincts\n\n return info", "def _get_c_flags(\n target,\n cc,\n cflags,\n debug,\n srcfiles=None,\n sharedobject=False,\n osname=None,\n verbose=False,\n):\n flags = []\n\n # define c flags\n if cc is not None:\n # remove .exe extension of necessary\n cc = _get_base_app_name(cc)\n\n # remove target .exe extension, if necessary\n target = _get_base_app_name(target)\n\n # get lower case OS string\n if osname is None:\n osname = _get_osname()\n\n # get - or / to prepend for compiler switches\n prepend = _get_prepend(cc, osname)\n\n # generate c flags\n if cc in [\"gcc\", \"g++\"]:\n if sharedobject:\n if osname != \"win32\":\n flags.append(\"fPIC\")\n else:\n if osname == \"win32\":\n flags.append(\"static\")\n if \"fPIC\" in flags:\n flags.remove(\"fPIC\")\n if debug:\n flags += [\"g\"]\n if _check_gnu_switch_available(\n \"-Wall\", compiler=\"gcc\", verbose=verbose\n ):\n flags.append(\"Wall\")\n else:\n pass\n elif cc in [\"clang\", \"clang++\"]:\n if sharedobject:\n msg = \"shared library not implement fo clang\"\n raise NotImplementedError(msg)\n if debug:\n flags += [\"g\"]\n if _check_gnu_switch_available(\n \"-Wall\", compiler=\"clang\", verbose=verbose\n ):\n flags.append(\"Wall\")\n else:\n pass\n elif cc in [\"icc\", \"icpc\", \"mpiicc\", \"mpiicpc\", \"icl\", \"cl\"]:\n if osname == \"win32\":\n if cc in [\"icl\", \"cl\"]:\n flags += [\"nologo\"]\n if debug:\n flags.append(\"/debug:full\")\n else:\n if sharedobject:\n flags.append(\"fpic\")\n else:\n if \"fpic\" in flags:\n flags.remove(\"fpic\")\n\n if debug:\n flags += [\"debug full\"]\n elif cc in [\"cl\"]:\n if osname == \"win32\":\n if debug:\n flags.append(\"Zi\")\n\n # Add -D-UF flag for C code if ISO_C_BINDING is not used in Fortran\n # code that is linked to C/C++ code. Only needed if there are\n # any fortran files. -D_UF defines UNIX naming conventions for\n # mixed language compilation.\n if srcfiles is not None:\n ffiles = _get_fortran_files(srcfiles)\n cfiles = _get_c_files(srcfiles)\n if ffiles is not None:\n iso_c_check = True\n if osname == \"win32\":\n if cc in [\"icl\", \"cl\"]:\n iso_c_check = False\n if iso_c_check:\n use_iso_c = _get_iso_c(ffiles)\n if not use_iso_c and cfiles is not None:\n flags.append(\"D_UF\")\n\n # process passed c flags - check for flags with a space between\n # the flag and a setting\n for idx, flag in enumerate(cflags[1:]):\n if flag[0] not in (\"/\", \"-\"):\n cflags[idx] += \" {}\".format(flag)\n cflags[idx + 1] = \"\"\n\n # add passed c flags - assume that flags have - or / as the\n # first character. c flags starting with O are excluded\n for flag in cflags:\n if len(flag) < 1:\n continue\n if flag[1] != \"O\":\n if flag[1:] not in flags:\n flags.append(flag[1:])\n\n # add target specific c/c++ switches\n tlist = _set_cflags(target, cc=cc, argv=False, osname=osname)\n if tlist is not None:\n for flag in tlist:\n if flag[1] != \"O\":\n if flag[1:] not in flags:\n flags.append(flag[1:])\n\n # add prepend to compiler flags\n for idx, flag in enumerate(flags):\n flags[idx] = prepend + flag\n\n return flags", "def compile_functions_to_c(self, functions, externs=[]):\n # Mangle main function: real main is provided by libv\n for function in functions:\n if function.signature.name == \"main\":\n function.signature.name = \"vizh_main\"\n\n signature_list = externs + [function.signature for function in functions]\n \n # We need size_t and libv functions\n code = ['#include <stddef.h>',\n '#include \"libv.h\"']\n\n # First output forward declarations for all functions and externs\n code += [f'{str(signature)};' for signature in signature_list]\n\n signature_list += libv_decls\n signatures = {signature.name: signature for signature in signature_list}\n\n errors = []\n for function in functions:\n try:\n code.append(self.compile_function_to_c(function, signatures))\n except CompilerError as err:\n errors.append((function.signature.name,err))\n\n if len(errors) > 0:\n messages = [f'Error while compiling {func_name}: {err}' for func_name, err in errors]\n raise CompilerError('\\n'.join(messages))\n \n return '\\n'.join(code)", "def test_callback_from_c(self):\n source = io.StringIO(\"\"\"\n int add(int x, int y);\n int x(int a) {\n return add(a + 1, 13);\n }\n \"\"\")\n arch = get_current_arch()\n obj = cc(source, arch, debug=True)\n def my_add(x: int, y: int) -> int:\n return x + y + 2\n imports = {\n 'add': my_add\n }\n m = load_obj(obj, imports=imports)\n y = m.x(101)\n self.assertEqual(117, y)", "def make_fix_plugin(self):\n\n res = \\\n\"\"\"{fname} = shared_object.{fname}\n {fname}.restype = POINTER({structname})\n {varname} = {fname}()\n\n\"\"\"\n fragments ={\n \"varname\": \"_plugin\",\n \"fname\": \"___madz_TYPE_get_out_struct\",\n \"structname\": self.python_madz_types + \"OUTSTRUCT\"\n }\n\n return res.format(**fragments)", "def helper_cccc(standardized_output: dict):\n\n for module in standardized_output[\"classes\"]:\n WMC = 0\n n_func = 0\n module_name = module[\"class name\"]\n for file in standardized_output[\"files\"]:\n for func in file[\"functions\"]:\n if \"class name\" in func and func[\"class name\"] == module_name:\n WMC += func[\"CC\"]\n n_func += 1\n module[\"WMC\"] = WMC\n module[\"no. functions\"] = n_func", "def make_globals(py_c_api):\n for fn in py_c_api:\n gv = ir.GlobalValue(fn.name, fn.signature, external=True)\n if gv.badval: gv.add_metadata(badval=ir.Const(gv.badval))\n if gv.maybe: gv.add_metadata(cpy_occurred=True)\n yield fn.name, gv", "def __genASGconstructor( self, file, funcName ):\r\n ASGbuffer = ''\r\n counter = 0\r\n \r\n # Go through all actively tracked ASG's (formalisms)\r\n for ASGname in self.__trackASG.keys():\r\n # This formalism has at least one entity, we NEED IT\r\n if( not self.__isASGbyNameEmpty( ASGname ) ):\r\n ASGbuffer += ASGname + 'RootNode=None, '\r\n counter += 1 \r\n if( counter >= 1 ):\r\n ASGbuffer = ASGbuffer[:-2]\r\n file.write('\\ndef '+funcName[0]+'(self, rootNode, ')\r\n file.write( ASGbuffer + '):\\n')\r\n else:\r\n file.write('\\ndef '+funcName[0]+'(self, rootNode):\\n')", "def begin_impl(self, qname):\n # new file for the toplevel package\n self.pkg_chpl_skel = ChapelFile(qname+'_Skel')\n #self.pkg_chpl_skel.main_area.new_def('proc __defeat_dce(){\\n')\n\n # new file for the user implementation\n self.pkg_impl = ChapelFile(qname+'_Impl')\n self.pkg_impl.new_def(extern_def_set_to_null)\n self.pkg_impl.new_def('// DO-NOT-DELETE splicer.begin(%s.Impl)'%qname)\n self.pkg_impl.new_def('// DO-NOT-DELETE splicer.end(%s.Impl)'%qname)\n self.pkg_impl.new_def('')", "def cython_c2py_conv_function_pointer(t_, ts):\n t = t_[1]\n argnames = []\n argdecls = []\n argbodys = []\n argrtns = []\n for n, argt in t[1][2]:\n argnames.append(n)\n decl, body, rtn = ts.cython_py2c(n, argt, proxy_name=\"c_\" + n)\n argdecls += decl.split('\\n') if isinstance(decl,basestring) else [decl]\n argbodys += body.split('\\n') if isinstance(body,basestring) else [body]\n argrtns += rtn.split('\\n') if isinstance(rtn,basestring) else [rtn]\n rtnname = 'rtn'\n rtnprox = 'c_' + rtnname\n rtncall = 'c_call_' + rtnname\n while rtnname in argnames or rtnprox in argnames:\n rtnname += '_'\n rtnprox += '_'\n argdecls = indent(argdecls)\n argbodys = indent(argbodys)\n rtndecl, rtnbody, rtnrtn, _ = ts.cython_c2py(rtncall, t[2][2],\n cached=False, proxy_name=rtnprox, existing_name=rtncall)\n if rtndecl is None and rtnbody is None:\n rtnprox = rtnname\n rtndecls = [rtndecl]\n returns_void = (t[2][2] == 'void')\n if not returns_void:\n rtndecls.append(\"cdef {0} {1}\".format(ts.cython_ctype(t[2][2]),\n rtncall))\n rtndecl = indent(rtndecls)\n rtnbody = indent(rtnbody)\n s = ('def {{proxy_name}}({arglist}):\\n'\n '{argdecls}\\n'\n '{rtndecl}\\n'\n ' if {{var}} == NULL:\\n'\n ' raise RuntimeError(\"{{var}} is NULL and may not be '\n 'safely called!\")\\n'\n '{argbodys}\\n')\n s += ' {{var}}({carglist})\\n' if returns_void else \\\n ' {rtncall} = {{var}}({carglist})\\n'\n s += '{rtnbody}\\n'\n s = s.format(arglist=\", \".join(argnames), argdecls=argdecls,\n cvartypeptr=ts.cython_ctype(t_).format(type_name='cvartype'),\n argbodys=argbodys, rtndecl=rtndecl, rtnprox=rtnprox,\n rtncall=rtncall, carglist=\", \".join(argrtns), rtnbody=rtnbody)\n caches = 'if {cache_name} is None:\\n' + indent(s)\n if not returns_void:\n caches += \"\\n return {rtnrtn}\".format(rtnrtn=rtnrtn)\n caches += '\\n {cache_name} = {proxy_name}\\n'\n return s, s, caches", "def fortran_c_wrapper(self) -> str:\n return ''.join([i.fortran_c_wrapper() for i in self.instances])", "def FbcExtension_init():\n return _libsbml.FbcExtension_init()", "def _initlib():\n global _libhfof\n\n if _libhfof is not None:\n return _libhfof\n\n suffix = sysconfig.get_config_var('EXT_SUFFIX')\n \n name = path.join(path.dirname(path.abspath(__file__)), '../build/libhfof'+suffix)\n if not path.exists(name):\n raise Exception('Library '+str(name)+' does not exist. Maybe you forgot to make it?')\n\n print('Loading libhfof - C functions for FoF calculations', name)\n _libhfof = ctypes.cdll.LoadLibrary(name)\n\n # morton indexing\n # void get_morton_idx(const double *pos, const int num_pos, const double inv_cell_width, int64_t *restrict out)\n func = _libhfof.get_morton_idx\n func.restype = None\n func.argtypes = [ndpointer(ctypes.c_double), ctypes.c_int, ctypes.c_double, ndpointer(int64)]\n \n # minimum and maximum per cell\n # void get_min_max(const double *pos, const uint32_t num_pos, double *restrict out)\n func = _libhfof.get_min_max\n func.restype = None\n func.argtypes = [ndpointer(ctypes.c_double), ctypes.c_uint32, ndpointer(ctypes.c_double)]\n # void get_min_max_2d(const double *pos, const uint32_t num_pos, double *restrict out)\n func = _libhfof.get_min_max_2d\n func.restype = None\n func.argtypes = [ndpointer(ctypes.c_double), ctypes.c_uint32, ndpointer(ctypes.c_double)]\n \n # Find the cell for each point\n # void find_lattice(const double *pos, const uint32_t num_pos, \n # const double inv_cell_width, const int N, const int M, int64_t *out)\n func = _libhfof.find_lattice\n func.restype = None\n func.argtypes = [ndpointer(ctypes.c_double), ctypes.c_uint32, ctypes.c_double, \n ctypes.c_int, ctypes.c_int64, ndpointer(int64)]\n\n # Find the block+cell for each point\n # void blocks_cells(const double min_x, const double min_y, const double min_z, \n #\t\t const double *pos, const uint32_t num_pos, \n #\t\t const double inv_cell_width, const int Py, const int64_t Px, \n #\t\t int64_t *out)\n func = _libhfof.blocks_cells\n func.restype = None\n func.argtypes = [ctypes.c_double, ctypes.c_double, ctypes.c_double,\n ndpointer(ctypes.c_double), ctypes.c_uint32, ctypes.c_double, \n ctypes.c_int, ctypes.c_int64, ndpointer(int64)]\n # void blocks_cells_2d(const double min_x, const double min_y, \n #\t\t const double *restrict pos, const uint32_t N, \n #\t\t const double inv_cell_width, const int64_t P, \n #\t\t int64_t *restrict out)\n func = _libhfof.blocks_cells_2d\n func.restype = None\n func.argtypes = [ctypes.c_double, ctypes.c_double, \n ndpointer(ctypes.c_double), ctypes.c_uint32, ctypes.c_double, \n ctypes.c_int64, ndpointer(int64)]\n\n # Friends of Friends linking periodic (on 4x4x4 cells)\n # see src/fof64.c\n func = _libhfof.fof64\n func.restype = ctypes.c_int\n func.argtypes = [ctypes.c_uint32,ctypes.c_int,ctypes.c_int64,ctypes.c_uint32,ctypes.c_double, \n ndpointer(float64), ndpointer(int64),ndpointer(int64), ndpointer(int64), \n ndpointer(int32), ctypes.c_double]\n\n # Friends-of-friends in 2d, using implementation with 8x8 (=64) cells\n # see src/fof64_2d\n func = _libhfof.fof64_2d\n func.restype = ctypes.c_int\n func.argtypes = [ctypes.c_uint32,ctypes.c_int,ctypes.c_uint32,ctypes.c_double, \n ndpointer(float64), ndpointer(int64),ndpointer(int64), ndpointer(int64), \n ndpointer(int32), ctypes.c_double]\n \n # Friends of Friends periodic linking\n # see src/fof.c\n func = _libhfof.fof_periodic\n func.restype = ctypes.c_int\n func.argtypes = [ctypes.c_uint32,ctypes.c_int,ctypes.c_int64,ctypes.c_uint32,ctypes.c_double,\n ndpointer(float64), ndpointer(int64),ndpointer(int64), ndpointer(int64), ndpointer(int32)]\n\n # Periodic image insertion\n # int pad_box(const double inv_boxsize, const double r_pad, const uint32_t num_pos, \n # const double *restrict pos, double *restrict periodic_pos)\n # \t int64_t *restrict pad_idx, const int max_images)\n\n func = _libhfof.pad_box\n func.restype = ctypes.c_int\n func.argtypes = [ctypes.c_double, ctypes.c_double,ctypes.c_uint32,\n ndpointer(float64), ndpointer(float64), ndpointer(int64), ctypes.c_int]\n # int pad_square(const double inv_width, const double r_pad, const uint32_t num_pos, \n #\t const double *restrict pos, double *restrict pad_pos, \n #\t int64_t *restrict pad_idx, const int max_images)\n func = _libhfof.pad_square\n func.restype = ctypes.c_int\n func.argtypes = [ctypes.c_double, ctypes.c_double,ctypes.c_uint32,\n ndpointer(float64), ndpointer(float64), ndpointer(int64), ctypes.c_int] \n \n return _libhfof", "def prepare_udf_source_info(element_type = None, element_list = None):\n udfs = lims.get_udfs(attach_to_name = element_type)\n objects={}\n for udf in udfs:\n db_name=udf.name.lower().replace(' ','_').replace('-','_')\n objects[db_name] = {'doc_source': { 'lims_field':udf.name,\n 'lims_element': element_type, \n 'source': 'Lims'},\n 'doc_type': udf.root.get('type')}\n return objects" ]
[ "0.6575036", "0.6416403", "0.62135506", "0.61577857", "0.6081804", "0.6024244", "0.58753806", "0.5823587", "0.573958", "0.5726746", "0.56846005", "0.56105137", "0.55705476", "0.5551445", "0.5547371", "0.55256623", "0.55013", "0.54184884", "0.539809", "0.53861326", "0.5378367", "0.53707635", "0.53621775", "0.53464997", "0.5334414", "0.5327685", "0.5298696", "0.5278123", "0.5239755", "0.52002364", "0.51520056", "0.51470363", "0.5138355", "0.5130353", "0.5122373", "0.5117571", "0.51139647", "0.51014346", "0.5097219", "0.5078887", "0.5065937", "0.50583494", "0.50526786", "0.50524765", "0.5045219", "0.5036137", "0.502471", "0.500661", "0.50028855", "0.49954233", "0.49630713", "0.49568933", "0.49492654", "0.49443746", "0.49227443", "0.49173304", "0.49110246", "0.4908863", "0.4884131", "0.48800737", "0.48771283", "0.4873213", "0.48707026", "0.48672277", "0.48650864", "0.48559538", "0.48532236", "0.48515087", "0.48459956", "0.4837309", "0.48313743", "0.48274118", "0.4826913", "0.4825897", "0.4824502", "0.48242757", "0.48163518", "0.48125237", "0.4802923", "0.4793175", "0.47912616", "0.47832265", "0.47832033", "0.47795302", "0.47749004", "0.47687593", "0.47653902", "0.47652495", "0.47599438", "0.47501197", "0.4749691", "0.47428718", "0.47359744", "0.47339693", "0.47338772", "0.47278315", "0.47247753", "0.47180218", "0.47150603", "0.47147155" ]
0.6747278
0
generate python_wrapper function body
def build_func_body(func_name, arg_dict, return_type): body = "" arg_list = "" # the following are pointers to scalar outputs # Note: pBufferSize was renamed pBufferSizeInBytes in v6.5 scalar_ptr_outputs = ['nnzTotalDevHostPtr', 'pBufferSize', 'pBufferSizeInBytes', 'resultDevHostPtr'] is_creator = 'cusparseCreate' in func_name is_getter = 'cusparseGet' in func_name if return_type == 'cusparseStatus_t' and not (is_creator or is_getter): is_return = False else: is_return = True # else: return_str = '' for k, v in arg_dict.items(): """ set some flags based on the name/type of the argument will use these flags to determine whether and how to call ffi.new or ffi.cast on each variable """ is_ptr = '*' in v is_cusparse_type = '_t' in v is_cusparse_ptr = is_ptr and is_cusparse_type is_output_scalar = k in scalar_ptr_outputs if k in ['alpha', 'beta']: is_scalar = True else: is_scalar = False if is_getter: is_gpu_array = False else: is_gpu_array = is_ptr and (not is_cusparse_ptr) and (not is_scalar) if 'Complex' in v: is_complex = True else: is_complex = False # convert variable to appropriate type for the FFI if is_output_scalar: # for scalar outputs make a new pointer body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) elif is_getter and is_ptr and (return_type == 'cusparseStatus_t'): # any pointers in cusparseGet* are new outputs to be created body += "%s = ffi.new('%s')\n" % (k, v) elif is_gpu_array: # pass pointer to GPU array data (use either .ptr or .gpudata) body += "%s = ffi.cast('%s', %s.ptr)\n" % (k, v, k) elif is_cusparse_ptr: if is_creator: # generate custom cusparse type body += "%s = ffi.new('%s')\n" % (k, v) else: # cast to the custom cusparse type body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) elif is_ptr and is_scalar: # create new pointer, with value initialized to scalar if is_complex: # complex case is a bit tricky. requires ffi.buffer body += "%sffi = ffi.new('%s')\n" % (k, v) if 'cusparseC' in func_name: body += "ffi.buffer(%sffi)[:] = \ np.complex64(%s).tostring()\n" % (k, k) elif 'cusparseZ' in func_name: body += "ffi.buffer(%sffi)[:] = \ np.complex128(%s).tostring()\n" % (k, k) else: body += "%s = ffi.new('%s', %s)\n" % (k, v, k) elif is_ptr or v == 'cudaStream_t': # case non-scalar pointer to appropriate type body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) else: # don't need explicit cast for plain int, float, etc pass # build the list of arguments to pass to the API if is_ptr and is_scalar and is_complex: # take into account modified argument name for complex scalars arg_list += "%sffi, " % k else: arg_list += "%s, " % k # add the function call and optionally return the result last_key = k arg_list = arg_list[:-2] # remove trailing ", " if is_getter and return_type != 'cusparseStatus_t': body += "return ffi_lib.%s(%s)\n" % (func_name, arg_list) else: # check cusparseStatus_t state before returning call_str = "status = ffi_lib.%s(%s)\n" % (func_name, arg_list) body += split_line(call_str, break_pattern=', ', nmax=76) body += "cusparseCheckStatus(status)\n" if is_return: # len(arg_dict) == 2) is to avoid return for cusparseGetLevelInfo if is_creator or (is_getter and (len(arg_dict) == 2)): body += "return %s[0]\n" % last_key else: body += "#TODO: return the appropriate result" body += '\n\n' return reindent(body, numSpaces=4, lstrip=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wrapper(*args):", "def make_c_function_stubs(self):\n fn =\\\n\"\"\"{rettype} {fnname}({args}){{\n {rettype} ret;\n\n ret = {cast_and_deref}___madz_LANG_python_OUTPUT.{nodename}({argnames});\n\n return ret;\n}}\n\n\"\"\"\n fn_no_return =\\\n\"\"\"{rettype} {fnname}({args}){{\n ___madz_LANG_python_OUTPUT.{nodename}({argnames});\n return;\n}}\n\n\"\"\"\n res = \"\"\n c_gen = c_wrapgen.CGenerator([],\"\", self.description)\n for node in self.description.definitions():\n if isinstance(node.type.get_type(), pdl.TypeFunction):\n fragments = {\n \"maybe_parentheses\": \")\" if isinstance(node.type.return_type.get_type(),pdl.TypeStruct) else \"\",\n \"cast_and_deref\": self.make_c_cast_deref_string(c_gen, node.type.return_type),\n \"rettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"fnname\": \"___madz_LANG_python_FN_\" + node.name,\n \"nodename\": node.name,\n \"args\": \",\".join(map(\n lambda a: c_gen.gen_type_string(a.name, a.type),\n node.type.args)),\n \"argnames\":\",\".join(map(\n lambda a: a.name,\n node.type.args))\n }\n res += (fn if not isinstance(node.type.return_type, pdl.TypeTypeNone) else fn_no_return).format(**fragments)\n return res", "def p_func_wrap(name, ret_type, arg_types, arg_mods, arg_names, body, assert_ = '', \n\t\ttemplate = 'template <class R>'\n\t):\t\n\t\n\tdecl_args = []\t\n\tfor arg_type, arg_mod, arg_name in zip(arg_types, arg_mods, arg_names):\n\t\tdecl_args.append(format('{} {} {}', arg_type, arg_mod, arg_name))\n\t\t\n\treturn [(\n\t\t'{template}\\n'\n\t\t'{ret_type} {func_name}({arg_lst}) {{\\n'\n\t\t'{assert_}\\n'\t\n\t\t'{body}\\n'\n\t\t'}}\\n'\n\t).format(\n\t\tassert_ = assert_,\n\t\tret_type = ret_type, \t\t\n\t\tfunc_name = name, \n\t\targ_lst = join(decl_args, ', '),\n\t\tbody = body,\n\t\ttemplate = template,\n\t)]", "def process_wrapper(self, code, declaration):\n return code", "def public(*args):\n def public_wrapper():\n pass\n return public_wrapper", "def make_c_header(self):\n res = \\\n\"\"\"PyThreadState* ___madz_LANG_python_thread_state; //Holds Thread State for this interpreter\nPyObject *___madz_LANG_python_wrapper_module; //Hold Pointer to the _madz.py file representing this plugin\ntypedef struct{{\n{function_pointers}\n}}___madz_LANG_python_TYPE_;\n___madz_LANG_python_TYPE_ ___madz_LANG_python_OUTPUT;\nvoid ___madz_init_imports();\n{fn_dec}\n\n\"\"\"\n c_gen = c_wrapgen.CGenerator([],\"\", self.description)\n #TODO function_pointers, all same except\n fragments ={\"fn_dec\" : \"\", \"function_pointers\" : \"\"}\n fn = \"\"\"{rettype}{fnname}({args});\\n\"\"\"\n pointer = \"\"\" {prettype} (*{nodename})({args});\\n\"\"\"\n for node in self.description.definitions():\n if isinstance(node.type.get_type(), pdl.TypeFunction):\n frg = {\n \"prettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"rettype\": c_gen.gen_type_string(\"\", node.type.return_type),\n \"fnname\": \"___madz_LANG_python_FN_\" + node.name,\n \"nodename\": node.name,\n \"args\": \",\".join(map(\n lambda a: c_gen.gen_type_string(a.name, a.type),\n node.type.args)),\n\n }\n fragments[\"fn_dec\"] += fn.format(**frg)\n fragments[\"function_pointers\"] += pointer.format(**frg)\n if fragments[\"function_pointers\"] == \"\":\n fragments[\"function_pointers\"] = \"uint8_t _madz_empty;\"\n return res.format(**fragments)", "def _code_wrapper_generator(cls,\n user_kwargs: List,\n disdat_kwargs: dict,\n core_code: Callable,\n return_signature: str,\n input_artifact_list: List = None,\n output_artifact_list: List = None,\n indentation: str = ' ',\n generated_func_name: str = None,\n temp_dir: str = 'dsdt_generated_code') -> Callable:\n core_code_name = core_code.__name__\n core_code_src = inspect.getsource(core_code)\n if generated_func_name is None:\n generated_func_name = core_code_name + '_generated'\n core_code_split = core_code_src.split('\\n')\n # parameters passed in by value\n user_params = ['{}: \\'{}\\'={}'.format(param_spec.name, param_spec.type, param_spec.default)\n for param_spec in user_kwargs if param_spec._passing_style is None]\n # parameters passed in by file\n user_by_file_params = ['{}: components.InputPath(None)'.format(param_spec.name)\n for param_spec in user_kwargs if param_spec._passing_style is not None]\n user_params = user_by_file_params + user_params\n dsdt_params = ['{}: \\'{}\\' = None'.format(param, type_) for param, type_ in disdat_kwargs.items()]\n # signal to the compiler what artifacts should be passed by file using components.InputPath, not by value\n input_artifacts = [] if input_artifact_list is None else input_artifact_list\n input_artifacts = ['reserve_disdat_{}: components.InputPath(\\'{}\\') = None'.format(var.name, var.type)\n for var in input_artifacts]\n # signal to the kfp compiler what artifacts to output\n output_artifacts = [] if output_artifact_list is None else output_artifact_list\n output_artifacts = ['{}: components.OutputPath(\\'{}\\')'.format(var.name, var.type) for var in output_artifacts]\n # the wrapper code with dynamic input and output signatures\n code_string = '''\nfrom typing import *\nfrom kfp import components\ndef {}({}) -> {}:\n import inspect, json\n from typing import NamedTuple\n frame = inspect.currentframe()\n # get the list of input parameters \n args, _, _, _ = inspect.getargvalues(frame)\n dsdt_params = [{}]\n user_kwargs, dsdt_kwargs = [], []\n\n # grab all input parameters and pass them in as kwargs\n for key in args: \n temp = key\n if key in dsdt_params:\n key = key.replace('dsdt_', '') # prefix is droped as dsdt params are passed in as dict, no need to worry about collision\n dsdt_kwargs.append((key, eval(temp)))\n else:\n user_kwargs.append((key, eval(temp)))\n user_kwargs = dict(user_kwargs)\n dsdt_kwargs = dict(dsdt_kwargs) \n # inject core code here !\n{}\n # call core code here !\n result = {}(user_kwargs=user_kwargs, disdat_kwargs=dsdt_kwargs)\n return {}\n '''\n code_string = code_string.format(generated_func_name,\n ', '.join(output_artifacts + user_params + dsdt_params + input_artifacts),\n return_signature,\n ', '.join(['\\'{}\\''.format(key) for key in disdat_kwargs.keys()]),\n (indentation + '{}\\n') * len(core_code_split),\n core_code_name,\n 'None' if return_signature == 'None' else 'result')\n code_string = code_string.format(*core_code_split) # inject user code\n output_file = 'dynamic_{}_wrapper.py'.format(generated_func_name)\n try:\n os.mkdir(temp_dir)\n except FileExistsError:\n pass\n with open(os.path.join(temp_dir, '__init__.py'), 'w') as fp:\n fp.write('')\n with open(os.path.join(temp_dir, output_file), 'w') as fp:\n fp.write(code_string) # save generated file in a sub-folder\n module = importlib.import_module('dsdt_generated_code.' + output_file.split('.')[0],\n os.path.join(temp_dir, output_file))\n return getattr(module, generated_func_name) # return the generate code", "def make_get_python_out_struct(self):\n res = \\\n\"\"\"DLLEXPORT ___madz_LANG_python_TYPE_* {}_get_out_struct(){{\n return &___madz_LANG_python_OUTPUT;\n}}\n\n\"\"\"\n return res.format(self.python_mangle)", "def _wrapper(func, args):\n return func(*args)", "def cpp_function(self):", "def wrapper(*args, **kwargs):\n print(f\"you are calling the {fn.__name__} function\")\n print(f\"Here's the documentation: {fn.__doc__}\")\n return fn(*args, **kwargs)", "def dummy_wrapper(func):\n return func", "def wrapper(*args, **kwargs):\n print(f\"you are about to call {fn.__name__}\")\n print(f\"Here's the documentation: {fn.__doc__}\")\n return fn(*args, **kwargs)", "def dummy_python_wrapper(*args, **_kwargs):\n def _inner_wrapper(func):\n return func\n\n if len(args) == 1 and callable(args[0]):\n return _inner_wrapper(args[0])\n\n return _inner_wrapper", "def gen_python_api(json_data, model_name,model_version,endpoint=\"http://127.0.0.1:8400\"):\n\n code_template = \"\"\"#!/usr/bin/env python\n\n import requests\n\n def main():\n #endpoint = \"http://127.0.0.1:8000\"\n endpoint = {{endpoint}}\n param={\"model_name\": \"{{ model_name }}\", \"model_version\": \"{{ model_version }}\"}\n json_data = {{json_data}}\n result = requests.post(endpoint, param=param,json=json_data)\n print(result.text)\n\n if __name__ == \"__main__\":\n main()\n \"\"\"\n\n generated_tensor_data_string = json.dumps(json_data)\n template = Template(code_template)\n generate_code = template.render(\n model_name=model_name, model_version=model_version,json_data=generated_tensor_data_string,endpoint=endpoint)\n logging.debug(\"Generate the code in Python:\\n{}\".format(generate_code))\n return generate_code", "def getPythonWrapper(self):\n\n isMC = self.cfg_comp.isMC \n\n classes = \"\"\n anclass = \"\"\n anclass += \"from PhysicsTools.HeppyCore.framework.analyzer import Analyzer\\n\"\n anclass += \"class EventAnalyzer(Analyzer):\\n\"\n anclass += \" def __init__(self, cfg_ana, cfg_comp, looperName):\\n\"\n anclass += \" super(EventAnalyzer, self).__init__(cfg_ana, cfg_comp, looperName)\\n\"\n\n anclass += \" def process(self, event):\\n\"\n\n for cname, coll in self.collections.items():\n classes += coll.get_py_wrapper_class(isMC)\n anclass += \" event.{0} = {0}.make_array(event)\\n\".format(coll.name)\n\n return classes + \"\\n\" + anclass", "def _generateArgWrapper( function, wrappedFunction ):\n\n sig = inspect.signature(function)\n sigNoDefault = sig.replace(parameters=[p.replace(default=Parameter.empty) for p in sig.parameters.values()])\n argspecString = str(sig)\n argspecStringNoDefaults = str(sigNoDefault)\n\n codeString = \"\\n\".join( [\n \"def argWrapperGenerator( wrappedFunction ):\",\n \" def argWrappedFunction%(argspecString)s:\",\n \" return wrappedFunction%(argspecStringNoDefaults)s\",\n \" return argWrappedFunction\",\n ] )\n\n codeString = codeString % {\n \"argspecString\" : argspecString,\n \"argspecStringNoDefaults\" : argspecStringNoDefaults,\n }\n\n fakeFileName = \"<Memoize-generated code for '%s'>\" % function.__name__\n\n codeObj = compile(\n codeString,\n fakeFileName,\n \"exec\"\n )\n\n localsDict = {}\n globalsDict = function.__globals__\n\n exec(codeObj, globalsDict, localsDict)\n\n argWrapperGenerator = localsDict[\"argWrapperGenerator\"]\n\n return argWrapperGenerator( wrappedFunction )", "def _make_source(name, init, body):\n code = \"\"\"\n #include <Python.h>\n\n %(body)s\n\n PyMODINIT_FUNC\n PyInit_%(name)s(void) {\n %(init)s\n }\n \"\"\" % dict(\n name=name, init=init, body=body,\n )\n return code", "def make_module_hook(self):\n res = \\\n\"\"\"{fname} = shared_object.{fname}\n {fname}.restype = POINTER({structname})\n {varname} = {fname}()\n\n\"\"\"\n fragments ={\n \"varname\": self._namespace_mangle(self.namespace) + \"_plugin\",\n \"fname\": \"___madz_LANG_python_get_out_struct\" if self.namespace == \"\" else \"___madz_LANG_python_get_\"+self._namespace_mangle(self.namespace) + \"_struct\",\n \"structname\": self.python_madz_types + (\"OUTSTRUCT\" if self.namespace == \"\" else self._namespace_mangle(self.namespace))\n }\n\n return res.format(**fragments)", "def setup_java_class(content_to_add):\n template = \"\"\"\npublic class Lambda {\n\n public static void main(String args[]) {\n %s\n }\n}\n \"\"\"\n return template % content_to_add", "def adaptPythonToPython(self, *args):\n return _SALOMERuntime.RuntimeSALOME_adaptPythonToPython(self, *args)", "def adaptPythonToCpp(self, *args):\n return _SALOMERuntime.RuntimeSALOME_adaptPythonToCpp(self, *args)", "def basic_wrapper( function ):\n\n #=========================================================================\n def callback( context ):\n \"\"\"\n The callback function with a single call-time context argument.\n \"\"\"\n return '<tag>{}</tag>'.format( function( context ) )\n return callback", "def get_py(self):\n pass", "def _boilerplate_to_python(indent):\n indent_str = \" \" * indent\n boilerplate = indent_str + \"import core.vba_library\\n\"\n boilerplate = indent_str + \"import core.vba_context\\n\"\n boilerplate += indent_str + \"from core.utils import safe_print\\n\"\n boilerplate += indent_str + \"from core.utils import safe_str_convert\\n\"\n boilerplate += indent_str + \"from core.utils import plus\\n\"\n boilerplate += indent_str + \"from core.utils import eq\\n\"\n boilerplate += indent_str + \"from core.utils import neq\\n\"\n boilerplate += indent_str + \"from core.utils import lt\\n\"\n boilerplate += indent_str + \"from core.utils import lte\\n\"\n boilerplate += indent_str + \"from core.utils import gt\\n\"\n boilerplate += indent_str + \"from core.utils import gte\\n\"\n boilerplate += indent_str + \"import core.utils\\n\"\n boilerplate += indent_str + \"from core.python_jit import update_array\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_num\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_int\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_str\\n\"\n boilerplate += indent_str + \"from core.vba_conversion import coerce_to_int_list\\n\\n\"\n boilerplate += indent_str + \"try:\\n\"\n boilerplate += indent_str + \" \" * 4 + \"vm_context\\n\"\n boilerplate += indent_str + \"except (NameError, UnboundLocalError):\\n\"\n boilerplate += indent_str + \" \" * 4 + \"vm_context = context\\n\"\n return boilerplate", "def metadata(*args):\n def metadata_wrapper():\n pass\n return metadata_wrapper", "def wrapper_fun(*args):\n print(\"Hello Decorator\")\n return fun(*args)", "def exec_wrapper(func) :\n\n def wrapped(self, *args, **kwargs):\n \"\"\"actual wrapper code\"\"\"\n\n self.logger.debug(\"%s: entering with self=%s, args=%s, kwargs=%s\" \\\n % (func.__name__, self, args, kwargs))\n\n # kwarg _user is not passed to LLA\n if not kwargs.has_key(\"_user\") :\n _user = \"\"\n else :\n _user = kwargs[\"_user\"]\n kwargs.pop(\"_user\")\n\n # kwarg _timeout is not passed to LLA\n if not kwargs.has_key(\"_timeout\") :\n _timeout = afs.CONFIG.AsyncTimeout\n else :\n _timeout = int(kwargs[\"_timeout\"])\n kwargs.pop(\"_timeout\")\n \n \n if kwargs.has_key(\"async\") :\n async = kwargs[\"async\"]\n kwargs.pop(\"async\")\n else :\n async = False\n \n\n # kwarg _cfg must be passed to LLA\n if not kwargs.has_key(\"_cfg\") :\n self.logger.debug(\"injecting default config.\")\n kwargs[\"_cfg\"] = afs.CONFIG\n\n # here we should check the authorisation\n self.logger.debug(\\\n \"should check auth of user=%s for method %s in class %s\"\\\n % (_user, func.__name__, self.__class__.__name__))\n\n # get cmdlist and parsefunction from method\n # parse_fct is parsing the output of the executed function\n # ParseInfo are any info the parse_fct requires beside ret,\n # outout and outerr \n parse_parameterlist = {\"args\" : args, \"kwargs\" : kwargs } \n argspec = inspect.getargspec(func)\n \n self.logger.debug(\"argspec=%s\" % (argspec,))\n\n count = 0\n if argspec[3] != None : \n for key in argspec[0][-len(argspec[3]):] :\n self.logger.debug(\"checking argspec key=%s\" % key)\n value = argspec[3][count]\n self.logger.debug(\"value=%s\" % value)\n count += 1\n if not parse_parameterlist[\"kwargs\"].has_key(key) :\n parse_parameterlist[\"kwargs\"][key] = value\n\n self.logger.debug(\"args=%s\" % (args,))\n self.logger.debug(\"kwargs=%s\" % (kwargs,))\n self.logger.debug(\"parse_parameterlist=%s\" % (parse_parameterlist,))\n\n cmd_list, parse_fct = func(self, *args, **kwargs) \n \n # do really execute the call\n if async == False :\n ret, output, outerr = self.execute(cmd_list)\n else :\n sp_ident = self.execute_detached(cmd_list)\n self.logger.debug(\"executed detached subprocess with sp_ident %s\" % sp_ident)\n self.active_subprocesses[sp_ident] = { \"cmd_list\": cmd_list, \"parse_fct\" : parse_fct, \"parse_parameterlist\" : parse_parameterlist, \"start_time\" : time.mktime(time.localtime()), \"timeout\" : _timeout }\n return sp_ident\n\n # parse the result\n self.logger.debug( \"calling parse_fct %s with %s, %s, %s, %s\" \\\n % (parse_fct.__name__, ret, output[:10],\\\n outerr[:10], parse_parameterlist))\n result = parse_fct(ret, output, outerr, \\\n parse_parameterlist, self.logger)\n self.logger.debug(\"%s returning : %s\" % (func.__name__, result.__repr__()))\n return result\n\n return wrapped", "def wrapper(*args, **kwargs):\n return func(*args, **kwargs)", "def wrapper(*args, **kwargs):\n return func(*args, **kwargs)", "def write_fortran_wrappers(out, decl, return_val):\n delegate_name = decl.name + f_wrap_suffix\n out.write(decl.fortranPrototype(delegate_name, [\"static\"]))\n out.write(\" { \\n\")\n\n call = FortranDelegation(decl, return_val)\n\n if decl.name == \"MPI_Init\":\n # Use out.write() here so it comes at very beginning of wrapper function\n out.write(\" int argc = 0;\\n\");\n out.write(\" char ** argv = NULL;\\n\");\n call.addActual(\"&argc\");\n call.addActual(\"&argv\");\n call.write(out)\n out.write(\" *ierr = %s;\\n\" % return_val)\n out.write(\"}\\n\\n\")\n\n # Write out various bindings that delegate to the main fortran wrapper\n write_fortran_binding(out, decl, delegate_name, \"MPI_INIT\", [\"fortran_init = 1;\"])\n write_fortran_binding(out, decl, delegate_name, \"mpi_init\", [\"fortran_init = 2;\"])\n write_fortran_binding(out, decl, delegate_name, \"mpi_init_\", [\"fortran_init = 3;\"])\n write_fortran_binding(out, decl, delegate_name, \"mpi_init__\", [\"fortran_init = 4;\"])\n return\n\n # This look processes the rest of the call for all other routines.\n for arg in decl.args:\n if arg.name == \"...\": # skip ellipsis\n continue\n\n if not (arg.pointers or arg.array):\n if not arg.isHandle():\n # These are pass-by-value arguments, so just deref and pass thru\n dereferenced = \"*%s\" % arg.name\n call.addActual(dereferenced)\n else:\n # Non-ptr, non-arr handles need to be converted with MPI_Blah_f2c\n # No special case for MPI_Status here because MPI_Statuses are never passed by value.\n call.addActualMPI2(\"%s_f2c(*%s)\" % (conversion_prefix(arg.type), arg.name))\n call.addActualMPICH(\"(%s)(*%s)\" % (arg.type, arg.name))\n\n else:\n if not arg.isHandle():\n # Non-MPI handle pointer types can be passed w/o dereferencing, but need to\n # cast to correct pointer type first (from MPI_Fint*).\n call.addActual(\"(%s)%s\" % (arg.castType(), arg.name))\n else:\n # For MPI-1, assume ints, cross fingers, and pass things straight through.\n call.addActualMPICH(\"(%s*)%s\" % (arg.type, arg.name))\n conv = conversion_prefix(arg.type)\n temp = \"temp_%s\" % arg.name\n\n # For MPI-2, other pointer and array types need temporaries and special conversions.\n if not arg.isHandleArray():\n call.addTemp(arg.type, temp)\n call.addActualMPI2(\"&%s\" % temp)\n\n if arg.isStatus():\n call.addCopy(\"%s_f2c(%s, &%s);\" % (conv, arg.name, temp))\n call.addWriteback(\"%s_c2f(&%s, %s);\" % (conv, temp, arg.name))\n else:\n call.addCopy(\"%s = %s_f2c(*%s);\" % (temp, conv, arg.name))\n call.addWriteback(\"*%s = %s_c2f(%s);\" % (arg.name, conv, temp))\n else:\n # Make temporary variables for the array and the loop var\n temp_arr_type = \"%s*\" % arg.type\n call.addTemp(temp_arr_type, temp)\n call.addTemp(\"int\", \"i\")\n\n # generate a copy and a writeback statement for this type of handle\n if arg.isStatus():\n copy = \" %s_f2c(&%s[i], &%s[i])\" % (conv, arg.name, temp)\n writeback = \" %s_c2f(&%s[i], &%s[i])\" % (conv, temp, arg.name)\n else:\n copy = \" temp_%s[i] = %s_f2c(%s[i])\" % (arg.name, conv, arg.name)\n writeback = \" %s[i] = %s_c2f(temp_%s[i])\" % (arg.name, conv, arg.name)\n\n # Generate the call surrounded by temp array allocation, copies, writebacks, and temp free\n count = \"*%s\" % arg.countParam().name\n call.addCopy(\"%s = (%s)malloc(sizeof(%s) * %s);\" %\n (temp, temp_arr_type, arg.type, count))\n call.addCopy(\"for (i=0; i < %s; i++)\" % count)\n call.addCopy(\"%s;\" % copy)\n call.addActualMPI2(temp)\n call.addWriteback(\"for (i=0; i < %s; i++)\" % count)\n call.addWriteback(\"%s;\" % writeback)\n call.addWriteback(\"free(%s);\" % temp)\n\n call.write(out)\n if decl.returnsErrorCode():\n out.write(\" *ierr = %s;\\n\" % return_val)\n else:\n out.write(\" return %s;\\n\" % return_val)\n out.write(\"}\\n\\n\")\n\n # Write out various bindings that delegate to the main fortran wrapper\n write_fortran_binding(out, decl, delegate_name, decl.name.upper())\n write_fortran_binding(out, decl, delegate_name, decl.name.lower())\n write_fortran_binding(out, decl, delegate_name, decl.name.lower() + \"_\")\n write_fortran_binding(out, decl, delegate_name, decl.name.lower() + \"__\")", "def wrap(function, wrapper):\n\t# Copy the function signature.\n\twrapper.__module__ = function.__module__\n\twrapper.__name__ = function.__name__\n\twrapper.__doc__ = function.__doc__", "def __wrapper__(self, x):\r\n return wrap(x)", "def test_2_writepy(self):\n for modname in self.originals:\n msg.info(\"Writing python wrapper module {}.py\".format(modname))\n self.writers[modname].write_py()\n msg.okay(\"Finished writing {}.py\".format(modname))", "def _make_methods(functions, modname):\n methods_table = []\n codes = []\n for funcname, flags, code in functions:\n cfuncname = \"%s_%s\" % (modname, funcname)\n if 'METH_KEYWORDS' in flags:\n signature = '(PyObject *self, PyObject *args, PyObject *kwargs)'\n else:\n signature = '(PyObject *self, PyObject *args)'\n methods_table.append(\n \"{\\\"%s\\\", (PyCFunction)%s, %s},\" % (funcname, cfuncname, flags))\n func_code = \"\"\"\n static PyObject* {cfuncname}{signature}\n {{\n {code}\n }}\n \"\"\".format(cfuncname=cfuncname, signature=signature, code=code)\n codes.append(func_code)\n\n body = \"\\n\".join(codes) + \"\"\"\n static PyMethodDef methods[] = {\n %(methods)s\n { NULL }\n };\n static struct PyModuleDef moduledef = {\n PyModuleDef_HEAD_INIT,\n \"%(modname)s\", /* m_name */\n NULL, /* m_doc */\n -1, /* m_size */\n methods, /* m_methods */\n };\n \"\"\" % dict(methods='\\n'.join(methods_table), modname=modname)\n return body", "def visit_Python(self, node):\n py_code = compile(node.py_ast, self.filename, mode='exec')\n bp_code = Code.from_code(py_code)\n # Skip the SetLineo and ReturnValue codes\n self.code_ops.extend(bp_code.code[1:-2])", "def wrapper(*args, **kwargs):\n print('S: Function {} args: {} kwargs: {}'.format(\n function.__name__, str(args), str(kwargs)))\n return function(*args, **kwargs)", "def wrapper(*args, **kwargs):\n return fn(*args, **kwargs)", "def visit_Python(self, node):\n # This compiles the given Python ast into a Python code object\n # then disassembles it into a byteplay code object. This allows\n # us to interleave the instructions with those generated for\n # the rest of the module and then compile a single unified \n # code object.\n py_code = compile(node.py_ast, self.filename, mode='exec')\n bpc = Code.from_code(py_code)\n # Skip the SetLineo and ReturnValue codes\n self.code_ops.extend(bpc.code[1:-2])", "def python_code_markdown(func: Callable) -> str:\n return \"\"\"\n ```python\n \"\"\" + inspect.getsource(func) + \"\"\"\n ```\n \"\"\"", "def buildPythonDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n return \"\"", "def _wrap_in_generator(func, source, namer, overload):\n\n nonlocals = []\n\n for var in six.get_function_code(func).co_freevars:\n # We must generate dummy vars so the generated function has the same closure\n # as the original function.\n free_template = 'var = None'\n nonlocal_node = templates.replace(free_template, var=var)\n nonlocals.extend(nonlocal_node)\n\n gen_fun_name = namer.new_symbol('gen_fun', set())\n template = \"\"\"\n def gen_fun(overload):\n nonlocals\n\n program\n\n return f_name\n \"\"\"\n\n ret = templates.replace(\n template,\n gen_fun=gen_fun_name,\n nonlocals=nonlocals,\n overload=overload.symbol_name,\n program=source,\n f_name=func.__name__)\n\n converted_module, _ = parsing.ast_to_object(ret)\n outer_func = getattr(converted_module, gen_fun_name)\n return outer_func(overload.module)", "def GenerateFixedFunctions(self, out):\n out.Write(\"\"\"\n\nstatic PPB_GetInterface __real_PPBGetInterface;\nstatic PPP_GetInterface_Type __real_PPPGetInterface;\n\nvoid __set_real_%(wrapper_prefix)s_PPBGetInterface(PPB_GetInterface real) {\n __real_PPBGetInterface = real;\n}\n\nvoid __set_real_%(wrapper_prefix)s_PPPGetInterface(PPP_GetInterface_Type real) {\n __real_PPPGetInterface = real;\n}\n\n/* Map interface string -> wrapper metadata */\nstatic struct %(wrapper_struct)s *%(wrapper_prefix)sPPBShimIface(\n const char *name) {\n struct %(wrapper_struct)s **next = s_ppb_wrappers;\n while (*next != NULL) {\n if (mystrcmp(name, (*next)->iface_macro) == 0) return *next;\n ++next;\n }\n return NULL;\n}\n\n/* Map interface string -> wrapper metadata */\nstatic struct %(wrapper_struct)s *%(wrapper_prefix)sPPPShimIface(\n const char *name) {\n struct %(wrapper_struct)s **next = s_ppp_wrappers;\n while (*next != NULL) {\n if (mystrcmp(name, (*next)->iface_macro) == 0) return *next;\n ++next;\n }\n return NULL;\n}\n\nconst void *__%(wrapper_prefix)s_PPBGetInterface(const char *name) {\n struct %(wrapper_struct)s *wrapper = %(wrapper_prefix)sPPBShimIface(name);\n if (wrapper == NULL) {\n /* We did not generate a wrapper for this, so return the real interface. */\n return (*__real_PPBGetInterface)(name);\n }\n\n /* Initialize the real_iface if it hasn't been. The wrapper depends on it. */\n if (wrapper->real_iface == NULL) {\n const void *iface = (*__real_PPBGetInterface)(name);\n if (NULL == iface) return NULL;\n wrapper->real_iface = iface;\n }\n\n return wrapper->wrapped_iface;\n}\n\nconst void *__%(wrapper_prefix)s_PPPGetInterface(const char *name) {\n struct %(wrapper_struct)s *wrapper = %(wrapper_prefix)sPPPShimIface(name);\n if (wrapper == NULL) {\n /* We did not generate a wrapper for this, so return the real interface. */\n return (*__real_PPPGetInterface)(name);\n }\n\n /* Initialize the real_iface if it hasn't been. The wrapper depends on it. */\n if (wrapper->real_iface == NULL) {\n const void *iface = (*__real_PPPGetInterface)(name);\n if (NULL == iface) return NULL;\n wrapper->real_iface = iface;\n }\n\n return wrapper->wrapped_iface;\n}\n\"\"\" % { 'wrapper_struct' : self.GetWrapperMetadataName(),\n 'wrapper_prefix' : self.wrapper_prefix,\n } )", "def eval_python_blocks(req, body):\n localsdict = {\"request\": req}\n globalsdict = {}\n\n old_stdout = sys.stdout\n old_stderr = sys.stderr\n\n try:\n start = 0\n while body.find(\"<%\", start) != -1:\n start = body.find(\"<%\")\n end = body.find(\"%>\", start) \n\n if start != -1 and end != -1:\n codeblock = body[start+2:end].lstrip()\n\n sys.stdout = StringIO.StringIO()\n sys.stderr = StringIO.StringIO()\n\n try:\n exec codeblock in localsdict, globalsdict\n\n except Exception, e:\n print \"ERROR in processing: %s\" % e\n\n output = sys.stdout.getvalue() + sys.stderr.getvalue()\n body = body[:start] + output + body[end+2:]\n\n finally:\n sys.stdout = old_stdout\n sys.stderr = old_stderr\n\n return body", "def gen_python_addr_module(module_name,root,creg_base,sreg_base):\n fo = open(module_name+\".py\",\"w\")\n fo.write(\"\"\n \"\\\"\\\"\\\"This file is automatically generated by the \"+sys.argv[0]+\" script\\n\"\n \"All modifications should be done in that file\\n\\\"\\\"\\\"\\n\"+\n root.name+\"_dict=\")\n (res,creg_base,sreg_base)=root.gen_python_addr(creg_base,sreg_base)\n fo.write(res+\"\\n\")\n fo.write(\"\"\n \"#Convert the dictionary to object, as described in https://stackoverflow.com/a/6993694/1735409\\n\"\n \"class Struct(object):\\n\"\n \" def __init__(self, data):\\n\"\n \" for name, value in data.items():\\n\"\n \" setattr(self, name, self._wrap(value))\\n\"\n \" def _wrap(self, value):\\n\"\n \" if isinstance(value, (tuple, list, set, frozenset)):\\n\"\n \" return type(value)([self._wrap(v) for v in value])\\n\"\n \" else:\\n\"\n \" return Struct(value) if isinstance(value, dict) else value\\n\"+\n root.name+\"=Struct(\"+root.name+\"_dict)\\n\")\n fo.close()", "def write_gotcha_c_wrapper(out, decl, return_val, write_body):\n # Write the pointer to the original function\n out.write(\"gotcha_wrappee_handle_t _wrap_py_%s_handle = 0x0;\\n\" % decl.name)\n\n # Now write the wrapper function, which will call the original function through the pointer\n out.write(decl.gotcha_prototype(default_modifiers))\n out.write(\" { \\n\")\n out.write(\" %s %s = 0;\\n\" % (decl.retType(), return_val))\n\n write_body(out)\n\n out.write(\" return %s;\\n\" % return_val)\n out.write(\"}\\n\\n\")\n\n # Write the GOTCHA binding struct\n out.write(\"struct gotcha_binding_t wrap_%s_binding = { \\\"%s\\\", (void*) wrap_%s, &_wrap_py_%s_handle };\\n\\n\" % (decl.name, decl.name, decl.name, decl.name))", "def make_wrapper(fname, atypes, rtype, cres):\n fndesc = cres.fndesc\n module = cres.library.create_ir_module(fndesc.unique_name)\n context = cres.target_context\n ll_argtypes = [context.get_value_type(ty) for ty in atypes]\n ll_return_type = context.get_value_type(rtype)\n\n # TODO: design a API for custom wrapping\n if type(rtype).__name__ == 'ArrayPointer':\n wrapty = ir.FunctionType(ir.VoidType(),\n [ll_return_type] + ll_argtypes)\n wrapfn = module.add_function(wrapty, fname)\n builder = ir.IRBuilder(wrapfn.append_basic_block('entry'))\n fnty = context.call_conv.get_function_type(rtype, atypes)\n fn = builder.module.add_function(fnty, cres.fndesc.llvm_func_name)\n status, out = context.call_conv.call_function(\n builder, fn, rtype, atypes, wrapfn.args[1:])\n with cgutils.if_unlikely(builder, status.is_error):\n cgutils.printf(builder,\n f\"rbc: {fname} failed with status code %i\\n\",\n status.code)\n builder.ret_void()\n builder.store(builder.load(out), wrapfn.args[0])\n builder.ret_void()\n else:\n wrapty = ir.FunctionType(ll_return_type, ll_argtypes)\n wrapfn = module.add_function(wrapty, fname)\n builder = ir.IRBuilder(wrapfn.append_basic_block('entry'))\n fnty = context.call_conv.get_function_type(rtype, atypes)\n fn = builder.module.add_function(fnty, cres.fndesc.llvm_func_name)\n status, out = context.call_conv.call_function(\n builder, fn, rtype, atypes, wrapfn.args)\n with cgutils.if_unlikely(builder, status.is_error):\n cgutils.printf(builder,\n f\"rbc: {fname} failed with status code %i\\n\",\n status.code)\n builder.ret(out)\n\n cres.library.add_ir_module(module)", "def gen_python(protocol):\n\ts = \"\"\n\tfor r in protocol:\n\t\tif r.write:\n\t\t\ts += \"def set_%s(value): #%s\\n\"%(r.name, r.desc)\n\t\t\ts += \"\\twrite_reg_raw(%d, \\\"%s\\\", value)\\n\\n\"%(r.number, r.size)\n\t\tif r.read:\n\t\t\ts += \"def get_%s(): #%s\\n\"%(r.name, r.desc)\n\t\t\ts += \"\\treturn read_reg_raw(%d, \\\"%s\\\")\\n\\n\"%(r.number, r.size)\n\treturn s", "def generate(module_name, module_path, target_dir):\n if not (Path(module_path) / 'builtins.stub.py').exists():\n copy(Path(__file__).parent.parent / 'stubs/builtins.stub.py', module_path)\n build_swift_wrappers_module(module_name, module_path, target_dir)", "def _wrap_func(fn):\n\n @wraps(fn)\n def wrapper(*arg, **kwargs):\n results = fn(*arg, **kwargs)\n return _convert_python_data(results)\n\n return wrapper", "def test_wraps():\n print('func')", "def example_function():", "def func_doc():", "def buildPythonDeclaration(self, indent=\"\", parent=\"\", **kwargs):\n return \"\"", "def _to_be_wrapped(self) -> None:", "def generate(self, namespace: Optional[str], template: str, func: Callable, call_args: Dict) -> str:", "def fortran_c_wrapper(self) -> str:\n result = banner('//')\n result += self._fc_includes()\n result += self._fc_using_statements()\n result += self._fc_function_definitions()\n return result", "def generate_lbryd_wrapper(url=LBRY_API_RAW_JSON_URL,\n doc=None,\n read_file=LBRYD_BASE_FPATH,\n write_file=LBRYD_FPATH):\n print(80 * \"-\")\n\n if doc:\n sections = get_lbry_api_function_docs(doc=doc)\n inpt = doc\n else:\n sections = get_lbry_api_function_docs(url=url)\n inpt = url\n\n if not sections:\n print(\"Empty information; wrapper module not written.\")\n return True\n\n print(\"Input JSON:\", inpt)\n\n # Open the actual file for appending\n with open(write_file, 'w') as lbry_file:\n docstring = ['\"\"\"',\n 'LBRY daemon wrapper in Python. Import it and initialize the main class.',\n '',\n 'This file was generated at build time using the `generator` module.',\n 'You may edit it but do so with caution.',\n 'If this file contains syntax errors, check the input file',\n 'for badly formated fields.',\n f'Input JSON: {inpt}',\n '\"\"\"',\n '']\n\n docstring = \"\\n\".join(docstring)\n lbry_file.write(docstring)\n\n with open(read_file, 'r') as template:\n header = template.read()\n\n lbry_file.write(header)\n\n # Iterate through all the functions we retrieved\n # and write them to the file\n for section in sections:\n commands = sections[section][\"commands\"]\n\n for command in commands:\n method_definition = generate_method_definition(command)\n lbry_file.write(method_definition)\n\n print(\"Generated 'lbrynet' API wrapper:\", write_file)\n with open(write_file) as lbry_file:\n source = lbry_file.read()\n\n parsed = True\n try:\n result = ast.parse(source, filename=write_file)\n except SyntaxError as err:\n print(\"The resulting file has syntax errors. Look at the error line for clues.\")\n print(\"Error:\", err)\n print()\n print(\"The problem is usually in the input JSON file; it may contain badly formatted fields.\")\n print(\"Input:\", inpt)\n print()\n parsed = False\n\n if parsed:\n try:\n from yapf.yapflib.yapf_api import FormatFile\n FormatFile(write_file, in_place=True)\n except ImportError:\n print()\n print(\"[Warning]: 'yapf' could not be imported, so the generated code will not be formatted\")\n\n return None", "def GetUtilWrapper(self):\n return ''", "def GenPy(mod,fname):\n f = open(fname, 'w')\n title = \"\"\"#\n# This file is generated automatically\n# Author:IAN\n# http://www.iknot.org\n\"\"\"\n f.write(title)\n for i in mod.__dict__.keys():\n s = \"def \" + i + \"():\" + \"\\n\"\n f.write(s)\n s = \" return\"\n f.write(s + \"\\n\")\n f.close()\n kcs_ui.message_noconfirm('py file saved to:%s'%(fname))", "def dummy_fn(self, *args, **kwargs):", "def gen_function(self, function):\n if function.body:\n self.gen_function_def(function)", "def generate(self):\n py_gen = PythonGenerator([], \"\", self.plugin_stub.description)\n code_fragments = {\n \"autogenerated_module_path\": self.language.wrap_directory.path,\n \"autogenerated_module\": self.language.get_python_autogenerated_module().path,\n \"new_autogenerated_module\": self.language.get_python_new_autogenerated_module().path,\n \"fix_plugin\": py_gen.make_fix_plugin(),\n \"module_namespace\": \"_madz__{}\".format(str(self.plugin_stub.id.namespace).replace(\".\", \"__\")),\n \"init_path\": self.language.get_plugin_init().path,\n \"ctypes_wrapper_path\": self.language.get_python_ctypes_wrapper().path,\n \"module_hooks\": py_gen.make_module_hook(),\n \"type_accessors\" : py_gen.make_type_accessor(None),\n \"cleanup_code\": py_gen.make_cleanup_code(None),\n \"imported_functions\": \"\",\n \"in_structs\": \"\",\n \"dep_module_hooks\": \"\",\n \"dep_cleanup_code\": \"\",\n \"imp_module_hooks\": \"\",\n \"imp_cleanup_code\": \"\",\n \"typedefs\": \"\",\n \"functions\": py_gen.make_def_function_types(),\n \"out_structs\": py_gen.make_out_struct(),\n \"plugin_cname\": self.language.output_directory.file(\"{}.madz\".format(self.plugin_stub.id.namespace)).path,\n \"function_callbacks\": py_gen.make_function_callbacks(),\n \"function_stubs\": py_gen.make_function_stubs()\n }\n\n cstdlib = {\n \"windows\": \"'MSVCRT'\",\n \"unix\": \"'c'\",\n \"osx\": \"'c'\"\n }[config_target.get(OptionPlatformOperatingSystem)]\n\n self.prep()\n self._pre_header =\"#include \\\"Python.h\\\"\\n\"\n self._post_header = py_gen.make_c_header()\n\n c_wrapgen.WrapperGenerator.generate(self)\n\n c_source = py_gen.make_c_init(self.language.get_python_code_filename())\n c_source += py_gen.make_get_out_struct()\n c_source += py_gen.make_get_python_out_struct()\n c_source += py_gen.make_c_function_stubs()\n\n all_deps = self.plugin_stub.gen_recursive_loaded_depends()\n # depends plugins python\n for dep in all_deps:\n gen = PythonGenerator([], dep.id.namespace, dep.description)\n\n code_fragments[\"imported_functions\"] += gen.make_def_function_types()\n code_fragments[\"typedefs\"] += gen.make_typedefs()\n code_fragments[\"in_structs\"] += gen.make_out_struct()\n code_fragments[\"dep_module_hooks\"] += \" \" + gen.make_module_hook()\n code_fragments[\"dep_cleanup_code\"] += \"{}\\n{}\".format(gen.make_type_accessor(False), gen.make_cleanup_code(False))\n\n c_source += gen.make_get_in_struct()\n\n # imports plugins python\n for imp in self.plugin_stub.gen_required_loaded_imports():\n if not (imp in all_deps):\n gen = PythonGenerator([], imp.id.namespace, imp.description)\n\n code_fragments[\"imported_functions\"] += gen.make_def_function_types()\n code_fragments[\"typedefs\"] += gen.make_typedefs()\n code_fragments[\"in_structs\"] += gen.make_out_struct()\n code_fragments[\"imp_module_hooks\"] += \" \" + gen.make_module_hook()\n code_fragments[\"imp_cleanup_code\"] += \"{}\\n{}\".format(gen.make_type_accessor(True), gen.make_cleanup_code(True))\n\n c_source += gen.make_get_in_struct()\n\n # This plugins python\n code_fragments[\"typedefs\"] += py_gen.make_typedefs()\n\n module_string = self.autogenerated_module_template.format(cstdlib = cstdlib)\n with self.language.get_python_autogenerated_module().pyopen(\"w\") as f:\n f.write(module_string)\n\n with self.language.get_python_new_autogenerated_module().pyopen(\"w\") as f:\n f.write(module_string)\n\n with self.language.get_python_ctypes_wrapper().pyopen(\"w\") as f:\n f.write(self.ctypes_wrapper_template)\n\n with self.language.get_c_code_filename().pyopen(\"a\") as f:\n f.write(\"\\n{}\\n\".format(c_source))\n\n with self.language.get_python_code_filename().pyopen(\"w\") as f:\n f.write(self.py_template.format(**code_fragments))", "def wrapper(*args, **kwargs):\n print('Before function')\n value = decorated_function(*args, **kwargs)\n print('After function')\n return value", "def update_wrapper_extended(wrapper, wrapped):\n updated_wrapper = functools.update_wrapper(wrapper, wrapped)\n # Assign the signature of the `wrapped` function to the updated wrapper function.\n # Certain frameworks may disallow signature inspection, causing `inspect.signature()` to throw.\n # One such example is the `tensorflow.estimator.Estimator.export_savedmodel()` function\n try:\n updated_wrapper.__signature__ = inspect.signature(wrapped)\n except Exception:\n _logger.debug(\"Failed to restore original signature for wrapper around %s\", wrapped)\n return updated_wrapper", "def convert(self):\n\t\tself.make_func_dict() #sets self.func_dict\n\t\tself.make_main_function() #sets self.main\n\t\tself.remove_lambda_nesting()\n\t\tself.replace_self_with_func_names()\n\t\tself.make_func_declarations() #sets self.cpp_declarations\n\t\tself.make_func_bodies() #sets self.cpp_func_bodies\t\t\n\t\tself.make_cpp_func_bodies()\n\t\tlines = []\n\t\tlines.append('#include \"lithp.hpp\"')\n\t\tfor name, signature in self.cpp_declarations.iteritems():\n\t\t\tlines.append(signature + ';')\n\n\t\tfor name, signature in self.cpp_declarations.iteritems():\n\t\t\tif name == 'main': continue\n\t\t\tlines.append(signature + '{')\n\t\t\tlines.append(' return ' + self.cpp_func_bodies[name] + ';\\n}')\n\t\tlines.append(\n\"\"\"\nint main(){\n %s;\n return 0;\n}\n\"\"\" % self.cpp_func_bodies['main'])\n\t\tself.converted = '\\n'.join(lines)\t\t\n\t\treturn self.converted", "def code():", "def func():", "def test_py_closure(self):", "def dummy_code_block() -> CodeBlock:\n return make_dummy_code_block()", "def maybe_run_swig(wrapper_filename, module_name, base_directory,\n extra_deps=None):\n module_filename = 'python/bllipparser/%s.py' % module_name\n swig_filename = join(base_directory, 'swig', 'wrapper.i')\n extra_deps = extra_deps or []\n if exists(wrapper_filename) and exists(module_filename):\n newer = any(is_newer(f, module_filename)\n for f in [swig_filename] + extra_deps)\n if not newer:\n return\n\n print('Generating ' + module_name + ' SWIG wrapper files')\n run(['swig', '-python', '-c++', '-module',\n module_name, '-I' + base_directory,\n '-Wall', '-classic', '-outdir', 'python/bllipparser',\n '-o', wrapper_filename, swig_filename])", "def _eval_python(loop, context, params=None, add_boilerplate=False, namespace=None):\n params = params # pylint\n \n # Are we actually doing this?\n if (not context.do_jit):\n return False\n\n # Emulating full VB programs in Python is difficult, so for now skip loops\n # that Execute() dynamic VB.\n full_code_vba = safe_str_convert(loop).replace(\"\\n\", \"\\\\n\")\n code_vba = full_code_vba[:20]\n code_vba_lower = full_code_vba.lower()\n if (not context.throttle_logging):\n log.info(\"Starting JIT emulation of '\" + code_vba + \"...' ...\")\n if ((\"Execute(\".lower() in code_vba_lower) or\n (\"ExecuteGlobal(\".lower() in code_vba_lower) or\n (\"Eval(\".lower() in code_vba_lower)):\n log.warning(\"Loop Execute()s dynamic code. Not JIT emulating.\")\n return False\n if (\".Item(\".lower() in code_vba_lower):\n log.warning(\"Loop references forms with .Item(). Not JIT emulating.\")\n return False\n \n # Generate the Python code for the VB code and execute the generated Python code.\n # TODO: Remove dangerous functions from what can be exec'ed.\n code_python = \"\"\n try:\n\n # For JIT handling we modify the values of certain variables to\n # handle recursive python code generation, so make a copy of the\n # original context.\n tmp_context = Context(context=context, _locals=context.locals, copy_globals=True)\n \n # Get the Python code for the loop.\n if (not context.throttle_logging):\n log.info(\"Generating Python JIT code...\")\n code_python = to_python(loop, tmp_context)\n if add_boilerplate:\n var_inits, _ = _loop_vars_to_python(loop, tmp_context, 0)\n func_defns = _called_funcs_to_python(loop, tmp_context, 0)\n code_python = _boilerplate_to_python(0) + \"\\n\" + \\\n func_defns + \"\\n\" + \\\n var_inits + \"\\n\" + \\\n code_python + \"\\n\" + \\\n _check_for_iocs(loop, tmp_context, 0) + \"\\n\" + \\\n _updated_vars_to_python(loop, tmp_context, 0)\n if (log.getEffectiveLevel() == logging.DEBUG):\n safe_print(\"JIT CODE!!\")\n safe_print(code_python)\n #print \"REMOVE THIS!!!\"\n #sys.exit(0)\n if (not context.throttle_logging):\n log.info(\"Done generating Python JIT code.\")\n\n # Extended ASCII strings are handled differently in VBScript and VBA.\n # Punt if we are emulating VBA and we have what appears to be extended ASCII\n # strings. For performance we are not handling the MS VBA extended ASCII in the python\n # JIT code.\n if (not context.is_vbscript):\n \n # Look for non-ASCII strings.\n non_ascii_pat = r'\"[^\"]*[\\x7f-\\xff][^\"]*\"'\n non_ascii_pat1 = r'\"[^\"]*(?:\\\\x7f|\\\\x[89a-f][0-9a-f])[^\"]*\"'\n if ((re.search(non_ascii_pat1, code_python) is not None) or\n (re.search(non_ascii_pat, code_python) is not None)):\n log.warning(\"VBA code contains Microsoft specific extended ASCII strings. Not JIT emulating.\")\n return False\n\n # Check for dynamic code execution in called functions.\n if (('\"Execute\", ' in code_python) or\n ('\"ExecuteGlobal\", ' in code_python) or\n ('\"Eval\", ' in code_python)):\n log.warning(\"Functions called by loop Execute() dynamic code. Not JIT emulating.\")\n return False\n \n # Run the Python code.\n \n # Have we already run this exact loop?\n if (code_python in jit_cache):\n var_updates = jit_cache[code_python]\n if (not context.throttle_logging):\n log.info(\"Using cached JIT loop results.\")\n if (var_updates == \"ERROR\"):\n log.error(\"Previous run of Python JIT loop emulation failed. Using fallback emulation for loop.\")\n return False\n\n # No cached results. Run the loop.\n elif (namespace is None):\n\n # JIT code execution goes not involve emulating VB GOTOs.\n context.goto_executed = False\n \n # Magic. For some reason exec'ing in locals() makes the dynamically generated\n # code recognize functions defined in the dynamic code. I don't know why.\n if (not context.throttle_logging):\n log.info(\"Evaluating Python JIT code...\")\n exec code_python in locals()\n else:\n\n # JIT code execution goes not involve emulating VB GOTOs.\n context.goto_executed = False\n\n # Run the JIT code in the given namespace.\n exec(code_python, namespace)\n var_updates = namespace[\"var_updates\"]\n if (not context.throttle_logging):\n log.info(\"Done JIT emulation of '\" + code_vba + \"...' .\")\n\n # Cache the loop results.\n jit_cache[code_python] = var_updates\n \n # Update the context with the variable values from the JIT code execution.\n try:\n for updated_var in var_updates.keys():\n if (updated_var == \"__shell_code__\"):\n continue\n context.set(updated_var, var_updates[updated_var])\n except (NameError, UnboundLocalError):\n log.warning(\"No variables set by Python JIT code.\")\n\n # Update shellcode bytes from the JIT emulation.\n import vba_context\n vba_context.shellcode = var_updates[\"__shell_code__\"]\n\n except NotImplementedError as e:\n log.error(\"Python JIT emulation of loop failed. \" + safe_str_convert(e) + \". Using fallback emulation method for loop...\")\n #safe_print(\"REMOVE THIS!!\")\n #raise e\n return False\n\n except Exception as e:\n\n # Cache the error.\n jit_cache[code_python] = \"ERROR\"\n \n # If we bombed out due to a potential infinite loop we\n # are done.\n if (\"Infinite Loop\" in safe_str_convert(e)):\n log.warning(\"Detected infinite loop. Terminating loop.\")\n return True\n\n # We had some other error. Emulating the loop in Python failed.\n log.error(\"Python JIT emulation of loop failed. \" + safe_str_convert(e) + \". Using fallback emulation method for loop...\")\n if (log.getEffectiveLevel() == logging.DEBUG):\n traceback.print_exc(file=sys.stdout)\n safe_print(\"-*-*-*-*-\\n\" + code_python + \"\\n-*-*-*-*-\")\n return False\n\n # Done.\n return True", "def fortran_c_wrapper(self) -> str:\n return ''", "def wrap(cls, **kwargs):\n\n def create_block(wrapped_fn) -> cls:\n\n def block_builder(fn, *, name=None, **fn_kwargs):\n\n # Obtain the non-keyword arguments of fn - these will serve as input port names\n num_in_ports = fn.__code__.co_argcount\n in_port_names = fn.__code__.co_varnames[0:num_in_ports]\n\n # Get the Out argument if it is there\n Out = kwargs.pop('Out') if 'Out' in kwargs else None\n\n default_name = camel_to_snake(fn.__code__.co_name)\n params = dict(name=maybe_or(name, default_name), In=in_port_names)\n params.update(kwargs)\n\n # Initialize the block with given input ports and the wrapped function name\n block = cls(**params)\n block.auto_rename = True\n\n # Get the function outputs\n out = fn(*block.In, **fn_kwargs)\n\n from blox_old.block.base.port_base import Port\n if not isinstance(out, tuple) and not isinstance(out, list) and not isinstance(out, Port):\n raise BlockError(\"Wrapped function must return either a single port, a list or a tuple of ports\")\n\n # Make sure out is iterable\n if isinstance(out, Port):\n out = (out,)\n\n # Determine the output names\n if Out is None:\n Out = parse_ports(len(out), default_prefix='o') if not isinstance(out, Port) else 1\n else:\n Out = parse_ports(Out, default_prefix='o')\n if len(Out) != len(out):\n raise BlockError(\"The number of returned ports doesn't match the specified number of ports\")\n\n for name, port in zip(Out, out):\n block.Out.new(name)\n block.Out[name] = port\n\n return block\n\n # The decorators returns a function that creates an instance of the block, like a constructor\n return partial(block_builder, wrapped_fn)\n\n return create_block", "def main_docstring():", "def _called_funcs_to_python(loop, context, indent):\n \n # Get the definitions for all local functions called directly in the loop.\n local_funcs = _get_all_called_funcs(loop, context)\n local_func_hashes = set()\n for curr_func in local_funcs:\n curr_func_hash = hashlib.md5(safe_str_convert(curr_func).encode()).hexdigest()\n local_func_hashes.add(curr_func_hash)\n \n # Now get the definitions of all the local functions called by the local\n # functions.\n seen_funcs = set()\n funcs_to_handle = list(local_funcs)\n while (len(funcs_to_handle) > 0):\n\n # Get the current function definition to check for calls.\n curr_func = funcs_to_handle.pop()\n curr_func_hash = hashlib.md5(safe_str_convert(curr_func).encode()).hexdigest()\n \n # Already looked at this one?\n if (curr_func_hash in seen_funcs):\n continue\n seen_funcs.add(curr_func_hash)\n\n # Get the functions called in the current function.\n curr_local_funcs = _get_all_called_funcs(curr_func, context)\n\n # Save the new functions for processing.\n for new_func in curr_local_funcs:\n new_func_hash = hashlib.md5(safe_str_convert(new_func).encode()).hexdigest()\n if (new_func_hash not in local_func_hashes):\n local_func_hashes.add(new_func_hash)\n local_funcs.append(new_func)\n funcs_to_handle.append(new_func)\n \n # Convert each local function to Python.\n r = \"\"\n for local_func in local_funcs:\n r += to_python(local_func, context, indent=indent) + \"\\n\"\n\n # Done.\n indent_str = \" \" * indent\n r = indent_str + \"# VBA Local Function Definitions\\n\" + r\n return r", "def generate_lbrycrd_wrapper(read_file=LBRYCRD_BASE_FPATH,\n write_file=LBRYCRD_FPATH):\n print(80 * \"-\")\n print(\"Input JSON:\", None)\n\n with open(write_file, \"w\") as lbrycrd_file:\n docstring = ['\"\"\"',\n 'LBRYCRD daemon wrapper in Python. Import it an initialize the main class.',\n '',\n 'This file was generated at build time using the `generator` module.',\n '\"\"\"',\n '']\n\n docstring = \"\\n\".join(docstring)\n lbrycrd_file.write(docstring)\n\n with open(read_file, \"r\") as template:\n header = template.read()\n\n lbrycrd_file.write(header)\n\n print(\"Generated 'lbrycrd' API wrapper:\", write_file)", "def body(self):", "def go(*args, **kws): \n class fncWrap(cgmGeneral.cgmFuncCls):\t\t\n def __init__(self,*args, **kws):\n super(fncWrap, self).__init__(*args, **kws)\n self._str_funcName = 'TemplateFactory.go'\t\n self._b_reportTimes = 0 #..we always want this on so we're gonna set it on\n self._cgmClass = 'TemplateFactory.go'\n '''\n\t mModule = None,\n\t forceNew = True,\n\t loadTemplatePose = True,\n\t tryTemplateUpdate = False,\n\t geo = None,\n\t **kws\n\t '''\n self._l_ARGS_KWS_DEFAULTS = [{'kw':'mModule',\"default\":None,\"argType\":'cgmModule','help':\"This must be a cgm module\"},\n {'kw':'forceNew',\"default\":True,\"argType\":'bool','help':\"Whether to force a new one\"},\n {'kw':'loadTemplatePose',\"default\":True,\"argType\":'bool','help':\"Whether to attempt to load a tempate pose or now\"},\n {'kw':'tryTemplateUpdate',\"default\":True,\"argType\":'bool','help':\"Whether to attempt to update the template with saved settings after creation\"},\n {'kw':'geo',\"default\":None,\"argType\":'mGeo,str','help':\"Geo to use for processing\"}]\t \n self.__dataBind__(*args, **kws)\n\n self.l_funcSteps = [{'step':'Initial Validation','call':self._step_validate_},\n {'step':'Need Templating?','call':self._step_templateNeed_},\n {'step':'Templating Data Bind','call':self._step_templatingDataBind_},\t \n {'step':'Checking template toggles','call':self._step_verifyModuleTemplateToggles_},\n {'step':'Main process','call':self._step_templateProcess_},\n {'step':'Tag Children','call':self._step_tagChildren_},\t \t \n ]\n\n def _step_validate_(self):\n assert self.d_kws['mModule'].isModule(),\"Not a module\"\n self._mi_module = self.d_kws['mModule']# Link for shortness\n self._str_reportStart = \"{0}('{1}')\".format(self._str_reportStart,self._mi_module.p_nameShort)\n\n if self.d_kws['loadTemplatePose']:#trying this\n self.l_funcSteps.append({'step':'Load Template Pose','call':self._step_poseLoad_})\n\n try:#Geo -------------------------------------------------------------------------------------------\n if self.d_kws['geo'] is None:\n try:\n self.d_kws['geo'] = self._mi_module.modulePuppet.getUnifiedGeo()\n if not self.d_kws['geo']:\n raise ValueError, \"Module puppet missing geo\"\n except StandardError,error:log.warning(\"geo failed to find: %s\"%(error) + \"=\"*75) \n self.str_geo = cgmValid.objString(self.d_kws['geo'],mayaType=['mesh','nurbsSurface'])\n except StandardError,error:\n self.log_error(\" geo failed : {0}\".format(error)) \n\n def _step_templateNeed_(self):\n #Before something can be templated, we need to see if it has a puppet yet\n if not self._mi_module.getMessage('modulePuppet') and not self._mi_module.getMessage('moduleParent'):\n self.log_debug(\"No modulePuppet or moduleParent. Need to create\")\n if self._mi_module.getMessage(\"helper\"):\n self._mi_module.__buildSimplePuppet__()\n else:\n self.log_error(\"No modulePuppet or moduleParent and no helper\")\t\t\n return\n\n if self._mi_module.mClass in ['cgmEyelids','cgmEyeball']:#Some special objects don't need no stinkin templating!\n if self._mi_module.getMessage('helper'):\n log.info(\"Helper object found. No templating necessary\")\t \n return \n\n if self.d_kws['tryTemplateUpdate']:\n self.log_info(\"Trying template update...\")\n if self._mi_module.templateSettings_call('update'):\n self.log_info(\"Template update...\")\t\t \n if self.d_kws['loadTemplatePose']:\n self.log_info(\"Trying loadTemplatePose...\") \n try:self._mi_module.templateSettings_call('load')\n except Exception,err:\n self.log_error(\"Load pose fail: {0}\".format(err))\n return False\n return self._SuccessReturn_()\n\n if self._mi_module.isTemplated():\n if self.d_kws['forceNew']:\n self._mi_module.deleteTemplate()\n else:\n log.warning(\"'%s' has already been templated\"%mModule.getShortName())\n return self._SuccessReturn_()\n\n\n def _step_templatingDataBind_(self):\n\n self.mi_modulePuppet = self._mi_module.modulePuppet\n\n self.cls = \"TemplateFactory.go\"\n\n self.moduleNullData = attributes.returnUserAttrsToDict(self._mi_module.mNode)\n self._mi_templateNull = self._mi_module.templateNull#link\n\n self.rigNull = self._mi_module.getMessage('rigNull')[0] or False\n self.moduleParent = self.moduleNullData.get('moduleParent')\n self.moduleColors = self._mi_module.getModuleColors()\n self.l_coreNames = self._mi_module.coreNames.value\n self.d_coreNamesAttrs = self._mi_module.coreNames.d_indexToAttr\n self.corePosList = self._mi_templateNull.templateStarterData\n self.foundDirections = False #Placeholder to see if we have it\n\n assert len(self.l_coreNames) == len(self.corePosList),\"coreNames length and corePosList doesn't match\"\n\n #>>> part name \n self.partName = self._mi_module.getPartNameBase()\n self.partType = self._mi_module.moduleType or False\n self._partName = self._mi_module.getPartNameBase()\n self._strShortName = self._mi_module.getShortName() or False \n\n self.direction = None\n if self._mi_module.hasAttr('cgmDirection'):\n self.direction = self._mi_module.cgmDirection or None\n\n #Verify we have a puppet and that puppet has a masterControl which we need for or master scale plug\n if not self.mi_modulePuppet.getMessage('masterControl'):\n if not self.mi_modulePuppet._verifyMasterControl():\n raise StandardError,\"MasterControl failed to verify\"\n\n self._mi_masterControl = self._mi_module.modulePuppet.masterControl\n self._mi_masterSettings = self._mi_masterControl.controlSettings\n self._mi_deformGroup = self._mi_module.modulePuppet.masterNull.deformGroup \n\n #>>> template null \n self.templateNullData = attributes.returnUserAttrsToDict(self._mi_templateNull.mNode)\n\n #>>>Connect switches\n\n def _step_verifyModuleTemplateToggles_(self):\n verify_moduleTemplateToggles(self)\n def _step_templateProcess_(self):\n try:\n if self._mi_module.mClass == 'cgmLimb':\n log.debug(\"mode: cgmLimb Template\")\n\n build_limbTemplate(self)\t\n\n if 'ball' in self.l_coreNames and 'ankle' in self.l_coreNames:\n try:\n doCastPivots(self._mi_module)\n except Exception,error:raise Exception,\"Cast pivots fail | {0}\".format(error)\n\n elif self._mi_module.mClass == 'cgmEyeball':\n log.info(\"mode: cgmEyeball\")\n try:doMakeEyeballTemplate(self)\n except StandardError,error:log.warning(\">>> %s.go >> build failed: %s\"%(self._mi_module.p_nameShort,error)) \n\n else:\n raise NotImplementedError,\"haven't implemented '{0} templatizing yet\".format(self._mi_module.mClass)\n\n except Exception,error:\n raise Exception,\"build fail! |{0}\".format(error)\n\n def _step_tagChildren_(self):\n doTagChildren(self._mi_module)\n\n def _step_poseLoad_(self):\n #>>> store template settings\n self._mi_module.templateSettings_call('load')\n\n\n\n \"\"\"\n self._mi_templateNull.overrideEnabled = 1\t\t\n cgmMeta.cgmAttr(self._mi_masterSettings.mNode,'templateVis',lock=False).doConnectOut(\"%s.%s\"%(self._mi_templateNull.mNode,'overrideVisibility'))\n cgmMeta.cgmAttr(self._mi_masterSettings.mNode,'templateLock',lock=False).doConnectOut(\"%s.%s\"%(self._mi_templateNull.mNode,'overrideDisplayType')) \n \"\"\"\n return fncWrap(*args, **kws).go()", "def generate_wrapper(self):\n\n # If there is an input file, parse it\n if self.package_info_path is not None:\n info_parser = PackageInfoParser(self.package_info_path,\n self.source_root)\n info_parser.parse()\n self.package_info = info_parser.package_info\n else:\n pass\n\n # Generate a header collection\n self.collect_source_hpp_files()\n\n # Attempt to assign source paths to each class, assuming the containing \n # file name is the class name\n for eachModule in self.package_info.module_info:\n for eachClass in eachModule.class_info:\n for eachPath in self.package_info.source_hpp_files:\n base = ntpath.basename(eachPath)\n if eachClass.name == base.split('.')[0]:\n eachClass.source_file_full_path = eachPath\n if eachClass.source_file is None:\n eachClass.source_file = base\n\n # Attempt to automatically generate template args for each class\n for eachModule in self.package_info.module_info:\n info_generator = CppInfoHelper(eachModule)\n for eachClass in eachModule.class_info:\n info_generator.expand_templates(eachClass, \"class\")\n\n # Generate the header collection\n header_collection_path = self.generate_header_collection()\n\n # Parse the header collection\n self.parse_header_collection(header_collection_path)\n\n # Update the Class and Free Function Info from the parsed code\n self.update_class_info()\n self.update_free_function_info()\n self.update_enum_info()\n\n # Write the modules\n for eachModule in self.package_info.module_info:\n module_writer = CppModuleWrapperWriter(self.global_ns,\n self.source_ns,\n eachModule,\n self.get_wrapper_template(),\n self.wrapper_root)\n module_writer.write()", "def wrap(value):\n return value # TODO", "def wrapper(func, *args, **kwargs):\n def wrapped():\n return func(*args, **kwargs)\n return wrapped", "def adaptPythonToCorba(self, *args):\n return _SALOMERuntime.RuntimeSALOME_adaptPythonToCorba(self, *args)", "def function_wrapper(self, _t):\n return self.function(*_t)", "def generate_code(spn_id, spn, meta_types, floating_data_type):\r\n\r\n # make sure we have ids\r\n assign_ids(spn)\r\n\r\n # fill method body according to SPN structure\r\n method_body = generate_method_body(spn, spn, floating_data_type, 0)\r\n\r\n # build parameters used in generated c++ function\r\n method_params = []\r\n passed_params = []\r\n for i, type in enumerate(meta_types):\r\n if type == MetaType.DISCRETE:\r\n method_params += [f'vector <int> possibleValues{i}', f'int nullValueIdx{i}']\r\n passed_params += [f'py::arg(\"possibleValues{i}\")', f'py::arg(\"nullValueIdx{i}\")']\r\n elif type == MetaType.REAL:\r\n method_params += [f'bool inverse{i}', f'bool leftMinusInf{i}', f'float leftCondition{i}',\r\n f'bool rightMinusInf{i}', f'float rightCondition{i}', f'bool leftIncluded{i}',\r\n f'bool rightIncluded{i}', f'float nullValue{i}']\r\n passed_params += [f'py::arg(\"inverse{i}\")', f'py::arg(\"leftMinusInf{i}\")', f'py::arg(\"leftCondition{i}\")',\r\n f'py::arg(\"rightMinusInf{i}\")', f'py::arg(\"rightCondition{i}\")',\r\n f'py::arg(\"leftIncluded{i}\")', f'py::arg(\"rightIncluded{i}\")', f'py::arg(\"nullValue{i}\")']\r\n\r\n value_dictionary = {\r\n 'spn_id': spn_id,\r\n 'method_body': method_body,\r\n 'method_params': ', '.join(method_params),\r\n 'node_count': get_number_of_nodes(spn),\r\n 'passed_params': ', '.join(passed_params),\r\n 'floating_data_type': floating_data_type\r\n }\r\n generated_method = replace_template(TemplatePath.METHOD_MASTER, value_dictionary, 0)\r\n registrate_method = replace_template(TemplatePath.REGISTRATION_MASTER, value_dictionary, 0)\r\n\r\n return generated_method, registrate_method", "def gen_fun_rst(name, txt):\n (desc, params, example) = parse_fun_block(txt)\n directive = gen_rst_directive(name, params)\n example_rst = gen_example_rst(example)\n res = \"\"\"\n{directive}\n{desc}\n{example}\n\n\"\"\".format(name=name,\n directive=directive,\n desc=indent(desc, 2),\n example=example_rst)\n return res", "def test_function_statement():\n r = convert_code(\"{foo arg1=bar arg2=3}\")\n assert r == \"{{ {'arg1': bar, 'arg2': 3}|foo }}\"", "def _stub_generator(self, nargs, body_func, kwargs=None):\n def stub(tyctx):\n # body is supplied when the function is magic'd into life via glbls\n return body(tyctx) # noqa: F821\n if kwargs is None:\n kwargs = {}\n # create new code parts\n stub_code = stub.__code__\n co_args = [stub_code.co_argcount + nargs + len(kwargs)]\n\n new_varnames = [*stub_code.co_varnames]\n new_varnames.extend([f'tmp{x}' for x in range(nargs)])\n new_varnames.extend([x for x, _ in kwargs.items()])\n from numba.core import utils\n if utils.PYVERSION >= (3, 8):\n co_args.append(stub_code.co_posonlyargcount)\n co_args.append(stub_code.co_kwonlyargcount)\n co_args.extend([stub_code.co_nlocals + nargs + len(kwargs),\n stub_code.co_stacksize,\n stub_code.co_flags,\n stub_code.co_code,\n stub_code.co_consts,\n stub_code.co_names,\n tuple(new_varnames),\n stub_code.co_filename,\n stub_code.co_name,\n stub_code.co_firstlineno,\n stub_code.co_lnotab,\n stub_code.co_freevars,\n stub_code.co_cellvars\n ])\n\n new_code = pytypes.CodeType(*co_args)\n\n # get function\n new_func = pytypes.FunctionType(new_code, {'body': body_func})\n return new_func", "def fn(*args, **kwargs):\n pass", "def python_return(text='is cool'):\n if '_' in text:\n text = text.replace(\"_\", \" \")\n return 'Python {}'.format(escape(text))", "def __call__(self,thing):\n return self.compiled(thing)", "def write_c_wrapper(out, decl, return_val, write_body):\n if generate_gotcha:\n write_gotcha_c_wrapper(out, decl, return_val, write_body)\n else:\n write_pmpi_c_wrapper(out, decl, return_val, write_body)", "def get_wrapper_template(self, declaration):\n pass", "def make_cleanup_code(self, is_import):\n res = \\\n\"\"\" # {dict_lookup}\n autogenerated_module.{dict_lookup} = _struct_accessor({varname})\n new_autogenerated_module.{dict_lookup} = _new_struct_accessor({varname})\n\"\"\"\n\n fragments ={\n \"dict_lookup\": \"self\" if (is_import is None)\n else ('{reqires_type}[\"{name}\"]'.format(\n reqires_type = \"imports\" if is_import else \"depends\",\n name = self.namespace)),\n \"varname\": self.mangled_namespace + \"_plugin\",\n }\n\n return res.format(**fragments)", "def say_hello_py(x):\n print('Hello from %s' % x)\n eel.say_hello_js('Python {from within say_hello_py()}!')", "def __def_function__():\n pass", "def to_PyMethodDef_entry(items):\r\n\r\n entry_type = items[0]\r\n items = items[1:]\r\n if entry_type == 'method':\r\n return 'FORWARDER(%s, %s, \"%s\", %s)' % items\r\n elif entry_type == 'function':\r\n return 'FREE_FORWARDER(%s, %s, \"%s\", %s)' % items\r\n elif entry_type == 'method_template':\r\n return 'FORWARDER(%s<common_type>, %s, \"%s\", %s)' % items\r\n else:\r\n assert False", "def old_function_with_docstring(x, y):\n return x + y", "def hello_world():\n return \"Hello world!\"", "def compile_function(self, function, arguments):" ]
[ "0.6718689", "0.6527611", "0.64795184", "0.6186041", "0.6184181", "0.61080533", "0.60333836", "0.5982096", "0.59634656", "0.5940715", "0.5934572", "0.59340453", "0.59037447", "0.59012115", "0.58594495", "0.585627", "0.58455956", "0.5824993", "0.58232915", "0.5799322", "0.5793883", "0.5770539", "0.5739759", "0.5739601", "0.5737684", "0.57368255", "0.571394", "0.5694685", "0.5634757", "0.5634757", "0.56157327", "0.55890834", "0.5578717", "0.55602586", "0.5552676", "0.5534715", "0.55344415", "0.5530241", "0.5524125", "0.5524003", "0.5505924", "0.54883194", "0.5479455", "0.54670024", "0.54661363", "0.54646635", "0.5460133", "0.5449359", "0.54458404", "0.5436728", "0.5423517", "0.5409167", "0.5395822", "0.53952086", "0.5389258", "0.5373675", "0.5352838", "0.5351283", "0.53467447", "0.5326242", "0.5304013", "0.5298899", "0.5294027", "0.5291205", "0.52868193", "0.52762276", "0.52647316", "0.5247889", "0.52474594", "0.52469194", "0.5232638", "0.52216566", "0.5218844", "0.52066624", "0.5196653", "0.519476", "0.51919705", "0.51843077", "0.51835096", "0.5174444", "0.51728874", "0.51672316", "0.5163421", "0.5158177", "0.51577234", "0.5155252", "0.5152823", "0.51492757", "0.51400423", "0.5128242", "0.51235473", "0.5122023", "0.5107286", "0.5103104", "0.51021093", "0.51014364", "0.5094616", "0.508696", "0.50757474", "0.5074355" ]
0.54531014
47
Test that both transformer implementations produce the same outputs when applied to a properlysized sequence.
def test_basic_equivalence(cell_cls, num_layers): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(4, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2, hidden=24) actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) with tf.variable_scope('rnn', reuse=True): with tf.variable_scope('transformer', reuse=True): expected = in_seq + pos_enc for _ in range(num_layers): expected = transformer_layer(expected, num_heads=2, hidden=24) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_random_transform_generator():\n # Check shapes are correct Batch Size = 1 - Pass\n batch_size = 1\n transforms = layer_util.random_transform_generator(batch_size, 0)\n assert transforms.shape == (batch_size, 4, 3)\n\n # Check numerical outputs are correct for a given seed - Pass\n batch_size = 1\n scale = 0.1\n seed = 0\n expected = tf.constant(\n np.array(\n [\n [\n [9.4661278e-01, -3.8267835e-03, 3.6934228e-03],\n [5.5613145e-03, 9.8034811e-01, -1.8044969e-02],\n [1.9651605e-04, 1.4576728e-02, 9.6243286e-01],\n [-2.5107686e-03, 1.9579126e-02, -1.2195010e-02],\n ]\n ],\n dtype=np.float32,\n )\n ) # shape = (1, 4, 3)\n got = layer_util.random_transform_generator(\n batch_size=batch_size, scale=scale, seed=seed\n )\n assert check_equal(got, expected)", "def test_transform_output(argument_pair):\n ...", "def test_transform(self):\n data = pd.DataFrame({\n \"x\": np.array([0.1, 0.3, 0.5]),\n \"y\": np.array([\"yes\", \"yes\", \"no\"])\n })\n\n transformer = DataTransformer()\n transformer._column_transform_info_list = [\n ColumnTransformInfo(\n column_name=\"x\", column_type=\"continuous\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n ),\n ColumnTransformInfo(\n column_name=\"y\", column_type=\"discrete\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n ]\n\n transformer._transform_continuous = Mock()\n selected_normalized_value = np.array([[0.1], [0.3], [0.5]])\n selected_component_onehot = np.array([\n [1, 0, 0],\n [1, 0, 0],\n [1, 0, 0],\n ])\n return_value = (selected_normalized_value, selected_component_onehot)\n transformer._transform_continuous.return_value = return_value\n\n transformer._transform_discrete = Mock()\n transformer._transform_discrete.return_value = [np.array([\n [0, 1],\n [0, 1],\n [1, 0],\n ])]\n\n result = transformer.transform(data)\n transformer._transform_continuous.assert_called_once()\n transformer._transform_discrete.assert_called_once()\n\n expected = np.array([\n [0.1, 1, 0, 0, 0, 1],\n [0.3, 1, 0, 0, 0, 1],\n [0.5, 1, 0, 0, 1, 0],\n ])\n\n assert result.shape == (3, 6)\n assert (result[:, 0] == expected[:, 0]).all(), \"continuous-cdf\"\n assert (result[:, 1:4] == expected[:, 1:4]).all(), \"continuous-softmax\"\n assert (result[:, 4:6] == expected[:, 4:6]).all(), \"discrete\"", "def test_transform(self):\n t = OneHotEncode(3)\n assert numpy.all(t.transform(0) == numpy.array((1.0, 0.0, 0.0)))\n assert numpy.all(t.transform(1) == numpy.array((0.0, 1.0, 0.0)))\n assert numpy.all(t.transform(2) == numpy.array((0.0, 0.0, 1.0)))\n with pytest.raises(AssertionError):\n t.transform(4)\n with pytest.raises(AssertionError):\n t.transform(-1)\n with pytest.raises(AssertionError):\n t.transform(2.2)\n assert numpy.all(\n t.transform([[2, 1], [0, 2]])\n == numpy.array(\n [[(0.0, 0.0, 1.0), (0.0, 1.0, 0.0)], [(1.0, 0.0, 0.0), (0.0, 0.0, 1.0)]]\n )\n )\n\n t = OneHotEncode(2)\n assert t.transform(0) == 0.0\n assert t.transform(1) == 1.0\n with pytest.raises(TypeError):\n t.transform(\"ipsi\")\n assert numpy.all(\n t.transform([[1, 1], [0, 1]]) == numpy.array([[1.0, 1.0], [0.0, 1.0]])\n )\n\n # for the crazy enough\n t = OneHotEncode(1)\n assert t.transform(0) == 0.0\n with pytest.raises(TypeError):\n t.transform(\"ipsi\")\n assert numpy.all(t.transform([[0, 0], [0, 0]]) == [[0.0, 0.0], [0.0, 0.0]])", "def test_multiple_rng_aliasing(self):\r\n rng1 = RandomStreams(1234)\r\n rng2 = RandomStreams(2392)\r\n assert rng1.state_updates is not rng2.state_updates\r\n assert rng1.gen_seedgen is not rng2.gen_seedgen", "def test_transform(self):\n t = Enumerate([2, \"asfa\", \"ipsi\"])\n assert t.transform(2) == 0\n assert t.transform(\"asfa\") == 1\n assert t.transform(\"ipsi\") == 2\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n assert numpy.all(\n t.transform([[\"ipsi\", \"asfa\"], [2, \"ipsi\"]]) == [[2, 1], [0, 2]]\n )\n\n # for the crazy enough\n t = Enumerate([2])\n assert t.transform(2) == 0\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n assert numpy.all(t.transform([[2, 2], [2, 2]]) == [[0, 0], [0, 0]])", "def test__transform_continuous(self):", "def _assert_same_length(\n list_series_1: Sequence[TimeSeries],\n list_series_2: Sequence[TimeSeries],\n):\n\n raise_if_not(\n len(list_series_1) == len(list_series_2),\n \"Sequences of series must be of the same length, found length:\"\n + f\" {len(list_series_1)} and {len(list_series_2)}.\",\n )", "def test_same_type():\n\n for seq, exp in [\n ((1, ), True),\n ((1, 1.0, 1+0j), False),\n ((1, 10, 100), True),\n ((True, False, 10), False),\n (['abc', 'def', 'ghi'], True),\n (np.linspace(0, 1, 100), True),\n ]:\n assert same_type(*seq) == exp\n\n assert same_type(\n True, False, True, False, target_type=bool,\n )\n\n assert not same_type(\n True, False, True, False, target_type=int,\n )\n\n with pytest.raises(ValueError):\n same_type()", "def test_identical(self):\n write this test!", "def test(args, encoder, decoder, x):\n\n T = args.maxseqlen\n hidden = None\n loss_mask_iou = 0\n loss_mask_x = 0\n loss_class = 0\n\n out_masks = []\n out_classes = []\n out_stops = []\n encoder.eval()\n decoder.eval()\n\n feats = encoder(x)\n # loop over sequence length and get predictions\n for t in range(0, T):\n out_mask, out_class, out_stop, hidden = decoder(feats, hidden)\n upsample_match = torch.nn.UpsamplingBilinear2d(size = (x.size()[-2],x.size()[-1]))\n out_mask = upsample_match(out_mask)\n # get predictions in list to concat later\n out_masks.append(out_mask)\n out_classes.append(out_class)\n out_stops.append(out_stop)\n # concat all outputs into single tensor to compute the loss\n out_masks = torch.cat(out_masks,1)\n out_classes = torch.cat(out_classes,1).view(out_class.size(0),len(out_classes),-1)\n out_stops = torch.cat(out_stops,1).view(out_stop.size(0),len(out_stops),-1)\n\n return torch.sigmoid(out_masks).data, out_classes.data, torch.sigmoid(out_stops).data", "def test_train_validation_exchange_data():\n x_train_input = np.random.normal(0, 2, (100, 2))\n y_train_input = np.random.randint(0, 2, (100,))\n train_data_input = (x_train_input, y_train_input)\n \n validation_data_input = (np.random.normal(0, 1, (20, 2)),\n np.random.randint(0, 2, (20)))\n exchange_data_input = (np.random.normal(0, 1, (20, 2)),\n np.random.randint(0, 2, (20)))\n\n # test that args must be ndarrays\n with pytest.raises(AssertionError):\n utils._train_validation_exchange_data(\n (x_train_input, list(y_train_input)), None, None)\n\n def assert_almost_equal(iter1, iter2):\n for ary1, ary2 in zip(iter1, iter2):\n np.testing.assert_almost_equal(ary1, ary2)\n\n # validation_data and exchange_data are passed explicitly\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, validation_data_input, exchange_data_input))\n assert_almost_equal(train_data + validation_data + exchange_data,\n train_data_input + validation_data_input + exchange_data_input)\n\n # validation_data is None, exchange_data is not None\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, None, exchange_data_input))\n assert_almost_equal(train_data + exchange_data, train_data_input + exchange_data_input)\n assert validation_data is None\n \n # validation data is not None, exchange data is None\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, validation_data_input, None))\n assert_almost_equal(train_data + exchange_data + validation_data,\n train_data_input + validation_data_input + validation_data_input)\n \n # validation_data is None, but validation_split == 0.2\n random_state = 0\n validation_split = 0.2\n x, x_valid, y, y_valid = train_test_split(*train_data_input,\n random_state=random_state,\n test_size=validation_split)\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, validation_split=validation_split))\n assert_almost_equal(train_data + exchange_data + validation_data,\n (x, y, x_valid, y_valid, x_valid, y_valid))\n \n # validation_data is None, validation_split == 0., exchange_split == 0.3\n random_state = 0\n exchange_split = 0.3\n x, x_exchange, y, y_exchange = train_test_split(*train_data_input,\n random_state=random_state,\n test_size=exchange_split)\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, exchange_split=exchange_split))\n assert_almost_equal(train_data + exchange_data,\n (x, y, x_exchange, y_exchange))\n assert validation_data is None\n \n # validation_data is None, exchange_data is None,\n # validation_split == 0.1, exchange_split = 0.15\n random_state = 0\n exchange_split = 0.15\n validation_split = 0.1\n x, x_valid, y, y_valid = train_test_split(*train_data_input,\n random_state=random_state,\n test_size=validation_split)\n x, x_exchange, y, y_exchange = train_test_split(*(x, y),\n random_state=random_state,\n test_size=exchange_split)\n train_data, validation_data, exchange_data = (\n utils._train_validation_exchange_data(\n train_data_input, validation_split=validation_split,\n exchange_split=exchange_split))\n assert_almost_equal(train_data + validation_data + exchange_data,\n (x, y) + (x_valid, y_valid) + (x_exchange, y_exchange))", "def create_mock_sequence(\n sequence_length, number_of_nodes, edge_per_node, in_channels, number_of_classes\n):\n input_sequence = torch.zeros(sequence_length, number_of_nodes, in_channels)\n\n X, edge_index = create_mock_data(\n number_of_nodes=number_of_nodes,\n edge_per_node=edge_per_node,\n in_channels=in_channels,\n )\n edge_weight = create_mock_edge_weight(edge_index)\n targets = create_mock_target(number_of_nodes, number_of_classes)\n\n for t in range(sequence_length):\n input_sequence[t] = X + t\n\n return input_sequence, targets, edge_index, edge_weight", "def test_transform(self):\n X = self.generate_X()\n task = mmRDTR()\n task.fit(X)\n res = task.transform(X)\n # check if Instance\n self.assertIsInstance(res,Container)\n # check if names\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n # check if values as within the range expected\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)\n for i in range(len(res.colnames())):\n self.assertEqual(round(res()[:,i].mean(),8),0)\n # check with new data\n Y = self.generate_X()\n res = task.transform(Y)\n self.assertEqual(np.all(res.colnames()==[str(i) for i in xrange(len(res.colnames()))]),True)\n self.assertEqual(np.all(res().min()>=-1),True)\n self.assertEqual(np.all(res().max()<=1),True)", "def test_random_state_transfer(self):\r\n class Graph:\r\n def __init__(self, seed=123):\r\n self.rng = RandomStreams(seed)\r\n self.y = self.rng.uniform(size=(1,))\r\n g1 = Graph(seed=123)\r\n f1 = function([], g1.y)\r\n g2 = Graph(seed=987)\r\n f2 = function([], g2.y)\r\n\r\n for (su1, su2) in zip(g1.rng.state_updates, g2.rng.state_updates):\r\n su2[0].set_value(su1[0].get_value())\r\n\r\n numpy.testing.assert_array_almost_equal(f1(), f2(), decimal=6)", "def test_multiple_rng_aliasing():\r\n rng1 = MRG_RandomStreams(1234)\r\n rng2 = MRG_RandomStreams(2392)\r\n assert rng1.state_updates is not rng2.state_updates", "def assert_predictions_equal(first, second, x):\n preds1 = first.predict(x, batch_size=batch_size)\n preds2 = second.predict(x, batch_size=batch_size)\n np.testing.assert_array_equal(preds1, preds2)", "def test_unknown_seq_length(self):\n network_1 = Conv1DNetwork()\n inputs_1 = tf.placeholder(tf.float32, [64, None, 300])\n outputs_1 = network_1(inputs_1)\n self.assertEqual(outputs_1.shape, [64, 128])\n\n hparams = {\n # Conv layers\n \"num_conv_layers\": 2,\n \"filters\": 128,\n \"kernel_size\": [[3, 4, 5], 4],\n # Pooling layers\n \"pooling\": \"AveragePooling\",\n \"pool_size\": [2, None],\n # Dense layers\n \"num_dense_layers\": 1,\n \"dense_size\": 10,\n }\n network = Conv1DNetwork(hparams)\n # nlayers = nconv-pool + nconv + npool + ndense + ndropout + flatten\n self.assertEqual(len(network.layers), 1 + 1 + 1 + 1 + 1 + 1)\n self.assertTrue(isinstance(network.layer_by_name('pool_2'),\n tx.core.AverageReducePooling1D))\n\n inputs = tf.placeholder(tf.float32, [64, None, 300])\n outputs = network(inputs)\n self.assertEqual(outputs.shape, [64, 10])\n\n hparams_2 = {\n # Conv layers\n \"num_conv_layers\": 1,\n \"filters\": 128,\n \"kernel_size\": 4,\n \"other_conv_kwargs\": {'data_format': 'channels_first'},\n # Pooling layers\n \"pooling\": \"MaxPooling\",\n \"other_pool_kwargs\": {'data_format': 'channels_first'},\n # Dense layers\n \"num_dense_layers\": 1,\n \"dense_size\": 10,\n }\n network_2 = Conv1DNetwork(hparams_2)\n inputs_2 = tf.placeholder(tf.float32, [64, 300, None])\n outputs_2 = network_2(inputs_2)\n self.assertEqual(outputs_2.shape, [64, 10])", "def assert_wrappers_equal(first, second):\n assert first.sk_params == second.sk_params\n assert first.history_ == second.history_\n if not first.model_ or not second.model_:\n assert first.model_ == second.model_\n else:\n assert_models_equal(first.model, second.model)", "def test_check_seqs_reverse_primers(self):\r\n\r\n # Initial test, should truncate all seqs\r\n in_seqs = self.in_seqs_reverse_primers\r\n bc_map = self.bc_map_fixed_len_bc1\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_in_seqs_reverse_primers\r\n rev_primers_test = self.reverse_primers_fixed_len_bc1\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='truncate_only',\r\n rev_primers=rev_primers_test,\r\n qual_out=False)\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)\r\n\r\n # Second test with a mismatch in seq a, should not find reverse primer\r\n # and will write out entire sequence.\r\n\r\n in_seqs = self.in_seqs_reverse_primers_mismatch\r\n bc_map = self.bc_map_fixed_len_bc1\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_in_seqs_reverse_primers_mismatch\r\n rev_primers_test = self.reverse_primers_fixed_len_bc1\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='truncate_only',\r\n rev_primers=rev_primers_test,\r\n qual_out=False)\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)\r\n\r\n # With reverse_primer_mismatches allowed set to 1,\r\n # should restore truncation.\r\n in_seqs = self.in_seqs_reverse_primers_mismatch\r\n bc_map = self.bc_map_fixed_len_bc1\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_in_seqs_reverse_primers_mismatch_allowed\r\n rev_primers_test = self.reverse_primers_fixed_len_bc1\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='truncate_only',\r\n rev_primers=rev_primers_test,\r\n qual_out=False,\r\n reverse_primer_mismatches=1)\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)\r\n\r\n # Testing truncate_remove, which should not write sequences where\r\n # the reverse primer is not found\r\n in_seqs = self.in_seqs_reverse_primers\r\n bc_map = self.bc_map_fixed_len_bc1\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_in_seqs_reverse_primers_full_remove\r\n rev_primers_test = self.reverse_primers_fixed_len_bc1\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='truncate_remove',\r\n rev_primers=rev_primers_test,\r\n qual_out=False)\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)\r\n\r\n # Testing truncate_remove, with reverse_primer_mismatches set to 1\r\n # should allow all 4 sequences to be written, truncated\r\n in_seqs = self.in_seqs_reverse_primers_mismatch\r\n bc_map = self.bc_map_fixed_len_bc1\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_in_seqs_reverse_primers_mismatch_allowed\r\n rev_primers_test = self.reverse_primers_fixed_len_bc1\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=1,\r\n disable_primer_check=False,\r\n reverse_primers='truncate_remove',\r\n rev_primers=rev_primers_test,\r\n qual_out=False,\r\n reverse_primer_mismatches=1)\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)", "def test_add_two_transforms(self):\n tflist = TransformList()\n tflist.add_transform(Transform(1))\n tflist.add_transform(Transform(2))\n self.assertEqual(len(tflist), 2)", "def testCheckpointMiddleOfSequence2(self):\n tp1 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5, 2,\n False, 1960, 0, False, 3, 10, 5, 0, 32, 128, 32, 'normal')\n tp2 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5, 2,\n False, 1960, 0, False, 3, 10, 5, 0, 32, 128, 32, 'normal')\n\n with open(resource_filename(__name__, 'data/tp_input.csv'), 'r') as fin:\n reader = csv.reader(fin)\n records = []\n for bottomUpInStr in fin:\n bottomUpIn = numpy.array(eval('[' + bottomUpInStr.strip() + ']'),\n dtype='int32')\n records.append(bottomUpIn)\n\n i = 1\n for r in records[:250]:\n print i\n i += 1\n output1 = tp1.compute(r, True, True)\n output2 = tp2.compute(r, True, True)\n self.assertTrue(numpy.array_equal(output1, output2))\n\n print 'Serializing and deserializing models.'\n\n savePath1 = os.path.join(self._tmpDir, 'tp1.bin')\n tp1.saveToFile(savePath1)\n tp3 = pickle.loads(pickle.dumps(tp1))\n tp3.loadFromFile(savePath1)\n\n savePath2 = os.path.join(self._tmpDir, 'tp2.bin')\n tp2.saveToFile(savePath2)\n tp4 = pickle.loads(pickle.dumps(tp2))\n tp4.loadFromFile(savePath2)\n\n self.assertTPsEqual(tp1, tp3)\n self.assertTPsEqual(tp2, tp4)\n\n for r in records[250:]:\n print i\n i += 1\n out1 = tp1.compute(r, True, True)\n out2 = tp2.compute(r, True, True)\n out3 = tp3.compute(r, True, True)\n out4 = tp4.compute(r, True, True)\n\n self.assertTrue(numpy.array_equal(out1, out2))\n self.assertTrue(numpy.array_equal(out1, out3))\n self.assertTrue(numpy.array_equal(out1, out4))\n\n self.assertTPsEqual(tp1, tp2)\n self.assertTPsEqual(tp1, tp3)\n self.assertTPsEqual(tp2, tp4)", "def test_sequence_to_moltype(self):\n s = Sequence(\"TTTTTTTTTTAAAA\", name=\"test1\")\n annot1 = s.add_annotation(Feature, \"exon\", \"fred\", [(0, 10)])\n annot2 = s.add_annotation(Feature, \"exon\", \"trev\", [(10, 14)])\n got = s.to_moltype(\"rna\")\n annot1_slice = str(annot1.get_slice())\n annot2_slice = str(annot2.get_slice())\n got1_slice = str(got.annotations[0].get_slice())\n got2_slice = str(got.annotations[1].get_slice())\n self.assertNotEqual(annot1_slice, got1_slice)\n self.assertEqual(annot2_slice, got2_slice)\n self.assertEqual(got.moltype.label, \"rna\")\n self.assertEqual(got.name, \"test1\")\n\n s = Sequence(\"AAGGGGAAAACCCCCAAAAAAAAAATTTTTTTTTTAAA\", name=\"test2\")\n xx_y = [[[2, 6], 2.4], [[10, 15], 5.1], [[25, 35], 1.3]]\n y_valued = s.add_annotation(Variable, \"SNP\", \"freq\", xx_y)\n got = s.to_moltype(\"rna\")\n y_valued_slice = str(y_valued.get_slice())\n got_slice = str(str(got.annotations[0].get_slice()))\n self.assertNotEqual(y_valued_slice, got_slice)\n self.assertEqual(got.moltype.label, \"rna\")\n self.assertEqual(got.name, \"test2\")\n\n s = Sequence(\"TTTTTTTTTTAAAAAAAAAA\", name=\"test3\")\n data = [i for i in range(20)]\n annot4 = s.add_annotation(SimpleVariable, \"SNP\", \"freq\", data)\n got = s.to_moltype(RNA)\n annot4_slice = str(annot4.get_slice())\n got_slice = str(str(got.annotations[0].get_slice()))\n self.assertNotEqual(annot4_slice[:10], got_slice[:10])\n self.assertEqual(annot4_slice[10:20], got_slice[10:20])\n self.assertEqual(got.moltype.label, \"rna\")\n self.assertEqual(got.name, \"test3\")\n\n # calling with a null object should raise an exception\n with self.assertRaises(ValueError):\n s.to_moltype(None)\n\n with self.assertRaises(ValueError):\n s.to_moltype(\"\")", "def assertSequenceEqual(self, seq1, seq2,\r\n msg=None, seq_type=None, max_diff=80*8):\r\n if seq_type is not None:\r\n seq_type_name = seq_type.__name__\r\n if not isinstance(seq1, seq_type):\r\n raise self.failureException('First sequence is not a %s: %s'\r\n % (seq_type_name, safe_repr(seq1)))\r\n if not isinstance(seq2, seq_type):\r\n raise self.failureException('Second sequence is not a %s: %s'\r\n % (seq_type_name, safe_repr(seq2)))\r\n else:\r\n seq_type_name = \"sequence\"\r\n\r\n differing = None\r\n try:\r\n len1 = len(seq1)\r\n except (TypeError, NotImplementedError):\r\n differing = 'First %s has no length. Non-sequence?' % (\r\n seq_type_name)\r\n\r\n if differing is None:\r\n try:\r\n len2 = len(seq2)\r\n except (TypeError, NotImplementedError):\r\n differing = 'Second %s has no length. Non-sequence?' % (\r\n seq_type_name)\r\n\r\n if differing is None:\r\n if seq1 == seq2:\r\n return\r\n\r\n seq1_repr = repr(seq1)\r\n seq2_repr = repr(seq2)\r\n if len(seq1_repr) > 30:\r\n seq1_repr = seq1_repr[:30] + '...'\r\n if len(seq2_repr) > 30:\r\n seq2_repr = seq2_repr[:30] + '...'\r\n elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)\r\n differing = '%ss differ: %s != %s\\n' % elements\r\n\r\n for i in xrange(min(len1, len2)):\r\n try:\r\n item1 = seq1[i]\r\n except (TypeError, IndexError, NotImplementedError):\r\n differing += ('\\nUnable to index element %d of first %s\\n' %\r\n (i, seq_type_name))\r\n break\r\n\r\n try:\r\n item2 = seq2[i]\r\n except (TypeError, IndexError, NotImplementedError):\r\n differing += ('\\nUnable to index element %d of second %s\\n' %\r\n (i, seq_type_name))\r\n break\r\n\r\n if item1 != item2:\r\n differing += ('\\nFirst differing element %d:\\n%s\\n%s\\n' %\r\n (i, item1, item2))\r\n break\r\n else:\r\n if (len1 == len2 and seq_type is None and\r\n type(seq1) != type(seq2)):\r\n # The sequences are the same, but have differing types.\r\n return\r\n\r\n if len1 > len2:\r\n differing += ('\\nFirst %s contains %d additional '\r\n 'elements.\\n' % (seq_type_name, len1 - len2))\r\n try:\r\n differing += ('First extra element %d:\\n%s\\n' %\r\n (len2, seq1[len2]))\r\n except (TypeError, IndexError, NotImplementedError):\r\n differing += ('Unable to index element %d '\r\n 'of first %s\\n' % (len2, seq_type_name))\r\n elif len1 < len2:\r\n differing += ('\\nSecond %s contains %d additional '\r\n 'elements.\\n' % (seq_type_name, len2 - len1))\r\n try:\r\n differing += ('First extra element %d:\\n%s\\n' %\r\n (len1, seq2[len1]))\r\n except (TypeError, IndexError, NotImplementedError):\r\n differing += ('Unable to index element %d '\r\n 'of second %s\\n' % (len1, seq_type_name))\r\n standardMsg = differing\r\n diffMsg = '\\n' + '\\n'.join(\r\n difflib.ndiff(pprint.pformat(seq1).splitlines(),\r\n pprint.pformat(seq2).splitlines()))\r\n\r\n standardMsg = self._truncateMessage(standardMsg, diffMsg)\r\n msg = self._formatMessage(msg, standardMsg)\r\n self.fail(msg)", "def test_combine_nsamples_different_shapes():\n test_sample_1 = np.ones((2, 13, 21))\n test_sample_2 = np.ones((3, 13, 21))\n pytest.raises(ValueError, utils.combine_nsamples, test_sample_1, test_sample_2)", "def test_transform(self):\n t = Compose([Enumerate([2, \"asfa\", \"ipsi\"]), OneHotEncode(3)], \"categorical\")\n assert numpy.all(t.transform(2) == numpy.array((1.0, 0.0, 0.0)))\n assert numpy.all(t.transform(\"asfa\") == numpy.array((0.0, 1.0, 0.0)))\n assert numpy.all(t.transform(\"ipsi\") == numpy.array((0.0, 0.0, 1.0)))\n\n with pytest.raises(KeyError):\n t.transform(\"aafdasfa\")\n\n assert numpy.all(\n t.transform([[\"ipsi\", \"asfa\"], [2, \"ipsi\"]])\n == numpy.array(\n [[(0.0, 0.0, 1.0), (0.0, 1.0, 0.0)], [(1.0, 0.0, 0.0), (0.0, 0.0, 1.0)]]\n )\n )\n\n t = Compose([Enumerate([2, \"asfa\"]), OneHotEncode(2)], \"categorical\")\n assert t.transform(2) == 0.0\n assert t.transform(\"asfa\") == 1.0\n with pytest.raises(KeyError):\n t.transform(\"ipsi\")\n assert numpy.all(\n t.transform([[\"asfa\", \"asfa\"], [2, \"asfa\"]])\n == numpy.array([[1.0, 1.0], [0.0, 1.0]])\n )\n\n # for the crazy enough\n t = Compose([Enumerate([2]), OneHotEncode(1)], \"categorical\")\n assert t.transform(2) == 0.0\n with pytest.raises(KeyError):\n t.transform(\"ipsi\")\n assert numpy.all(t.transform([[2, 2], [2, 2]]) == [[0, 0], [0, 0]])", "def test_transform(self):\n t = Identity()\n assert t.transform(\"yo\") == \"yo\"", "def test_transformer_forward_shape(self, input_vocab_size, output_vocab_size):\n self._test_transformer_forward_shape(input_vocab_size, output_vocab_size)", "def test_similarity_measure_size_compatibility():\n\n patch1 = torch.randn(size=(4, 6, 2))\n patch2 = torch.randn(size=(4, 6, 2))\n\n ssd_similarity_measure(patch1, patch2)\n sad_similarity_measure(patch1, patch2)\n assert True # just check if the ssd calculation was successfull\n\n patch1 = torch.randn(size=(4, 3))\n patch2 = torch.randn(size=(4, 3))\n\n ssd_similarity_measure(patch1, patch2)\n sad_similarity_measure(patch1, patch2)\n assert True # just check if the ssd calculation was successfull\n\n patch1 = torch.randn(size=(5,))\n patch2 = torch.randn(size=(5,))\n\n ssd_similarity_measure(patch1, patch2)\n sad_similarity_measure(patch1, patch2)\n assert True # just check if the ssd calculation was successfull\n\n patch1 = torch.randn(size=(3, 7, 2, 4))\n patch2 = torch.randn(size=(3, 7, 2, 4))\n\n ssd_similarity_measure(patch1, patch2)\n sad_similarity_measure(patch1, patch2)\n assert True # just check if the ssd calculation was successful", "def test_make_sampled_format(self):\n for num_inputs in [1, 3]:\n for num_outputs in [1, 2, 4]:\n for num_time_steps in [4, 10, 12]:\n # Generate data\n # P=2 format [0, 1, 2, 3, ...]\n sample_interval = 2\n dt_system = np.random.random()\n dt_sample = sample_interval * dt_system\n outputs = np.random.random(\n (num_time_steps, num_outputs, num_inputs))\n time_steps = make_time_steps(\n num_time_steps, sample_interval)\n time_values = time_steps * dt_system\n\n # Compute using modred\n my_ERA = era.ERA()\n time_steps_computed, outputs_computed =\\\n era.make_sampled_format(time_values, outputs)\n #self.assertEqual(dt_system_computed, dt_system)\n\n # Reference values\n num_time_steps_true = (num_time_steps - 1) * 2\n time_steps_true = make_time_steps(num_time_steps_true, 1)\n outputs_true = np.zeros(\n (num_time_steps_true, num_outputs, num_inputs))\n outputs_true[::2] = outputs[:-1]\n outputs_true[1::2] = outputs[1:]\n\n # Compare values\n np.testing.assert_equal(\n time_steps_computed, time_steps_true)\n np.testing.assert_equal(outputs_computed, outputs_true)\n\n # Test that if there is a wrong time value, get an error\n time_values[num_time_steps // 2] = -1\n self.assertRaises(\n ValueError, era.make_sampled_format, time_values,\n outputs)", "def test_yield_value(self):\n msg = 'Must be an iterable which yield sequences in order.'\n examples = (\n [\n 'Hello',\n 'World',\n 'Hello World',\n ],\n [\n 'Mario use Kimura Lock on Luigi, and Luigi tap out.',\n 'Mario use Superman Punch.',\n 'Luigi get TKO.',\n 'Toad and Toadette are fightting over mushroom (weed).',\n ],\n [''],\n [],\n )\n\n for batch_sequences in examples:\n dataset = LanguageModelDataset(batch_sequences=batch_sequences)\n self.assertIsInstance(dataset, Iterable, msg=msg)\n\n for ans_sequence, sequence in zip(batch_sequences, dataset):\n self.assertIsInstance(sequence, str, msg=msg)\n self.assertEqual(sequence, ans_sequence, msg=msg)", "def test_transform(self):\n t = Reverse(Quantize())\n assert t.transform(9) == 9.0\n assert t.transform(5) == 5.0\n assert numpy.all(t.transform([9, 5]) == numpy.array([9.0, 5.0], dtype=float))", "def test_return_self(self):\n\n df = d.create_df_2()\n\n x = ScalingTransformer(columns=[\"a\"], scaler=\"standard\")\n\n x_fitted = x.fit(df)\n\n assert (\n x_fitted is x\n ), \"return value from ScalingTransformer.fit not as expected (self).\"", "def assert_same_size(sequences):\n seq_size = len(sequences[0])\n for seq in sequences:\n if len(seq) != seq_size:\n raise SizeError", "def test_equal15():\n x = np.array([[[[[[True, False, True], [True, False, True], [True, False, True]]]]]])\n y = x\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_check_seqs_added_demultiplex(self):\r\n\r\n # Test added demultiplex for the run_prefix\r\n in_seqs = self.in_seqs_added_demultiplex\r\n bc_map = self.bc_map_added_demultiplex\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_fasta_fixed_added_demultiplex\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='disable',\r\n rev_primers={},\r\n qual_out=False,\r\n added_demultiplex_field='run_prefix')\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)\r\n\r\n # Demultiplex by the 'group' in the fasta label\r\n in_seqs = self.in_seqs_added_demultiplex\r\n bc_map = self.bc_map_added_demultiplex_group\r\n primer_seq_lens = self.primer_seq_lens_fixed_len_bc1\r\n all_primers = self.all_primers_fixed_len_bc1\r\n expected = self.expected_fasta_added_demultiplex_group\r\n\r\n fd, out_fp = mkstemp(prefix=\"sample_seqs_\", suffix=\".fna.tmp\")\r\n close(fd)\r\n out_f = open(out_fp, \"w\")\r\n self._files_to_remove.append(out_f.name.replace('.tmp', ''))\r\n\r\n actual = check_seqs(\r\n fasta_out=out_f,\r\n fasta_files=[in_seqs],\r\n starting_ix=0,\r\n valid_map=bc_map,\r\n qual_mappings={},\r\n filters=[],\r\n barcode_len=12,\r\n keep_primer=False,\r\n keep_barcode=False,\r\n barcode_type=\"golay_12\",\r\n max_bc_errors=1.5,\r\n retain_unassigned_reads=False,\r\n attempt_bc_correction=True,\r\n primer_seqs_lens=primer_seq_lens,\r\n all_primers=all_primers,\r\n max_primer_mm=0,\r\n disable_primer_check=False,\r\n reverse_primers='disable',\r\n rev_primers={},\r\n qual_out=False,\r\n added_demultiplex_field='group')\r\n\r\n out_f = open(out_f.name.replace('.tmp', ''), \"U\")\r\n actual_results = '\\n'.join([line.strip() for line in out_f])\r\n\r\n self.assertEqual(actual_results, expected)", "def test_testGenerator():\n\n # check type\n assert isinstance(testset, list)\n\n # check the shape\n assert len(testset)==newObs.shape[0]", "def test_append_transform(self):\n\n # Default GroupTransform length should be 0 without append.\n self.assertEqual(self.group_tr.__len__(), 0)\n\n matrix_tr = OCIO.MatrixTransform()\n ff_tr = OCIO.FixedFunctionTransform()\n\n self.group_tr.appendTransform(matrix_tr)\n self.group_tr.appendTransform(ff_tr)\n\n self.assertEqual(self.group_tr.__len__(), 2)\n\n iterator = self.group_tr.__iter__()\n for i in [matrix_tr, ff_tr]:\n self.assertEqual(i, next(iterator))", "def test_forward_pass(self):\n ex = self._create_example()\n decoder_input_fn = FixedDecoderInputs(\n inputs=tf.convert_to_tensor(\n ex.target, dtype=tf.float32),\n sequence_length=tf.convert_to_tensor(\n ex.target_len, dtype=tf.int32))\n\n model = self.create_model()\n decoder_output = model.encode_decode(\n source=tf.convert_to_tensor(\n ex.source, dtype=tf.float32),\n source_len=tf.convert_to_tensor(\n ex.source_len, dtype=tf.int32),\n decoder_input_fn=decoder_input_fn,\n target_len=tf.convert_to_tensor(\n ex.target_len, dtype=tf.int32))\n\n with self.test_session() as sess:\n sess.run(tf.global_variables_initializer())\n decoder_output_ = sess.run(decoder_output)\n\n max_decode_length = model.params[\"target.max_seq_len\"]\n expected_decode_len = np.minimum(ex.target_len, max_decode_length)\n\n # Assert shapes are correct\n np.testing.assert_array_equal(decoder_output_.logits.shape, [\n self.batch_size, np.max(expected_decode_len),\n model.target_vocab_info.total_size\n ])\n np.testing.assert_array_equal(\n decoder_output_.predictions.shape,\n [self.batch_size, np.max(expected_decode_len)])", "def test_casting_without_iterable(test_fixture, test_input, expected):\n test_fixture.cast_prop = test_input\n assert test_input == test_fixture.cast_prop == expected\n assert type(test_fixture.cast_prop) == type(expected)", "def test_equal13():\n x = np.array([[[[[[True, False, True], [True, False, True], [True, False, True]]]]]])\n y = np.array([[True, False, True], [True, False, True], [True, False, True]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_super_transform_called(self, mocker):\n\n df = d.create_df_2()\n\n x = ScalingTransformer(columns=[\"a\"], scaler=\"standard\")\n\n x.fit(df)\n\n expected_call_args = {0: {\"args\": (d.create_df_2(),), \"kwargs\": {}}}\n\n with h.assert_function_call(\n mocker,\n tubular.base.BaseTransformer,\n \"transform\",\n expected_call_args,\n return_value=d.create_df_2(),\n ):\n\n x.transform(df)", "def test_random_transform():\n # given\n train = pd.read_csv('source/train.csv')\n train['labels'] = train['labels'].map(ast.literal_eval)\n image_path = os.path.join('source', train.iloc[0].path)\n all_labels = train.iloc[0]['labels']\n for label in all_labels:\n if label['class'] == 'whiteboard':\n break\n xn = [int(float(x)) for x in label['xn'].split(';')][:4]\n yn = [int(float(y)) for y in label['yn'].split(';')][:4]\n labels = np.zeros((4, 2))\n for i in range(4):\n labels[i, 0] = xn[i]\n labels[i, 1] = yn[i]\n img = cv2.imread(image_path)\n kw = dict(rotation_range=15,\n height_shift_range=0.2,\n width_shift_range=0.2,\n shear_range=0.3,\n channel_shift_range=0.2,\n horizontal_flip=True,\n vertical_flip=True,\n dim_ordering='tf',\n seed=1313)\n # when\n rimg, rlabels = image_generator.random_transform(img, labels, **kw)\n\n # then just assert transformation isn't changed much\n assert MultiPoint([[224.91875347, 58.05657097],\n [673.57648317, 189.27244333],\n [544.23308452, 381.12743459],\n [70.73339963, 312.7359806]]\n ).equals_exact(rlabels, 5)", "def test_equal6():\n x = randtool(\"float\", -10, 10, [3, 3, 3, 1])\n y = randtool(\"float\", -10, 10, [3, 3, 1])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_copy(self):\n s = Sequence(\"TTTTTTTTTTAAAA\", name=\"test_copy\")\n annot1 = s.add_annotation(Feature, \"exon\", \"annot1\", [(0, 10)])\n annot2 = s.add_annotation(Feature, \"exon\", \"annot2\", [(10, 14)])\n got = s.copy()\n got_annot1 = got.get_annotations_matching(\n annotation_type=\"exon\", name=\"annot1\"\n )[0]\n got_annot2 = got.get_annotations_matching(\n annotation_type=\"exon\", name=\"annot2\"\n )[0]\n self.assertIsNot(got, s)\n self.assertIsNot(got_annot1, annot1)\n self.assertIsNot(got_annot2, annot2)\n self.assertEqual(got.name, s.name)\n self.assertEqual(got.info, s.info)\n self.assertEqual(got._seq, s._seq)\n self.assertEqual(got.moltype, s.moltype)\n annot1_slice = str(annot1.get_slice())\n annot2_slice = str(annot2.get_slice())\n got1_slice = str(got.annotations[0].get_slice())\n got2_slice = str(got.annotations[1].get_slice())\n self.assertEqual(annot1_slice, got1_slice)\n self.assertEqual(annot2_slice, got2_slice)", "def test_instrument_mixed_streaming_batch(self):\n # Create the pipeline that will be instrumented.\n from apache_beam.options.pipeline_options import StandardOptions\n options = StandardOptions(streaming=True)\n p_original = beam.Pipeline(interactive_runner.InteractiveRunner(), options)\n streaming_cache_manager = StreamingCache(cache_dir=None)\n ie.current_env().set_cache_manager(streaming_cache_manager, p_original)\n source_1 = p_original | 'source1' >> beam.io.ReadFromPubSub(\n subscription='projects/fake-project/subscriptions/fake_sub')\n source_2 = p_original | 'source2' >> beam.Create([1, 2, 3, 4, 5])\n\n # pylint: disable=possibly-unused-variable\n pcoll_1 = ((source_1, source_2)\n | beam.Flatten()\n | 'square1' >> beam.Map(lambda x: x * x))\n\n # Watch but do not cache the PCollections.\n ib.watch(locals())\n # This should be noop.\n utils.watch_sources(p_original)\n self._mock_write_cache(\n p_original, [], self.cache_key_of('source_2', source_2))\n ie.current_env().mark_pcollection_computed([source_2])\n\n # Instrument the original pipeline to create the pipeline the user will see.\n p_copy = beam.Pipeline.from_runner_api(\n p_original.to_runner_api(),\n runner=interactive_runner.InteractiveRunner(),\n options=options)\n ie.current_env().add_derived_pipeline(p_original, p_copy)\n instrumenter = instr.build_pipeline_instrument(p_copy)\n actual_pipeline = beam.Pipeline.from_runner_api(\n proto=instrumenter.instrumented_pipeline_proto(),\n runner=interactive_runner.InteractiveRunner(),\n options=options)\n\n # Now, build the expected pipeline which replaces the unbounded source with\n # a TestStream.\n source_1_cache_key = self.cache_key_of('source_1', source_1)\n source_2_cache_key = self.cache_key_of('source_2', source_2)\n p_expected = beam.Pipeline()\n ie.current_env().set_cache_manager(streaming_cache_manager, p_expected)\n test_stream = (\n p_expected\n | TestStream(output_tags=[source_1_cache_key, source_2_cache_key]))\n # pylint: disable=expression-not-assigned\n ((\n test_stream[self.cache_key_of('source_1', source_1)],\n test_stream[self.cache_key_of('source_2', source_2)])\n | beam.Flatten()\n | 'square1' >> beam.Map(lambda x: x * x)\n | 'reify' >> beam.Map(lambda _: _)\n | cache.WriteCache(\n ie.current_env().get_cache_manager(p_expected), 'unused'))\n\n # Test that the TestStream is outputting to the correct PCollection.\n class TestStreamVisitor(PipelineVisitor):\n def __init__(self):\n self.output_tags = set()\n\n def enter_composite_transform(self, transform_node):\n self.visit_transform(transform_node)\n\n def visit_transform(self, transform_node):\n transform = transform_node.transform\n if isinstance(transform, TestStream):\n self.output_tags = transform.output_tags\n\n v = TestStreamVisitor()\n actual_pipeline.visit(v)\n expected_output_tags = set([source_1_cache_key, source_2_cache_key])\n actual_output_tags = v.output_tags\n self.assertSetEqual(expected_output_tags, actual_output_tags)\n\n # Test that the pipeline is as expected.\n assert_pipeline_proto_equal(\n self,\n p_expected.to_runner_api(),\n instrumenter.instrumented_pipeline_proto())", "def test_multiple_rng(self):\r\n rng1 = RandomStreams(1234)\r\n rng2 = RandomStreams(2392)\r\n assert rng1.random_state_variables is not rng2.random_state_variables", "def test_inverse_transform(self):", "def testMulti(self):\n affineClass = xyTransformRegistry[\"affine\"]\n wrapper0 = OneXYTransformConfig()\n wrapper0.transform.retarget(affineClass)\n affineConfig0 = wrapper0.transform\n affineConfig0.translation = (-2.1, 3.4)\n rotAng = 0.832 # radians\n xScale = 3.7\n yScale = 45.3\n affineConfig0.linear = (\n math.cos(rotAng) * xScale, math.sin(rotAng) * yScale,\n -math.sin(rotAng) * xScale, math.cos(rotAng) * yScale,\n )\n\n wrapper1 = OneXYTransformConfig()\n wrapper1.transform.retarget(affineClass)\n affineConfig1 = wrapper1.transform\n affineConfig1.translation = (26.5, -35.1)\n rotAng = -0.25 # radians\n xScale = 1.45\n yScale = 0.9\n affineConfig1.linear = (\n math.cos(rotAng) * xScale, math.sin(rotAng) * yScale,\n -math.sin(rotAng) * xScale, math.cos(rotAng) * yScale,\n )\n\n multiClass = xyTransformRegistry[\"multi\"]\n multiConfig = multiClass.ConfigClass()\n multiConfig.transformDict = {\n 0: wrapper0,\n 1: wrapper1,\n }\n with lsst.utils.tests.getTempFilePath(\".py\") as filePath:\n self.checkConfig(multiClass, multiConfig, filePath)\n multiXYTransform = multiClass(multiConfig)\n\n affine0 = affineClass(affineConfig0)\n affine1 = affineClass(affineConfig1)\n transformList = (affine0, affine1)\n refMultiXYTransform = RefMultiXYTransform(transformList)\n\n self.checkBasics(refMultiXYTransform)\n\n for fromPoint in self.fromIter():\n toPoint = multiXYTransform.forwardTransform(fromPoint)\n predToPoint = refMultiXYTransform.forwardTransform(fromPoint)\n for i in range(2):\n self.assertAlmostEqual(toPoint[i], predToPoint[i])", "def test0(self):\r\n a = T.constant(2.5)\r\n b = T.constant(numpy.asarray([[[0.5]]]))\r\n b2 = b.dimshuffle()\r\n assert b2.ndim == 0\r\n d_a = T.DimShuffle([], [])(a)\r\n d_b = T.DimShuffle([True, True, True], [0, 2, 1])(b)\r\n d_a2 = T.DimShuffle([], ['x', 'x', 'x'])(a)\r\n\r\n self.assertTrue(_as_scalar(a) == a)\r\n self.assertTrue(_as_scalar(b) != b)\r\n self.assertTrue(_as_scalar(d_a) != d_a)\r\n self.assertTrue(_as_scalar(d_b) != d_b)\r\n self.assertTrue(_as_scalar(d_a2) != d_a2)", "def test_random_state_transfer():\r\n class Graph:\r\n def __init__(self, seed=123):\r\n self.rng = MRG_RandomStreams(seed)\r\n self.y = self.rng.uniform(size=(1,))\r\n g1 = Graph(seed=123)\r\n f1 = theano.function([], g1.y)\r\n g2 = Graph(seed=987)\r\n f2 = theano.function([], g2.y)\r\n\r\n g2.rng.rstate = g1.rng.rstate\r\n for (su1, su2) in zip(g1.rng.state_updates, g2.rng.state_updates):\r\n su2[0].set_value(su1[0].get_value())\r\n\r\n numpy.testing.assert_array_almost_equal(f1(), f2(), decimal=6)", "def test_sequence(self, output, input_):\n input_ = \"\\n\".join(input_)\n g = Genes(input_)\n s = Sequence(genes=g, ages=g.size)\n s.run()\n self.assertEquals(s.population.get_survivor(Sequence.IMPOSSIBLE),\n output)", "def test_equal14():\n x = np.array([[[[[[True, False, True], [True, False, True], [True, False, True]]]]]])\n y = np.array([[True, False, True], [True, False, True], [True, False, True]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def sequence_equals(sequence1, sequence2):\n assert len(sequence1) == len(sequence2), (len(sequence1), len(sequence2))\n for item_from_s1, item_from_s2 in zip(sequence1, sequence2):\n assert item_from_s1 == item_from_s2, (item_from_s1, item_from_s2)\n\n return True", "def test_output_activation_return_return_final_seq_only_off():\n RANDOM_ITERATIONS = 20\n input_dim = 100\n for _ in range(RANDOM_ITERATIONS):\n data = torch.randn((25, 10, 100))\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"linear\", 10], [\"linear\", 3]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"relu\", initialiser=\"xavier\", batch_norm=True)\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"relu\", initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"linear\", 10], [\"linear\", 3]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"relu\", initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"linear\", 10], [\"linear\", 3]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"sigmoid\", initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n assert all(out.reshape(1, -1).squeeze() <= 1)\n summed_result = torch.sum(out, dim=2)\n assert all(summed_result.reshape(1, -1).squeeze() != 1.0)\n\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"linear\", 10], [\"linear\", 3]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"softmax\", initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n assert all(out.reshape(1, -1).squeeze() <= 1)\n summed_result = torch.sum(out, dim=2)\n summed_result = summed_result.reshape(1, -1).squeeze()\n summed_result = torch.round( (summed_result * 10 ** 5) / (10 ** 5))\n assert all( summed_result == 1.0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"lstm\", 25]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n output_activation=\"softmax\", initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert all(out.reshape(1, -1).squeeze() >= 0)\n assert all(out.reshape(1, -1).squeeze() <= 1)\n summed_result = torch.sum(out, dim=2)\n summed_result = summed_result.reshape(1, -1).squeeze()\n summed_result = torch.round( (summed_result * 10 ** 5) / (10 ** 5))\n\n\n\n assert all( summed_result == 1.0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"lstm\", 25]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert not all(out.reshape(1, -1).squeeze() >= 0)\n\n assert not all(out.reshape(1, -1).squeeze() <= 0)\n summed_result = torch.sum(out, dim=2)\n summed_result = summed_result.reshape(1, -1).squeeze()\n summed_result = torch.round( (summed_result * 10 ** 5) / (10 ** 5))\n assert not all( summed_result == 1.0)\n\n RNN_instance = RNN(layers_info=[[\"lstm\", 20], [\"gru\", 5], [\"lstm\", 25], [\"linear\", 8]],\n hidden_activations=\"relu\", input_dim=input_dim, return_final_seq_only=False,\n initialiser=\"xavier\")\n out = RNN_instance.forward(data)\n assert not all(out.reshape(1, -1).squeeze() >= 0)\n assert not all(out.reshape(1, -1).squeeze() <= 0)\n summed_result = torch.sum(out, dim=2)\n summed_result = summed_result.reshape(1, -1).squeeze()\n summed_result = torch.round( (summed_result * 10 ** 5) / (10 ** 5))\n assert not all( summed_result == 1.0)", "def test2(self):\r\n a = T.dscalar()\r\n d_a = T.DimShuffle([], [])(a)\r\n d_a2 = T.DimShuffle([], ['x', 'x'])(a)\r\n\r\n self.assertTrue(_as_scalar(a) is a)\r\n self.assertTrue(_as_scalar(d_a) is a)\r\n self.assertTrue(_as_scalar(d_a2) is a)", "def test_pipeline_transform_with_sample(testbed: SparkETLTests):\n # Given - getting the input dataframes\n inc_df: DataFrame = testbed.dataframes['page_views']\n prev_df: DataFrame = testbed.dataframes['soyel_db.user_pageviews']\n # getting the expected dataframe\n expected_data: DataFrame = testbed.dataframes['expected_output_user_pageviews']\n # When - actual data\n transformed_data: DataFrame = pipeline.transform(inc_df=inc_df,\n prev_df=prev_df,\n config=testbed.config,\n logger=testbed.logger)\n # Then - comparing the actual and expected data\n testbed.comapare_dataframes(df1=transformed_data, df2=expected_data)", "def test_transform_default(self):\n result = transform((1, 2))\n self.assertEqual(result, (2 * PIXEL, 1 * PIXEL))", "def test_equal4():\n x = np.array([[1, 2, 3]])\n y = np.array([[[[1, 2, 3], [1, 2, 3], [1, 2, 3]]]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_mixed():\n # assert the distribution of the samples is close to the distribution of the data\n # using a kstest for continuous + a cstest for categorical.", "def test_equal7():\n x = randtool(\"float\", -10, 10, [3, 3, 1])\n y = randtool(\"float\", -10, 10, [3, 3, 3, 1])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_case_1(self):\n print(\"-------------------shuffle-----------------------------------\")\n for _ in range(10):\n deck_size = np.random.randint(low=1, high=100000)\n deck = np.arange(deck_size)\n shuffle_deck = shuffle(deck)\n self.assertEqual(sum(shuffle_deck), deck_size * (deck_size - 1) // 2)\n self.assertEqual(len(deck), len(shuffle_deck))\n self.assertSetEqual(set(shuffle_deck), set(deck))\n\n print(\"input sequence preserve ok: PASS\")\n print(\"shuffle contain unique value ok: PASS\")\n print(\"shuffle contain same set of value as deck ok: PASS\")", "def _assert_same_size(outputs: TensorStruct, output_size: OutputSize):\n flat_output_size = nest.flatten(output_size)\n flat_output = nest.flatten(outputs)\n for output, size in zip(flat_output, flat_output_size):\n if isinstance(size, torch.Size):\n if output[0].size() != size:\n raise ValueError('The output size does not matchthe required output_size')\n elif output[0].size()[-1] != size:\n raise ValueError('The output size does not match the required output_size')", "def test_transform_album_with_two_transforms_with_gap_in_numbering(self):\n album = Album(artist='Artist', album='Album')\n tflist = TransformList()\n tflist.add_transform(Transform(1,\n cond_artist=True, pattern_artist='Artist',\n change_artist=True, to_artist='Artist 2',\n ))\n tflist.add_transform(Transform(3,\n cond_album=True, pattern_album='Album',\n change_album=True, to_album='Album 2',\n ))\n\n self.assertEqual(album.last_transform, 0)\n self.assertEqual(album.artist, 'Artist')\n self.assertEqual(album.album, 'Album')\n self.assertEqual(album.transformed, False)\n\n tflist.apply_album(album)\n\n self.assertEqual(album.last_transform, 3)\n self.assertEqual(album.artist, 'Artist 2')\n self.assertEqual(album.album, 'Album 2')\n self.assertEqual(album.transformed, True)", "def test_transform_track_with_two_transforms_with_gap_in_numbering(self):\n track = Track(artist='Artist', title='Title')\n tflist = TransformList()\n tflist.add_transform(Transform(1,\n cond_artist=True, pattern_artist='Artist',\n change_artist=True, to_artist='Artist 2',\n ))\n tflist.add_transform(Transform(3,\n cond_title=True, pattern_title='Title',\n change_title=True, to_title='Title 2',\n ))\n\n self.assertEqual(track.last_transform, 0)\n self.assertEqual(track.artist, 'Artist')\n self.assertEqual(track.title, 'Title')\n self.assertEqual(track.transformed, False)\n\n tflist.apply_track(track)\n\n self.assertEqual(track.last_transform, 3)\n self.assertEqual(track.artist, 'Artist 2')\n self.assertEqual(track.title, 'Title 2')\n self.assertEqual(track.transformed, True)", "def test_fit(self):\n data = pd.DataFrame({\n \"x\": np.random.random(size=100),\n \"y\": np.random.choice([\"yes\", \"no\"], size=100)\n })\n\n transformer = DataTransformer()\n transformer._fit_continuous = Mock()\n transformer._fit_continuous.return_value = ColumnTransformInfo(\n column_name=\"x\", column_type=\"continuous\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(1, 'tanh'), SpanInfo(3, 'softmax')],\n output_dimensions=1 + 3\n )\n\n transformer._fit_discrete = Mock()\n transformer._fit_discrete.return_value = ColumnTransformInfo(\n column_name=\"y\", column_type=\"discrete\", transform=None,\n transform_aux=None,\n output_info=[SpanInfo(2, 'softmax')],\n output_dimensions=2\n )\n\n transformer.fit(data, discrete_columns=[\"y\"])\n\n transformer._fit_discrete.assert_called_once()\n transformer._fit_continuous.assert_called_once()\n assert transformer.output_dimensions == 6", "def test_upcast_dims(self):\n self.init()\n # Test sum\n assert sum_mat(self.i64_1) == np.sum(self.i64_1)\n assert sum_cube(self.f64_1) == np.sum(self.f64_1)\n assert sum_cube(self.fi64_2) == np.sum(self.fi64_2)\n # Test transpose\n assert self.i64_1.shape == (3,)\n assert transpose_mat(self.i64_1).shape == (1,3)\n assert np.all(transpose_mat(self.i64_1) == self.i64_1.T)\n # Test that downcasting is not possible\n with pytest.raises(TypeError):\n transpose_mat(self.i64_3)\n with pytest.raises(TypeError):\n sum_vec(self.ff64_2)", "def test_equal10():\n x = np.array([[True, False, True], [True, False, True], [True, False, True]])\n y = np.array([[True, False, True]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_roundtrip(setup_teardown_file):\n f = setup_teardown_file[3]\n dt = np.dtype('(3,)f8')\n dset = f.create_dataset('x', (10,), dtype=dt)\n\n out = dset[...]\n dset[...] = out\n\n assert np.all(dset[...] == out)", "def test_merge_list_same(short_ll, small_ll):\n assert ml(short_ll, small_ll) == 8\n assert len(small_ll) == 8", "def test_transform(self):\n result = transform((1, 2) ,2, 2)\n self.assertEqual(result, (4 * PIXEL, 3 * PIXEL))", "def test_inheritance(self):\n\n x = ScalingTransformer(columns=[\"a\"], scaler=\"standard\")\n\n h.assert_inheritance(x, tubular.base.BaseTransformer)", "def test_seqprep_assembly(self):\n self.writeTmpFastq(self.test_fn1, self.test_fn2)\n\n ### Suggested default settings ###\n params = {}\n params['-f'] = self.test_fn1\n params['-r'] = self.test_fn2\n params['-s'] = self.temp_dir_string + 'assembled.gz'\n params['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params['-o'] = 15\n params['-m'] = 0.02\n params['-n'] = 0.9\n params['-y'] = 'J'\n\n sp_app = SeqPrep(params = params,\n WorkingDir=self.temp_dir_string)\n\n sp_res = sp_app()\n\n # since output is gzipped by default we need to convert to\n # raw text before testing our results. \n assembly_result = GzipFile(fileobj=sp_res['Assembled']).read()\n self.assertEqual(assembly_result, default_expected_assembly_workaround) \n\n unass_reads1_result = GzipFile(fileobj=\n sp_res['UnassembledReads1']).read()\n self.assertEqual(unass_reads1_result, \n expected_default_unassembled_reads1) \n\n unass_reads2_result = GzipFile(fileobj=\n sp_res['UnassembledReads2']).read()\n self.assertEqual(unass_reads2_result, \n expected_default_unassembled_reads2) \n sp_res.cleanUp() \n \n\n ### Alt settings ###\n params_alt = {}\n params_alt['-f'] = self.test_fn1\n params_alt['-r'] = self.test_fn2\n params_alt['-s'] = self.temp_dir_string + 'assembled.gz'\n params_alt['-1'] = self.temp_dir_string + 'unassembled.reads1out.gz'\n params_alt['-2'] = self.temp_dir_string + 'unassembled.reads2out.gz'\n params_alt['-o'] = 30\n params_alt['-m'] = 0.01\n params_alt['-n'] = 0.95\n params_alt['-y'] = 'J'\n \n sp_app2 = SeqPrep(params = params_alt,\n WorkingDir=self.temp_dir_string)\n sp_res2 = sp_app2()\n\n assembly_result = GzipFile(fileobj=sp_res2['Assembled']).read()\n self.assertEqual(assembly_result, expected_assembly_altered_params) \n\n unassembled_reads1_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads1']).read()\n self.assertEqual(unassembled_reads1_result2, \n expected_unassembled_reads1_altered_params) \n\n unassembled_reads2_result2 = GzipFile(fileobj=\n sp_res2['UnassembledReads2']).read()\n self.assertEqual(unassembled_reads2_result2, \n expected_unassembled_reads2_altered_params) \n\n sp_res2.cleanUp() \n shutil.rmtree(self.temp_dir_string)", "def test_demultiplex_sequences_alternate_settings(self):\r\n\r\n file_data = {}\r\n file_data['mapping_file'] = self.valid_mapping_data_golay_upper\r\n file_data['fasta_files'] = [self.valid_fasta_file_with_bc_errors]\r\n file_data['qual_files'] = [self.valid_qual_file_no_errors]\r\n file_data['demultiplexed_seqs_f'] = FakeOutFile()\r\n file_data['demultiplexed_qual_f'] = FakeOutFile()\r\n file_data['unassigned_seqs_f'] = FakeOutFile()\r\n file_data['unassigned_qual_f'] = FakeOutFile()\r\n\r\n keep_barcode = True,\r\n barcode_type = 12\r\n max_bc_errors = 1\r\n start_index = 500\r\n write_unassigned_reads = True\r\n disable_bc_correction = False\r\n added_demultiplex_field = 'Added_Demultiplex'\r\n\r\n log_data, bc_freqs, seq_counts, corrected_bc_count =\\\r\n demultiplex_sequences(file_data, keep_barcode, barcode_type,\r\n max_bc_errors, start_index, write_unassigned_reads,\r\n disable_bc_correction, added_demultiplex_field)\r\n\r\n expected_demultiplexed_fasta_seq = '>s1_500 ABCD0001 orig_bc=TACTCGTCGATG new_bc=AACTCGTCGATG bc_diffs=1\\nTACTCGTCGATGCAGGACGAGACGAGGTT\\n'\r\n expected_demultiplexed_qual_seq = '>s1_500 ABCD0001 orig_bc=TACTCGTCGATG new_bc=AACTCGTCGATG bc_diffs=1\\n29 13 24 14 10 14 16 13 30 10 13 11 30 26 11 11 29 20 19 16 24 17 29 28 11 27 14 24 24\\n'\r\n self.assertEqual(file_data['demultiplexed_seqs_f'].data,\r\n expected_demultiplexed_fasta_seq)\r\n self.assertEqual(file_data['demultiplexed_qual_f'].data,\r\n expected_demultiplexed_qual_seq)\r\n\r\n expected_log_data = {'AGCAGCACTTGT,1,s2': 0, 'ACCGCAGAGTCA,1,s3': 0,\r\n 'AACTCGTCGATG,1,s1': 1}\r\n expected_bc_freqs = {'TACTCGTCGATG': 1, 'GCCGCAGAGTCA': 1,\r\n 'AGCAGCACTTGT': 1}\r\n expected_seq_counts = 3\r\n expected_corrected_bc_count = [1, 0]\r\n\r\n self.assertEqual(log_data, expected_log_data)\r\n self.assertEqual(bc_freqs, expected_bc_freqs)\r\n self.assertEqual(seq_counts, expected_seq_counts)\r\n self.assertEqual(corrected_bc_count, expected_corrected_bc_count)", "def test_copy_non_bool_error(self):\n\n with pytest.raises(ValueError):\n\n BaseTransformer(copy=1)", "def test_transform_array_anonymize(self, mock_maps):\n # Setup\n data = np.array(['bar', 'foo', 'foo', 'tar'])\n\n # Run\n transformer = Mock()\n transformer.anonymize = 'email'\n transformer.intervals = [1, 2, 3]\n\n mock_maps[id(transformer)] = {\n 'bar': 'bar_x',\n 'foo': 'foo_x',\n 'tar': 'tar_x'\n }\n\n result = CategoricalTransformer.transform(transformer, data)\n\n # Asserts\n expect_result_len = 4\n\n self.assertEqual(\n len(result),\n expect_result_len,\n \"Unexpected length of transformed data\"\n )", "def test_samples_to_sequence(sample_store):\n # GIVEN a store with sample in a mix of states\n assert len(sample_store._get_query(table=Sample).all()) > 1\n assert (\n len(\n [\n sample\n for sample in sample_store._get_query(table=Sample).all()\n if sample.sequenced_at\n ]\n )\n >= 1\n )\n\n # WHEN finding which samples are in queue to be sequenced\n sequence_samples: List[Sample] = sample_store.get_samples_to_sequence()\n\n # THEN samples should be a list of samples\n assert isinstance(sequence_samples, list)\n assert isinstance(sequence_samples[0], Sample)\n\n # THEN it should list the received and partly sequenced samples\n assert len(sequence_samples) == 2\n assert {sample.name for sample in sequence_samples} == set(\n [\"sequenced-partly\", \"received-prepared\"]\n )\n for sample in sequence_samples:\n assert sample.sequenced_at is None\n if sample.name == \"sequenced-partly\":\n assert sample.reads > 0", "def test_parallel_align_seqs_pynast(self):\r\n\r\n params = {\r\n 'min_percent_id': 0.75,\r\n 'min_length': 15,\r\n 'template_fp': self.template_fp,\r\n 'pairwise_alignment_method': 'uclust',\r\n 'blast_db': None\r\n }\r\n\r\n app = ParallelAlignSeqsPyNast()\r\n r = app(self.inseqs1_fp,\r\n self.test_out,\r\n params,\r\n job_prefix='PTEST',\r\n poll_directly=True,\r\n suppress_submit_jobs=False)\r\n # confirm that the total number of output sequences equals the total\r\n # number of input sequences\r\n num_input_seqs = count_seqs_in_filepaths([self.inseqs1_fp])[1]\r\n num_template_seqs = count_seqs_in_filepaths([self.template_fp])[1]\r\n num_output_seqs = \\\r\n count_seqs_in_filepaths(glob(join(self.test_out, '*fasta')))[1] \\\r\n - num_input_seqs - num_template_seqs\r\n self.assertEqual(num_input_seqs, num_output_seqs)", "def test_align_two_alignments(self):\n res = align_two_alignments(self.align1_fp, self.align2_fp, RNA)\n self.assertEqual(res.toFasta(), self.align_two_align)", "def test_equal11():\n x = np.array([[True, False, True]])\n y = np.array([[[[[True, False, True], [True, False, True], [True, False, True]]]]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_equal9():\n x = np.array([[True, False, True], [True, False, True], [True, False, True]])\n y = np.array([True, False, True])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_generate_batch_from_several_1d_arrays_with_dividable_batch_size(\n arrays,\n batch_size,\n expected):\n gen = BatchGenerator(*arrays, batch_size=batch_size)\n\n first, second = next(gen.flow())\n\n assert first == expected[0]\n assert second == expected[1]", "def test_equal12():\n x = np.array([[True, False, True], [True, False, True], [True, False, True]])\n y = np.array([[True, False, True], [False, False, False], [True, True, False]])\n res = np.equal(x, y)\n obj.run(res=res, x=x, y=y)", "def test_prepend_transform(self):\n\n # Default GroupTransform length should be 0 without prepend.\n self.assertEqual(self.group_tr.__len__(), 0)\n\n matrix_tr = OCIO.MatrixTransform()\n ff_tr = OCIO.FixedFunctionTransform()\n\n self.group_tr.prependTransform(matrix_tr)\n self.group_tr.prependTransform(ff_tr)\n\n self.assertEqual(self.group_tr.__len__(), 2)\n\n # FixedFunctionTransform goes in front due to prepend.\n iterator = self.group_tr.__iter__()\n for i in [ff_tr, matrix_tr]:\n self.assertEqual(i, next(iterator))", "def __test_similarity(self):\n\n _, test_loader, _ = create_loaders()\n\n false_counter = 0\n for (image, labels) in test_loader:\n\n output_pytorch = self._model(image).detach().numpy()\n\n im = image.numpy().flatten()\n output_manual = self.run_through_model(im)\n\n if np.allclose(output_pytorch, output_manual, rtol=1e-4, atol=1e-4) is not True:\n false_counter += 1\n\n print(f\"Number of mistakes: {false_counter}\")", "def test_roundtrip(self):\n dt = np.dtype('(3,)f8')\n dset = self.f.create_dataset('x', (10,), dtype=dt)\n\n out = dset[...]\n dset[...] = out\n\n self.assertTrue(np.all(dset[...] == out))", "def test_transform(self):\n t = Quantize()\n assert t.transform(8.6) == 9\n assert t.transform(8.4) == 8\n assert t.transform(5.3) == 5\n assert numpy.all(t.transform([8.6, 5.3]) == numpy.array([9, 5], dtype=int))", "def test_load_full_transform(self):\n self.add_transform(cond_artist=True, cond_album=True, cond_title=True,\n cond_ensemble=True, cond_composer=True, cond_conductor=True,\n change_artist=True, change_album=True, change_title=True,\n change_ensemble=True, change_composer=True, change_conductor=True,\n pattern_artist='Artist', pattern_album='Album', pattern_title='Title',\n pattern_ensemble='Ensemble', pattern_composer='Composer', pattern_conductor='Conductor',\n to_artist='Artist 2', to_album='Album 2', to_title='Title 2',\n to_ensemble='Ensemble 2', to_composer='Composer 2', to_conductor='Conductor 2')\n self.app.load_data()\n self.assertEqual(len(self.app.transforms), 1)\n transform = self.app.transforms.transforms[1]\n self.assertEqual(transform.cond_artist, True)\n self.assertEqual(transform.cond_album, True)\n self.assertEqual(transform.cond_title, True)\n self.assertEqual(transform.cond_ensemble, True)\n self.assertEqual(transform.cond_composer, True)\n self.assertEqual(transform.cond_conductor, True)\n self.assertEqual(transform.change_artist, True)\n self.assertEqual(transform.change_album, True)\n self.assertEqual(transform.change_title, True)\n self.assertEqual(transform.change_ensemble, True)\n self.assertEqual(transform.change_composer, True)\n self.assertEqual(transform.change_conductor, True)\n self.assertEqual(transform.pattern_artist, 'Artist')\n self.assertEqual(transform.pattern_album, 'Album')\n self.assertEqual(transform.pattern_title, 'Title')\n self.assertEqual(transform.pattern_ensemble, 'Ensemble')\n self.assertEqual(transform.pattern_composer, 'Composer')\n self.assertEqual(transform.pattern_conductor, 'Conductor')\n self.assertEqual(transform.to_artist, 'Artist 2')\n self.assertEqual(transform.to_album, 'Album 2')\n self.assertEqual(transform.to_title, 'Title 2')\n self.assertEqual(transform.to_ensemble, 'Ensemble 2')\n self.assertEqual(transform.to_composer, 'Composer 2')\n self.assertEqual(transform.to_conductor, 'Conductor 2')", "def test_iterable_len(self):\n for iterable_len, expected_size in [(5, 5), (150, 100), (None, 100)]:\n with self.subTest(iterable_len=iterable_len):\n iterable_of_args, iterable_len_, chunk_size, n_splits = apply_numpy_chunking(\n self.test_data_numpy, iterable_len=iterable_len, n_splits=1\n )\n\n # Materialize generator and test contents\n iterable_of_args = list(iterable_of_args)\n self.assertEqual(len(iterable_of_args), 1)\n self.assertIsInstance(iterable_of_args[0][0], np.ndarray)\n np.testing.assert_array_equal(iterable_of_args[0][0], self.test_data_numpy[:expected_size])\n\n # Test other output\n self.assertEqual(iterable_len_, 1)\n self.assertEqual(chunk_size, 1)\n self.assertIsNone(n_splits)", "def test_differencer_produces_expected_results(na_handling):\n transformer = Differencer(na_handling=na_handling)\n y_transformed = transformer.fit_transform(y_simple)\n y_expected = y_simple_expected_diff[na_handling]\n\n _assert_array_almost_equal(y_transformed, y_expected)", "def test_multiple_inference_runs_yield_same_result(count, mock_model_runtime):\n runtime = mock_model_runtime[0]\n net_id = mock_model_runtime[1]\n input_tensors = mock_model_runtime[2]\n output_tensors = mock_model_runtime[3]\n\n expected_results = np.array([[4, 85, 108, 29, 8, 16, 0, 2, 5, 0]])\n\n for _ in range(count):\n runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)\n\n output_vectors = ann.workload_tensors_to_ndarray(output_tensors)\n\n for i in range(len(expected_results)):\n assert output_vectors[i].all() == expected_results[i].all()", "def check_input_matches_expected_output(in_, out):\n ...", "def _assertStructuredAlmostEqual(\n first, second, abstol, reltol, exact, item_callback, exception\n):\n\n args = (first, second)\n f, s = args\n if all(isinstance(_, Mapping) for _ in args):\n if exact and len(first) != len(second):\n raise exception(\n \"mappings are different sizes (%s != %s)\" % (len(first), len(second))\n )\n for key in first:\n if key not in second:\n raise exception(\n \"key (%s) from first not found in second\"\n % (_unittest.case.safe_repr(key),)\n )\n try:\n _assertStructuredAlmostEqual(\n first[key],\n second[key],\n abstol,\n reltol,\n exact,\n item_callback,\n exception,\n )\n except exception as e:\n raise exception(\n \"%s\\n Found when comparing key %s\"\n % (str(e), _unittest.case.safe_repr(key))\n )\n return # PASS!\n\n elif any(isinstance(_, str) for _ in args):\n if first == second:\n return # PASS!\n\n elif all(isinstance(_, Sequence) for _ in args):\n # Note that Sequence includes strings\n if exact and len(first) != len(second):\n raise exception(\n \"sequences are different sizes (%s != %s)\" % (len(first), len(second))\n )\n for i, (f, s) in enumerate(zip(first, second)):\n try:\n _assertStructuredAlmostEqual(\n f, s, abstol, reltol, exact, item_callback, exception\n )\n except exception as e:\n raise exception(\"%s\\n Found at position %s\" % (str(e), i))\n return # PASS!\n\n else:\n # Catch things like None, which may cause problems for the\n # item_callback [like float(None)])\n #\n # Test `is` and `==`, but this is not necessarily fatal: we will\n # continue and allow the item_callback to potentially convert\n # the values to be comparable.\n try:\n if first is second or first == second:\n return # PASS!\n except:\n pass\n try:\n f = item_callback(first)\n s = item_callback(second)\n if f == s:\n return\n diff = abs(f - s)\n if abstol is not None and diff <= abstol:\n return # PASS!\n if reltol is not None and diff / max(abs(f), abs(s)) <= reltol:\n return # PASS!\n if math.isnan(f) and math.isnan(s):\n return # PASS! (we will treat NaN as equal)\n except:\n pass\n\n msg = \"%s !~= %s\" % (\n _unittest.case.safe_repr(first),\n _unittest.case.safe_repr(second),\n )\n if f is not first or s is not second:\n msg = \"%s !~= %s (%s)\" % (\n _unittest.case.safe_repr(f),\n _unittest.case.safe_repr(s),\n msg,\n )\n raise exception(msg)", "def test_transform_album_with_two_transforms_with_gap_in_numbering_and_one_already_applied(self):\n album = Album(artist='Artist', album='Album', last_transform=2)\n tflist = TransformList()\n tflist.add_transform(Transform(1,\n cond_artist=True, pattern_artist='Artist',\n change_artist=True, to_artist='Artist 2',\n ))\n tflist.add_transform(Transform(3,\n cond_album=True, pattern_album='Album',\n change_album=True, to_album='Album 2',\n ))\n\n self.assertEqual(album.last_transform, 2)\n self.assertEqual(album.artist, 'Artist')\n self.assertEqual(album.album, 'Album')\n self.assertEqual(album.transformed, False)\n\n tflist.apply_album(album)\n\n self.assertEqual(album.last_transform, 3)\n self.assertEqual(album.artist, 'Artist')\n self.assertEqual(album.album, 'Album 2')\n self.assertEqual(album.transformed, True)", "def test_get_chimeras_from_Nast_aligned(self):\r\n\r\n # empty input gives empty output\r\n seqs = \"\"\r\n fd, test_seqs_fp = mkstemp(prefix=\"test_chimera_slayer\")\r\n close(fd)\r\n self.files_to_remove.append(test_seqs_fp)\r\n fh = open(test_seqs_fp, \"w\")\r\n fh.write(seqs)\r\n fh.close()\r\n\r\n observed = get_chimeras_from_Nast_aligned(test_seqs_fp)\r\n self.assertEqual(observed, [])\r\n\r\n # no chimeras give empty output\r\n seqs = \"\"\">test1\r\nGTGGGGAATATTGCACAATGGGCGGAAGCCTGATGCAGCGACGCCGCGTGAGGGATGACGGCCTTCGGGTTGTAAACCTCTTTCAGCAGGGACGAAGCGTAAGTGACGGTACCTGCAGAAGAAGCGCCGGCCAACTACGTGCCAGCAGCCGCGGTAAGAC\r\n\"\"\"\r\n fd, test_seqs_fp2 = mkstemp(prefix=\"test_chimera_slayer\")\r\n close(fd)\r\n self.files_to_remove.append(test_seqs_fp2)\r\n fh = open(test_seqs_fp2, \"w\")\r\n fh.write(seqs)\r\n fh.close()\r\n observed = get_chimeras_from_Nast_aligned(test_seqs_fp2)\r\n self.assertEqual(observed, [])\r\n\r\n # Real chimeras are identified as such\r\n fd, test_seqs_fp3 = mkstemp(prefix=\"test_chimera_slayer\")\r\n close(fd)\r\n self.files_to_remove.append(test_seqs_fp3)\r\n fh = open(test_seqs_fp3, \"w\")\r\n fh.write(chimeras)\r\n fh.close()\r\n\r\n observed = get_chimeras_from_Nast_aligned(test_seqs_fp3)\r\n self.assertEqual(observed, [(chimera_id, parent_ids)])", "def test_mc_t_two_sample(self):\r\n # Verified against R's t.test() and Deducer::perm.t.test().\r\n\r\n # With numpy array as input.\r\n exp = (-0.11858541225631833, 0.90756579317867436)\r\n I = array([7.2, 7.1, 9.1, 7.2, 7.3, 7.2, 7.5])\r\n II = array([8.8, 7.5, 7.7, 7.6, 7.4, 6.7, 7.2])\r\n obs = mc_t_two_sample(I, II)\r\n self.assertFloatEqual(obs[:2], exp)\r\n self.assertEqual(len(obs[2]), 999)\r\n self.assertCorrectPValue(0.8, 0.9, mc_t_two_sample, [I, II],\r\n p_val_idx=3)\r\n\r\n # With python list as input.\r\n exp = (-0.11858541225631833, 0.90756579317867436)\r\n I = [7.2, 7.1, 9.1, 7.2, 7.3, 7.2, 7.5]\r\n II = [8.8, 7.5, 7.7, 7.6, 7.4, 6.7, 7.2]\r\n obs = mc_t_two_sample(I, II)\r\n self.assertFloatEqual(obs[:2], exp)\r\n self.assertEqual(len(obs[2]), 999)\r\n self.assertCorrectPValue(0.8, 0.9, mc_t_two_sample, [I, II],\r\n p_val_idx=3)\r\n\r\n exp = (-0.11858541225631833, 0.45378289658933718)\r\n obs = mc_t_two_sample(I, II, tails='low')\r\n self.assertFloatEqual(obs[:2], exp)\r\n self.assertEqual(len(obs[2]), 999)\r\n self.assertCorrectPValue(0.4, 0.47, mc_t_two_sample, [I, II],\r\n {'tails': 'low'}, p_val_idx=3)\r\n\r\n exp = (-0.11858541225631833, 0.54621710341066287)\r\n obs = mc_t_two_sample(I, II, tails='high', permutations=99)\r\n self.assertFloatEqual(obs[:2], exp)\r\n self.assertEqual(len(obs[2]), 99)\r\n self.assertCorrectPValue(0.4, 0.62, mc_t_two_sample, [I, II],\r\n {'tails': 'high', 'permutations': 99}, p_val_idx=3)\r\n\r\n exp = (-2.8855783649036986, 0.99315596652421401)\r\n obs = mc_t_two_sample(I, II, tails='high', permutations=99, exp_diff=1)\r\n self.assertFloatEqual(obs[:2], exp)\r\n self.assertEqual(len(obs[2]), 99)\r\n self.assertCorrectPValue(0.55, 0.99, mc_t_two_sample, [I, II],\r\n {'tails': 'high', 'permutations': 99, 'exp_diff': 1}, p_val_idx=3)", "def testOutputShapeConsistency(self, use_bias):\n\n # When padding is SAME, then the actual number of padding pixels can be\n # computed as: pad = kernel_shape - strides + (-input_shape % strides)\n # = 5 - 1 + (- 32 % 1) = 4\n\n # The formula for the minimal size is:\n # oH = strides[1] * (in_height - 1) - padding + kernel_shape_h\n # oH = 1 * ( 32 - 1) - 4 + 5 = 32\n\n # The formula for the maximum size (due to extra pixels) is:\n # oH_max = oH + strides[1] - 1\n # so, for strides = 1 and padding = SAME, input size == output size.\n inputs = tf.placeholder(tf.float32, shape=self.in_shape)\n\n conv1 = snt.Conv2DTranspose(name=\"conv2d_1\",\n output_channels=self.out_channels,\n output_shape=self.out_shape,\n kernel_shape=self.kernel_shape,\n padding=self.padding,\n stride=1,\n use_bias=use_bias)\n\n outputs = conv1(inputs)\n\n self.assertTrue(outputs.get_shape().is_compatible_with((\n self.batch_size,) + self.out_shape + (self.out_channels,)))\n\n self.assertTrue(conv1.w.get_shape().is_compatible_with(self.kernel_shape2))\n if use_bias:\n self.assertTrue(conv1.b.get_shape().is_compatible_with(\n [self.out_channels]))", "def test_transform_track_with_two_transforms_with_gap_in_numbering_and_one_already_applied(self):\n track = Track(artist='Artist', title='Title')\n track.last_transform = 2\n tflist = TransformList()\n tflist.add_transform(Transform(1,\n cond_artist=True, pattern_artist='Artist',\n change_artist=True, to_artist='Artist 2',\n ))\n tflist.add_transform(Transform(3,\n cond_title=True, pattern_title='Title',\n change_title=True, to_title='Title 2',\n ))\n\n self.assertEqual(track.last_transform, 2)\n self.assertEqual(track.artist, 'Artist')\n self.assertEqual(track.title, 'Title')\n self.assertEqual(track.transformed, False)\n\n tflist.apply_track(track)\n\n self.assertEqual(track.last_transform, 3)\n self.assertEqual(track.artist, 'Artist')\n self.assertEqual(track.title, 'Title 2')\n self.assertEqual(track.transformed, True)", "def test_sequence_dist_all_metrics(metric):\n unique_seqs = np.array([\"AAA\", \"ARA\", \"AFFFFFA\", \"FAFAFA\", \"FFF\"])\n seqs2 = np.array([\"RRR\", \"FAFA\", \"WWWWWWW\"])\n dist_mat = ir.ir_dist.sequence_dist(unique_seqs, metric=metric, cutoff=8, n_jobs=2)\n assert dist_mat.shape == (5, 5)\n\n dist_mat = ir.ir_dist.sequence_dist(\n unique_seqs, seqs2, metric=metric, cutoff=8, n_jobs=2\n )\n assert dist_mat.shape == (5, 3)", "def test_assign_seqs_two_fastas(self):\r\n\r\n # Handles two fasta files alone\r\n file_data = {}\r\n file_data['fasta_files'] = [self.valid_fasta_file_no_errors,\r\n self.valid_fasta_file_no_errors]\r\n file_data['qual_files'] = []\r\n #file_data['mapping_file'] = self.valid_mapping_data_golay_upper\r\n file_data['demultiplexed_seqs_f'] = FakeOutFile()\r\n\r\n ids_bcs_added_field = {('AACTCGTCGATG', ''): 's1',\r\n ('AGCAGCACTTGT', ''): 's2', ('ACCGCAGAGTCA', ''): 's3'}\r\n bc_lens = [12]\r\n all_bcs = ['AACTCGTCGATG', 'AGCAGCACTTGT', 'ACCGCAGAGTCA']\r\n keep_barcode = False\r\n barcode_type = \"golay_12\"\r\n max_bc_errors = 1.5\r\n start_index = 1\r\n write_unassigned_reads = False\r\n disable_bc_correction = False\r\n added_demultiplex_field = None\r\n\r\n log_data, bc_freqs, seq_counts, corrected_bc_count =\\\r\n assign_seqs(file_data, ids_bcs_added_field, bc_lens, all_bcs,\r\n keep_barcode, barcode_type, max_bc_errors, start_index,\r\n write_unassigned_reads, disable_bc_correction,\r\n added_demultiplex_field)\r\n\r\n expected_demultiplexed_fasta_seq = '>s1_1 ABCD0001 orig_bc=AACTCGTCGATG new_bc=AACTCGTCGATG bc_diffs=0\\nCAGGACGAGACGAGGTT\\n>s3_2 EFGH0002 orig_bc=ACCGCAGAGTCA new_bc=ACCGCAGAGTCA bc_diffs=0\\nCCAGATTACGAGATTA\\n>s2_3 IJKL0003 orig_bc=AGCAGCACTTGT new_bc=AGCAGCACTTGT bc_diffs=0\\nGACCGATTACGATAACG\\n>s1_4 ABCD0001 orig_bc=AACTCGTCGATG new_bc=AACTCGTCGATG bc_diffs=0\\nCAGGACGAGACGAGGTT\\n>s3_5 EFGH0002 orig_bc=ACCGCAGAGTCA new_bc=ACCGCAGAGTCA bc_diffs=0\\nCCAGATTACGAGATTA\\n>s2_6 IJKL0003 orig_bc=AGCAGCACTTGT new_bc=AGCAGCACTTGT bc_diffs=0\\nGACCGATTACGATAACG\\n'\r\n self.assertEqual(file_data['demultiplexed_seqs_f'].data,\r\n expected_demultiplexed_fasta_seq)\r\n\r\n expected_log_data = {'ACCGCAGAGTCA,s3': 2, 'AACTCGTCGATG,s1': 2,\r\n 'AGCAGCACTTGT,s2': 2}\r\n expected_bc_freqs = {'AACTCGTCGATG': 2, 'AGCAGCACTTGT': 2,\r\n 'ACCGCAGAGTCA': 2}\r\n expected_seq_counts = 6\r\n expected_corrected_bc_count = [0, 0]\r\n\r\n self.assertEqual(log_data, expected_log_data)\r\n self.assertEqual(bc_freqs, expected_bc_freqs)\r\n self.assertEqual(seq_counts, expected_seq_counts)\r\n self.assertEqual(corrected_bc_count, expected_corrected_bc_count)" ]
[ "0.6028225", "0.5944386", "0.58593905", "0.5800506", "0.57926905", "0.57714605", "0.5770468", "0.57545334", "0.57405823", "0.57333404", "0.5712127", "0.5697243", "0.56768686", "0.56742346", "0.56673914", "0.56589234", "0.5644305", "0.56440634", "0.5639191", "0.56258535", "0.56170297", "0.56063867", "0.5598403", "0.5579288", "0.5575113", "0.5569507", "0.55689967", "0.55661476", "0.555037", "0.5548092", "0.5538684", "0.5530538", "0.55230075", "0.551618", "0.5513973", "0.5503551", "0.5502054", "0.55004334", "0.549585", "0.5494905", "0.5490442", "0.5486001", "0.5474738", "0.5464692", "0.5457247", "0.5456352", "0.54527247", "0.54506207", "0.5447707", "0.54420877", "0.5439949", "0.5434105", "0.54311925", "0.5428818", "0.54278255", "0.5421687", "0.5417973", "0.5416521", "0.539767", "0.5395739", "0.53878367", "0.5367935", "0.5365286", "0.5364861", "0.53593975", "0.5341806", "0.5340899", "0.53312755", "0.53310716", "0.53307295", "0.53266686", "0.5325988", "0.53219837", "0.53204256", "0.53201914", "0.5320127", "0.5298521", "0.5280453", "0.5279334", "0.52764827", "0.5274737", "0.5263659", "0.52632904", "0.5254753", "0.5242812", "0.52381957", "0.523819", "0.5234049", "0.5233235", "0.52279884", "0.52268744", "0.5225757", "0.52253497", "0.5224916", "0.52249", "0.5213845", "0.5212851", "0.5210949", "0.5210097", "0.52016854" ]
0.5383693
61
Test the cell when the input sequence is longer than the time horizon.
def test_past_horizon(cell_cls): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(4, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24) actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) def apply_regular(sequence): with tf.variable_scope('rnn', reuse=True): with tf.variable_scope('transformer', reuse=True): expected = sequence + pos_enc for _ in range(3): expected = transformer_layer(expected, num_heads=2, hidden=24) return expected expected = tf.concat([apply_regular(in_seq[:, :-1]), apply_regular(in_seq[:, 1:])[:, -1:]], axis=1) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exceeded(self):\r\n return int(time.time()) - self.start_time >= self.length", "def is_time(self) -> bool:\n return self.times > 1", "def check_time():\n times = get_times()\n time_difference = abs((times['local'] - times['target']).total_seconds())\n return time_difference < post_time_tol_seconds", "def _matchTime(self, time: float):\n return self._comparator['Time'] < time", "def time_is_out(self):\n return self.get_simulation_time() > self.config.max_time", "def check_time_borders(self, sam_ev, ):\n mask = np.logical_and(sam_ev['timeMJD'] > self.DataStart,\n sam_ev['timeMJD'] < self.DataEnd)\n return sam_ev[mask]", "def test_lengthen_closed_interval(self):\n self.t(\"track 2016-01-01T00:00:00 - 2016-01-01T01:00:00 foo\")\n code, out, err = self.t(\"lengthen @1 10mins\")\n self.assertIn('Lengthened @1 by 0:10:00', out)", "def valid(t):\n return float(t) > time.time()", "def validity_by_time(self):\n conn = psycopg2.connect(self.conn)\n permissable_maximum_age_secs = 600 # 600s = 10mins\n query = \"SELECT time FROM steve_sense_sensor_logs ORDER BY time DESC LIMIT 1\"\n cur = conn.cursor()\n cur.execute(query)\n queryResult = cur.fetchall()\n age_seconds = (datetime.datetime.now(\n timezone.utc) - queryResult[0][0]).seconds\n cur.close()\n conn.close()\n if age_seconds > permissable_maximum_age_secs:\n print(\"Check Sensor, last sample is \"+str(age_seconds)+\" old\")\n return False\n else:\n return True", "def is_over(self, time):\n over = (not self.enable_loop()) and (time >= self.get_duration())\n return over", "def T_elapsed(T_amount: BlockHeight) -> bool:\n T_now = getCurrentBlockHeight()\n return T_now - T_init >= T_amount", "def is_timing_pattern(line, threshold=5):\n while line[0] != 0:\n line = line[1:]\n if not len(line):\n return False\n while line[-1] != 0:\n line = line[:-1]\n if not len(line):\n return False\n c = []\n count = 1\n l = line[0]\n for p in line[1:]:\n if p == l:\n count += 1\n else:\n c.append(count)\n count = 1\n l = p\n c.append(count)\n if len(c) < 5:\n return False\n return np.var(c) < threshold", "def overtime(self):\n if self._overtime != '':\n return True\n return False", "def haveTime(self):\n if self.timeout is None:\n return True\n return time.time() <= self._stop", "def test_timestamp_spacing_too_frequent(times):\n assert_series_equal(\n time.spacing(times, '30min'),\n pd.Series([True] + [False] * (len(times) - 1), index=times)\n )", "def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)", "def test_time_supp_length_matches_no_timesteps(self):\n for no_timesteps in [5, 578, 993, 300072]:\n for dt in [0.1, 0.5, 3.0]:\n test_rec = rt.Recording(np.empty([6, no_timesteps, 1]), dt=dt)\n self.assertEqual(\n len(test_rec.time_supp),\n no_timesteps,\n 'Expected length of time_supp {} to match no_timesteps of '\n 'input {}.'.format(len(test_rec.time_supp), no_timesteps),\n )", "def is_timeout(self) -> bool:\n return self.runtime.timeout <= 0.0", "def _check_large_tilt(self):\n large_tilt = []\n xy, xz, yz = self.tilt_factors\n x,y,_ = self.cell_lengths\n\n large_tilt.append(-x/2<xy<x/2)\n large_tilt.append(-x/2<xz<y/2)\n large_tilt.append(-x/2<yz<y/2)\n return not all(large_tilt)", "def test_within_length(self):\r\n\r\n flow1 = Flowgram(\"0 1.2 2.1 3.4 0.02 0.01 1.02 0.08\") # len 7\r\n flow2 = Flowgram('0.5 1.0 4.1 0.0 0.0 1.23 0.0 3.1') # len 10\r\n\r\n self.assertTrue(within_length(flow1, 0, 10))\r\n self.assertFalse(within_length(flow1, 10, 20))\r\n self.assertFalse(within_length(flow2, 0, 5))\r\n self.assertTrue(within_length(flow2, 5, 20))\r\n self.assertTrue(within_length(flow2, 5, 11))", "def test_time(self):\n M = simulation.StateMonitor(self.G, 'v')\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, self.dt)))", "def is_equidistant(self) -> bool:\n if len(self.time) < 3:\n return True\n return len(self.time.to_series().diff().dropna().unique()) == 1", "def test_custom_time(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, interval)))", "def test_bad_interval(self):\n # Intentionally set a small interval (3 min) to fail.\n interval = np.timedelta64(3, 'm')\n self.assertFalse(utils.check_timestamps(self.times, interval))", "def is_lmtp_over(self):\n\t\tif self._c >= self._C:\n\t\t\treturn True\n\t\treturn False", "def check_time(y, x):\n check = []\n # y stores earliest time for Y in each row, x stores latest time for X in each row. These two must not overlap\n for ty, tx in product(y, x):\n check.append(np.all(ty > tx))\n\n return np.all(check)", "def after(self, time2):\r\n return self.to_seconds() > time2.to_seconds()", "def __checkTimer(self):\n if self.__endTime is None:\n raise AssertionError('The end time had not been set.')\n if time.time() > self.__endTime:\n self._logError('Maximum Run Time Reached !!')\n raise _MaxRunTimeReachedError('')", "def check_trial_length(data, **_):\n # NaN values are usually ignored so replace them with Inf so they fail the threshold\n metric = np.nan_to_num(data[\"feedback_times\"] - data[\"goCue_times\"], nan=np.inf)\n passed = (metric < 60.1) & (metric > 0)\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed", "def check_time(self,data,data_orginal):\n if data['start_time'] > data['end_time']:\n raise ValidationError('event end time should be greater than start time.')", "def isTimeRemaining(self):\n if self.run_type.startswith('timed'):\n time_since_start = (time.time() - self.start_times['run'])\n remaining_time = self.max_time * 60 - time_since_start\n if remaining_time < 0:\n return False\n else:\n return True", "def test_lengthen_open_interval(self):\n self.t(\"start 30mins ago foo\")\n code, out, err = self.t.runError(\"lengthen @1 10mins\")\n self.assertIn('Cannot lengthen open interval @1', err)", "def check_time(self, m, s):\r\n if m*60 + s > 5400:\r\n self.unit.s = 0\r\n self.unit.m = 90\r\n return\r\n if s < 0:\r\n s = 0\r\n if m < 0:\r\n m = 0\r\n self.unit.s = s\r\n self.unit.m = m", "def marker_validity(table, train, thresh=1.0):\n shift = len(table) - len(train)\n\n if shift >= 0:\n start_d = train[0] - table.time.values[0]\n end_d = train[-1] - table.time.values[len(table)-shift-1]\n if np.abs(start_d - end_d) <= thresh:\n return shift # valid\n else:\n raise ValueError(\"marker value not match!\")\n else:\n raise ValueError(\"electrode markers exceed stimulus markers!\")\n\n return False", "def check_last_cycle_duration(self):\n min_pm_time = timedelta(seconds=self.args.min_pm_time)\n max_pm_time = timedelta(seconds=self.args.max_pm_time)\n if self.args.pm_timestamp:\n pm_timestamp = datetime.fromtimestamp(self.args.pm_timestamp)\n now = datetime.now()\n pm_time = now - pm_timestamp\n if pm_time < min_pm_time:\n raise TestFailed(\n \"{0} time less than expected: {1} < {2}\".format(\n self.args.pm_operation.capitalize(), pm_time, min_pm_time\n )\n )\n if pm_time > max_pm_time:\n raise TestFailed(\n \"{0} time greater than expected: {1} > {2}\".format(\n self.args.pm_operation.capitalize(), pm_time, max_pm_time\n )\n )\n\n logging.info(\n \"{0} time: {1}\".format(self.args.pm_operation.capitalize(), pm_time)\n )", "def test_general_subset_invalid_time():\n pass", "def verify_event_timing(self, event, item):\n return True", "def test_load_points_times_length():\n df = leiap.get_points_times(warn='disable')\n assert df.shape[0] > 0", "def test_lengthen_synthetic_interval(self):\n now = datetime.now()\n now_utc = now.utcnow()\n\n three_hours_before = now - timedelta(hours=3)\n four_hours_before = now - timedelta(hours=4)\n five_hours_before = now - timedelta(hours=5)\n\n three_hours_before_utc = now_utc - timedelta(hours=3)\n four_hours_before_utc = now_utc - timedelta(hours=4)\n five_hours_before_utc = now_utc - timedelta(hours=5)\n\n self.t.configure_exclusions((four_hours_before.time(), three_hours_before.time()))\n\n self.t(\"start {:%Y-%m-%dT%H:%M:%S}\".format(five_hours_before))\n\n self.t(\"lengthen @2 5min\")\n\n j = self.t.export()\n\n self.assertEqual(len(j), 2)\n self.assertClosedInterval(j[0],\n expectedStart=\"{:%Y%m%dT%H%M%S}Z\".format(five_hours_before_utc),\n expectedEnd=\"{:%Y%m%dT%H%M%S}Z\".format(four_hours_before_utc + timedelta(minutes=5)),\n expectedTags=[],\n description=\"lengthened interval\")\n self.assertOpenInterval(j[1],\n expectedStart=\"{:%Y%m%dT%H%M%S}Z\".format(three_hours_before_utc),\n expectedTags=[],\n description=\"unmodified interval\")", "def important_event(time: int) -> bool:\n last_event = get_events(True)[0]\n try:\n time_event = int(last_event.split('\\n')[0].strip(\"'\"))\n except ValueError:\n time_event = int(last_event.split('\\n')[-1].strip(\"'\"))\n if time - time_event < 60:\n return 'gol' in last_event or 'cartão' in last_event\n return False", "def after(self, time2):\n return self.to_seconds() > time2.to_seconds()", "def test_bad_time_repeat(self):\n repeated = np.concatenate([np.repeat(self.times[0], 3),\n self.times[3:]])\n self.assertFalse(utils.check_timestamps(repeated))", "def check_timer(self, wanted_time):\n if time.time() - self.start_time >= wanted_time:\n return True\n return False", "def is_dropped(upd_time, time_before):\n if (upd_time - time_before) / float(Config.BOUNDARY) >= 1.5:\n return True\n return False", "def triggered(self, time: float) -> bool:\n for start, end in self.boundaries():\n if start < time <= end:\n return True\n return False", "def test_invalid_time_too_late(event_member):\n _, member, event_id = event_member\n current = date.today() + timedelta(days=1)\n start = (datetime.combine(current, time(16, 30)) +\n timedelta(days=(MAX_DAYS - 2)))\n end = start + timedelta(days=5)\n expect_error(edit, InputError, member.username, event_id,\n True, start, end)", "def _badness(self, time):\n return (time - self.expected_time)**2", "def assert_timeout(self) -> None:", "def is_exceeded(self):\n\n if self.stopwatch.check_time() > self.duration:\n self.stopwatch.start()\n self.num_processed_this_interval = 0\n return False\n\n return self.num_processed_this_interval >= self.max_per_interval", "def toc(self,timestamp):\n return self._timestamp > timestamp", "def __check_total_duration(self, duration: int) -> bool:\n available_duration = N_EIGHTHS_PER_MEASURE * (self.n_measures - 1)\n return self.current_time_in_eighths + duration <= available_duration", "def find_alert_time(self) -> None:\n \n # Also not clear from the paper how to doe this,\n # use the first 10 data points in the light curve to determine the magnitude\n # baseline\n\n mean_mag = np.mean(self.mags[:10])\n std_mag = np.std(self.mags[:10])\n\n num_above = 0 \n i = 9\n\n while num_above < 3 and i < len(self.times)-1:\n \n i += 1 \n\n if self.mags[i] < mean_mag - std_mag:\n num_above += 1\n else:\n num_above = 0.0\n\n if len(self.times) - 1 == i:\n print(\"Give me more training data, not alerted yet, this is probably going to fail\")\n \n return self.times[i-1]", "def isolate_self_reporting_cases(self, time: int):", "def trimtimes(time, elmbeg, elmend, preft = 0.0, suft = 0.0):\n valididx = np.zeros(len(time),dtype='bool')\n \n elmbeg = elmbeg - preft\n elmend = elmend + suft\n for i in range(len(time)):\n t = time[i]\n boolbeg = t>=elmbeg\n boolend = t<=elmend\n boolelm = boolbeg & boolend\n valididx[i] = np.sum(boolelm)\n \n #To use only data outside of ELMs\n valididx = np.invert(valididx)\n return time[valididx], valididx", "def test_convergence(self, time_step):\n \n ##compare the average episode length between two loop\n if self.past_episode == time_step:\n self.convergence = True\n else:\n self.convergence = False", "def test_convergence(self, time_step):\n \n ##compare the average episode length between two loop\n if self.past_episode == time_step:\n self.convergence = True\n else:\n self.convergence = False", "def test_seq_exceeds_homopolymers(self):\r\n self.assertEqual(seq_exceeds_homopolymers('AAACGA', 3), False)\r\n self.assertEqual(seq_exceeds_homopolymers('AAACGA', 2), True)\r\n self.assertEqual(seq_exceeds_homopolymers('AAACGA', 1), True)\r\n self.assertEqual(seq_exceeds_homopolymers('AAACGATTTT', 3), True)", "def early_stop(val_history, t=3, required_progress=0.0001):\n \n if (len(val_history) > t+1):\n differences = []\n for x in range(1, t+1):\n differences.append(val_history[-x]-val_history[-(x+1)])\n differences = [y < required_progress for y in differences]\n if sum(differences) == t: \n return True\n else:\n return False\n else:\n return False", "def test_lengthen_an_interval_to_enclose_a_month_border(self):\n self.t(\"track 20180831T220000 - 20180831T230000 foo\")\n self.t(\"lengthen @1 4h\")\n\n j = self.t.export()\n\n self.assertEqual(len(j), 1)\n self.assertClosedInterval(j[0])", "def __le__(self, seq):\n if any(self._arr[i] > seq[i] for i in range(min(self._length, len(seq)))):\n return False\n return self._length <= len(seq)", "def _IsTimeReplot( self ):\n return True", "def is_chunk_timeout(self, chunk_timeout): \n return time() - self._chunk_timeout_time > chunk_timeout", "def within_length(flowgram, minlength=0, maxlength=400):\r\n seq = flowgram.toSeq()\r\n l = len(seq)\r\n return (l >= minlength and l <= maxlength)", "def check_goCue_delays(data, **_):\n metric = np.nan_to_num(data[\"goCue_times\"] - data[\"goCueTrigger_times\"], nan=np.inf)\n passed = (metric <= 0.0015) & (metric > 0)\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed", "def is_longer(dna1, dna2):\n return get_length(dna1) > get_length(dna2)", "def test_accurate(self):\n M = simulation.EventMonitor(self.G)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n times = self.G.pattern.nonzero()[1]*self.dt\n self.assertTrue(np.allclose(sorted(times), M.t))\n for (i, t) in zip(M.i, M.t):\n self.assertTrue(self.G.pattern[i, int_r(t/self.dt)])", "def check_timelimit_slot__(self):\n timerange = self.valkkafs_manager.getTimeRange()\n \n if len(timerange) < 1: # empty tuple implies no frames\n print(\"PlaybackController: check_timelimit_slot__ : WARNING! no timerange from ValkkaFS\")\n # fabricate a dummy time : this exact moment\n current_time = int(time.time() * 1000)\n timerange = (\n current_time,\n current_time + 1\n )\n print(\"check_timelimits_slot__ : timerange =\", timerange)\n print(\"check_timelimits_slot__ : %s -> %s\" % ( formatMstimestamp(timerange[0]), formatMstimestamp(timerange[1]) ) )\n self.signals.set_fs_time_limits.emit(timerange)", "def _validate_time_index(self):\n if len(self.hybrid_time_index) < 8760:\n msg = (\"The length of the merged time index ({}) is less than \"\n \"8760. Please ensure that the input profiles have a \"\n \"time index that overlaps >= 8760 times.\")\n e = msg.format(len(self.hybrid_time_index))\n logger.error(e)\n raise FileInputError(e)", "def check_diff(self,game,wanted_diff,wanted_starting_time=''):\n return True", "def _validate_time(self, col):\r\n error_rows = [] # initialize list of rows with errors\r\n # Loop through data and validate time values\r\n for i, row in enumerate(self.rows):\r\n csv_row = i + 1\r\n time_of_survey = row[col]\r\n time24hr_pattern = \"^(2[0-3]|[01]?[0-9]):([0-5]?[0-9]):([0-5]?[0-9])$\"\r\n time12hr_pattern = \"^(1[0-2]|0?[1-9]):([0-5]?[0-9]):([0-5]?[0-9])( ?[AP]M)?$\"\r\n\r\n if \"M\" in time_of_survey:\r\n if not re.search(time12hr_pattern, time_of_survey):\r\n error_rows.append(csv_row)\r\n else:\r\n if not re.search(time24hr_pattern, time_of_survey):\r\n error_rows.append(csv_row)\r\n return error_rows", "def is_tachycardic(self, hr):\n lower_bound = self.tachycardic_range()\n return hr > lower_bound", "def test_timeout_elapsed_no_exception(self):\n deadline = Deadline(-MS)\n timeout = deadline.timeout(raise_if_elapsed=False)\n self.assertGreater(timeout, -2 * MS)\n self.assertLess(timeout, -MS)", "def testMuchTiling(self):\n launcher.TextFrame._ResetTiling()\n area = wx.Display().GetClientArea()\n lc = launcher.TextFrame('super dooper tytle 4 roolerz and doodz')\n # Needs to be real big in case you have a large monitor. ~1000\n # iterations needed for a (1440,874) laptop before a full reset\n # happens.\n for i in range(3000):\n lc._ShiftTilePosition()\n self.assertTrue(launcher.TextFrame._tile_position[0] > area[0])\n self.assertTrue(launcher.TextFrame._tile_position[1] > area[1])\n self.assertTrue(launcher.TextFrame._tile_position[0] < area[2])\n self.assertTrue(launcher.TextFrame._tile_position[1] < area[3])", "def checkTime(shape):\n if not cmds.isConnected(\"time1.outTime\", \"%s.time\" % shape):\n message = 'Connecting : time1.outTime to %s.time' % shape\n cmds.connectAttr(\"time1.outTime\", \"%s.time\" % shape)\n MGlobal.displayInfo(message)", "def test_is_time_druid_time_col(self):\n col = TableColumn(column_name=\"__time\", type=\"INTEGER\")\n self.assertEquals(col.is_dttm, None)\n DruidEngineSpec.alter_new_orm_column(col)\n self.assertEquals(col.is_dttm, True)\n\n col = TableColumn(column_name=\"__not_time\", type=\"INTEGER\")\n self.assertEquals(col.is_time, False)", "def time_in_range(start, end, time):\n if start <= end:\n return start <= time <= end\n else:\n return start <= time or time <= end", "def test_outside_bottom_range(self):\n input_ = [\n self.indicator_record(date=datetime.date(2000, 2, 1), value=0.13),\n self.indicator_record(date=datetime.date(2000, 3, 1), value=0.22),\n ]\n output = self.expander._ipca_from_15_expander(input_)\n expected = self.indicator_record(date=datetime.date(2000, 4, 1), value=0.22)\n actual = output[-1]\n\n self.assertEqual(expected, actual)", "def check_candidates(alt_data_sequence, sequence, min_gap, max_gap, window_size):\n\n t = i = 0\n times = dict()\n len_sequence = len(sequence)\n while 0 <= i < len_sequence:\n found_element = _find_element(alt_data_sequence, sequence[i], window_size, t)\n if found_element is not None:\n times[i] = found_element\n if i == 0 or abs(times[i][1]-times[i-1][0]) <= max_gap:\n # forward phase, + 1 to prevent infinite loop\n t = times[i][1] + min_gap + 1\n i += 1\n else:\n # backward phase\n t = times[i][1] - max_gap\n i -= 1\n else:\n return False\n\n return True", "def withinInterval(self, t):\n if (self.initTime == self.endTime):\n return True\n if (self.initTime <= t and t <= self.endTime):\n return True\n else:\n return False", "def test_time_field():", "def testTooLong(self, dry_run=False):\n\n\t\twg = waveform.Generator(frequency=Quantity(1, 'GHz'), dry_run=dry_run)\n\n\t\twg.delay(Quantity(1, 'ns'))\n\t\twg.delay(Quantity(1, 'us'))\n\t\twg.delay(Quantity(1, 'ms'))\n\t\tassert_raises(ValueError, wg.delay, Quantity(0.01, 's'))", "def test_expand_data_1500_correct_len():\n # TODO: should it round up to allow last snippet of time?\n exp = expand_data(log, 1500)\n assert len(exp) == (log['end'].iloc[-1] / 1500)", "def test_analyze_time(self):\n self.ph5validate.analyze_time()\n self.assertEqual(self.ph5validate.das_time.keys(), [('12183', 1, 500)])\n Dtime = self.ph5validate.das_time[('12183', 1, 500)]\n\n # 3 different deploy time\n self.assertEqual(len(Dtime['time_windows']), 5)\n\n # station 9001\n self.assertEqual(Dtime['time_windows'][0],\n (1550849950, 1550850034, '9001'))\n self.assertEqual(Dtime['time_windows'][1],\n (1550849950, 1550850034, '9001'))\n self.assertEqual(Dtime['time_windows'][2],\n (1550849950, 1550850034, '9001'))\n # station 9002\n self.assertEqual(Dtime['time_windows'][3],\n (1550850043, 1550850093, '9002'))\n # station 9003\n self.assertEqual(Dtime['time_windows'][4],\n (1550850125, 1550850187, '9003'))\n\n self.assertEqual(Dtime['min_deploy_time'],\n [1550849950,\n 'Data exists before deploy time: 7 seconds.'])", "def past_limit(row, col, matrix):\n return row >= len(matrix) or col >= len(matrix) or matrix[row][col] > 0", "def _lead_cheak(self,pulse_width_list):\n return (abs(pulse_width_list[0] - _Const.NEC_HDR_MARK) <\n _Const.NEC_HDR_MARK * _Const.TOLERANCE) and (\n abs(pulse_width_list[1] - _Const.NEC_HDR_SPACE) <\n _Const.NEC_HDR_SPACE * _Const.TOLERANCE)", "def isTimeForTask(self, task_times):\n if self.run_type.startswith('timed'):\n time_since_start = (time.time() - self.start_times['run'])\n remaining_time = self.max_time * 60 - time_since_start\n mean_task_time = np.mean(task_times)\n self.tee(\" projected task time: %s, remaining time: %s\"%(\\\n HMStime(mean_task_time), HMStime(remaining_time)), process=process)\n if mean_task_time > remaining_time:\n return False\n else:\n return True", "def test_time(self):\r\n pass", "def check(ht, mt, st, pid):\n\n ns_ticks = 0\n shift = 0\n\n diff = (mt - ht + TOTAL_TICKS) % TOTAL_TICKS\n for rep in range(12):\n tmp = diff + rep * TOTAL_TICKS\n if tmp % 11 == 0:\n ns_ticks = tmp / 11\n shift = (ht - ns_ticks + TOTAL_TICKS) % TOTAL_TICKS\n\n if (ns_ticks + shift) % TOTAL_TICKS != ht:\n continue\n\n if (12*ns_ticks + shift) % TOTAL_TICKS != mt:\n continue\n\n if (720*ns_ticks + shift) % TOTAL_TICKS != st:\n continue\n\n # calc_st = (720*ns_ticks + shift) % TOTAL_TICKS\n # if calc_st == st:\n ns = ns_ticks % 1e9\n ns_ticks /= 1e9\n\n secs = ns_ticks % 60\n ns_ticks /= 60\n\n mins = ns_ticks % 60\n ns_ticks /= 60\n\n hrs = ns_ticks\n\n if hrs < 12:\n print(f\"Case #{pid}: {int(hrs)} {int(mins)} {int(secs)} {int(ns)}\")\n return True\n\n return False", "def test_W_end(self):\t\t\n self.assertAlmostEqual(attempt.W[-1], 9.494852380803035)", "def is_longer(dna1, dna2):\n return len(dna1)> len(dna2)", "def parrot_trouble(talking, hour):\r\n if(talking and (hour < 7 or hour > 20)):\r\n return True\r\n return False", "def test_plt_mag_time():\n\n ta = WATA()\n wata_data = define_testdata()\n ta.source = ColumnDataSource(data=wata_data)\n ta.add_time_column()\n ta.setup_date_range()\n\n # create the arrays per filter and readout pattern\n nrsrapid_f140x, nrsrapid_f110w, nrsrapid_clear = [], [], []\n nrsrapidd6_f140x, nrsrapidd6_f110w, nrsrapidd6_clear = [], [], []\n filter_used, readout = ta.source.data['tafilter'], ta.source.data['readout']\n max_val_box, time_arr = ta.source.data['max_val_box'], ta.source.data['time_arr']\n for i, val in enumerate(max_val_box):\n if '140' in filter_used[i]:\n if readout[i].lower() == 'nrsrapid':\n nrsrapid_f140x.append(val)\n nrsrapid_f110w.append(np.NaN)\n nrsrapid_clear.append(np.NaN)\n nrsrapidd6_f140x.append(np.NaN)\n nrsrapidd6_f110w.append(np.NaN)\n nrsrapidd6_clear.append(np.NaN)\n elif readout[i].lower() == 'nrsrapidd6':\n nrsrapid_f140x.append(np.NaN)\n nrsrapid_f110w.append(np.NaN)\n nrsrapid_clear.append(np.NaN)\n nrsrapidd6_f140x.append(val)\n nrsrapidd6_f110w.append(np.NaN)\n nrsrapidd6_clear.append(np.NaN)\n elif '110' in filter_used[i]:\n if readout[i].lower() == 'nrsrapid':\n nrsrapid_f140x.append(np.NaN)\n nrsrapid_f110w.append(val)\n nrsrapid_clear.append(np.NaN)\n nrsrapidd6_f140x.append(np.NaN)\n nrsrapidd6_f110w.append(np.NaN)\n nrsrapidd6_clear.append(np.NaN)\n elif readout[i].lower() == 'nrsrapidd6':\n nrsrapid_f140x.append(np.NaN)\n nrsrapid_f110w.append(np.NaN)\n nrsrapid_clear.append(np.NaN)\n nrsrapidd6_f140x.append(np.NaN)\n nrsrapidd6_f110w.append(val)\n nrsrapidd6_clear.append(np.NaN)\n else:\n if readout[i].lower() == 'nrsrapid':\n nrsrapid_f140x.append(np.NaN)\n nrsrapid_f110w.append(np.NaN)\n nrsrapid_clear.append(val)\n nrsrapidd6_f140x.append(np.NaN)\n nrsrapidd6_f110w.append(np.NaN)\n nrsrapidd6_clear.append(np.NaN)\n elif readout[i].lower() == 'nrsrapidd6':\n nrsrapid_f140x.append(np.NaN)\n nrsrapid_f110w.append(np.NaN)\n nrsrapid_clear.append(np.NaN)\n nrsrapidd6_f140x.append(np.NaN)\n nrsrapidd6_f110w.append(np.NaN)\n nrsrapidd6_clear.append(val)\n # add to the bokeh data structure\n ta.source.data[\"nrsrapid_f140x\"] = nrsrapid_f140x\n ta.source.data[\"nrsrapid_f110w\"] = nrsrapid_f110w\n ta.source.data[\"nrsrapid_clear\"] = nrsrapid_clear\n ta.source.data[\"nrsrapidd6_f140x\"] = nrsrapidd6_f140x\n ta.source.data[\"nrsrapidd6_f110w\"] = nrsrapidd6_f110w\n ta.source.data[\"nrsrapidd6_clear\"] = nrsrapidd6_clear\n result = ta.plt_mag_time()\n\n assert bokeh_plot_type == type(result)", "def isendofheated(self,lag):\n kmax = self.n\n v1 = self.v1\n v2 = self.v2\n for k in range(kmax-1):\n if lag[k+1]>=(v2+v1)/(v2-v1) * lag[k]:\n return False\n return True", "def _check_following_time_interval_threshold(data, index, time_window, threshold, min_count):\n\n\t# define the start slice\n\tstart_slice = index + 1\n\t# define the end slice, it will be the start slice plus or minus (depending on the operator) the time windows\n\tend_slice = start_slice + time_window\n\n\t# return True or False if the window contains more than the min_count\n\treturn ((data[start_slice:end_slice] > threshold).sum()) >= min_count", "def end_time(self) -> float:\r\n ...", "def timedout(self):\n\n return self.duration() > self.check.timeout", "def __gt__(self, seq):\n return not self.__le__(seq)", "def io_operation(self, time):\n self._io_time -= time\n if self._io_time <= 0:\n self._io = False\n return True\n return False", "def running(self):\r\n return self.__maxlen__ > 0", "def checkValid(self) -> None:\n if len(self.times) != len(self.milestones):\n raise ValueError(\"Times and milestones are not the same length\")\n if len(self.times)==0:\n raise ValueError(\"Trajectory is empty\")\n for (tprev,t) in zip(self.times[:-1],self.times[1:]):\n if tprev > t:\n raise ValueError(\"Timing is not sorted\")\n n = len(self.milestones[0])\n for q in self.milestones:\n if len(q) != n:\n raise ValueError(\"Invalid milestone size\")\n return", "def test_date_obj_within_t_delta(self):\n max_hour_count = (None, None, None)\n self.deque.append((self.datetime_obj, self.timestamp))\n result = feature_5(self.deque,\n self.heap,\n self.expected_dict,\n self.top_n,\n max_hour_count,\n self.time_rollover_queue)\n self.assertEqual(len(self.deque), 2)\n self.assertEqual(self.deque[-1], (self.datetime_obj, self.timestamp))\n self.assertEqual(result, max_hour_count)" ]
[ "0.6468879", "0.6448804", "0.5934962", "0.5849655", "0.57931024", "0.57145846", "0.5672567", "0.56684816", "0.5625143", "0.56138664", "0.5613715", "0.55911756", "0.55447626", "0.55374813", "0.5525327", "0.5507069", "0.54900634", "0.54690355", "0.54398084", "0.5436942", "0.5431212", "0.542524", "0.5418955", "0.5405217", "0.5402532", "0.53981674", "0.5389747", "0.5365855", "0.5354101", "0.53269887", "0.5325161", "0.53218794", "0.5307647", "0.5305859", "0.5279163", "0.52782696", "0.5272713", "0.52722996", "0.5268426", "0.52428013", "0.5220225", "0.5219497", "0.52189755", "0.52115333", "0.52112156", "0.5209926", "0.52026045", "0.51995826", "0.5198306", "0.5193817", "0.51936966", "0.5187395", "0.5186752", "0.51851696", "0.5170898", "0.5170898", "0.51680785", "0.5165749", "0.5160656", "0.5147195", "0.51396596", "0.5138453", "0.51338744", "0.51300377", "0.5116067", "0.5112243", "0.5111005", "0.5099668", "0.5097749", "0.50966674", "0.5094689", "0.5090898", "0.5087617", "0.5081663", "0.5078688", "0.507791", "0.50743663", "0.5066182", "0.5064232", "0.5053082", "0.50511146", "0.50409245", "0.50409114", "0.50332737", "0.50329417", "0.50278467", "0.5025255", "0.5024248", "0.5021289", "0.50148606", "0.50110763", "0.5004039", "0.50032336", "0.5003072", "0.5000043", "0.4998431", "0.499591", "0.49951592", "0.49950135", "0.49866873", "0.4979591" ]
0.0
-1
Test the cell when the states are split up and recombined from different timesteps.
def test_mismatched_starts(cell_cls): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(5, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24) _, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.float64) _, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.float64) _, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.float64) new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0) for s1, s2, s3 in zip(states_1, states_2, states_3)) full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5], full_seq[2, 1:3]], axis=0) inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:3]], axis=0) actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=new_states) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_transition_function_empty_grid(self):\r\n map_file_path = os.path.abspath(os.path.join(__file__, MAPS_DIR, 'empty-8-8/empty-8-8.map'))\r\n grid = MapfGrid(parse_map_file(map_file_path))\r\n\r\n # agents are starting a\r\n agent_starts = ((0, 0), (7, 7))\r\n agents_goals = ((0, 2), (5, 7))\r\n\r\n env = MapfEnv(grid, 2, agent_starts, agents_goals,\r\n FAIL_PROB, REWARD_OF_CLASH, REWARD_OF_GOAL, REWARD_OF_LIVING, OptimizationCriteria.Makespan)\r\n\r\n first_step_transitions = [((round(prob, 2), collision), next_state, reward, done)\r\n for ((prob, collision), next_state, reward, done) in\r\n env.P[env.s][vector_action_to_integer((RIGHT, UP))]]\r\n\r\n self.assertEqual(set(first_step_transitions), {\r\n ((0.64, False), env.locations_to_state(((0, 1), (6, 7))), REWARD_OF_LIVING, False), # (RIGHT, UP)\r\n ((0.08, False), env.locations_to_state(((1, 0), (6, 7))), REWARD_OF_LIVING, False), # (DOWN, UP)\r\n ((0.08, False), env.locations_to_state(((0, 0), (6, 7))), REWARD_OF_LIVING, False), # (UP, UP)\r\n ((0.08, False), env.locations_to_state(((0, 1), (7, 7))), REWARD_OF_LIVING, False), # (RIGHT, RIGHT)\r\n ((0.08, False), env.locations_to_state(((0, 1), (7, 6))), REWARD_OF_LIVING, False), # (RIGHT, LEFT)\r\n ((0.01, False), env.locations_to_state(((1, 0), (7, 7))), REWARD_OF_LIVING, False), # (DOWN, RIGHT)\r\n ((0.01, False), env.locations_to_state(((1, 0), (7, 6))), REWARD_OF_LIVING, False), # (DOWN, LEFT)\r\n ((0.01, False), env.locations_to_state(((0, 0), (7, 7))), REWARD_OF_LIVING, False), # (UP, RIGHT)\r\n ((0.01, False), env.locations_to_state(((0, 0), (7, 6))), REWARD_OF_LIVING, False) # (UP, LEFT)\r\n })\r\n\r\n wish_state = env.locations_to_state(((0, 1), (6, 7)))\r\n second_step_transitions = [((round(prob, 2), collision), next_state, reward, done)\r\n for ((prob, collision), next_state, reward, done) in\r\n env.P[wish_state][vector_action_to_integer((RIGHT, UP))]]\r\n\r\n # [(0,0), (7,7)]\r\n self.assertEqual(set(second_step_transitions), {\r\n ((0.64, False), env.locations_to_state(((0, 2), (5, 7))), REWARD_OF_LIVING + REWARD_OF_GOAL, True),\r\n # (RIGHT, UP)\r\n ((0.08, False), env.locations_to_state(((1, 1), (5, 7))), REWARD_OF_LIVING, False), # (DOWN, UP)\r\n ((0.08, False), env.locations_to_state(((0, 1), (5, 7))), REWARD_OF_LIVING, False), # (UP, UP)\r\n ((0.08, False), env.locations_to_state(((0, 2), (6, 7))), REWARD_OF_LIVING, False), # (RIGHT, RIGHT)\r\n ((0.08, False), env.locations_to_state(((0, 2), (6, 6))), REWARD_OF_LIVING, False), # (RIGHT, LEFT)\r\n ((0.01, False), env.locations_to_state(((1, 1), (6, 7))), REWARD_OF_LIVING, False), # (DOWN, RIGHT)\r\n ((0.01, False), env.locations_to_state(((1, 1), (6, 6))), REWARD_OF_LIVING, False), # (DOWN, LEFT)\r\n ((0.01, False), env.locations_to_state(((0, 1), (6, 7))), REWARD_OF_LIVING, False), # (UP, RIGHT)\r\n ((0.01, False), env.locations_to_state(((0, 1), (6, 6))), REWARD_OF_LIVING, False) # (UP, LEFT)\r\n })", "def test_case_generate(self):\n\n # initialization\n state = np.random.choice(self.init_states)\n model = rm.randint(0, self.model_num - 1)\n duration = np.random.choice(self.step_values)\n temp = rm.randint(self.min_temp, self.max_temp)\n\n self.states = [[model, duration, temp]]\n self.time = duration\n\n while self.time < self.max_time:\n if state == \"inc_tmp\":\n change = np.random.choice(\n self.transitionName[0], p=self.transitionMatrix[0]\n ) # choose the next state\n if change == \"S1S1\": # stay in the same state\n temp = self.get_temp_inc(temp)\n model = rm.randint(0, self.model_num - 1)\n diff = (\n self.max_time - self.time\n ) # this is for ensuring the maximum duration is not exceeded\n if (diff) < self.max_step and (diff) > self.min_step:\n duration = diff\n self.states.append([model, duration, temp])\n return self.states_to_dict()\n elif diff < self.min_step:\n self.states[len(self.states) - 1][1] += diff\n return self.states_to_dict()\n else:\n duration = np.random.choice(self.step_values)\n self.time += duration\n self.states.append([model, duration, temp])\n\n elif change == \"S1S2\": # change from increase to decrease\n temp = self.get_temp_dec(temp)\n model = rm.randint(0, self.model_num - 1)\n state = \"dec_tmp\"\n\n diff = self.max_time - self.time\n if (diff) < self.max_step and (diff) > self.min_step:\n duration = diff\n self.states.append([model, duration, temp])\n return self.states_to_dict()\n elif diff < self.min_step:\n self.states[len(self.states) - 1][1] += diff\n return self.states_to_dict()\n else:\n duration = np.random.choice(self.step_values)\n self.time += duration\n self.states.append([model, duration, temp])\n else:\n print(\"Error\")\n\n elif state == \"dec_tmp\":\n change = np.random.choice(\n self.transitionName[1], p=self.transitionMatrix[1]\n )\n if change == \"S2S1\":\n temp = self.get_temp_inc(temp)\n model = rm.randint(0, self.model_num - 1)\n state = \"inc_tmp\"\n diff = self.max_time - self.time\n if (diff) < self.max_step and (diff) > self.min_step:\n duration = diff\n self.states.append([model, duration, temp])\n return self.states_to_dict()\n elif diff < self.min_step:\n self.states[len(self.states) - 1][1] += diff\n return self.states_to_dict()\n else:\n duration = np.random.choice(self.step_values)\n\n self.time += duration\n self.states.append([model, duration, temp])\n\n elif change == \"S2S2\":\n temp = self.get_temp_dec(temp)\n model = rm.randint(0, self.model_num - 1)\n\n diff = self.max_time - self.time\n if (diff) < self.max_step and (diff) > self.min_step:\n duration = diff\n self.states.append([model, duration, temp])\n return self.states_to_dict()\n elif diff < self.min_step:\n self.states[len(self.states) - 1][1] += diff\n return self.states_to_dict()\n else:\n duration = np.random.choice(self.step_values)\n self.time += duration\n self.states.append([model, duration, temp])\n\n else:\n print(\"Error\")\n pass\n else:\n print(\"Error\")\n\n return self.states_to_dict()", "def step(self, game: Game):\n\n print(\"Tick #{}\".format(game.time_left))\n\n splitValue = getSplitValue(game)\n print (getSplitValue(game))\n\n for cell in game.me.cells:\n\n if game.time_left < 6:\n cell.trade(99999)\n\n\n\n\n if cell.mass >= splitValue:\n if len(game.me.cells) < 10:\n cell.split()\n #else:\n #cell.trade(cell.mass - 100)\n else:\n distance = cell.position.distance_to(cell.target)\n possibleVictims = findVictims(cell, game.enemies)\n\n if (cell.mass <= 100):\n target = closestRessource(game, cell, possibleVictims + game.resources.allResources, len(possibleVictims))\n else:\n #cell.burst()\n target = closestRessource(game, cell, possibleVictims, len(possibleVictims))\n\n\n for e in game.enemies:\n for c in e.cells:\n if enemyComingthrough(cell, c):\n target = cell.position + (c.target - c.position)\n #cell.burst()\n pass\n\n if (target != None):\n cell.move(target)\n else:\n print (' KES TU FAIS, VA PAS LÀ ')", "def test_update_splits():\n assert new_log.iloc[0]['start'] == 0", "def test_update_file_state(self):\n # blocks [0 4012], based on unit_362-2013-202-2-0.mdd\n test_file1 = os.path.join(INPUT_HYPM_PATH, 'first.mdd')\n\n # parse the first .mdd files into the node and instrument group files\n mdd.procall([test_file1])\n\n file_state = self.get_file_state('node60p1.dat')\n expected_file_state_1 = {StateKey.UNPROCESSED_DATA: [],\n StateKey.FILE_SIZE: 4012,\n StateKey.OUTPUT_INDEX: 1}\n\n if file_state != expected_file_state_1:\n print \"file state try 1: '%s'\" % file_state\n self.fail(\"Expected file state 1 does not match\")\n\n test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_362-2013-202-2-0.mdd')\n\n # parse the first .mdd files into the node and instrument group files\n mdd.procall([test_file2])\n\n file_state = self.get_file_state('node60p1.dat')\n expected_file_state_2 = {StateKey.UNPROCESSED_DATA: [[4736, 8192]],\n StateKey.FILE_SIZE: 8192,\n StateKey.OUTPUT_INDEX: 2}\n\n if file_state != expected_file_state_2:\n print \"file state try 2: '%s'\" % file_state\n self.fail(\"Expected file state 2 does not match\")\n\n # start second test, switch to node58\n # blocks [0 3583] [3840 4058]\n test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd')\n mdd.procall([test_file1])\n\n file_state = self.get_file_state('node58p1.dat')\n expected_file_state = {StateKey.UNPROCESSED_DATA: [[3189, 3945]],\n StateKey.FILE_SIZE: 4059,\n StateKey.OUTPUT_INDEX: 1}\n\n if file_state != expected_file_state:\n print file_state\n self.fail(\"Expected file state 3 does not match\")\n\n # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059]\n test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd')\n\n # parse the two .mdd files into the node and instrument group files\n mdd.procall([test_file2])\n\n file_state = self.get_file_state('node58p1.dat')\n # there is an unprocessed '/n' in between records\n expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]],\n StateKey.FILE_SIZE: 4060,\n StateKey.OUTPUT_INDEX: 2}\n\n if file_state != expected_file_state:\n print file_state\n self.fail(\"Expected file state 4 does not match\")", "def test_update_task_states(self):\r\n changed = self.combinedoe.update_task_states()\r\n self.assertFalse(changed)\r\n\r\n current_task = self.combinedoe.current_task\r\n current_task.change_state(CombinedOpenEndedV1Module.DONE)\r\n changed = self.combinedoe.update_task_states()\r\n\r\n self.assertTrue(changed)", "def test_state(self):\n # blocks [0 3583] [3840 4058]\n test_file1 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-2-0.mdd')\n # blocks [0 1279] [1536 1791] [2048 2303] [2560 2815] [3072 4059]\n test_file2 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-3-0.mdd')\n\n # parse the two .mdd files into the node and instrument group files\n mdd.procall([test_file1, test_file2])\n\n file_state = self.get_file_state('node58p1.dat')\n # there is an unprocessed '/n' in between records\n expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]],\n StateKey.FILE_SIZE: 4060,\n StateKey.OUTPUT_INDEX: 1}\n\n if file_state != expected_file_state:\n print file_state\n self.fail(\"Expected file state 1 does not match\")\n\n # blocks [0 2047] [2304 4095] [4096 7451]\n test_file3 = os.path.join(INPUT_HYPM_PATH, 'unit_364-2013-206-6-0.mdd')\n\n # parse another .mdd file adding on to the node file, and making\n # another sequence of instrument group files\n mdd.procall([test_file3])\n\n file_state = self.get_file_state('node58p1.dat')\n expected_file_state = {StateKey.UNPROCESSED_DATA: [[4059, 4060]],\n StateKey.FILE_SIZE: 7452,\n StateKey.OUTPUT_INDEX: 2}\n\n if file_state != expected_file_state:\n print \"file state: '%s'\" % file_state\n self.fail(\"Expected file state 2 does not match\")\n\n data_orig = self.read_full_file('node58p1.dat')\n\n # read the data from all generated files into one data string\n data_out = self.read_full_file('node58p1_0.status_1236801.dat')\n data_out += self.read_full_file('node58p1_0.wa_wfp_1236820.dat')\n data_out += self.read_full_file('node58p1_0.wc_wfp_1236820.dat')\n data_out += self.read_full_file('node58p1_0.we_wfp_1236820.dat')\n data_out += self.read_full_file('node58p1_1.status_1236801.dat')\n data_out += self.read_full_file('node58p1_1.wa_wfp_1236822.dat')\n data_out += self.read_full_file('node58p1_1.wc_wfp_1236822.dat')\n data_out += self.read_full_file('node58p1_1.we_wfp_1236822.dat')\n\n # confirm data in the node file matches those output in the instrument groups\n if not TestSioUnpack.compare_sio_matches(data_orig, data_out):\n self.fail(\"Failed sio block compare\")", "def _assert_like_multi_cell_state(x, layer_sizes, cell_type):\n if cell_type == 'Basic' or cell_type == 'GRU': # Basic and GRU states have same shape\n try:\n shapes = [_get_shape(layer) for layer in x]\n except:\n raise ValueError('State did not have expected form for Basic or GRU rnn state. Got:\\n{}'.format(x))\n batch_size = shapes[0][0]\n for (i, s) in enumerate(shapes):\n if s[0] != batch_size:\n raise ValueError('Inconsistent batch sizes. Expected {} based on 0th layer, but found {} in layer {}.'\n .format(batch_size, s[0], i))\n if s[1] != layer_sizes[i]:\n raise ValueError('State size at layer {} was {}, but layer size is {}.'.format(i, s[1], layer_sizes[i]))\n return\n elif cell_type == 'LSTM':\n try:\n shapes = [[_get_shape(xx) for xx in layer] for layer in x]\n except:\n raise ValueError('State did not have expected form for LSTM state. Got:\\n{}'.format(x))\n batch_size = shapes[0][0][0]\n for (i, s) in enumerate(shapes):\n if s[0][0] != batch_size:\n raise ValueError('Inconsistent batch sizes. Expected {} based on 0th layer, but found {} in c in layer {}.'\n .format(batch_size, s[0][0], i))\n if s[1][0] != batch_size:\n raise ValueError('Inconsistent batch sizes. Expected {} based on 0th layer, but found {} in h in layer {}.'\n .format(batch_size, s[1][0], i))\n if s[0][1] != layer_sizes[i]:\n raise ValueError('State size in c at layer {} was {}, but layer size is {}.'.format(i, s[0][1], layer_sizes[i]))\n if s[1][1] != layer_sizes[i]:\n raise ValueError('State size in h at layer {} was {}, but layer size is {}.'.format(i, s[1][1], layer_sizes[i]))\n return\n else:\n raise ValueError('Allowed cell types are \"Basic\", \"LSTM\" and \"GRU\". Got {}.'.format(x))", "def test_getting_state_parallel(self):\n no_replicates = 25\n replicate(experiment2, no_replicates, parallel=True, no_processes=2)\n for i in range(no_replicates):\n self.assertNotIn(SUBSTATE_KEY_PATTERN % i + '.result', state)", "def test_location_to_state():\n for num_rows in [12, 10]:\n for num_cols in [15, 9]:\n env = Four_Rooms_Environment(grid_width=num_cols, grid_height=num_rows)\n observed_states = set()\n for row in range(num_rows):\n for col in range(num_cols):\n state = env.location_to_state((row, col))\n assert state not in observed_states\n observed_states.add(state)", "def branch_competetive(state, time, d):\n\n th0 = state[0] \n th1 = state[1:(d[\"alpha1\"]+d[\"alpha1_p\"])]\n th2 = state[(d[\"alpha1\"]+d[\"alpha1_p\"]):]\n \n #print(len(state), len(th1))\n ### get all cytokine secreting cells \n th1_all = np.sum(th1[-d[\"alpha1_p\"]:])\n th2_all = np.sum(th2[-d[\"alpha2_p\"]:])\n ### calculate cytokine concentrations\n cyto_1 = d[\"beta_cyto_1\"]*th1_all + d[\"ifn_ext\"]\n cyto_2 = d[\"beta_cyto_2\"]*th2_all + d[\"il21_ext\"]\n ### calculate cytokine effect on rate\n fb1 = d[\"fb_rate1\"]*cyto_1**3/(cyto_1**3+d[\"K_1\"]**3)\n fb2 = d[\"fb_rate2\"]*cyto_2**3/(cyto_2**3+d[\"K_2\"]**3)\n ### update differantiation rate\n beta1 = d[\"beta1\"]*(1+fb1)\n beta2 = d[\"beta2\"]*(1+fb2)\n \n ### differentiate effectors th1 \n alpha = d[\"alpha1\"]\n p = 1.\n dt_th1 = diff_effector2(th1, th0, alpha, beta1, d[\"beta1_p\"], p, d)\n ### differentiate effectors th2\n alpha = d[\"alpha2\"]\n p = 1.\n dt_th2 = diff_effector2(th2, th0, alpha, beta2, d[\"beta2_p\"], p, d)\n \n ### combine all cells\n dt_th0 = -(beta1+beta2)*th0\n dt_state = np.concatenate(([dt_th0], dt_th1, dt_th2))\n\n return dt_state", "def operator_splitting(data, func_transport, func_collision):\n # executing time step\n func_transport(data)\n func_collision(data)\n assert np.all(data.state >= 0)\n data.t += 1\n return", "def test_location_to_state_and_state_to_location_match():\n env = Four_Rooms_Environment(stochastic_actions_probability=0.0)\n env.reset()\n for row in range(env.grid_height):\n for col in range(env.grid_width):\n assert env.location_to_state((row, col)) == env.location_to_state(env.state_to_location(env.location_to_state((row, col))))", "def test_check_user_location_and_goal_location_match_state_and_next_state():\n for _ in range(50):\n env = Four_Rooms_Environment()\n env.reset()\n for _ in range(50):\n move = randint(0, 3)\n env.step(move)\n assert env.state == [env.location_to_state(env.current_user_location), env.location_to_state(env.current_goal_location)]\n assert env.next_state == [env.location_to_state(env.current_user_location), env.location_to_state(env.current_goal_location)]", "def step(self, state):", "def test_previous_state(self):\n self.report_start(self.whoami())\n\n Device.objects.get(hostname='panda01').state_transition_to(Device.OFFLINE)\n\n # set a series of previous transitions for panda02\n Device.objects.get(hostname='panda02').state_transition_to(Device.OFFLINE)\n Device.objects.get(hostname='panda02').state_transition_to(Device.IDLE)\n Device.objects.get(hostname='panda02').state_transition_to(Device.RESERVED)\n Device.objects.get(hostname='panda02').state_transition_to(Device.RUNNING)\n Device.objects.get(hostname='panda02').state_transition_to(Device.IDLE)\n Device.objects.get(hostname='panda02').state_transition_to(Device.OFFLINE)\n Device.objects.get(hostname='panda02').state_transition_to(Device.IDLE)\n\n Device.objects.get(hostname='panda03').state_transition_to(Device.RUNNING)\n Device.objects.get(hostname='panda04').state_transition_to(Device.RESERVED)\n Device.objects.get(hostname='panda05').state_transition_to(Device.RETIRED)\n Device.objects.get(hostname='panda06').state_transition_to(Device.OFFLINING)\n\n self.panda_type.health_check_job = self.factory.make_job_json(health_check='true')\n self.panda_type.save()\n\n jobs = self.scheduler_tick()\n\n self.assertEqual(len(jobs), 1)\n job = TestJob.objects.get(id=jobs[0].id)\n job_id = job.id\n self.assertEqual(job.status, TestJob.RUNNING)\n\n for job in jobs:\n job_obj = TestJob.objects.get(pk=job.id) # reload\n job_obj.status = TestJob.COMPLETE\n self.job_finished(job_obj)\n\n self.assertEqual(len(jobs), 1)\n job = TestJob.objects.get(id=job_id)\n self.assertEqual(job.status, TestJob.COMPLETE)\n\n self.assertEqual(\n Device.objects.get(hostname='panda02').status,\n Device.IDLE\n )\n\n self.assertEqual(\n Device.objects.get(hostname='panda02').health_status,\n Device.HEALTH_PASS\n )\n\n self.assertEqual(\n Device.objects.get(hostname='panda01').health_status,\n Device.HEALTH_UNKNOWN\n )\n\n panda01 = Device.objects.get(hostname='panda01')\n panda01.status = Device.IDLE\n panda01.save()\n\n jobs = self.scheduler_tick()\n\n self.assertEqual(\n Device.objects.get(hostname='panda01').health_status,\n Device.HEALTH_UNKNOWN\n )\n\n self.assertEqual(Device.objects.get(hostname='panda01').status, Device.RUNNING)\n\n for job in jobs:\n job_obj = TestJob.objects.get(pk=job.id) # reload\n job_obj.status = TestJob.COMPLETE\n self.job_finished(job_obj)\n\n self.assertEqual(Device.objects.get(hostname='panda01').status, Device.IDLE)\n self.assertIsNone(Device.objects.get(hostname='panda01').current_job)\n self.assertEqual(\n Device.objects.get(hostname='panda01').health_status,\n Device.HEALTH_PASS\n )\n self.scheduler_tick()\n self.assertEqual(Device.objects.get(hostname='panda01').status, Device.IDLE)\n self.assertIsNone(Device.objects.get(hostname='panda01').current_job)\n self.assertEqual(\n Device.objects.get(hostname='panda01').health_status,\n Device.HEALTH_PASS\n )\n\n self.cleanup(self.whoami())", "def discrete_trajectory_to_wait_times(data, t_col='t', state_col='state'):\n\n states = data[state_col].values\n times = data[t_col].values\n num_measurements = len(data)\n\n # now iterate through valid part of trajectory to establish wait times\n start_times = []\n end_times = []\n earliest_st = [] # bounds on start time\n latest_st = []\n earliest_et = [] # bounds on end time\n latest_et = []\n wait_state = []\n wait_type = []\n k0 = 0 # index at which current state began\n state = states[k0]\n state_has_changed = False\n for k in range(num_measurements):\n # if no state change, continue\n if states[k] == state:\n continue\n # otherwise, store change\n start_times.append(times[k0])\n end_times.append(times[k])\n wait_state.append(state)\n # bounds on true wait time value\n if k0 == 0: # left exterior times have exactly determined \"start\"\n earliest_st.append(times[k0])\n else:\n earliest_st.append(times[k0-1])\n latest_st.append(times[k0])\n earliest_et.append(times[k-1])\n latest_et.append(times[k])\n # if this is the first state change, we store it separately\n if not state_has_changed:\n wait_type.append('left exterior')\n state_has_changed = True\n # otherwise, a normal state change\n else:\n wait_type.append('interior')\n # either way, state has changed\n state = states[k]\n k0 = k\n # also store the time spent in final state\n start_times.append(times[k0])\n end_times.append(times[k])\n if k0 == 0: # full exterior times also have exactly determined \"start\"\n earliest_st.append(times[k0])\n else:\n earliest_st.append(times[k0-1])\n latest_st.append(times[k0])\n # right/full exterior times have exactly determined \"end\"\n earliest_et.append(times[k])\n latest_et.append(times[k])\n # state type stored specially for final state\n wait_state.append(state)\n if not state_has_changed:\n wait_type.append('full exterior')\n else:\n wait_type.append('right exterior')\n start_times = np.array(start_times)\n end_times = np.array(end_times)\n wait_times = end_times - start_times\n min_waits = np.array(earliest_et) - np.array(latest_st)\n max_waits = np.array(latest_et) - np.array(earliest_st)\n df = pd.DataFrame({'start_time': start_times, 'end_time': end_times,\n 'wait_time': wait_times, 'state': wait_state,\n 'min_waits': min_waits, 'max_waits': max_waits,\n 'wait_type': wait_type})\n df.index.name = 'rank_order'\n df['window_size'] = times[-1] - times[0]\n return df", "def test_update_state1(self):\n pass", "def test_setting_state_parallel(self):\n no_replicates = 25\n\n replicate(experiment, no_replicates, parallel=True, no_processes=2)\n for i in range(no_replicates):\n self.assertIn('result', state[SUBSTATE_KEY_PATTERN % i])\n self.assertEqual(state[SUBSTATE_KEY_PATTERN % i]['result'], \"bla\")", "def test_split_cell_sets_new_tier_level(mock_amg):\n\n mock_amg.cells[0].split()\n\n assert mock_amg.cells[-4].tier == 1\n assert mock_amg.cells[-3].tier == 1\n assert mock_amg.cells[-2].tier == 1\n assert mock_amg.cells[-1].tier == 1\n\n mock_amg.cells[-1].split()\n assert mock_amg.cells[-4].tier == 2\n assert mock_amg.cells[-3].tier == 2\n assert mock_amg.cells[-2].tier == 2\n assert mock_amg.cells[-1].tier == 2", "def test_solo_cell():\n cell = c6.Cell(loc=[1, 1])\n for i in range(10):\n cell.step()", "def test_update_state2(self):\n pass", "def test_ThinData(self):\n for split in self.splits:\n y_data, x_data = data_process.thinData(y_test, xdim_test, split)\n oldStep = float(xdim_test[1]) - float(xdim_test[0])\n newStep = float(x_data[1]) - float(x_data[0])\n self.assertAlmostEqual(newStep/oldStep,split)\n self.assertTrue(len(y_data) == len(x_data))", "def th_cell_diff(th_state, time, d):\n assert d[\"alpha_int\"] < d[\"alpha\"]\n \n # divide array into cell states\n tnaive = th_state[:d[\"alpha_int\"]]\n tint = th_state[d[\"alpha_int\"]:d[\"alpha\"]]\n teff = th_state[d[\"alpha\"]:]\n \n assert len(tnaive)+len(tint)+len(teff) == len(th_state)\n tnaive = np.sum(tnaive)\n tint = np.sum(tint)\n teff = np.sum(teff)\n \n # IL2 production\n il2_producers = tnaive+tint\n il2_consumers = teff+tint \n conc_il2 = d[\"rate_il2\"]*il2_producers/(d[\"K_il2\"]+il2_consumers)\n \n # IL7 production\n il7_consumers = il2_consumers\n conc_il7 = d[\"rate_il7\"] / (d[\"K_il2\"]+il7_consumers)\n \n # apply feedback on rate beta\n fb_ifn = 0\n if d[\"fb_ifn\"] != 0:\n conc_ifn = d[\"rate_ifn\"]*(il2_producers)\n fb_ifn = (d[\"fb_ifn\"]*conc_ifn**3)/(conc_ifn**3+d[\"K_ifn\"]**3)\n \n beta = (fb_ifn+1)*d[\"beta\"] \n beta_p = d[\"beta_p\"] \n rate_death = d[\"d_eff\"]\n\n # check homeostasis criteria\n if d[\"crit\"] == False:\n update_t0(d, time, conc_il2, conc_il7)\n elif d[\"death_mode\"] == False:\n beta_p = beta_p*np.exp(-d[\"decay_p\"]*(time-d[\"t0\"]))\n else:\n rate_death = rate_death*np.exp(0.1*(time-d[\"t0\"]))\n \n # differentiation \n dt_state = diff_effector_new(th_state, teff, d, beta, rate_death, beta_p)\n \n \n return dt_state", "def check_sim(self):\n if self.index % 5 == 0:\n print (\"Timesteps Completed: {} out of {}\".format(self.index, self.timesteps))\n if self.index == self.timesteps:\n print (\"Simulation Completed\")\n self.end_simulation()\n self.index += 1", "def single_high_a_state(\n shape=(50, 20),\n time_step=0.01,\n num_changed_states=1,\n number_timesteps=3000,\n snap_shot_rate=100,\n initial_value=0.14\n):\n time_array = np.arange(0, number_timesteps * time_step, time_step)\n\n deviation_a = np.zeros(shape)\n deviation_b = np.zeros(shape)\n\n for _ in range(num_changed_states):\n index = np.random.randint(shape[0]), np.random.randint(shape[1])\n while deviation_a[index] > 0:\n index = np.random.randint(shape[0]), np.random.randint(1)\n deviation_a[index] = initial_value\n\n for num, _ in enumerate(time_array):\n deviation_a_update, _ = react_diff(\n deviation_a,\n deviation_b,\n diffusion_coef=1.,\n dt=time_step,\n is_1d=False\n )\n deviation_b_update, _ = react_diff(\n deviation_a,\n deviation_b,\n diffusion_coef=3.,\n is_a_substance=False,\n dt=time_step,\n is_1d=False\n )\n deviation_a = deviation_a_update\n deviation_b = deviation_b_update\n\n plt.figure(1)\n plt.xlabel('X direction')\n plt.ylabel('Y direction')\n plt.title('Two dimensional reaction diffusion system w/ \\n'\n 'single high value for species A. Plot species A')\n\n plt.figure(2)\n plt.xlabel('X direction')\n plt.ylabel('Y direction')\n plt.title('Two dimensional reaction diffusion system w/ \\n'\n 'single high value for species A. Plot species B')\n if num % snap_shot_rate == 0:\n plt.figure(1)\n plt.imshow(deviation_a)\n plt.pause(0.1)\n plt.draw()\n\n plt.figure(2)\n plt.imshow(deviation_b)\n plt.pause(0.1)\n plt.draw()\n\n plt.show()", "def random_state(\n shape=(50, 50),\n time_step=0.01,\n number_timesteps=3000,\n rand_upper_bound=0.1,\n snap_shot_rate=100\n):\n time_array = np.arange(0, number_timesteps * time_step, time_step)\n\n deviation_a = np.random.rand(shape[0], shape[1]) * rand_upper_bound\n deviation_b = np.random.rand(shape[0], shape[1]) * rand_upper_bound\n\n for num, _ in enumerate(time_array):\n deviation_a_update, _ = react_diff(\n deviation_a,\n deviation_b,\n diffusion_coef=1.,\n dt=time_step,\n is_1d=False\n )\n deviation_b_update, _ = react_diff(\n deviation_a,\n deviation_b,\n diffusion_coef=3.,\n is_a_substance=False,\n dt=time_step,\n is_1d=False\n )\n deviation_a = deviation_a_update\n deviation_b = deviation_b_update\n\n plt.xlabel('X direction')\n plt.ylabel('Y direction')\n plt.title('Two dimensional reaction diffusion system w/ random start state')\n if num % snap_shot_rate == 0:\n plt.imshow(deviation_a)\n plt.pause(0.1)\n plt.draw()\n\n plt.show()", "def consecutive_cells(self) -> bool:\n if self._env == \"lab\":\n _save_notebook()\n elif self._env != \"test\":\n logger.info(\"Save the notebook before checking for consecutiveness.\")\n nb = read_notebook(self._nb_path)\n consecutiveness = check_consecutiveness(\n nb, calling_statement=\".live.consecutive_cells\"\n )\n return consecutiveness", "def test_dataset_scenario_generation_full_outside1(self):\n params = ParameterServer()\n\n map_filename = os.path.join(os.path.dirname(__file__), \"data/DR_DEU_Merging_MT_v01_shifted.xodr\")\n track_filename = os.path.join(os.path.dirname(__file__), \"data/interaction_dataset_DEU_Merging_dummy_track_outside.csv\")\n\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"MapFilename\"] = map_filename\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"TrackFilenameList\"] = [track_filename]\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"StartingOffsetMs\"] = 0\n\n scenario_generation = InteractionDatasetScenarioGenerationFull(\n params=params, num_scenarios=1)\n\n scenario = scenario_generation.get_scenario(0)\n self.assertAlmostEqual(scenario.eval_agent_ids, [1])\n\n world_state = scenario.GetWorldState()\n agent11 = world_state.GetAgent(1)\n agent12 = world_state.GetAgent(2)\n agent13 = world_state.GetAgent(3)\n\n self.assertAlmostEqual(agent11.first_valid_timestamp, 0.0)\n self.assertAlmostEqual(agent12.first_valid_timestamp, 0.0)\n self.assertNotEqual(agent13.first_valid_timestamp, 0.0)\n \n # agent13 should not be valid at the beginning, as he is outside of map\n world_state.time = 0\n self.assertEqual(isinstance(agent11, Agent), True)\n self.assertEqual(agent11.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent11.InsideRoadCorridor(), True)\n \n self.assertEqual(isinstance(agent12, Agent), True)\n self.assertEqual(agent12.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent12.InsideRoadCorridor(), True)\n\n self.assertEqual(isinstance(agent13, Agent), True)\n self.assertEqual(agent13.IsValidAtTime(world_state.time), False)\n # as we use only state once it's in map, this will be true, although the time step is not valid yet\n self.assertEqual(agent13.InsideRoadCorridor(), True)\n\n # agent13 should not be valid at the beginning, as he is outside of map\n world_state.Step(0.05)\n\n self.assertEqual(isinstance(agent11, Agent), True)\n self.assertEqual(agent11.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent11.InsideRoadCorridor(), True)\n \n self.assertEqual(isinstance(agent12, Agent), True)\n self.assertEqual(agent12.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent12.InsideRoadCorridor(), True)\n\n self.assertEqual(isinstance(agent13, Agent), True)\n self.assertEqual(agent13.IsValidAtTime(world_state.time), False)\n # as we use only state once it's in map, this will be true, although the time step is not valid yet\n self.assertEqual(agent13.InsideRoadCorridor(), True)\n\n self.assertEqual(list(world_state.agents_valid.keys()), [1,2])\n\n # agent13 should be valid at some point\n world_state.Step(agent13.first_valid_timestamp)\n world_state.Step(0.01) # agent13.IsValidAtTime() uses previous time stamp, therefore we increment it one more step\n\n self.assertEqual(isinstance(agent11, Agent), True)\n self.assertEqual(agent11.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent11.InsideRoadCorridor(), True)\n \n self.assertEqual(isinstance(agent12, Agent), True)\n self.assertEqual(agent12.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent12.InsideRoadCorridor(), True)\n\n self.assertEqual(isinstance(agent13, Agent), True)\n self.assertEqual(agent13.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent13.InsideRoadCorridor(), True)\n\n self.assertEqual(list(world_state.agents_valid.keys()), [1,2,3])", "def test_shape_interval(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, ['a', 'v', 'b'], interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n nsteps = int_r(self.t_max/interval)\n self.assertEqual(M.v.shape, (self.N, nsteps))\n self.assertEqual(M.a.shape, (2, nsteps))\n self.assertEqual(M.b.shape, (self.N, nsteps))", "def test_update_state4(self):\n pass", "def epidemic_finish(states, iteration):\n return np.sum(states) == 0 and iteration > 10", "def test_split_east_shares_CorrWindow(mock_amg):\n\n # split the central cell\n mock_amg.cells[4].split()\n # split the easterly cell\n mock_amg.cells[5].split()\n\n cw_old = mock_amg.cells[4].children['br'].tr_win\n cw_new = mock_amg.cells[5].children['bl'].tl_win\n\n assert cw_old is cw_new", "def check_status(self, base):\n change = False\n # Trigger intensification\n if self.curr_i == self.I:\n self.curr_i = 0\n base = self.search_intensification()\n change = True\n # Trigger diversification\n elif self.curr_d == self.D:\n self.curr_d = 0\n base = self.search_diversification()\n change = True\n # Trigger step reduction\n elif self.curr_r == self.R:\n self.curr_r = 0\n # Start from best point found so far\n base = self.MTM[[-1], :-1].T\n self.update_STM(base)\n self.update_LTM(base)\n self.step = self.step_red * self.step\n\n if change:\n curr_obj = self.obj_wrap(base)\n self.update_MTM(base, curr_obj)\n self.update_STM(base)\n self.update_LTM(base)\n self.bases = np.block([[self.bases],[base.T, curr_obj]])\n\n return base", "def test_transfer_to_final_timeperiod_state_in_progress(self):\n when(self.time_table_mocked).can_finalize_job_record(any(str), any(Job)).thenReturn(True)\n uow_dao_mock = mock(UnitOfWorkDao)\n when(uow_dao_mock).get_one(any()).thenReturn(\n create_unit_of_work(PROCESS_UNIT_TEST, 1, 1, None, unit_of_work.STATE_PROCESSED))\n self.pipeline_real.uow_dao = uow_dao_mock\n\n self.pipeline_real.insert_uow = then_return_uow\n pipeline = spy(self.pipeline_real)\n\n job_record = get_job_record(job.STATE_IN_PROGRESS,\n TEST_PRESET_TIMEPERIOD,\n PROCESS_SITE_HOURLY)\n\n pipeline.manage_pipeline_for_process(job_record.process_name, job_record)\n verify(self.time_table_mocked, times=1). \\\n update_job_record(any(str), any(Job), any(UnitOfWork), any(str))\n # verify(pipeline, times=1).\\\n # _compute_and_transfer_to_final_run(any(str), any(str), any(str), any(Job))\n # verify(pipeline, times=1).\\\n # _process_state_final_run(any(str), any(Job))", "def updateState(self):\n\n if ('cutting' in self.step_ops) and (self.cut_state.user_cutting):\n self.step_ops['cutting'] = True\n \n if ('cooking' in self.step_ops) and (self.cut_state.user_cooking):\n self.step_ops['cooking'] = True\n\n # TODO: add the rest of the operations\n\n advance = True\n\n # Check if ALL operations are complete\n for op in self.step_ops:\n if self.step_ops[op] == False:\n advance = False\n break\n\n if advance:\n self.nextStep()", "def test_update_state3(self):\n pass", "def goal_test(state): \n size = len(state)\n for i in range (size):\n for j in range (size):\n if state[i][j] != i*size + j:\n return False \n return True", "def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)", "def test_time(self):\n M = simulation.StateMonitor(self.G, 'v')\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, self.dt)))", "def goal_test(self, state, goal_state):\r\n for i in range(3):\r\n for j in range(3):\r\n if state[i][j] != goal_state[i][j]:\r\n return False\r\n return True", "def test_get_field_state_comparisons(self):\r\n comparison_groupings = get_field_state_comparisons(\r\n self.dist_matrix_header, self.dist_matrix, self.mapping_header,\r\n self.mapping, self.field, ['Control'])\r\n expected = {'Fast': {'Control': [0.72899999999999998,\r\n 0.80000000000000004, 0.72099999999999997, 0.76500000000000001,\r\n 0.77600000000000002, 0.74399999999999999, 0.749,\r\n 0.67700000000000005, 0.73399999999999999, 0.77700000000000002,\r\n 0.73299999999999998, 0.72399999999999998, 0.69599999999999995,\r\n 0.67500000000000004, 0.65400000000000003, 0.69599999999999995,\r\n 0.73099999999999998, 0.75800000000000001, 0.73799999999999999,\r\n 0.73699999999999999]}}\r\n self.assertDictEqual(comparison_groupings, expected)\r\n\r\n comparison_groupings = get_field_state_comparisons(\r\n self.dist_matrix_header, self.dist_matrix, self.mapping_header,\r\n self.mapping, self.field, ['Fast'])\r\n expected = {'Control': {'Fast': [0.72899999999999998,\r\n 0.80000000000000004, 0.72099999999999997, 0.76500000000000001,\r\n 0.77600000000000002, 0.74399999999999999, 0.749,\r\n 0.67700000000000005, 0.73399999999999999, 0.77700000000000002,\r\n 0.73299999999999998, 0.72399999999999998, 0.69599999999999995,\r\n 0.67500000000000004, 0.65400000000000003, 0.69599999999999995,\r\n 0.73099999999999998, 0.75800000000000001, 0.73799999999999999,\r\n 0.73699999999999999]}}\r\n self.assertDictEqual(comparison_groupings, expected)", "def event_m20_11_4000020():\n \"\"\"State 0,2: [Lib] [Preset] Navigation mesh switching by flag judgment_SubState\"\"\"\n assert event_m20_11_x57(z68=6000020, z69=0, z70=2, z71=211000030, z72=0)\n \"\"\"State 1: Finish\"\"\"\n EndMachine()", "def __init__(self):\n \"\"\" action_ space : pick up location , Drop location\n state_space : location , time (hours) , day\n state_init : random pick from the state_space \"\"\"\n self.action_space = [(i,j) for i in range(m) for j in range(m) if i!=j or i==0]\n # Total states (Xi Tj Dk)\n self.state_space = [[x, y, z] for x in range(m) for y in range(t) for z in range(d)]\n # random Initialize of state (location, hours, day)\n self.state_init = random.choice(self.state_space)\n # Start the first round\n self.reset()", "def timestep(self):\n self.survive()\n self.move()\n self.set_survival_probability(self.vital_var)\n self.reproduce()\n # self.save_frame()\n if self.time % self.save_state_frequency == 0:\n self.save_state(self.state_file.format(self.time))\n if self.time % self.save_stats_frequency == 0:\n self.write_stats()\n # self.connected_component()\n self.time += 1\n\n if self.pop_size > 0:\n return True\n else:\n return False", "def is_fixed_state( previous_live, live_cells ):\n fixed = False\n if previous_live[0].size == live_cells[0].size:\n if previous_live[1].size == live_cells[1].size:\n if (previous_live[0]==live_cells[0]).all():\n if (previous_live[1]==live_cells[1]).all():\n fixed = True\n return fixed", "def test_retry_state_in_progress(self):\n when(self.time_table_mocked).can_finalize_job_record(any(str), any(Job)).thenReturn(True)\n uow_dao_mock = mock(UnitOfWorkDao)\n when(uow_dao_mock).get_one(any()).thenReturn(\n create_unit_of_work(PROCESS_UNIT_TEST, 1, 1, None, unit_of_work.STATE_PROCESSED))\n self.pipeline_real.uow_dao = uow_dao_mock\n\n self.pipeline_real.insert_uow = then_raise\n self.pipeline_real.recover_from_duplicatekeyerror = override_recover_function\n pipeline = spy(self.pipeline_real)\n\n job_record = get_job_record(job.STATE_IN_PROGRESS,\n TEST_PRESET_TIMEPERIOD,\n PROCESS_SITE_HOURLY)\n\n pipeline.manage_pipeline_for_process(job_record.process_name, job_record)\n verify(self.time_table_mocked, times=1). \\\n update_job_record(any(str), any(Job), any(UnitOfWork), any(str))\n # verify(pipeline, times=1).\\\n # _compute_and_transfer_to_final_run(any(str), any(str), any(str), any(Job))\n # verify(pipeline, times=1).\\\n # _process_state_final_run(any(str), any(Job))", "def _step(self, board, elapsedTime):\n\t\tpass", "def test_2(self):\n for _ in range(33):\n\n # Create grid of admissible state space values.\n num_edu_start = np.random.choice(range(1, 3))\n num_periods = np.random.randint(1, 15)\n num_types = np.random.randint(1, 3)\n\n edu_spec = {}\n edu_spec[\"start\"] = np.random.choice(\n range(1, 10), size=num_edu_start, replace=False\n ).tolist()\n edu_spec[\"max\"] = max(edu_spec[\"start\"]) + np.random.randint(1, 5)\n min_idx = edu_spec[\"max\"] + 1\n\n # FORTRAN\n base_args = (num_periods, num_types)\n\n state_space = StateSpace(*base_args, edu_spec[\"start\"], edu_spec[\"max\"])\n\n py_a, py_c, _, _ = state_space._get_fortran_counterparts()\n py_b = state_space.states_per_period\n py_d = py_b.max()\n\n fort_a, fort_b, fort_c, fort_d = fort_debug.wrapper_create_state_space(\n *base_args, edu_spec[\"start\"], edu_spec[\"max\"], min_idx\n )\n\n # Ensure equivalence\n rslts = [[fort_a, py_a], [fort_b, py_b], [fort_c, py_c], [fort_d, py_d]]\n for obj in rslts:\n # Slice Fortran output to shape of Python output.\n if isinstance(obj[0], np.ndarray):\n obj[0] = obj[0][tuple(map(slice, obj[1].shape))]\n\n assert_allclose(obj[0], obj[1])\n\n for _ in range(100):\n\n # Draw random request for testing purposes\n num_covars = np.random.randint(2, 10)\n num_agents = np.random.randint(100, 1000)\n tiny = np.random.normal(size=num_agents)\n beta = np.random.normal(size=num_covars)\n\n # Generate sample\n exog = np.random.sample((num_agents, num_covars))\n exog[:, 0] = 1\n endog = np.dot(exog, beta) + tiny\n\n # Run OLS\n beta_result = ols(y=endog, x=exog)\n\n # Check parameters\n py = beta_result\n f90 = fort_debug.wrapper_get_coefficients(\n endog, exog, num_covars, num_agents\n )\n assert_almost_equal(py, f90)\n\n # Check prediction\n py = exog.dot(beta_result)\n f90 = fort_debug.wrapper_point_predictions(exog, f90, num_agents)\n assert_almost_equal(py, f90)", "def test_SetMultipleMovingLoadsConfigurationCombined(self):\n\n # create nodes\n second_coord = [1, 0, 0.0]\n self.mp.CreateNewNode(1,0.0,0.0,0.0)\n self.mp.CreateNewNode(2,second_coord[0],second_coord[1],0.0)\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1,2], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n parameters.AddVector(\"configuration\", [-0.25, 0, 0.25])\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.25)\n\n process = GMA.SetMultipleMovingLoadsProcess(self.mp, parameters)\n conditions = []\n conditions.append(self.cmp.GetCondition(2))\n conditions.append(self.cmp.GetCondition(3))\n conditions.append(self.cmp.GetCondition(4))\n\n # initialise and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n\n # set load on node\n all_rhs = []\n for cond in conditions:\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0, 0)\n rhs = KratosMultiphysics.Vector(0)\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, -2.0, 0.0, 0.0])\n self.checkRHS(all_rhs[2], [0.0, -1.5, 0.0, -0.5])\n\n # move load within first element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, -2.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, -1.5, 0.0, -0.5])\n self.checkRHS(all_rhs[2], [0.0, -1.0, 0.0, -1.0])", "def test_preset_timeperiod_state_in_progress(self):\n when(self.time_table_mocked).can_finalize_job_record(any(str), any(Job)).thenReturn(True)\n uow_dao_mock = mock(UnitOfWorkDao)\n when(uow_dao_mock).get_one(any()).thenReturn(create_unit_of_work(PROCESS_UNIT_TEST, 0, 1, None))\n self.pipeline_real.uow_dao = uow_dao_mock\n\n self.pipeline_real.insert_uow = then_return_uow\n pipeline = spy(self.pipeline_real)\n\n job_record = get_job_record(job.STATE_IN_PROGRESS,\n TEST_PRESET_TIMEPERIOD,\n PROCESS_SITE_HOURLY)\n\n pipeline.manage_pipeline_for_process(job_record.process_name, job_record)\n verify(self.time_table_mocked, times=0). \\\n update_job_record(any(str), any(Job), any(UnitOfWork), any(str))\n # verify(pipeline, times=1).\\\n # _compute_and_transfer_to_final_run(any(str), any(str), any(str), any(Job))\n # verify(pipeline, times=0).\\\n # _process_state_final_run(any(str), any(Job))", "def test_update_state(self):\n pass", "def update_cells(self, state):\n width = WIDTH / CELL_SIZE\n height = HEIGHT / CELL_SIZE\n\n for index in range(0, width * height):\n if state[index] != self.get_state(index):\n self.toggle_color(index)", "def loop_fn(time, cell_output, cell_state, loop_state, emit_ta):\n \n if cell_output is None: # time == 0\n next_cell_state = initial_state\n emit_output= tf.ones(tf.shape(initial_state[1])[:1], dtype=tf.int32) * tf.constant(-1) #(batch_size)\n next_input = tf.squeeze(self.sos, [1])\n elements_finished = tf.logical_and(tf.cast(emit_output, dtype=tf.bool), False)\n \n else:\n \n next_cell_state = cell_state\n decoder_outputs = tf.expand_dims(cell_output, 1) #(batch_size, 1, hidden_size)\n encoder_outputs_reshape = tf.reshape(encoder_outputs, shape=(-1, self.config.hidden_size)) #(batch_size*time_steps, hidden_size)\n decoder_outputs_reshape = tf.reshape(decoder_outputs, shape=(-1, self.config.hidden_size)) #(batch_size*1, hidden_size)\n encoder_outputs_linear_reshape = tf.nn.rnn_cell._linear(encoder_outputs_reshape, output_size=self.config.hidden_size, \n bias=False, scope='Ptr_W1') #(b_sz*tstps_en, h_sz)\n decoder_outputs_linear_reshape = tf.nn.rnn_cell._linear(decoder_outputs_reshape, output_size=self.config.hidden_size, \n bias=False, scope='Ptr_W2') #(b_sz*1, h_sz)\n encoder_outputs_linear = tf.reshape(encoder_outputs_linear_reshape, tf.shape(encoder_outputs))\n decoder_outputs_linear = tf.reshape(decoder_outputs_linear_reshape, tf.shape(decoder_outputs))\n \n encoder_outputs_linear_expand = tf.expand_dims(encoder_outputs_linear, 1) #(b_sz, 1, tstp_en, h_sz)\n decoder_outputs_linear_expand = tf.expand_dims(decoder_outputs_linear, 2) #(b_sz, 1, 1, h_sz)\n \n after_add = tf.tanh(encoder_outputs_linear_expand + decoder_outputs_linear_expand) #(b_sz, 1, tstp_en, h_sz)\n \n after_add_reshape = tf.reshape(after_add, shape=(-1, self.config.hidden_size))\n \n after_add_linear_reshape = tf.nn.rnn_cell._linear(after_add_reshape, output_size=1, #(b_sz*1*tstp_en, 1)\n bias=False, scope='Ptr_v')\n after_add_linear = tf.reshape(after_add_linear_reshape, shape=(-1, tstps_en)) #(b_sz, tstp_en)\n en_length_mask = tf.sequence_mask(self.encoder_tstps, #(b_sz, tstp_en)\n maxlen=tstps_en, dtype=tf.bool)\n\n \"\"\"mask out already hitted ids\"\"\" \n hit_ids = tf.cond(emit_ta.size() > 0, lambda: emit_ta.pack(), lambda: tf.ones(shape=[1, batch_size], dtype=tf.int32)*-1) #(to_cur_tstp, b_sz)\n masks = tf.one_hot(hit_ids, depth=tstps_en, on_value=True, off_value=False) #(to_cur_tstp, b_sz, tstp_en)\n masks = tf.reduce_any(masks, reduction_indices=[0]) #(b_sz, tstp_en)\n hit_masks = tf.logical_not(masks)\n\n mask = tf.logical_and(en_length_mask, hit_masks)\n logits = tf.select(mask, after_add_linear,\n tf.ones_like(after_add_linear) * (-np.Inf)) # shape(b_sz, tstp_en)\n\n emit_output = tf.arg_max(logits, dimension=1) #(batch_size)\n emit_output = tf.cast(emit_output, dtype=tf.int32)\n \n bool_mask = tf.one_hot(emit_output, depth=tstps_en, on_value=True, off_value=False) #(b_sz, tstps_en)\n bool_mask = tf.reshape(bool_mask, shape=(batch_size, tstps_en))\n next_input = tf.boolean_mask(encoder_inputs, bool_mask) #(b_sz, emb_sz)\n \n elements_finished = tf.equal(emit_output, 0) #(batch_size)\n elements_finished = tf.reshape(elements_finished, (-1,))\n \n elements_finished = tf.logical_or(elements_finished, (time >= self.config.num_steps))\n next_loop_state = loop_state\n return (elements_finished, next_input, next_cell_state,\n emit_output, next_loop_state)", "def test_state_after_failure(self):\n pass", "def test_steps(self, model):\r\n model.fs.unit.initialize()\r\n\r\n # Add disturbances\r\n for t in model.fs.time:\r\n if 300 <= t < 600:\r\n model.fs.unit.shell_inlet.temperature[t].fix(288.15 - 10)\r\n elif 600 <= t < 900:\r\n model.fs.unit.shell_inlet.temperature[t].fix(288.15)\r\n elif 900 <= t < 1200:\r\n model.fs.unit.shell_inlet.temperature[t].fix(288.15 + 10)\r\n elif t >= 1200:\r\n model.fs.unit.shell_inlet.temperature[t].fix(288.15)\r\n\r\n # Transient solution\r\n solver.solve(model)\r\n\r\n times = [0, 300, 600, 900, 1200, 1500]\r\n sco2_exp = [305.2, 304.9, 305.1, 306.5, 305.7, 305.2]\r\n air_exp = [370.4, 373.1, 370.3, 365.9, 370.7, 370.4]\r\n wall_exp = [339.4, 338.7, 339.1, 340.7, 339.9, 339.4]\r\n\r\n self.check_temperatures(model, times, sco2_exp, air_exp, wall_exp)", "def test_split_south_shares_CorrWindow(mock_amg):\n\n # split the central cell\n mock_amg.cells[4].split()\n # split the southerly cell\n mock_amg.cells[1].split()\n\n cw_old = mock_amg.cells[4].children['br'].bl_win\n cw_new = mock_amg.cells[1].children['tr'].tl_win\n\n assert cw_old is cw_new", "def test_send_state_event_nonoverwriting(self) -> None:\n\n self._perform_background_initial_update()\n\n u1 = self.register_user(\"u1\", \"pass\")\n u1token = self.login(\"u1\", \"pass\")\n r1 = self.helper.create_room_as(u1, tok=u1token)\n\n self.helper.send_state(\n r1, \"cat.hissing\", {\"value\": True}, tok=u1token, state_key=\"tabby\"\n )\n\n r1stats_ante = self._get_current_stats(\"room\", r1)\n assert r1stats_ante is not None\n\n self.helper.send_state(\n r1, \"cat.hissing\", {\"value\": False}, tok=u1token, state_key=\"moggy\"\n )\n\n r1stats_post = self._get_current_stats(\"room\", r1)\n assert r1stats_post is not None\n\n self.assertEqual(\n r1stats_post[\"current_state_events\"] - r1stats_ante[\"current_state_events\"],\n 1,\n )", "def validate_steps(self, arr_accepted_steps, update_arr_new_steps=True):\n self.agent.df_population['coord_x'] = self.agent.df_population['coord_x'] + \\\n arr_accepted_steps * (self.pos_x - self.agent.df_population['coord_x'])\n self.agent.df_population['coord_y'] = self.agent.df_population['coord_y'] + \\\n arr_accepted_steps * (self.pos_y - self.agent.df_population['coord_y'])\n self.agent.df_population['coord_z'] = self.agent.df_population['coord_z'] + \\\n arr_accepted_steps * (self.pos_z - self.agent.df_population['coord_z'])\n self.agent.df_population['direction_x'] = self.agent.df_population['direction_x'] + \\\n arr_accepted_steps * (self.dir_x - self.agent.df_population['direction_x'])\n self.agent.df_population['direction_y'] = self.agent.df_population['direction_y'] + \\\n arr_accepted_steps * (self.dir_y - self.agent.df_population['direction_y'])\n self.agent.df_population['direction_z'] = self.agent.df_population['direction_z'] + \\\n arr_accepted_steps * (self.dir_z - self.agent.df_population['direction_z'])\n if update_arr_new_steps:\n self.arr_new_steps = self.arr_new_steps & ~arr_accepted_steps", "def test_split_cell_adds_new_windows_correctly(mock_amg):\n\n # check the last 4 cells after splitting the bottom left and compare the\n # coordinate locations\n mock_amg.cells[0].split()\n\n new_bl = mock_amg.cells[-4]\n assert new_bl.multigrid is mock_amg\n assert new_bl.cw_list is mock_amg.windows\n assert new_bl.coordinates == [(0, 0), (32, 0), (32, 32), (0, 32)]\n\n new_br = mock_amg.cells[-3]\n assert new_br.multigrid is mock_amg\n assert new_br.cw_list is mock_amg.windows\n assert new_br.coordinates == [(32, 0), (64, 0), (64, 32), (32, 32)]\n\n new_tl = mock_amg.cells[-2]\n assert new_tl.multigrid is mock_amg\n assert new_tl.cw_list is mock_amg.windows\n assert new_tl.coordinates == [(0, 32), (32, 32), (32, 64), (0, 64)]\n\n new_tr = mock_amg.cells[-1]\n assert new_tr.multigrid is mock_amg\n assert new_tr.cw_list is mock_amg.windows\n assert new_tr.coordinates == [(32, 32), (64, 32), (64, 64), (32, 64)]", "def test_recomb(self):\n sol1, sol2 = [0,0,0,0],[1,1,1,1]\n hot_regions = [0,0,0,1] ##NOTE: sum(hot_regions) shouls always be 1\n rec_events = 1\n sol = list(d.recombine(sol1,sol2,rec_events,hot_regions))\n print(f\"recomb sol: {sol}\")\n self.assertTrue( (sol == [0,0,0,1]) or (sol == [1,1,1,0]) )", "def next_state_func(self, state, action, Time_matrix):\n next_state = []\n \n # Initialize various times\n total_time = 0\n pickup_time = 0 # time from current location to pickup location\n waiting_time = 0 # time if driver to refuse all requests\n drop_time = 0 # time from Pick-up point to drop point\n \n # getting the current location, time, day and request locations\n curr_loc = state[0]\n curr_time = state[1]\n curr_day = state[2]\n pickup_loc = action[0]\n drop_loc = action[1]\n \n # 1. driver refuse to requests\n # so wait time is 1 unit, next location is current location\n if ((pickup_loc== 0) and (drop_loc == 0)):\n waiting_time = 1\n next_loc = curr_loc\n \n # 2. cab is already at pick up point\n #if current cab position is same as pick up position\n elif (curr_loc == pickup_loc):\n drop_time = Time_matrix[curr_loc][drop_loc][curr_time][curr_day]\n \n # next location is the drop location\n next_loc = drop_loc\n # 3. cab is not at the pickup point\n else:\n # Driver is away to pickup point, he has to travel to pickup point first\n # time take to reach pickup point\n pickup_time = Time_matrix[curr_loc][pickup_loc][curr_time][curr_day]\n new_time, new_day = self.new_time_day(curr_time, curr_day, pickup_time)\n \n # we calculated pickup Time, now time taken to drop\n drop_time = Time_matrix[pickup_loc][drop_loc][new_time][new_day]\n next_loc = drop_loc\n\n # Calculate total time as sum of all durations\n total_time = (waiting_time + pickup_time + drop_time)\n next_time, next_day = self.new_time_day(curr_time, curr_day, total_time)\n \n # Construct next_state using the next_loc and the new time states.\n next_state = [next_loc, next_time, next_day]\n \n return next_state, waiting_time, pickup_time, drop_time", "def test_SetMultipleMovingLoadsMultipleConditions(self):\n\n #create nodes\n second_coord = [1.0, 0.0, 0.0]\n third_coord = [2.0, 0.0, 0.0]\n self.mp.CreateNewNode(1, 0.0, 0.0, 0.0)\n self.mp.CreateNewNode(2, second_coord[0],second_coord[1],second_coord[2])\n self.mp.CreateNewNode(3, third_coord[0], third_coord[1], third_coord[2])\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1, 2], self.mp.GetProperties()[1])\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 2, [2, 3], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME,\n 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME,\n 0.5)\n process = GMA.SetMultipleMovingLoadsProcess(self.mp,parameters)\n\n # get conditions\n conditions = []\n conditions.append(self.cmp.GetCondition(3))\n conditions.append(self.cmp.GetCondition(4))\n\n # initialize and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0,0)\n rhs = KratosMultiphysics.Vector(0)\n\n # set load on node\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, -2.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, 0.0])\n\n # move load within first element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, -1.0, 0.0, -1.0])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, 0.0])\n\n # move load to element connection element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, -2.0])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, 0.0])\n\n # move load to next element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, -1.0, 0.0, -1.0])", "def test_run_is_next_run(curent_time, state):\n date = datetime(2020,5,5,12,0)\n duration_in_minutes = 65\n run = Run(date, duration_in_minutes/60)\n\n assert run.is_next_run(curent_time) == state", "def _imputation_step(self, current_times, state):\r\n # Does not do anything special if we're jumping across a gap. More advanced\r\n # models, especially probabilistic ones, would want a special case that\r\n # depends on the gap size.\r\n return state", "def is_changed(self, new_grid):\n for row in range(self._grid_height):\n for col in range(self._grid_width):\n if self.get_tile(row,col) != new_grid[row][col]:\n return True\n return False", "def test_toggle_cell(self):\n self.cell.toggle_living()\n self.assertEqual(self.cell.is_living(), True)\n self.cell.toggle_living()\n self.assertEqual(self.cell.is_living(), False)", "def test_split_cell_splits_neighbours(mock_amg):\n\n # split the centre cell in the mock grid\n # this will create 4 more cells at tier 1\n mock_amg.cells[4].split()\n\n # now split the bottom right of these cells\n # this should force the east and south cells to also be split\n mock_amg.cells[4].children['br'].split()\n\n assert mock_amg.cells[5].has_children\n assert mock_amg.cells[1].has_children", "def life_step(state):\n\t# For every cell each live cell in any of the 8 neighbouring cells contributes 1 to the sum\n\t# Rolling matricies is periodic so this implements periodic boundary conditions\n\tnumberOfNeigbours = sum(np.roll(np.roll(state, i, axis=0), j, axis=1)\n\t\t\t\t\t\t for i in (-1,0,1) for j in (-1,0,1) if (i != 0 or j != 0))\n\n\t# Any live cell with fewer than two live neighbours dies, as if caused by under-population\n\tstate = np.where(numberOfNeigbours < 2, 0, state)\n\t# Any live cell with more than three live neighbours dies, as if by over-population\n\tstate = np.where(numberOfNeigbours > 3, 0, state)\n\t# Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.\n\tstate = np.where(numberOfNeigbours == 3, 1, state)\n\n\treturn state", "def test_dataset_scenario_generation_full_late_behavior_overwritten(self):\n params = ParameterServer()\n\n map_filename = os.path.join(os.path.dirname(__file__), \"data/DR_DEU_Merging_MT_v01_shifted.xodr\")\n track_filename = os.path.join(os.path.dirname(__file__), \"data/interaction_dataset_DEU_Merging_dummy_track_late.csv\")\n\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"MapFilename\"] = map_filename\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"TrackFilenameList\"] = [track_filename]\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"StartingOffsetMs\"] = 0\n\n scenario_generation = InteractionDatasetScenarioGenerationFull(\n params=params, num_scenarios=1)\n\n scenario = scenario_generation.get_scenario(0)\n world_state = scenario.GetWorldState()\n agent1 = world_state.GetAgent(1)\n agent2 = world_state.GetAgent(2)\n\n self.assertAlmostEqual(agent1.first_valid_timestamp, 0.0)\n self.assertAlmostEqual(agent2.first_valid_timestamp, 0.3)\n \n self.assertEqual(isinstance(agent1, Agent), True)\n self.assertEqual(agent1.IsValidAtTime(world_state.time), True)\n \n self.assertEqual(isinstance(agent2, Agent), True)\n self.assertEqual(agent2.IsValidAtTime(world_state.time), False)", "def is_winning(self, curr_state):\n rows = [[0,1,2], [3,4,5], [6,7,8]]\n columns = [[0,3,6], [1,4,7], [2,5,8]]\n diagonal = [[0,4,8], [2,4,6]]\n total_checks = rows + columns + diagonal\n for row in total_checks:\n sum = 0\n count = 0\n for pos in row:\n if np.isnan(curr_state[pos]):\n break\n else:\n sum = sum + curr_state[pos]\n count = count + 1\n if sum == 15 and count == 3:\n return True\n return False", "def perform_action(self, action):\r\n t_list = self.get_action_outcomes(self.current_state, action)\r\n new_state = t_list[np.argmax(np.random.multinomial(1, [t[0] for t in t_list]))][1]\r\n # print(len(self.trajectory), ':', self.current_state, '--', action ,'-->', new_state)\r\n self.current_state = new_state\r\n self.trajectory.append(new_state)\r\n return tuple(self.current_state) == tuple(self.end_state)", "def test_g1_perform_tick(self):\n config.NR_ROWS = 5\n config.NR_COLS = 5\n blinker = [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]\n new_gamefield = logic.perform_tick(blinker)\n\n self.assertEqual(new_gamefield, [\n [0, 0, 0, 0, 0],\n [0, 0, 1, 0, 0],\n [0, 0, 1, 0, 0],\n [0, 0, 1, 0, 0],\n [0, 0, 0, 0, 0],\n ])", "def test_dataset_scenario_generation_full_outside3(self):\n params = ParameterServer()\n\n map_filename = os.path.join(os.path.dirname(__file__), \"data/DR_DEU_Merging_MT_v01_shifted.xodr\")\n track_filename = os.path.join(os.path.dirname(__file__), \"data/interaction_dataset_DEU_Merging_dummy_track_outside.csv\")\n\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"MapFilename\"] = map_filename\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"TrackFilenameList\"] = [track_filename]\n params[\"Scenario\"][\"Generation\"][\"InteractionDatasetScenarioGenerationFull\"][\"StartingOffsetMs\"] = 0\n\n scenario_generation = InteractionDatasetScenarioGenerationFull(\n params=params, num_scenarios=3)\n\n scenario = scenario_generation.get_scenario(2)\n self.assertAlmostEqual(scenario.eval_agent_ids, [3])\n world_state = scenario.GetWorldState()\n agent31 = world_state.GetAgent(1)\n agent32 = world_state.GetAgent(2)\n agent33 = world_state.GetAgent(3)\n\n # they all should be valid at the beginning\n world_state.time = 0\n self.assertEqual(isinstance(agent31, Agent), True)\n self.assertEqual(agent31.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent31.InsideRoadCorridor(), True)\n \n self.assertEqual(isinstance(agent32, Agent), True)\n self.assertEqual(agent32.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent32.InsideRoadCorridor(), True)\n\n self.assertEqual(isinstance(agent33, Agent), True)\n self.assertEqual(agent33.IsValidAtTime(world_state.time), True)\n self.assertEqual(agent33.InsideRoadCorridor(), True)\n\n world_state.Step(0.05)\n self.assertEqual(len(world_state.agents_valid), 3)", "def test_state(\n size: Union[int, tuple],\n num_berries: int,\n number_steps: int,\n state_sizes: List[int] = [3, 5],\n) -> None:\n for state_size in state_sizes:\n game = Game(\n size,\n [0, 0],\n -1,\n 5,\n -5,\n 10,\n num_berries,\n berry_movement_probabilities=[0.5] * num_berries,\n state_size=state_size,\n )\n done = False\n i = 1\n print(f\"Beginning full board\\n{game.get_state(full=True)}\")\n print(f\"And the state\\n{game.get_state(state_size)}\")\n while not done and i < number_steps:\n action = random.choice(MOVEMENTS)\n print(f\"Action taken {action}\")\n state, reward, done = game.step(action)\n print(f\"Full board\\n{game.get_state(full=True)}\")\n print(f\"The state\\n{game.get_state(state_size)}\")\n i += 1", "def test_same_unique_crash_type_with_different_state(self):\n self.testcases[0].security_flag = False\n self.testcases[0].crash_type = 'Timeout'\n self.testcases[0].crash_state = 'abcdef'\n self.testcases[1].security_flag = False\n self.testcases[1].crash_type = 'Timeout'\n self.testcases[1].crash_state = 'abcde'\n\n for t in self.testcases:\n t.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def test_goal(puzzle_state):\n \n x = puzzle_state.dimension\n final_state = []\n \n for i in range(x*x):\n final_state += [i]\n \n final_state_tuple = tuple(final_state)\n \n if puzzle_state.config == final_state_tuple:\n return True\n else:\n return False", "def Check(self):\n cleared = False\n while not cleared:\n for i in list(combinations([cell.Check() for cell in self.cells], 2)):\n # for i in list(combinations(zip(self.locations.x,self.locations.y,self.locations.length,self.locations.index),2)):\n x1 = i[0][0]\n y1 = i[0][1]\n r1 = i[0][2] / 2\n idx1 = i[0][3]\n x2 = i[1][0]\n y2 = i[1][1]\n r2 = i[1][2] / 2\n idx1 = i[0][3]\n idx2 = i[1][3]\n distance = (x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)\n radii = (r1 + r2) * (r1 + r2)\n if distance == radii:\n cleared = True\n elif distance > radii:\n cleared = True\n else:\n if x1 > x2 and y1 > y2:\n if (\n x1 + r1 > 0\n and x1 + r1 < self.boundaries[0]\n and y1 + r1 > 0\n and y1 + r1 < self.boundaries[1]\n ):\n self.cells[idx1].x = x1 + r1 / 2\n self.cells[idx1].y = y1 + r1 / 2\n elif x1 > x2 and y1 < y2:\n if (\n x1 + r1 > 0\n and x1 + r1 < self.boundaries[0]\n and y1 - r1 > 0\n and y1 - r1 < self.boundaries[1]\n ):\n self.cells[idx1].x = x1 + r1 / 2\n self.cells[idx1].y = y1 - r1 / 2\n elif x1 < x2 and y1 > y2:\n if (\n x1 - r1 > 0\n and x1 - r1 < self.boundaries[0]\n and y1 + r1 > 0\n and y1 + r1 < self.boundaries[1]\n ):\n self.cells[idx1].x = x1 - r1 / 2\n self.cells[idx1].y = y1 + r1 / 2\n else:\n if (\n x1 - r1 > 0\n and x1 - r1 < self.boundaries[0]\n and y1 - r1 > 0\n and y1 - r1 < self.boundaries[1]\n ):\n self.cells[idx1].x = x1 - r1 / 2\n self.cells[idx1].y = y1 - r1 / 2\n _logger.debug(\n f\"Bumped from {x1 :.2e}, {y1 :.2e} to {self.cells[idx1].x :.2e}, {self.cells[idx1].y :.2e}\"\n )\n cleared = False\n return", "def _update_state(self, currentPhase, phasetime, time):\n # compute new state without registered action\n tToNearGreenPhase = self._get_toNearGreenPhase(currentPhase, phasetime, self.extended)\n\n if self.numbus > 0:\n # last available checkout for this intersection\n if self.numbus > 1:\n # bunch, use current time as last checkout\n last_available_checkout_time = time\n elif self.last_checkout_bus is None:\n # no checked out bus, assume perfect headway\n last_available_checkout_time = time - self.CONFIG['target_headway']\n else:\n last_available_checkout_time = self.last_checkout_bus.check_out_time\n # check in time of the last bus checked in\n last_check_in_time = self.bus_list[-1].check_in_time\n check_in_hdy = self.bus_list[-1].check_in_headway\n new_state = [last_available_checkout_time, last_check_in_time, check_in_hdy, self.numbus, self.allnumvel,\n tToNearGreenPhase]\n else:\n if self.last_checkout_bus:\n last_available_checkout_time = self.last_checkout_bus.check_out_time\n check_in_hdy = self.last_checkout_bus.check_in_headway\n last_check_in_time = self.last_checkout_bus.check_in_time\n new_state = [last_available_checkout_time, last_check_in_time, check_in_hdy, 0, self.allnumvel, tToNearGreenPhase]\n else:\n new_state = [0, 0, 0, 0, self.allnumvel, tToNearGreenPhase]\n\n self.state = new_state\n return", "def test_different_crash_type_with_similar_state(self):\n self.testcases[0].security_flag = False\n self.testcases[0].crash_type = 'Timeout'\n self.testcases[0].crash_state = 'abcdef'\n self.testcases[1].security_flag = False\n self.testcases[1].crash_type = 'TimeoutX'\n self.testcases[1].crash_state = 'abcde'\n\n for t in self.testcases:\n t.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def test_SetMultipleMovingLoadsMultipleConditionsReversed(self):\n\n # create nodes\n second_coord = [1.0, 0.0, 0.0]\n third_coord = [2.0, 0.0, 0.0]\n self.mp.CreateNewNode(1, 0.0, 0.0, 0.0)\n self.mp.CreateNewNode(2, second_coord[0],second_coord[1],second_coord[2])\n self.mp.CreateNewNode(3, third_coord[0], third_coord[1], third_coord[2])\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [3, 2], self.mp.GetProperties()[1])\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 2, [2, 1], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.5)\n process = GMA.SetMultipleMovingLoadsProcess(self.mp,parameters)\n\n # get conditions\n conditions = []\n conditions.append(self.cmp.GetCondition(3))\n conditions.append(self.cmp.GetCondition(4))\n\n # initialize and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0,0)\n rhs = KratosMultiphysics.Vector(0)\n\n # calculate load\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, -2.0])\n\n # move load within first element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, 0.0])\n self.checkRHS(all_rhs[1], [0.0, -1.0, 0.0, -1.0])\n\n # move load to element connection element\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # calculate load\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, 0.0, 0.0, -2.0])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, 0.0])\n\n # move load to next element, also increase time step\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.75)\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # calculate load\n all_rhs = []\n for cond in conditions:\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n all_rhs.append(list(rhs))\n\n self.checkRHS(all_rhs[0], [0.0, -1.5, 0.0, -0.5])\n self.checkRHS(all_rhs[1], [0.0, 0.0, 0.0, 0.0])", "def isSameState(self, oth: 'StateNode') -> bool:\n def sameElements(a: list, b: list):\n return len([1 for i, j in zip(a, b) if i == j]) == len(a) and len(a) == len(b)\n if self.table != oth.table:\n return False\n a_lin, a_col = self.state\n b_lin, b_col = oth.state\n return sameElements(a_lin, b_lin) and sameElements(a_col, b_col)", "def test_split_cell_south_sets_neighbours(mock_amg):\n\n mock_amg.cells[4].split() # middle cell\n mock_amg.cells[1].split() # south cell\n\n south = mock_amg.cells[1]\n north = mock_amg.cells[4]\n\n assert south.children['tl'].north == north.children['bl']\n assert south.children['tr'].north == north.children['br']\n assert north.children['bl'].south == south.children['tl']\n assert north.children['br'].south == south.children['tr']", "def _check_cells(self):\n for row_number in range(self.number_cells_y):\n for col_number in range(self.number_cells_x):\n alive_neighbours = self._get_neighbours(row_number,col_number)\n \n self.to_be_updated[row_number][col_number] = False\n if self.cells[row_number][col_number].get_status():\n if alive_neighbours < 2:\n self.to_be_updated[row_number][col_number] = True\n elif alive_neighbours > 3:\n self.to_be_updated[row_number][col_number] = True\n else:\n if alive_neighbours == 3:\n self.to_be_updated[row_number][col_number] = True", "def branch_precursor(state, time, d):\n assert d[\"alpha_IL2\"] < d[\"alpha1\"] and d[\"alpha_IL2\"] < d[\"alpha2\"]\n \n th0 = state[0]\n \n th1 = state[1:(d[\"alpha1\"]+d[\"alpha1_p\"]+1)]\n th2 = state[(d[\"alpha1\"]+d[\"alpha1_p\"]+1):]\n #print(len(state), len(th1))\n ### get all cytokine secreting cells \n th1_all = np.sum(th1[-d[\"alpha1_p\"]:])\n th2_all = np.sum(th2[-d[\"alpha2_p\"]:])\n \n t_eff = th1_all+th2_all\n t_il2 = np.sum(th1[:d[\"alpha_IL2\"]]) + np.sum(th2[:d[\"alpha_IL2\"]])\n\n ### calculate cytokine concentrations\n cyto_1 = d[\"beta_cyto_1\"]*th1_all + d[\"ifn_ext\"]\n cyto_2 = d[\"beta_cyto_2\"]*th2_all + d[\"il21_ext\"]\n \n conc_il2 = d[\"rate_il2\"]*t_il2/(d[\"K_il2\"]+t_eff)\n\n # compute feedbacks\n fb1 = d[\"fb_rate1\"]*cyto_1**3/(cyto_1**3+d[\"K_1\"]**3)\n fb2 = d[\"fb_rate2\"]*cyto_2**3/(cyto_2**3+d[\"K_2\"]**3)\n ### update differantiation rate\n beta1 = d[\"beta1\"]*(1+fb1)\n beta2 = d[\"beta2\"]*(1+fb2) \n \n ### calculate probability, note that these are adjusted to beta1 beta2 so that\n # they are not necessarily \\in (0,1)\n p1, p2 = get_prob(d, beta1, beta2, cyto_1, cyto_2)\n \n #print(beta1*p1_adj/(beta1*p1_adj+beta2))\n beta1_p = d[\"beta1_p\"]\n beta2_p = d[\"beta2_p\"]\n rate_death = d[\"d_eff\"] \n \n # check for homeostasis regulation\n if d[\"crit\"] == False:\n update_t0(d, time, conc_il2, t_eff)\n elif d[\"death_mode\"] == False:\n assert d[\"crit\"] == True \n beta1_p = beta1_p*np.exp(-d[\"decay_p\"]*(time-d[\"t0\"]))\n beta2_p = beta2_p*np.exp(-d[\"decay_p\"]*(time-d[\"t0\"]))\n\n else:\n rate_death = rate_death*np.exp(time-d[\"t0\"])\n\n # this is the actual differentiation where odes are computed \n dt_th1 = diff_precursor(th1, th0, d[\"alpha1\"], beta1, beta1_p, p1, rate_death, d)\n dt_th2 = diff_precursor(th2, th0, d[\"alpha2\"], beta2, beta2_p, p2, rate_death, d)\n dt_th0 = -(beta1*p1+beta2)*th0 \n dt_state = np.concatenate(([dt_th0], dt_th1, dt_th2))\n\n return dt_state", "def event_m20_11_6000():\n \"\"\"State 0,2: [Preset] Door of the living person_SubState\"\"\"\n assert event_m20_11_x77(z42=20110480, z43=600000)\n \"\"\"State 1: Finish\"\"\"\n EndMachine()", "def test_is_finished(self):\n experiment = Experiment(TasksMock())\n self.assertEquals(False, experiment.is_finished())\n for _ in range(0, 17):\n experiment.press_b_down(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_up(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_down(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_up(time.time())\n self.assertEquals(True, experiment.is_finished())", "def calculateState (self):\r\n newState = 0\r\n # print (\"Inside state function the states DNs are: \\n\")\r\n # print (\"Before starting \\n\")\r\n self.stateDanglingNodes()\r\n #for i in range(len(self.metaSpikes)):\r\n # if self.metaSpikes[i].typeSpike == 1:\r\n # print (\"Meta atom number is: \" + str(self.atomNumber) + \"\\n\")\r\n \r\n insideMetState = []\r\n # To calculate the state we need to update every atom the metaatom consistrs off then see\r\n # the states of every dangling node in the metaspikes\r\n for i in range(len(self.metaSpikes)):\r\n if self.metaSpikes[i].typeSpike == 1:\r\n #print (\"Inside type 1 \\n\")\r\n #print (\"Number of type 1 nodes: \" + str(len(self.metaSpikes[i].danglingNodeList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingNodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingNodeList[j].state)\r\n if self.metaSpikes[i].danglingNodeList[j].state == 1:\r\n # print (\"Adding one \\n\" )\r\n newState += 1\r\n else:\r\n # print (\"Subracting one \\n\")\r\n newState -= 1\r\n else:\r\n \r\n # print (\"Inside type 2 \\n\")\r\n # print (\"Number od type 1 tales: \" + str(len(self.metaSpikes[i].danglingTailList)) + \"\\n\")\r\n for j in range(len(self.metaSpikes[i].danglingTailList)):\r\n #print (\"Size of tail: \" + str(len(self.metaSpikes[i].danglingTailList[j].nodeList)) + \"\\n\")\r\n for k in range(len(self.metaSpikes[i].danglingTailList[j].nodeList)):\r\n insideMetState.append(self.metaSpikes[i].danglingTailList[j].nodeList[k].state)\r\n if self.metaSpikes[i].danglingTailList[j].nodeList[k].state == 1:\r\n newState += 1\r\n else:\r\n newState -= 1 \r\n \r\n # print (\"The state of analysed nodes: \\n\" + str(insideMetState) + \"\\n\")\r\n # print (\"The length of analysed nodes: \\n\" + str(len(insideMetState)) + \"\\n\")\r\n # print (\"The new state is: \" + str(newState) + \"\\n\") \r\n self.state = newState", "def testSuccessiveHalving(self):\n stats = self.default_statistics()\n sched, mock_runner = self.schedulerSetup(stats[\"max_trials\"])\n big_bracket = sched._state[\"bracket\"]\n cur_units = stats[str(stats[\"s_max\"])][\"r\"]\n # The last bracket will downscale 4 times\n for x in range(stats[\"brack_count\"] - 1):\n trials = big_bracket.current_trials()\n current_length = len(trials)\n for trl in trials:\n mock_runner._launch_trial(trl)\n\n # Provides results from 0 to 8 in order, keeping last one running\n for i, trl in enumerate(trials):\n action = sched.on_trial_result(\n mock_runner, trl, result(cur_units, i))\n if i < current_length - 1:\n self.assertEqual(action, TrialScheduler.PAUSE)\n mock_runner.process_action(trl, action)\n\n self.assertEqual(action, TrialScheduler.CONTINUE)\n new_length = len(big_bracket.current_trials())\n self.assertEqual(new_length, self.downscale(current_length, sched))\n cur_units += int(cur_units * sched._eta)\n self.assertEqual(len(big_bracket.current_trials()), 1)", "def test_custom_time(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, interval)))", "def test_transition_state_statmech(self):\n job = job_list[-1]\n self.assertTrue(isinstance(job, StatMechJob))\n job.level_of_theory = self.level_of_theory\n job.frequencyScaleFactor = self.frequencyScaleFactor\n job.includeHinderedRotors = self.useHinderedRotors\n job.applyBondEnergyCorrections = self.useBondCorrections\n job.load()", "def test_update_team_state(self):\n pass", "def test_SetMultipleMovingLoads(self):\n\n # create nodes\n second_coord = [1, 0, 0.0]\n self.mp.CreateNewNode(1,0.0,0.0,0.0)\n self.mp.CreateNewNode(2,second_coord[0],second_coord[1],0.0)\n\n # create condition\n self.mp.CreateNewCondition(\"MovingLoadCondition2D2N\", 1, [1,2], self.mp.GetProperties()[1])\n\n parameters = self.base_parameters\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.TIME, 0)\n self.mp.ProcessInfo.SetValue(KratosMultiphysics.DELTA_TIME, 0.25)\n\n process = GMA.SetMultipleMovingLoadsProcess(self.mp, parameters)\n cond = self.cmp.GetCondition(2)\n\n # initialise and set load\n process.ExecuteInitialize()\n process.ExecuteInitializeSolutionStep()\n\n # initialise matrices\n lhs = KratosMultiphysics.Matrix(0, 0)\n rhs = KratosMultiphysics.Vector(0)\n\n # set load on node\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -2.0, 0.0, 0.0])\n\n # move load\n process.ExecuteFinalizeSolutionStep()\n process.ExecuteInitializeSolutionStep()\n\n # check if interpolation is done correctly\n cond.CalculateLocalSystem(lhs, rhs, self.mp.ProcessInfo)\n\n self.checkRHS(rhs, [0.0, -1.5, 0.0, -0.5])", "def test_accurate(self):\n M = simulation.StateMonitor(self.G, ['v', 'a', 'b'])\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n v_expected = np.array([i*(M.t+self.dt) for i in xrange(self.N)])\n a_expected = np.array([(M.t+self.dt) for i in xrange(2)])\n b_expected = [((i + np.round(M.t/sim.dt)).astype(int) % 3) == 0\n for i in xrange(self.N)]\n\n self.assertTrue(np.allclose(M.v, v_expected))\n self.assertTrue(np.allclose(M.a, a_expected))\n self.assertTrue(np.allclose(M.b, b_expected))", "def test_cmd_roomstate(self, mock_gametime):\n\n mock_gametime.return_value = _get_timestamp(\"autumn\", \"afternoon\")\n\n # show existing room states (season/time doesn't count)\n\n self.assertEqual(self.room1.room_states, [])\n\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"\",\n f\"Room states (not counting automatic time/season) on Room(#{self.room1.id}):\\n None\",\n )\n\n # add room states\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"burning\",\n \"Added room state 'burning' to this room.\",\n )\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"windy\",\n \"Added room state 'windy' to this room.\",\n )\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"\",\n f\"Room states (not counting automatic time/season) on Room(#{self.room1.id}):\\n \"\n \"'burning' and 'windy'\",\n )\n # toggle windy\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"windy\",\n \"Cleared room state 'windy' from this room.\",\n )\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"\",\n f\"Room states (not counting automatic time/season) on Room(#{self.room1.id}):\\n \"\n \"'burning'\",\n )\n # add a autumn state and make sure we override it\n self.room1.add_desc(\"Autumn description.\", room_state=\"autumn\")\n self.room1.add_desc(\"Spring description.\", room_state=\"spring\")\n\n self.assertEqual(self.room1.get_stateful_desc(), \"Autumn description.\")\n self.call(\n extended_room.CmdExtendedRoomState(),\n \"spring\",\n \"Added room state 'spring' to this room.\",\n )\n self.assertEqual(self.room1.get_stateful_desc(), \"Spring description.\")", "def test_cvm_online_state_online(n_feat, tmp_path, seed):\n window_sizes = [10]\n\n with fixed_seed(seed):\n x_ref = np.random.normal(0, 1, (n, n_feat)).squeeze()\n x = np.random.normal(0.1, 1, (n, n_feat))\n dd = CVMDriftOnline(x_ref, window_sizes=window_sizes, ert=20)\n # Store state for comparison\n state_dict_t0 = {}\n for key in dd.online_state_keys:\n state_dict_t0[key] = getattr(dd, key)\n\n # Run for 10 time steps\n test_stats_1 = []\n for t, x_t in enumerate(x):\n if t == 5:\n dd.save_state(tmp_path)\n # Store state for comparison\n state_dict_t5 = {}\n for key in dd.online_state_keys:\n state_dict_t5[key] = getattr(dd, key)\n preds = dd.predict(x_t)\n test_stats_1.append(preds['data']['test_stat'])\n\n # Reset and check state cleared\n dd.reset_state()\n for key, orig_val in state_dict_t0.items():\n np.testing.assert_array_equal(orig_val, getattr(dd, key)) # use np.testing here as it handles torch.Tensor etc\n\n # Repeat, check that same test_stats both times\n test_stats_2 = []\n for t, x_t in enumerate(x):\n preds = dd.predict(x_t)\n test_stats_2.append(preds['data']['test_stat'])\n np.testing.assert_array_equal(test_stats_1, test_stats_2)\n\n # Load state from t=5 timestep\n dd.load_state(tmp_path)\n\n # Compare stateful attributes to original at t=5\n for key, orig_val in state_dict_t5.items():\n np.testing.assert_array_equal(orig_val, getattr(dd, key)) # use np.testing here as it handles torch.Tensor etc\n\n # Compare predictions to original at t=5\n new_pred = dd.predict(x[5])\n np.testing.assert_array_equal(new_pred['data']['test_stat'], test_stats_1[5])", "def test_tract_split_housing(self):\n # Validate first new tract from the split tract\n # Tract 35.01\n tract1 = self.geographies.find({ 'geoid': '15003003501' })\n self.assertEqual(tract1.count(), 1)\n tract1 = tract1[0]\n \n split_tract_house_2000 = 3370 \n tract1_house_pct = 0.383 \n tract1_house_2000 = int(tract1_house_pct * split_tract_house_2000)\n tract1_house_2010 = 1353 \n tract1_house_delta = tract1_house_2010 - tract1_house_2000\n tract1_house_pct_change = float(tract1_house_delta) / tract1_house_2000\n\n self.assertAlmostEqual(tract1['xwalk']['15003003500']['HUPCT00'], tract1_house_pct, places=4)\n self.assertAlmostEqual(tract1['data']['2000']['H1']['H001001'], tract1_house_2000)\n self.assertAlmostEqual(float(tract1['data']['2010']['H1']['H001001']), tract1_house_2010)\n self.assertAlmostEqual(float(tract1['data']['delta']['H1']['H001001']), tract1_house_delta)\n self.assertAlmostEqual(float(tract1['data']['pct_change']['H1']['H001001']), tract1_house_pct_change)\n\n # Validate second new part from the split tract\n # Tract 35.02\n tract2 = self.geographies.find({ 'geoid': '15003003502' })\n self.assertEqual(tract2.count(), 1)\n tract2 = tract2[0]\n\n tract2_house_pct = 0.617\n tract2_house_2000 = int(tract2_house_pct * split_tract_house_2000)\n tract2_house_2010 = 2180 \n tract2_house_delta = tract2_house_2010 - tract2_house_2000\n tract2_house_pct_change = float(tract2_house_delta) / tract2_house_2000 \n \n self.assertAlmostEqual(tract2['xwalk']['15003003500']['HUPCT00'], tract2_house_pct, places=4)\n self.assertAlmostEqual(tract2['data']['2000']['H1']['H001001'], tract2_house_2000)\n self.assertAlmostEqual(float(tract2['data']['2010']['H1']['H001001']), tract2_house_2010)\n self.assertAlmostEqual(float(tract2['data']['delta']['H1']['H001001']), tract2_house_delta)\n self.assertAlmostEqual(float(tract2['data']['pct_change']['H1']['H001001']), tract2_house_pct_change)", "def test_get_field_state_comparisons_tiny(self):\r\n comparison_groupings = get_field_state_comparisons(\r\n self.tiny_dist_matrix_header, self.tiny_dist_matrix,\r\n self.tiny_mapping_header, self.tiny_mapping, self.tiny_field,\r\n ['SampleFieldState1'])\r\n self.assertEqual(comparison_groupings, {})", "def event_m10_29_3000():\r\n \"\"\"State 0,2: [Lib] [Preset] Navigation mesh switching by flag judgment_SubState\"\"\"\r\n assert event_m10_29_x13(z55=300000, z56=0, z57=2, flag9=0, flag10=102640)\r\n \"\"\"State 1: Finish\"\"\"\r\n EndMachine()\r\n Quit()", "def test_state_skipped(self):\n pipeline = spy(self.pipeline_real)\n job_record = get_job_record(job.STATE_SKIPPED,\n TEST_PRESET_TIMEPERIOD,\n PROCESS_SITE_HOURLY)\n\n pipeline.manage_pipeline_for_process(job_record.process_name, job_record)\n verify(self.time_table_mocked, times=0). \\\n update_job_record(any(str), any(Job), any(UnitOfWork), any(str))\n verify(self.time_table_mocked, times=0).get_tree(any(str))" ]
[ "0.62299633", "0.6125553", "0.59904784", "0.5965185", "0.5964256", "0.59597164", "0.5947608", "0.5937805", "0.58110744", "0.5780508", "0.5778231", "0.5771809", "0.57424587", "0.5713898", "0.57090956", "0.5698011", "0.567906", "0.56286466", "0.5621524", "0.56178564", "0.56158185", "0.560919", "0.55860907", "0.5556497", "0.5532133", "0.5531007", "0.55301285", "0.55273956", "0.55190164", "0.55168945", "0.550473", "0.5504515", "0.5502345", "0.54807466", "0.5480652", "0.5477578", "0.54651433", "0.5462359", "0.5460311", "0.5458362", "0.5428043", "0.5423648", "0.5415396", "0.5405416", "0.5403177", "0.5399153", "0.53937334", "0.53882104", "0.5363286", "0.5361099", "0.5360594", "0.5355957", "0.5354216", "0.53526443", "0.5350338", "0.53491783", "0.5342557", "0.5340569", "0.53403693", "0.5340248", "0.53398997", "0.533269", "0.53258", "0.5322434", "0.53173006", "0.5316993", "0.53119385", "0.53069144", "0.5299688", "0.5298732", "0.52982605", "0.52947515", "0.528581", "0.5285542", "0.5285307", "0.5277559", "0.5276565", "0.52695507", "0.52695394", "0.52671343", "0.52598816", "0.52558357", "0.5251508", "0.52478004", "0.52471447", "0.523808", "0.5233437", "0.52324355", "0.52318347", "0.5230307", "0.5229335", "0.52287126", "0.5223608", "0.52225906", "0.5221176", "0.52203", "0.52110106", "0.52089095", "0.5202155", "0.51991534" ]
0.5819505
8
Set up the Opple light platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: name = config[CONF_NAME] host = config[CONF_HOST] entity = OppleLight(name, host) add_entities([entity]) _LOGGER.debug("Init light %s %s", host, entity.unique_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure(self):\n\n self.platform.configure()", "def platform_start(self):\n self.platform.start()", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 256}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 64}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 32}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None\n) -> None:\n # Assign configuration variables.\n # The configuration check takes care they are present.\n host = config[CONF_HOST]\n username = config[CONF_USERNAME]\n password = config.get(CONF_PASSWORD)\n\n # Setup connection with devices/cloud\n hub = awesomelights.Hub(host, username, password)\n\n # Verify that passed in configuration works\n if not hub.is_valid_login():\n _LOGGER.error(\"Could not connect to AwesomeLight hub\")\n return\n\n # Add devices\n add_entities(AwesomeLight(light) for light in hub.lights())", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n lights = []\n for channel, device_config in config[CONF_DEVICES].items():\n device = {}\n device[\"name\"] = device_config[CONF_NAME]\n device[\"dimmable\"] = device_config[\"dimmable\"]\n device[\"channel\"] = channel\n device[\"driver\"] = config[CONF_DRIVER]\n device[\"host\"] = config[CONF_HOST]\n device[\"port\"] = config[CONF_PORT]\n lights.append(FutureNowLight(device))\n\n add_entities(lights, True)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n # import awesomelights\n\n # Assign configuration variables. The configuration check takes care they are\n # present.\n host = config.get(CONF_HOST)\n username = config.get(CONF_USERNAME)\n password = config.get(CONF_PASSWORD)\n\n # Setup connection with devices/cloud\n # hub = awesomelights.Hub(host, username, password)\n _LOGGER.info(\"hub = awesomelights.Hub(host, username, password)\")\n\n # Verify that passed in configuration works\n # if not hub.is_valid_login():\n # _LOGGER.error(\"Could not connect to AwesomeLight hub\")\n # return\n\n # Add devices\n # add_devices(AwesomeLight(light) for light in hub.lights())\n add_devices([AwesomeLight(Light)])", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n name = config.get(CONF_NAME)\n mac = config.get(CONF_MAC)\n _LOGGER.debug(\"Setting up\")\n\n mon = Monitor(hass, mac, name)\n add_entities([SkybeaconTemp(name, mon)])\n add_entities([SkybeaconHumid(name, mon)])\n\n def monitor_stop(_service_or_event):\n \"\"\"Stop the monitor thread.\"\"\"\n _LOGGER.info(\"Stopping monitor for %s\", name)\n mon.terminate()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)\n mon.start()", "def setup(self):\n # Create your sprites and sprite lists here\n self.game: Game = Game(SCREEN_WIDTH, SCREEN_HEIGHT, TILE_SIZE, 1, grid_layers = 4)\n self.game.game_message = \"Lead the Rabbit home\"\n\n # show the menu so that we see the instructions\n self.game.menu.button_list[0].text = \"Start\"\n self.game.menu.is_visible = True", "def initialize(self):\n self.ha_url = self.args.get(\"ha_url\", None)\n self.use_current_brightness = self.args.get(\"use_current_brightness\", False)\n self.condition = self.args.get(\"condition\")\n self.lights = self.args[\"lights\"]\n self.listen_state(self.change_lights_color, self.args[\"media_player\"], attribute = self.args.get(\"photo_attribute\", \"entity_picture\"))", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def setup_pi():\n global pi\n pi = modOrangePi.OrangePiOne()", "def main():\r\n LEDStrip = createNeoPixelObject()\r\n setup(LEDStrip)\r\n clock(LEDStrip)", "def initialize(self):\n self.log.info(\"Initialize raspPinball hardware.\")\n\n self.config = self.machine.config['rasppinball']\n self.machine.config_validator.validate_config(\"rasppinball\", self.config)\n print(\"***************************\")\n print(self.config)\n #self.machine_type = (\n # self.machine.config['hardware']['driverboards'].lower())\n\n self._connect_to_hardware()\n\n\n # keypad\n self._kp = Keypad()\n self.old_key = \"\"\n self.key = \"\"\n # leds\n self.init_strips()", "def initialize_robot():\n\n proxy_motion = naoqi.ALProxy(\"ALMotion\", IP_ROBOT, PORT_ROBOT)\n proxy_motion.wakeUp()\n\n proxy_autonomous_life = naoqi.ALProxy(\"ALAutonomousLife\", IP_ROBOT, PORT_ROBOT)\n proxy_autonomous_life.setState(\"disabled\")\n\n proxy_motion = naoqi.ALProxy(\"ALMotion\", IP_ROBOT, PORT_ROBOT)\n proxy_motion.wakeUp()", "def setup(self): \n # Navigate to POS screen\n pos.connect()", "def setup(hass, base_config):\n from pyhusmow import API as HUSMOW_API\n\n config = base_config.get(DOMAIN)\n\n if hass.data.get(DOMAIN) is None:\n hass.data[DOMAIN] = { 'devices': [] }\n\n api = HUSMOW_API()\n api.login(config.get(CONF_USERNAME), config.get(CONF_PASSWORD))\n\n robots = api.list_robots()\n\n if not robots:\n return False\n\n for robot in robots:\n hass.data[DOMAIN]['devices'].append(AutomowerDevice(robot, api))\n\n for component in AUTOMOWER_COMPONENTS:\n discovery.load_platform(hass, component, DOMAIN, {}, base_config)\n\n return True", "def setUp(self):\n self.platform = wirelesstagpy.WirelessTags(username=USERNAME, password=PASSWORD)\n self.tag_outdoor = wirelesstagpy.SensorTag(MOCK.OUTDOOR_PROBE, self.platform)\n self.platform._tags[\"fake-1\"] = self.tag_outdoor # pylint: disable=protected-access", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_light')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: True,\n FEATURE_PERCENT: True,\n FEATURE_NUMBER_OF_STEPS: 100\n })", "def setup(self):\n build_world.start_level(self)", "def setup(self):\n self.pi.set_pull_up_down(self.gpio, pigpio.PUD_OFF)\n self.pi.set_watchdog(self.gpio, 0)\n self.register_callbacks()", "def __init__(self):\n GPIO.setmode(GPIO.BOARD)\n for light in self.all:\n GPIO.setup(light, GPIO.OUT)", "def main():\n # Parse arguments for configuration and light type\n parser = argparse.ArgumentParser()\n parser.add_argument(\"light_type\", help=\"lifx or hue\", choices=['lifx', 'hue'], type = str.lower)\n parser.add_argument(\"-c\", \"--config_mode\", action='store_true', help=\"runs the client in config mode which prints out the light data\")\n \n args = parser.parse_args()\n \n config_mode = args.config_mode\n light_type = args.light_type\n \n # Get light information \n # *Note*\n # Only LIFX is supported at this point in time\n light_service = None\n if light_type == 'lifx':\n light_service = lightservice.LIFXLightService(\"https://api.lifx.com/v1/\")\n \n data = light_service.refresh_light_data(config_mode)\n \n button_handler = None\n if config_mode:\n button_handler = buttonhandler.ConfigButtonHandler()\n button_handler.start()\n else:\n button_handler = buttonhandler.ButtonHandler(data)\n button_handler.start(light_service)", "def test_setup_adds_proper_devices(self, mock_light):\n good_config = {\n \"mochad\": {},\n \"light\": {\n \"platform\": \"mochad\",\n \"devices\": [{\"name\": \"Light1\", \"address\": \"a1\"}],\n },\n }\n assert setup_component(self.hass, light.DOMAIN, good_config)", "def setup(self):\n\t\tself.interface = self.getDriver('light_interface')\n\n\t\tself.pin = self.config['interface_position']\n\t\tself.blink_rate = self.config['blink_rate'] / 2 or 0.5\n\t\tself.is_on = False\n\n\t\tself.intensity = 255\n\t\tself.blink = False\n\t\tself.count = None\n\t\tself.current_count = False\n\t\tself.current_count = None\n\n\t\tself.saved_intensity = None\n\t\tself.saved_blink = False\n\t\tself.saved_count = None\n\n\t\treturn True", "def setup_platform(hass, config, add_devices, discovery_info=None):\n from pybotvac import Account\n\n try:\n auth = Account(config[CONF_USERNAME], config[CONF_PASSWORD])\n except HTTPError:\n _LOGGER.error(\"Unable to connect to Neato API\")\n return False\n\n dev = []\n for robot in auth.robots:\n for type_name in SWITCH_TYPES:\n dev.append(NeatoConnectedSwitch(robot, type_name))\n add_devices(dev)", "def setup_application(self):\n pass", "def pibooth_startup(cfg, app):", "def __init__(self, name, host):\n\n self._device = OppleLightDevice(host)\n\n self._name = name\n self._is_on = None\n self._brightness = None\n self._color_temp = None", "def test_setup_platform(self, store_mock):\n config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n with assert_setup_component(1, ip.DOMAIN):\n setup_component(self.hass, ip.DOMAIN, config)\n self.hass.block_till_done()\n\n assert self.hass.states.get(\"image_processing.microsoftface_demo_camera\")", "def setup_platform(opp, config, add_entities, discovery_info=None):\n\n for scene in pywink.get_scenes():\n _id = scene.object_id() + scene.name()\n if _id not in opp.data[DOMAIN][\"unique_ids\"]:\n add_entities([WinkScene(scene, opp)])", "def setup_platform(hass, config, add_devices, discovery_info=None):\n \n devices = []\n resource = requests.get('{host}/rest/items?type=json'.format(\n host=config.get('host')))\n\n for item in resource.json()['item']:\n if item['type'] in ['DimmerItem', ]:\n devices.append(OpenhabLight(\n hass,\n item,\n config.get('brightness', DEFAULT_BRIGHTNESS)))\n\n add_devices(devices)", "def appInit(self):\n glutInitDisplayMode( GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH )\n glClearColor(0.0, 0.0, 0.0, 0.0)\n glClearDepth(1.0 )\n glEnable( GL_DEPTH_TEST )\n glShadeModel( GL_SMOOTH )\n glEnable( GL_NORMALIZE )\n glEnable( GL_COLOR_MATERIAL )\n\n glEnable( GL_LIGHTING )\n glEnable( GL_LIGHT0 )\n\n self.set_lighting()\n\n self.make_simple_scenes()\n self.make_multi_object_scene()", "def setup(bot: Bot) -> None:\n bot.add_cog(Armory(bot))", "def pibooth_setup_camera(cfg):", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])", "def setup(self):\n # if not system.restore_snapshot():\n # self.log.debug(\"No snapshot to restore, if this is not expected please contact automation team\")\n crindsim.set_mode(\"manual\")\n pos.connect()\n pos.sign_on()", "def initialize(self):\n\t\tpcd8544.LCD.initialize(self)\n\t\tRPIO.setup(self._backlight_pin, RPIO.OUT, initial=RPIO.LOW)", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def setUpEnv(self):\n \n robot = Robot('atrv')\n\n pose = Sensor('pose')\n robot.append(pose)\n pose.configure_mw('yarp')\n\n motion = Actuator('v_omega')\n robot.append(motion)\n motion.configure_mw('yarp')\n \n env = Environment('indoors-1/indoor-1')\n env.configure_service('socket')", "def setup_pymol():\n pymol.finish_launching() # Prevent threading errors\n # Configure global settings\n cmd.set('scene_buttons', 1)\n cmd.set('matrix_mode', 1)\n cmd.set('movie_panel', 1)\n # Configure quality settings\n cmd.mset(\"1 x500\")\n cmd.set('ray_trace_frames', 1)\n cmd.viewport(800, 800)", "def setup(self):\n\n self._enable_torque(self._reg.TORQUE_ENABLE)\n self.change_operating_mode(self._reg.MODE_EXT_POSI)\n # set to max velocity\n self.change_veloity(self._default_velocity)", "def setup(bot: Red):\n bot.add_cog(Welcome(bot))", "def setup(self):\n \n # Board refers to the P1 header of the Raspberry Pi board\n GPIO.setmode(GPIO.BOARD)\n\n # Set up pin as an input with a pull up resistor to 3.3V\n GPIO.setup(self.__pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)", "def __init__(self):\n super().__init__()\n\n # Robot state\n self.ask_mode = False\n\n # Connect two large motors on output ports B and C\n self.sound = Sound()\n self.leds = Leds()\n self.p1 = TouchSensor(INPUT_1)\n self.p2 = TouchSensor(INPUT_2)\n self.p3 = TouchSensor(INPUT_3)\n self.p4 = TouchSensor(INPUT_4)", "def initialize_electronics(self):\n\n self.electronics = ArduinoModel(**self.config['electronics']['arduino'])\n self.logger.info('Initializing electronics arduino')\n self.electronics.initialize()", "def __init__(self, name='demo'):\n init()\n joystick.init()\n for i in range(joystick.get_count()):\n joystick.Joystick(i).init()\n\n State.game = util.load_cfg(name)\n State.clock = Clock(10, State.game['frame_rate'])\n State.window = display.set_mode(State.game['screen_size'])\n\n self._last_joystick_action = None\n self.create_screens()", "def set_up_all_ao(self):\n self.set_as_active()\n \n # sets up ambient occlusion lighting\n self.set_up_world_ao()\n self.comp_add_ao()", "def setup(self):\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(self.Motor_A_EN, GPIO.OUT)\n GPIO.setup(self.Motor_B_EN, GPIO.OUT)\n GPIO.setup(self.Motor_A_Pin1, GPIO.OUT)\n GPIO.setup(self.Motor_A_Pin2, GPIO.OUT)\n GPIO.setup(self.Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(self.Motor_B_Pin2, GPIO.OUT)\n self.motorStop() # Avoids automatic motor rotation after initialization\n try: # Try is used here to avoid errors due to repeated setting of PWM\n self.pwm_A = GPIO.PWM(self.Motor_A_EN, 1000)\n self.pwm_B = GPIO.PWM(self.Motor_B_EN, 1000)\n except:\n pass", "def InitLightBasic(self):\r\n\t\t\r\n\t\taLight = AmbientLight(\"AmbientLight\")\r\n\t\taLight.setColor(Vec4(0.3, 0.3, 0.3, 1))\r\n\t\trender.setLight(render.attachNewNode(aLight))\r\n\t\r\n\t\tdLight1 = DirectionalLight(\"DirectionalLight1\")\r\n\t\tdLight1.setColor(Vec4(0.65, 0.6, 0.6, 1))\t\t\r\n\t\tdLight1NP = render.attachNewNode(dLight1)\r\n\t\tdLight1NP.setHpr(100, -40, 0)\r\n\t\trender.setLight(dLight1NP)\r\n\t\r\n\t\tdLight2 = DirectionalLight(\"DirectionalLight2\")\r\n\t\tdLight2.setColor(Vec4(0.35, 0.35, 0.3, 1))\r\n\t\tdLight2NP = render.attachNewNode(dLight2)\r\n\t\tdLight2NP.setHpr(150, -60, 0)\r\n\t\trender.setLight(dLight2NP)", "def init():\n\n global registry, fsk_router, ook_router\n\n radio.init()\n OpenThings.init(Devices.CRYPT_PID)\n\n fsk_router = Registry.Router(\"fsk\")\n\n #OOK receive not yet written\n #It will be used to be able to learn codes from Energenie legacy hand remotes\n ##ook_router = Registry.Router(\"ook\")\n\n registry = Registry.DeviceRegistry()\n registry.set_fsk_router(fsk_router)\n ##registry.set_ook_router(ook_router\n\n path = os.path.join(sys.path[0], registry.DEFAULT_FILENAME)\n if os.path.isfile(path):\n registry.load_from(path)\n print(\"loaded registry from file\")\n registry.list()\n fsk_router.list()\n\n # Default discovery mode, unless changed by app\n ##discovery_none()\n ##discovery_auto()\n ##discovery_ask(ask)\n discovery_autojoin()\n ##discovery_askjoin(ask)", "def setup_method(self):\n self.hass = get_test_home_assistant()\n\n self.config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\", \"name\": \"test local\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n self.endpoint_url = f\"https://westus.{mf.FACE_API_URL}\"", "def _init_hardware(self):\n return", "def InitEnvironment(self):\r\n\t\t\r\n\t\t# Turn antialiasing on\r\n\t\trender.setAntialias(AntialiasAttrib.MMultisample,1)\r\n\t\t\r\n\t\t# load the falcon model\r\n\t\tfalcon = loader.loadModel(\"Content/falcon/falcon.bam\")\r\n\t\tfalcon.setScale(30)\r\n\t\tfalcon.setPos(0, 0, 28.5)\r\n\t\tfalcon.reparentTo(render)", "def setupHw():\n\n pin.setupHw()\n pin.setupOutPins(traffic_lights)\n pin.setDebug(False)", "def __init__(self, host):\n self._io = RemoteIO(host)\n self._host = host\n\n self._left_wheel = Wheel(id='b', side='left', remote_io=self._io)\n self._right_wheel = Wheel(id='a', side='right', remote_io=self._io, inverse=True)\n\n self._cam = Camera(host)\n\n self._left_led = LED(side='left', remote_io=self._io)\n self._front_led = LED(side='center', remote_io=self._io)\n self._right_led = LED(side='right', remote_io=self._io)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n from evohomeclient import EvohomeClient\n\n username = config.get(CONF_USERNAME)\n password = config.get(CONF_PASSWORD)\n\n if username is None or password is None:\n _LOGGER.error(\"Missing required configuration items %s or %s\",\n CONF_USERNAME, CONF_PASSWORD)\n return False\n\n evo_api = EvohomeClient(username, password)\n try:\n add_devices([RoundThermostat(evo_api)])\n except socket.error:\n _LOGGER.error(\n \"Connection error logging into the honeywell evohome web service\"\n )\n return False", "def _initialize_hardware(self):\n # Import\n try:\n from gpiozero import MCP3008\n except Exception as ex:\n logging.error('\\n *** ERROR importing gpiozero: {}'.format(ex))\n\n # Things failed, must be running locally, not on a widget, so don't\n # bother initializing the MCP3008\n return\n\n # Initialize the MCP3008\n try:\n self._sensor = MCP3008(channel=0)\n except Exception as ex:\n logging.error('\\n *** ERROR initializing MCP3008: {}'.format(ex))\n return\n\n # Start force loop thread\n threading.Thread(target=self._force_loop, daemon=True).start()", "def setUp(self):\n commandutils.CommandTestCaseMixin.setUp(self)\n self.lighting = objects.LocationLighting.createFor(\n self.location, candelas=0)", "def __init__(self, root, io):\n parts.hand.Hand.__init__(self, root=root, io=io)\n\n dxl_motors = OrderedDict({\n name: dict(conf)\n for name, conf in self.dxl_motors.items()\n })\n\n self.attach_dxl_motors(dxl_motors)\n\n \"\"\"\n self._load_sensor = self.io.find_module('force_gripper')\n self._load_sensor.offset = 4\n self._load_sensor.scale = 10000\n \"\"\"", "def setup_platform(hass, config, add_devices, discovery_info=None):\n hub.update()\n\n for vacbot in hub.vacbots:\n add_devices([DeebotMopAttachedBinarySensor(vacbot, \"mop_attached\")], True)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n\n lights = []\n\n from nmap import PortScanner, PortScannerError\n scanner = PortScanner()\n\n _LOGGER.error(scanner)\n\n ip = ''\n\n for i in range(255):\n try:\n result = scanner.scan(hosts='192.168.1.'+str(i), arguments=\" --privileged -sP \") # hosts=config[CONF_HOSTS]\n except PortScannerError:\n return _LOGGER.error(\"PortScannerError\")\n\n _LOGGER.error(result)\n\n for ipv4, info in result['scan'].items():\n mac = info['addresses'].get('mac') or _arp(ipv4)\n _LOGGER.error(mac)\n _LOGGER.error(ipv4)\n if mac == '5C:CF:7F:E2:40:49': #put in config file\n ip = ipv4\n _LOGGER.error(ip)\n break\n if ip is not '':\n break\n\n\n lights.append(DaliLight('http://'+ip))\n add_devices(lights)", "def __init__(self):\n\n GPIO.setup(PIN_BTN, GPIO.IN, GPIO.PUD_UP)\n GPIO.setup(PIN_RED_LED_0, GPIO.OUT, GPIO.LOW)\n GPIO.setup(PIN_BLUE_LED, GPIO.OUT, GPIO.LOW)", "def setup(self):\n if not system.restore_snapshot():\n self.log.warning(\"Not able to restore snapshot\")\n\n pos.connect()\n\n pos.sign_on()", "def initialize_home_hub(argv):\n parse_cmd_line_opts(argv)\n init_logging()\n init_error_reporting()\n \n # Verify we have a valid home id\n if HOME_ID is None:\n print('Home ID is invalid or missing. Please provide an integer following the -i flag')\n exit()\n\n # Begin Home Hub Specific Setup\n logger.info('Starting the Home Hub main program for Home: %s', HOME_ID)\n\n # Get the email and password for this HH's user from the env vars\n powernet_user_email = os.getenv('POWERNET_USER_EMAIL', None)\n powernet_user_password = os.getenv('POWERNET_USER_PASSWORD', None)\n \n if powernet_user_email is None:\n logger.info('Missing the required login email address')\n logger.info('Please set the POWERNET_USER_EMAIL environment variable and try again')\n exit()\n \n if powernet_user_password is None:\n logger.info('Missing the required login password')\n logger.info('Please set the POWERNET_USER_PASSWORD environment variable and try again')\n exit()\n \n # attempt to authenticate against our API\n form_payload = {'email': powernet_user_email, 'password': powernet_user_password}\n response = requests.post('https://pwrnet-158117.appspot.com/api/v1/powernet_user/auth/', data=form_payload)\n auth_token = response.json()['token']\n\n # Initializing variables for queue and threads\n rpi = HardwareInterface(house_id=HOME_ID, gpio_map=None, auth_token=auth_token)\n buffer_size = 8\n q_ai = Queue(buffer_size)\n\n # Initialize threads\n producer_ai_thread = Thread(name='Producer', target=rpi.producer_ai, args=(q_ai,))\n producer_ai_thread.start()\n\n consumer_ai_thread = Thread(name='Consumer', target=rpi.consumer_ai, args=(q_ai,))\n consumer_ai_thread.start()\n\n devices_thread = Thread(name='Device', target=rpi.devices_th)\n devices_thread.start()\n\n load_control_thread = Thread(name=\"LoadControl\", target=rpi.local_controller_th)\n load_control_thread.start()", "def startup( self ):\n # ---- Setup UPNPC ----\n if self.config.neuron.use_upnpc:\n bittensor.logging.success(prefix = 'Set upnpc', sufix = '<green>ON</green>')\n try:\n self.external_port = net.upnpc_create_port_map( port = self.axon.port )\n except net.UPNPCException as upnpc_exception:\n logger.critical('Failed to hole-punch with upnpc')\n raise RuntimeError('Failed to hole-punch with upnpc')\n else:\n bittensor.logging.success(prefix = 'Set upnpc', sufix = '<red>OFF</red>')\n self.external_port = self.config.axon.port\n\n # ---- Get external ip ----\n try:\n self.external_ip = net.get_external_ip()\n bittensor.logging.success(prefix = 'External IP', sufix = '<blue>{}</blue>'.format(self.external_ip))\n except net.ExternalIPNotFound as external_port_exception:\n raise RuntimeError('Unable to attain your external ip. Check your internet connection. error:{}', external_port_exception)\n\n # ---- Setup tensorboard ----\n if self.config.neuron.use_tensorboard == True:\n self._tensorboard_program = program.TensorBoard()\n self._tensorboard_program.configure(argv=[None, '--logdir', self.config.neuron.full_path, '--load_fast=true'])\n self._tensorbaord_url = self._tensorboard_program.launch()\n bittensor.logging.success(prefix = 'Set tensorboard', sufix = '<blue>http://localhost:6006/</blue>')\n else: bittensor.logging.success(prefix = 'Set tensorboard', sufix = '<red>OFF</red>')\n\n # ---- Setup Wallet. ----\n if not self.wallet.has_coldkeypub:\n self.wallet.create_new_coldkey( n_words = 12, use_password = True )\n if not self.wallet.has_coldkeypub:\n raise RuntimeError('Miner must have access to a decrypted coldkeypub')\n if not self.wallet.has_hotkey:\n self.wallet.create_new_hotkey( n_words = 12, use_password = False )\n if not self.wallet.has_hotkey:\n raise RuntimeError('Miner must have access to a decrypted hotkey')\n\n # ---- Subscribe to chain ----\n subscribe_success = self.subtensor.subscribe(\n wallet = self.wallet,\n ip = self.external_ip,\n port = self.external_port,\n modality = bittensor.proto.Modality.TEXT,\n wait_for_finalization = True,\n timeout = 4 * bittensor.__blocktime__,\n )\n if not subscribe_success:\n raise RuntimeError('Failed to subscribe neuron.')\n\n # ---- Starting axon ----\n self.axon.start()", "def initialize(self):\n remote_event = self.args.get(\"remote_event\", \"deconz_event\")\n remote_event_filter = self.args.get(\"remote_event_filter\", {})\n motion_sensors = self.args.get(\"motion_sensors\", [])\n self._light_group = self.args.get(\"light_group\", \"light.cocina\")\n self._main_constrain = self.args.get(\"toggle_automation\")\n self._delay_re_enable_motion_control = int(\n self.args.get(\"delay_re_enable_motion_control\", 120)\n )\n self._max_delay_motion_off = int(\n self.args.get(\"max_delay_motion_off\", 900)\n )\n\n self._scene_rotation = {\n scene_key: i\n for i, scene_key in enumerate(self.args.get(\"rotate_scene_order\"))\n }\n _schedule_config = self.args.get(\"scene_schedule\")\n self.log(\n f\"[DEBUG] APP INIT with schedule_config {_schedule_config}\",\n level=\"WARNING\",\n log=LOGGER,\n )\n\n self._default_scene = self.args.get(\"default_scene\")\n self._scenes = {}\n self._time_windows = {}\n for scene_key, scene_data in self.args.get(\"scene_schedule\").items():\n self._time_windows[scene_key] = (\n scene_data.get(\"from\", \"00:00:00\"),\n scene_data.get(\"to\", \"00:00:00\"),\n )\n self._scenes[scene_key] = (\n scene_data[\"turn_on_service_call\"],\n scene_data[\"wait_to_turn_off\"],\n )\n\n light_st = self.get_state(self._light_group)\n self._light_on = light_st == \"on\"\n self._last_switch_press = 0.0\n self._last_scene = self._default_scene\n\n self._motion_states = {}\n for sensor in motion_sensors:\n self._motion_states[sensor] = self.get_state(sensor) == \"on\"\n self.listen_state(\n self._motion_detected,\n sensor,\n constrain_input_boolean=self._main_constrain,\n )\n self._motion_on = any(self._motion_states.values())\n self._last_light_on = 0.0 if not self._motion_on else monotonic()\n self._motion_light_enabled = True\n\n self.listen_state(self._light_changed, self._light_group)\n self.listen_event(\n self._switch_event, remote_event, **remote_event_filter\n )\n # Add listener to check light off after a long time\n self.listen_state(\n self._no_motion_for_long_time,\n motion_sensors[0],\n new=\"off\",\n duration=self._max_delay_motion_off,\n # constrain_input_boolean=self._main_constrain,\n )\n self.log(\n f\"APP INIT with light {light_st}, motion: {self._motion_states}\",\n level=\"WARNING\",\n log=LOGGER,\n )", "def setUp(self):\n self.hass = get_test_home_assistant()\n self.assertTrue(setup_component(self.hass, remote.DOMAIN, {'remote': {\n 'platform': 'demo',\n }}))", "def setup_game(self):", "def platform_init(self):\n if isinstance(self.imu, MockImuController) or isinstance(self.pwm_controller, MockPWMController):\n print(\"Mock components detected, creating mock antenna controller\")\n platform = MockPlatformController(self.azimuth_servo, self.elevation_servo, self.imu)\n else:\n print(\"Initializing PIDAntennaController class\")\n platform = PIDPlatformController(\n self.azimuth_servo,\n self.elevation_servo,\n self.imu,\n pid_output_limits=self.pid_config.get(\"output_limits\"),\n pid_frequency=self.pid_config.get(\"period\"),\n p=self.pid_config.get(\"p\"),\n i=self.pid_config.get(\"i\"),\n d=self.pid_config.get(\"d\")\n )\n \n self.platform = platform\n\n if not isinstance(self.gps, MockGPSController):\n self.gps_update_loop = GPSLocationController(self.gps)\n self.gps_update_loop.start()\n else:\n self.gps_update_loop = None\n \n return platform", "def main():\n\n # Install crypt32 (not required for Proton 3.16-3 and up)\n util.protontricks('crypt32')\n\n # Install directmusic, set overrides\n util.protontricks('directmusic')\n util.winedll_override('dmime', 'n')\n util.winedll_override('dmsynth', 'n')\n util.winedll_override('dmusic', 'n')\n util.winedll_override('dsound', 'n')\n util.winedll_override('dswave ', 'n')\n util.winedll_override('l3codecx', 'n')\n\n # Set sound to alsa\n util.protontricks('sound=alsa')\n\n # Disable launcher\n util.replace_command('patriots.exe', 'riseofnations.exe')", "def _initialize_hardware(self):\n # Import\n try:\n import board\n import busio\n import adafruit_vl6180x\n except Exception as ex:\n logging.error(\n '\\n *** ERROR importing Adafruit libraries: {}'.format(\n ex,\n ),\n )\n\n # Things failed, so we must be running locally, not on a widget;\n # don't bother hooking up the VL6180X\n return\n\n # Initialize I2C and VL6180X\n try:\n i2c = busio.I2C(board.SCL, board.SDA)\n self._sensor = adafruit_vl6180x.VL6180X(i2c)\n except Exception as ex:\n logging.error(\n '\\n *** ERROR initializing I2C/LSM303: {}'.format(ex),\n )\n\n self._initialize_id_led()", "def setPlatform(self):\n\t\treturn None", "def setup_platform(hass, config, add_entities, discovery_info=None):\n litejet_ = hass.data[\"litejet_system\"]\n\n devices = []\n for i in litejet_.loads():\n name = litejet_.get_load_name(i)\n if not litejet.is_ignored(hass, name):\n devices.append(LiteJetLight(hass, litejet_, i, name))\n add_entities(devices, True)", "def setup_platform(hass, config, add_devices, discovery_info=None) -> None:\n friendly_name = config.get(CONF_FRIENDLY_NAME)\n mac_addr = config.get(CONF_MAC)\n add_devices([Switchmate(mac_addr, friendly_name)], True)", "def main():\r\n app = application.Application()\r\n app.initializeLayer(menu.Menu())\r\n app.run()", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n if discovery_info is None:\n return\n\n data = hass.data[LUPUSEC_DOMAIN]\n\n device_types = [CONST.TYPE_OPENING]\n\n devices = []\n for device in data.lupusec.get_devices(generic_type=device_types):\n devices.append(LupusecBinarySensor(data, device))\n\n add_entities(devices)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n # Only act if loaded via mysensors by discovery event.\n # Otherwise gateway is not setup.\n if discovery_info is None:\n return\n\n for gateway in mysensors.GATEWAYS.values():\n # Define the S_TYPES and V_TYPES that the platform should handle as\n # states. Map them in a dict of lists.\n pres = gateway.const.Presentation\n set_req = gateway.const.SetReq\n map_sv_types = {\n pres.S_TEMP: [set_req.V_TEMP],\n pres.S_HUM: [set_req.V_HUM],\n pres.S_BARO: [set_req.V_PRESSURE, set_req.V_FORECAST],\n pres.S_WIND: [set_req.V_WIND, set_req.V_GUST],\n pres.S_RAIN: [set_req.V_RAIN, set_req.V_RAINRATE],\n pres.S_UV: [set_req.V_UV],\n pres.S_WEIGHT: [set_req.V_WEIGHT, set_req.V_IMPEDANCE],\n pres.S_POWER: [set_req.V_WATT, set_req.V_KWH],\n pres.S_DISTANCE: [set_req.V_DISTANCE],\n pres.S_LIGHT_LEVEL: [set_req.V_LIGHT_LEVEL],\n pres.S_IR: [set_req.V_IR_RECEIVE],\n pres.S_WATER: [set_req.V_FLOW, set_req.V_VOLUME],\n pres.S_CUSTOM: [set_req.V_VAR1,\n set_req.V_VAR2,\n set_req.V_VAR3,\n set_req.V_VAR4,\n set_req.V_VAR5],\n pres.S_SCENE_CONTROLLER: [set_req.V_SCENE_ON,\n set_req.V_SCENE_OFF],\n }\n if float(gateway.protocol_version) < 1.5:\n map_sv_types.update({\n pres.S_AIR_QUALITY: [set_req.V_DUST_LEVEL],\n pres.S_DUST: [set_req.V_DUST_LEVEL],\n })\n if float(gateway.protocol_version) >= 1.5:\n map_sv_types.update({\n pres.S_COLOR_SENSOR: [set_req.V_RGB],\n pres.S_MULTIMETER: [set_req.V_VOLTAGE,\n set_req.V_CURRENT,\n set_req.V_IMPEDANCE],\n pres.S_SOUND: [set_req.V_LEVEL],\n pres.S_VIBRATION: [set_req.V_LEVEL],\n pres.S_MOISTURE: [set_req.V_LEVEL],\n pres.S_AIR_QUALITY: [set_req.V_LEVEL],\n pres.S_DUST: [set_req.V_LEVEL],\n })\n map_sv_types[pres.S_LIGHT_LEVEL].append(set_req.V_LEVEL)\n\n if float(gateway.protocol_version) >= 2.0:\n map_sv_types.update({\n pres.S_INFO: [set_req.V_TEXT],\n pres.S_GAS: [set_req.V_FLOW, set_req.V_VOLUME],\n pres.S_GPS: [set_req.V_POSITION],\n pres.S_WATER_QUALITY: [set_req.V_TEMP, set_req.V_PH,\n set_req.V_ORP, set_req.V_EC]\n })\n map_sv_types[pres.S_CUSTOM].append(set_req.V_CUSTOM)\n map_sv_types[pres.S_POWER].extend(\n [set_req.V_VAR, set_req.V_VA, set_req.V_POWER_FACTOR])\n\n devices = {}\n gateway.platform_callbacks.append(mysensors.pf_callback_factory(\n map_sv_types, devices, add_devices, MySensorsSensor))", "def __init__(self, robot):\n\n #initialise the stick and the smart dashboard (in case we need stuff for auton):\n self.stick = wpilib.Joystick(0)\n self.smart_dashboard = NetworkTable.getTable(\"SmartDashboard\")\n\n #Main stick buttons.\n #-----------------------------------------------------------------------\n trigger = JoystickButton(self.stick, 1)\n thumb = JoystickButton(self.stick, 2)\n three = JoystickButton(self.stick, 3)\n four = JoystickButton(self.stick, 4)\n five = JoystickButton(self.stick, 5)\n six = JoystickButton(self.stick, 6)\n seven = JoystickButton(self.stick, 7)\n eight = JoystickButton(self.stick, 8)\n nine = JoystickButton(self.stick, 9)\n ten = JoystickButton(self.stick, 10)\n eleven = JoystickButton(self.stick, 11)\n twelve = JoystickButton(self.stick, 12)\n\n #Hat switch POV stuff.\n #-----------------------------------------------------------------------\n pov_north = POVButton(self.stick, 0)\n pov_northeast = POVButton(self.stick, 45)\n pov_east = POVButton(self.stick, 90)\n pov_southeast = POVButton(self.stick, 135)\n pov_south = POVButton(self.stick, 180)\n pov_southwest = POVButton(self.stick, 225)\n pov_west = POVButton(self.stick, 270)\n pov_northwest = POVButton(self.stick, 315)\n\n pov_south.whenPressed(SuperStrafeEntertainmentSystem(robot, SuperStrafeEntertainmentSystem.kBack))\n pov_north.whenPressed(SuperStrafeEntertainmentSystem(robot, SuperStrafeEntertainmentSystem.kForward))\n pov_east.whenPressed(SuperStrafeEntertainmentSystem(robot, SuperStrafeEntertainmentSystem.kRight))\n pov_west.whenPressed(SuperStrafeEntertainmentSystem(robot, SuperStrafeEntertainmentSystem.kLeft))", "def setup_platform(hass, config, add_devices, discovery_info=None):\n dev_id = config.get(CONF_ID, None)\n devname = config.get(CONF_NAME, \"EnOcean binary sensor\")\n add_devices([EnOceanBinarySensor(dev_id, devname)])", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n\n host = config[CONF_HOST]\n port = config[CONF_PORT]\n token = config.get(CONF_ACCESS_TOKEN)\n\n client = ClementineRemote(host, port, token, reconnect=True)\n\n add_entities([ClementineDevice(client, config[CONF_NAME])])", "def initialize(self):\n self.logger.debug('Initializing Basler Camera')\n tl_factory = pylon.TlFactory.GetInstance()\n devices = tl_factory.EnumerateDevices()\n if len(devices) == 0:\n #print('No camera found')\n self.logger.warning('No camera found')\n\n self._driver = None\n for device in devices:\n if self.cam_num in device.GetFriendlyName():\n self._driver = pylon.InstantCamera()\n self._driver.Attach(tl_factory.CreateDevice(device))\n self._driver.Open()\n self.friendly_name = device.GetFriendlyName()\n print(device.GetFriendlyName())\n\n if not self._driver:\n msg = f'Basler {self.cam_num} not found. Please check if the camera is connected'\n self.logger.error(msg)\n return\n\n # self.logger.info(f'Loaded camera {self._driver.GetDeviceInfo().GetModelName()}')\n\n # self._driver.RegisterConfiguration(pylon.SoftwareTriggerConfiguration(), pylon.RegistrationMode_ReplaceAll,\n # pylon.Cleanup_Delete)\n\n #self.config.fetch_all()", "def __init__(self):\n GPIO.setwarnings(False)\n GPIO.cleanup() # Reset the high and low levels of the GPIO port\n #The following code defines the GPIO used to control the L298N chip. This definition is different for different Raspberry Pi driver boards.\n self.Motor_A_EN = 17\n self.Motor_B_EN = 4\n self.Motor_A_Pin1 = 27\n self.Motor_A_Pin2 = 18\n self.Motor_B_Pin1 = 21\n self.Motor_B_Pin2 = 26\n self.setup()", "def __init__(self):\n\n self.left_motor = ev3.LargeMotor(ev3.OUTPUT_B)\n self.right_motor = ev3.LargeMotor(ev3.OUTPUT_C)\n self.arm_motor = ev3.MediumMotor(ev3.OUTPUT_A)\n self.touch_sensor = ev3.TouchSensor()\n self.color_sensor = ev3.ColorSensor()\n self.ir_sensor = ev3.InfraredSensor()\n self.pixy = ev3.Sensor(driver_name=\"pixy-lego\")\n\n assert self.left_motor.connected\n assert self.right_motor.connected\n assert self.arm_motor.connected\n assert self.touch_sensor\n assert self.color_sensor\n assert self.ir_sensor\n assert self.pixy", "def startup(self):\n pass", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n add_devices_callback([\n HE853Switch('OviSwitch', STATE_ON),\n HE853Switch('AC', STATE_OFF)\n ])", "def __initialize(self):\n self.__object = None\n \n self.__mainAct = None\n self.__mainMenu = None\n \n self.__e5project = e5App().getObject(\"Project\")\n \n self.__supportedVariants = []", "def __init__(self):\n\n ShowBase.__init__(self)\n controls.setup_mouse()\n self.tpp_camera = TPPCamera()\n\n try:\n self.world = World()\n except OSError:\n raise\n\n self.physics = Physics(self.world.player)\n base.taskMgr.add(self.__main_loop, \"__main_loop\")", "def main():\n\n # connect to the hue bridge\n bridge = phue.Bridge()\n bridge.connect() # throw an exception if connection was not established\n\n tracker = beat_tracker.BeatTracker()\n tracker.start()\n try:\n\n # obtain a list of lights to control\n lights = get_lights(bridge)\n\n x = 0\n ids = [l.light_id for l in lights]\n\n while True:\n\n time_between_beats = (60.0 / tracker.tempo)\n\n combos = [\n [1, 0],\n [1, 254],\n [1, 0],\n [500, 254],\n ]\n x = (x + 1) % 4\n\n temp, _brightness = combos[x]\n\n adjust = int(_brightness * (int(tracker.volume / 1500.0) * 2))\n\n if tracker.volume < 1000:\n adjust = 0\n\n brightness = int(min(adjust, 254))\n on = bool(tracker.volume > 800)\n command = {\"ct\": temp, \"bri\": brightness, \"transitiontime\": 1, \"on\": on}\n bridge.set_light(ids, command)\n\n if time_between_beats > 1:\n time.sleep(1)\n else:\n time.sleep(time_between_beats)\n\n finally:\n tracker.stop()", "async def async_setup_platform(hass: HomeAssistantType,\n config: ConfigType,\n async_add_entities,\n discovery_info=None) -> None:\n _LOGGER.info(\"Startup Youjia platform configuration.\")\n\n if (discovery_info is not None and config is None) or len(config) == 0:\n config = discovery_info\n\n if config is None:\n return\n\n if discovery_info is None:\n return\n\n if config['names'] is None:\n return\n\n for index, name in config['names'].items():\n entry_id = \"{0}{1:0>2}\".format(config['entity_id'], index)\n _LOGGER.info(\"Adding brightness light {} of {} into HA.\".format(name, entry_id))\n async_add_entities([YoujiaX160(name,\n entry_id,\n config['entity_id'],\n index,\n config['total_solts'],\n config['host_name']\n )], True)\n if config['auto'] is True:\n thread = threading.Thread(target=auto_checking_switch_state,\n args=(get_host(config['host_name']), config['entity_id']))\n thread.daemon = True\n SWITCH_STATUS_CHECKING_THREAD[config['name']] = thread\n thread.start()", "def init():\n\n global leftDriverStick\n global rightDriverStick\n global goGamePad\n\n try:\n leftDriverStick = T16000M(0)\n except:\n print('OI: Error - Could not instantiate Left Driver Stick on USB port 0!!!')\n\n try:\n rightDriverStick = T16000M(1)\n except:\n print('OI: Error - Could not instantiate Right Driver Stick on USB port 0!!!')\n\n try:\n goGamePad = Joystick(2)\n except:\n print('OI: Error - Could not instantiate Right Driver Stick on USB port 2!!!')\n\n\n # ----------------------------------------------------------\n # Driver Controls\n # ----------------------------------------------------------\n #global resetYawBtn\n #resetYawBtn = JoystickButton(rightDriverStick, config.btnResetYawAngleIndex)\n #resetYawBtn.whenPressed(NavxResetYawAngle())\n\n global btnDriveSlow\n btnDriveSlow = JoystickButton(leftDriverStick, config.btnDriveSlow)\n \n global btnEnableLightSensor\n btnEnableLightSensor = JoystickButton(leftDriverStick, config.btnEnableLightSensorIndex)\n\n global btnExtendAll\n btnExtendAll = JoystickButton(rightDriverStick, config.btnExtendAllIndex)\n btnExtendAll.whenPressed(ExtendAll())\n\n global btnRetract\n btnRetract = JoystickButton(rightDriverStick, config.btnRetractAllIndex)\n btnRetract.whenPressed(RetractAll())\n\n global btnExtendFront\n btnExtendFront = JoystickButton(rightDriverStick, config.btnExtendFrontIndex)\n btnExtendFront.whenPressed(ExtendFront())\n\n global btnExtendBack\n btnExtendBack = JoystickButton(rightDriverStick, config.btnExtendBackIndex)\n btnExtendBack.whenPressed(ExtendBack())\n\n global btnRetractFront\n btnRetractFront = JoystickButton(rightDriverStick, config.btnRetractFrontIndex)\n btnRetractFront.whenPressed(RetractFront())\n\n global btnCargoGrabTog\n btnCargoGrabTog = JoystickButton(goGamePad, config.btnHatchGrabTogIndex)\n btnCargoGrabTog.whenPressed(ExtendBack())\n \n \"\"\"\n global btnResetEncoders\n btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)\n btnResetEncoders.whenPressed(TankDriveResetEncoders())\n \"\"\"\n\n \"\"\"\n global axisElevator\n axisElevator = JoystickAxis(goGamePad, config.axisElevatorIndex)\n axisElevator. #??? idk how to configure joystick axis\n \"\"\"\n\n \"\"\"\n global btnRampTog\n btnRampTog = JoystickButton(goGamePad, config.btnRampTogIndex)\n btnRampTog.whenPressed(ExtendFront())\n \"\"\"\n #global btnResetEncoders\n #btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)\n #btnResetEncoders.whenPressed(TankDriveResetEncoders())\n\n # These variable names are inconsistent, need to be fixed!!!!\n #global btnRampExtendTog\n #btnRampExtendTog = JoystickButton(goGamePad, config.btnRampExtendTogIndex)\n #btnRampExtendTog.whenPressed(RampExtend())\n\n #global btnRampRetractTog\n #btnRampRetractTog = JoystickButton(goGamePad, config.btnRampRetractTogIndex)\n #btnRampRetractTog.whenPressed(RampRetract())", "def setup_platform(hass, config, add_entities, discovery_info=None):\n pass", "def main():\n\n # Fixes the startup process.\n util.replace_command('Launcher.exe', 'Borderlands2.exe')\n util.append_argument('-NoSplash')\n\n # Disables esync prevents crashes.\n util.disable_esync()\n\n # Enables NVIDIA PhysX in Borderlands 2.\n util.protontricks('physx')", "def __init__(self):\n self.server_name = 'Binary Light Device'\n self.device = None", "def setup_platform(hass, config, add_devices, discovery_info=None):\n cl = hass.data.get(DATA_CIRCADIAN_LIGHTING)\n if cl:\n cs = CircadianSwitch(\n hass,\n cl,\n name=config.get(CONF_NAME),\n lights_ct=config.get(CONF_LIGHTS_CT, []),\n lights_rgb=config.get(CONF_LIGHTS_RGB, []),\n lights_xy=config.get(CONF_LIGHTS_XY, []),\n lights_brightness=config.get(CONF_LIGHTS_BRIGHT, []),\n disable_brightness_adjust=config.get(CONF_DISABLE_BRIGHTNESS_ADJUST),\n min_brightness=config.get(CONF_MIN_BRIGHT),\n max_brightness=config.get(CONF_MAX_BRIGHT),\n sleep_entity=config.get(CONF_SLEEP_ENTITY),\n sleep_state=config.get(CONF_SLEEP_STATE),\n sleep_colortemp=config.get(CONF_SLEEP_CT),\n sleep_brightness=config.get(CONF_SLEEP_BRIGHT),\n disable_entity=config.get(CONF_DISABLE_ENTITY),\n disable_state=config.get(CONF_DISABLE_STATE),\n initial_transition=config.get(CONF_INITIAL_TRANSITION),\n )\n add_devices([cs])\n\n def update(call=None):\n \"\"\"Update lights.\"\"\"\n cs.update_switch()\n\n return True\n else:\n return False", "def setUp(self):\n self._plugin = spotlight_volume.SpotlightVolumePlugin()\n self._parser = plist.PlistParser()", "def elinos_init():\n elinos_env = get_elinos_environment()\n\n solib_dirs = []\n\n # System libraries\n if None in (elinos_env[key] for key in (\"cdk\", \"target\")):\n warn(\"ELinOS system libraries will not be loaded\")\n else:\n solib_prefix = \"%s/%s\" % (elinos_env[\"cdk\"], elinos_env[\"target\"])\n solib_dirs += [\"%s/%s\" % (solib_prefix, \"lib\")]\n gdb.execute(\"set solib-absolute-prefix %s\" % solib_prefix)\n\n # Xenomai libraries. Those are optional, so have a lighter warning\n # if they cannot be located.\n if elinos_env[\"project\"] is None:\n warn(\"Xenomai libraries may not be loaded\")\n else:\n for dir in elinos_env['xenomai']:\n solib_dirs += [\"%s/%s\"\n % (dir, \"xenomai-build/usr/realtime/lib\")]\n\n if len(solib_dirs) != 0:\n gdb.execute(\"set solib-search-path %s\" % \":\".join(solib_dirs))", "def __init__(self, **kwargs) -> None:\n super(Light, self).__init__(**kwargs)\n\n get = kwargs.get\n if get('light') is None:\n raise Exception('Light is required')\n if get('button') is None:\n raise Exception('Button is required')\n\n self._light: LED = LED(get('light'))\n self._button: Button = Button(get('button'), pull_up=False)\n self._state: bool = get('state', False)" ]
[ "0.670556", "0.669467", "0.64281476", "0.6402788", "0.6392791", "0.6329436", "0.6317174", "0.6247289", "0.61651385", "0.6147113", "0.6105949", "0.60822815", "0.6056726", "0.60057616", "0.5982987", "0.5977405", "0.59728897", "0.5957383", "0.59512126", "0.59488773", "0.59368527", "0.5917735", "0.5917156", "0.59153193", "0.591321", "0.5909372", "0.59063745", "0.5903958", "0.59006363", "0.5900078", "0.5898548", "0.5886142", "0.58784187", "0.5877129", "0.58683145", "0.5858067", "0.58519256", "0.58511335", "0.5843388", "0.5834903", "0.5834903", "0.5823042", "0.5817788", "0.578972", "0.5788241", "0.57785577", "0.5775282", "0.5774394", "0.5763769", "0.5756435", "0.5751476", "0.57261163", "0.57197356", "0.5717984", "0.57172936", "0.5712271", "0.5703317", "0.57029974", "0.5694296", "0.56855386", "0.56842375", "0.5682097", "0.56784886", "0.56782174", "0.56611335", "0.565201", "0.56393975", "0.5637135", "0.56367576", "0.5636088", "0.5631931", "0.5622486", "0.56179154", "0.5617783", "0.56128687", "0.5611254", "0.5602034", "0.557001", "0.5566049", "0.55617654", "0.5559074", "0.55579007", "0.5544716", "0.55435723", "0.55428994", "0.55415285", "0.5536564", "0.5535264", "0.5530821", "0.5529509", "0.5528059", "0.55206895", "0.55190367", "0.5517039", "0.55141515", "0.55133396", "0.54994065", "0.5497537", "0.54965186", "0.54962337" ]
0.73331904
0
Initialize an Opple light.
def __init__(self, name, host): self._device = OppleLightDevice(host) self._name = name self._is_on = None self._brightness = None self._color_temp = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, **kwargs) -> None:\n super(Light, self).__init__(**kwargs)\n\n get = kwargs.get\n if get('light') is None:\n raise Exception('Light is required')\n if get('button') is None:\n raise Exception('Button is required')\n\n self._light: LED = LED(get('light'))\n self._button: Button = Button(get('button'), pull_up=False)\n self._state: bool = get('state', False)", "def _create_example_light():\n return Light({\"warning\": False, \"off\": True})", "def InitLightBasic(self):\r\n\t\t\r\n\t\taLight = AmbientLight(\"AmbientLight\")\r\n\t\taLight.setColor(Vec4(0.3, 0.3, 0.3, 1))\r\n\t\trender.setLight(render.attachNewNode(aLight))\r\n\t\r\n\t\tdLight1 = DirectionalLight(\"DirectionalLight1\")\r\n\t\tdLight1.setColor(Vec4(0.65, 0.6, 0.6, 1))\t\t\r\n\t\tdLight1NP = render.attachNewNode(dLight1)\r\n\t\tdLight1NP.setHpr(100, -40, 0)\r\n\t\trender.setLight(dLight1NP)\r\n\t\r\n\t\tdLight2 = DirectionalLight(\"DirectionalLight2\")\r\n\t\tdLight2.setColor(Vec4(0.35, 0.35, 0.3, 1))\r\n\t\tdLight2NP = render.attachNewNode(dLight2)\r\n\t\tdLight2NP.setHpr(150, -60, 0)\r\n\t\trender.setLight(dLight2NP)", "def __init__(self, LightFun):\n self.setParameters()\n self.Light = LightFun", "def __init__(self, light: pykulersky.Light):\n self._light = light\n self._hs_color = None\n self._brightness = None\n self._white_value = None\n self._available = True", "def __init__(self, light, lights, settings):\n\n if 'name' in lights[light]:\n self.name = lights[light]['name']\n else:\n self.name = light\n if 'gpio' in lights[light]:\n self.gpio = lights[light]['gpio']\n else:\n self.gpio = 18 # GPIO pin 18 is the default for testing\n if 'on' in lights[light]:\n self.on = lights[light]['on']\n else:\n self.on = 'continuous'\n\n GPIO.setup(self.gpio, GPIO.OUT)\n if self.on == 'continuous':\n self.turn_on()\n else: # set up light on/off cyclying other than continuous\n pass # for example, during certain hours", "def __init__(self):\n GPIO.setmode(GPIO.BOARD)\n for light in self.all:\n GPIO.setup(light, GPIO.OUT)", "def __init__(self,\r\n lightpos=(10, -10, 20),\r\n lightcol=(1.0, 1.0, 1.0),\r\n lightamb=(0.1, 0.1, 0.2)):\r\n super(Light, self).__init__()\r\n self.lightpos = lightpos\r\n self.lightcol = lightcol\r\n self.lightamb = lightamb", "def __init__(self, device: SensemeDevice) -> None:\n super().__init__(device, f\"{device.name} Light\")\n self._attr_supported_color_modes = {ColorMode.COLOR_TEMP}\n self._attr_color_mode = ColorMode.COLOR_TEMP\n self._attr_min_mireds = color_temperature_kelvin_to_mired(\n device.light_color_temp_max\n )\n self._attr_max_mireds = color_temperature_kelvin_to_mired(\n device.light_color_temp_min\n )", "def initialize(self):\n self.ha_url = self.args.get(\"ha_url\", None)\n self.use_current_brightness = self.args.get(\"use_current_brightness\", False)\n self.condition = self.args.get(\"condition\")\n self.lights = self.args[\"lights\"]\n self.listen_state(self.change_lights_color, self.args[\"media_player\"], attribute = self.args.get(\"photo_attribute\", \"entity_picture\"))", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_light')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: True,\n FEATURE_PERCENT: True,\n FEATURE_NUMBER_OF_STEPS: 100\n })", "def initialize_lights(self):\n\t\tfor light in OUTPUT.LIGHTS:\n\t\t\tif light != -1:\n\t\t\t\tio.set_bit(light, 0)\n\t\tfor order in self.orderQueue.yield_orders(exclude=(None,)):\n\t\t\tself.set_button_light(order.floor, OUTPUT.IN_LIGHTS, 1)", "def __init__(self):\n\n GPIO.setup(PIN_BTN, GPIO.IN, GPIO.PUD_UP)\n GPIO.setup(PIN_RED_LED_0, GPIO.OUT, GPIO.LOW)\n GPIO.setup(PIN_BLUE_LED, GPIO.OUT, GPIO.LOW)", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def initialize(self):\n\t\tpcd8544.LCD.initialize(self)\n\t\tRPIO.setup(self._backlight_pin, RPIO.OUT, initial=RPIO.LOW)", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 256}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 64}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 32}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def __init__(self, envirophat, use_leds):\n self.envirophat = envirophat\n self.use_leds = use_leds\n # sensors readings\n self.light = None\n self.light_red = None\n self.light_green = None\n self.light_blue = None\n self.accelerometer_x = None\n self.accelerometer_y = None\n self.accelerometer_z = None\n self.magnetometer_x = None\n self.magnetometer_y = None\n self.magnetometer_z = None\n self.temperature = None\n self.pressure = None\n self.voltage_0 = None\n self.voltage_1 = None\n self.voltage_2 = None\n self.voltage_3 = None", "def __init__(self, red_pin, green_pin, blue_pin):\n #self.red = gpiozero.PWMLED(red_pin, frequency=80, initial_value=1)\n #self.green = gpiozero.PWMLED(green_pin, frequency=80, initial_value=1)\n #self.blue = gpiozero.PWMLED(blue_pin, frequency=80, initial_value=1)\n self.red = gpiozero.LED(red_pin)\n self.green = gpiozero.LED(green_pin)\n self.blue = gpiozero.LED(blue_pin)\n self.leds = [self.red, self.green, self.blue]\n self.off()", "def init_led():\r\n global led\r\n led = LED(LED_GPIO_PIN)\r\n led.off()", "def set_light_on(self):\r\n self._light = \"ON\"", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.ratchet", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.ratchet", "def __init__(self, parent):\n super(Demo4, self).__init__(parent)\n self.scenes = []\n self.draw_axes = True\n self.lighting = True\n self.current_scene = 0\n self.objects = []\n self.diffuse_light = [0.8, 0.8, 0.8, 1]", "def set_light_on(self):\n self._light = \"ON\"", "def __init__(self, hass, lj, i, name):\n self._hass = hass\n self._lj = lj\n self._index = i\n self._brightness = 0\n self._name = name\n\n lj.on_load_activated(i, self._on_load_changed)\n lj.on_load_deactivated(i, self._on_load_changed)", "def __init__(self, device: SensemeDevice) -> None:\n super().__init__(device, device.name)\n self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}\n self._attr_color_mode = ColorMode.BRIGHTNESS", "def __init__(self):\n super().__init__()\n\n # Robot state\n self.ask_mode = False\n\n # Connect two large motors on output ports B and C\n self.sound = Sound()\n self.leds = Leds()\n self.p1 = TouchSensor(INPUT_1)\n self.p2 = TouchSensor(INPUT_2)\n self.p3 = TouchSensor(INPUT_3)\n self.p4 = TouchSensor(INPUT_4)", "def test_light_interface(light_name='head_green_light'):\n l = Lights()\n rospy.loginfo(\"All available lights on this robot:\\n{0}\\n\".format(\n ', '.join(l.list_all_lights())))\n rospy.loginfo(\"Blinking Light: {0}\".format(light_name))\n on_off = lambda x: 'ON' if l.get_light_state(x) else 'OFF'\n rospy.loginfo(\"Initial state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn off light\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # reset output\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"Final state: {0}\".format(on_off(light_name)))", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.pythia", "def initialize(self):\n self.ros.enable()\n self.phone_link.enable()", "def appInit(self):\n glutInitDisplayMode( GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH )\n glClearColor(0.0, 0.0, 0.0, 0.0)\n glClearDepth(1.0 )\n glEnable( GL_DEPTH_TEST )\n glShadeModel( GL_SMOOTH )\n glEnable( GL_NORMALIZE )\n glEnable( GL_COLOR_MATERIAL )\n\n glEnable( GL_LIGHTING )\n glEnable( GL_LIGHT0 )\n\n self.set_lighting()\n\n self.make_simple_scenes()\n self.make_multi_object_scene()", "def build_light(self, item):\n\n # Validete NMS object.\n if \"ObjectID\" not in item:\n return\n\n # Get object id from item.\n object_id = item[\"ObjectID\"]\n # Find light data\n if object_id not in self.lights_dictionary:\n return\n\n # Build Lights\n light_information = self.lights_dictionary[object_id]\n for idx, light_values in enumerate(light_information.values()):\n # Get Light Properties.\n light_type = light_values[\"type\"]\n light_location = light_values[\"location\"]\n\n # Create light.\n light = bpy.ops.object.light_add(\n type=light_type.upper(),\n location=light_location\n )\n light = bpy.context.object\n light[\"NMS_LIGHT\"] = True\n light.name = \"{0}_light{1}\".format(item.name, idx)\n data_copy = deepcopy(light_values)\n\n # Remove invalid blender properties.\n data_copy.pop(\"type\")\n data_copy.pop(\"location\")\n\n # Apply all other properties to blender object.\n for key, value in data_copy.items():\n if isinstance(value, list):\n value = mathutils.Vector(tuple(value))\n setattr(light.data, key, value)\n\n # Parent to object.\n utils.parent(light, item)\n\n # Disable Selection.\n light.hide_viewport = True\n light.hide_select = True", "def __init__(\n self,\n hass,\n cl,\n name,\n lights_ct,\n lights_rgb,\n lights_xy,\n lights_brightness,\n disable_brightness_adjust,\n min_brightness,\n max_brightness,\n sleep_entity,\n sleep_state,\n sleep_colortemp,\n sleep_brightness,\n disable_entity,\n disable_state,\n initial_transition,\n ):\n self.hass = hass\n self._cl = cl\n self._name = name\n self._entity_id = \"switch.\" + slugify(f\"circadian_lighting {name}\")\n self._state = None\n self._icon = ICON\n self._hs_color = None\n self._lights_ct = lights_ct\n self._lights_rgb = lights_rgb\n self._lights_xy = lights_xy\n self._lights_brightness = lights_brightness\n self._disable_brightness_adjust = disable_brightness_adjust\n self._min_brightness = min_brightness\n self._max_brightness = max_brightness\n self._sleep_entity = sleep_entity\n self._sleep_state = sleep_state\n self._sleep_colortemp = sleep_colortemp\n self._sleep_brightness = sleep_brightness\n self._disable_entity = disable_entity\n self._disable_state = disable_state\n self._initial_transition = initial_transition\n self._attributes = {\"hs_color\": self._hs_color, \"brightness\": None}\n\n self._lights = lights_ct + lights_rgb + lights_xy + lights_brightness\n\n # Register callbacks\n dispatcher_connect(hass, CIRCADIAN_LIGHTING_UPDATE_TOPIC, self.update_switch)\n track_state_change(hass, self._lights, self.light_state_changed)\n if self._sleep_entity is not None:\n track_state_change(hass, self._sleep_entity, self.sleep_state_changed)\n if self._disable_entity is not None:\n track_state_change(hass, self._disable_entity, self.disable_state_changed)", "def test_04_Light(self):\n l_xml = self.m_xml.light\n l_device = self.m_device_obj\n l_light = deviceXML.read_base_device_object_xml(l_device, l_xml)\n # print(PrettyFormatAny.form(l_light, 'C4-04-A - Light'))\n self.assertEqual(l_light.Name, TESTING_LIGHT_NAME_0)\n self.assertEqual(l_device.RoomName, TESTING_LIGHT_ROOM_NAME_0)", "def turn_on(self, **kwargs: Any) -> None:\n self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)\n self._light.turn_on()", "def test_02_Light(self):\n l_xml = self.m_xml.light_sect[1]\n print(PrettyFormatAny.form(l_xml, 'C3-02-A - XML'))\n l_device = self.m_device_obj\n l_light = deviceXML.read_base_device_object_xml(l_device, l_xml)\n print(PrettyFormatAny.form(l_light, 'C3-02-B - Light'))\n self.assertEqual(l_light.Name, TESTING_LIGHT_NAME_1)\n self.assertEqual(l_device.RoomName, TESTING_LIGHT_ROOM_NAME_1)\n self.assertEqual(l_light.UPBAddress, convert.dotted_hex2int(TESTING_INSTEON_ADDRESS_0))", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.phe", "def __init__(self, *args, **kwargs):\n _gdi_.Colour_swiginit(self,_gdi_.new_Colour(*args, **kwargs))", "def light_action():\n if light_btn.isChecked():\n self.variables.default_values_dict[\"settings\"][\"external_lights\"] = True\n else:\n self.variables.default_values_dict[\"settings\"][\n \"external_lights\"\n ] = False", "def __init__(self):\n # Hardware initialization\n gpio.init()\n # Logging\n self._logger = logging.getLogger(' '.join([__name__, __version__]))\n self._logger.debug(\n 'Instance of %s created: %s',\n self.__class__.__name__,\n str(self)\n )", "def __init__(self, client, num_lights):\n self.client = client\n self.num_lights = num_lights\n self.led = LEDStrip(self.num_lights)", "def __init__(self, host):\n self._io = RemoteIO(host)\n self._host = host\n\n self._left_wheel = Wheel(id='b', side='left', remote_io=self._io)\n self._right_wheel = Wheel(id='a', side='right', remote_io=self._io, inverse=True)\n\n self._cam = Camera(host)\n\n self._left_led = LED(side='left', remote_io=self._io)\n self._front_led = LED(side='center', remote_io=self._io)\n self._right_led = LED(side='right', remote_io=self._io)", "def __init__(self, name, light=True, extra=None):\n\n self.name = name\n self.light = light\n\n if self.light:\n self.d = common_light.copy()\n else:\n self.d = common_dark.copy()\n\n self.d.update(extra)", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_switch')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: False,\n FEATURE_PERCENT: False,\n FEATURE_NUMBER_OF_STEPS: False\n })", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n name = config[CONF_NAME]\n host = config[CONF_HOST]\n entity = OppleLight(name, host)\n\n add_entities([entity])\n\n _LOGGER.debug(\"Init light %s %s\", host, entity.unique_id)", "def flicker_lights(self):\n print 'Lights Set'", "def __init__(self):\n self.server_name = 'Binary Light Device'\n self.device = None", "def __init__(self):\n super().__init__()\n\n # Gadget state\n \n self.isDoorOpen = False\n self.verified = True\n\n # Ev3dev initialization\n self.leds = Leds()\n self.sound = Sound()\n self.drive = MoveTank(OUTPUT_B, OUTPUT_C)\n \n self.ir_sensor = InfraredSensor()\n self.ir_sensor.mode = self.ir_sensor.MODE_IR_REMOTE\n self.color_sensor = ColorSensor()\n self.color_sensor.mode = 'COL-COLOR' # WHITE\n\n # Start threads\n threading.Thread(target=self._patrol_thread, daemon=True).start()", "def lighton(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 0:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is on\")\n else:\n update.message.reply_text(\"Light was already on\")\n\n __sauna.control.setPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def turn_on(self, **kwargs):\n self._brightness = 100\n self._state = 'on'\n #self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)\n #self._light.turn_on()\n _LOGGER.info(\"turn_on() is called\")", "def __init__(self):\n\n super().__init__()\n\n self.active = True\n self.driver = Driver.instance()\n self.sensor_manager = SensorManager.instance()\n\n self.pwm = Adafruit_PCA9685.PCA9685(address=0x40, busnum=1) # create PCA9685-object at I2C-port\n self.pwm.set_pwm_freq(50)\n\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(20, GPIO.OUT)\n GPIO.setup(21, GPIO.OUT)\n GPIO.setup(26, GPIO.OUT)\n self.driven_distance = 0", "def __init__( self, dev, port ):\n super( Grove_Light_Sensor, self ).__init__( dev, port )", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.foundation", "def __init__(self, device):\n self._unique_id = device\n self._device = AehW4a1(device)\n self._fan_modes = FAN_MODES\n self._swing_modes = SWING_MODES\n self._preset_modes = PRESET_MODES\n self._attr_available = False\n self._on = None\n self._current_temperature = None\n self._target_temperature = None\n self._attr_hvac_mode = None\n self._fan_mode = None\n self._swing_mode = None\n self._preset_mode = None\n self._previous_state = None", "def __init__(self, background_color=np.array([1.0, 1.0, 1.0]),\n camera=None):\n self._objects = {}\n self._lights = {}\n self._ambient_light = AmbientLight(np.array([0.,0.,0.]), 0.0)\n self._background_color = background_color\n\n self._renderer = None\n self.camera = camera", "def init_gl(self):\n\n # default background color is white-ish\n background = [.99, .99, .99, 1.0]\n # if user passed a background color use it\n if 'background' in self.kwargs:\n try:\n # convert to (4,) uint8 RGBA\n background = to_rgba(self.kwargs['background'])\n # convert to 0.0 - 1.0 float\n background = background.astype(np.float64) / 255.0\n except BaseException:\n log.error('background color wrong!',\n exc_info=True)\n # apply the background color\n gl.glClearColor(*background)\n\n max_depth = (np.abs(self.scene.bounds).max(axis=1) ** 2).sum() ** .5\n max_depth = np.clip(max_depth, 500.00, np.inf)\n gl.glDepthRange(0.0, max_depth)\n\n gl.glClearDepth(1.0)\n gl.glEnable(gl.GL_DEPTH_TEST)\n gl.glDepthFunc(gl.GL_LEQUAL)\n\n gl.glEnable(gl.GL_DEPTH_TEST)\n gl.glEnable(gl.GL_CULL_FACE)\n gl.glEnable(gl.GL_LIGHTING)\n gl.glEnable(gl.GL_LIGHT0)\n gl.glEnable(gl.GL_LIGHT1)\n\n # put the light at one corner of the scenes AABB\n gl.glLightfv(gl.GL_LIGHT0,\n gl.GL_POSITION,\n rendering.vector_to_gl(np.append(self.scene.bounds[1], 0)))\n gl.glLightfv(gl.GL_LIGHT0, gl.GL_SPECULAR,\n rendering.vector_to_gl(.5, .5, 1, 1))\n gl.glLightfv(gl.GL_LIGHT0, gl.GL_DIFFUSE,\n rendering.vector_to_gl(1, 1, 1, .75))\n gl.glLightfv(gl.GL_LIGHT0, gl.GL_AMBIENT,\n rendering.vector_to_gl(.1, .1, .1, .2))\n\n gl.glColorMaterial(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE)\n gl.glEnable(gl.GL_COLOR_MATERIAL)\n gl.glShadeModel(gl.GL_SMOOTH)\n\n gl.glMaterialfv(gl.GL_FRONT,\n gl.GL_AMBIENT,\n rendering.vector_to_gl(0.192250, 0.192250, 0.192250))\n gl.glMaterialfv(gl.GL_FRONT,\n gl.GL_DIFFUSE,\n rendering.vector_to_gl(0.507540, 0.507540, 0.507540))\n gl.glMaterialfv(gl.GL_FRONT,\n gl.GL_SPECULAR,\n rendering.vector_to_gl(.5082730, .5082730, .5082730))\n\n gl.glMaterialf(gl.GL_FRONT,\n gl.GL_SHININESS,\n .4 * 128.0)\n\n gl.glEnable(gl.GL_BLEND)\n gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)\n\n gl.glEnable(gl.GL_LINE_SMOOTH)\n gl.glHint(gl.GL_LINE_SMOOTH_HINT, gl.GL_NICEST)\n\n gl.glLineWidth(1.5)\n gl.glPointSize(4)", "def init(self):\n\n # Configuration interface support comes with plasma\n self.setHasConfigurationInterface(False)\n\n # Aspect ratio defined in Plasma\n self.setAspectRatioMode(Plasma.IgnoreAspectRatio)\n\n # Theme is a const variable holds Applet Theme\n self.theme = Plasma.Svg(self)\n\n # It gets default plasma theme's background\n self.theme.setImagePath(\"widgets/background\")\n\n # Resize current theme as applet size\n self.theme.resize(self.size())\n\n self.mainWidget = None\n self.layout = None\n\n self.initPlasmoid()", "async def lights(self, context):\n\n await random_image(context, 'lights')", "def __init__(self, tellcore_device, signal_repetitions):\n super().__init__(tellcore_device, signal_repetitions)\n\n self._brightness = 255", "def __init__(self, root, io):\n parts.hand.Hand.__init__(self, root=root, io=io)\n\n dxl_motors = OrderedDict({\n name: dict(conf)\n for name, conf in self.dxl_motors.items()\n })\n\n self.attach_dxl_motors(dxl_motors)\n\n \"\"\"\n self._load_sensor = self.io.find_module('force_gripper')\n self._load_sensor.offset = 4\n self._load_sensor.scale = 10000\n \"\"\"", "def __init__(self, *args):\n super().__init__(*args, category=CATEGORY_GARAGE_DOOR_OPENER)\n self._flag_state = False\n\n serv_garage_door = self.add_preload_service(SERV_GARAGE_DOOR_OPENER)\n self.char_current_state = serv_garage_door.configure_char(\n CHAR_CURRENT_DOOR_STATE, value=0\n )\n self.char_target_state = serv_garage_door.configure_char(\n CHAR_TARGET_DOOR_STATE, value=0, setter_callback=self.set_state\n )", "def setup(self):\n\t\tself.interface = self.getDriver('light_interface')\n\n\t\tself.pin = self.config['interface_position']\n\t\tself.blink_rate = self.config['blink_rate'] / 2 or 0.5\n\t\tself.is_on = False\n\n\t\tself.intensity = 255\n\t\tself.blink = False\n\t\tself.count = None\n\t\tself.current_count = False\n\t\tself.current_count = None\n\n\t\tself.saved_intensity = None\n\t\tself.saved_blink = False\n\t\tself.saved_count = None\n\n\t\treturn True", "def __init__(self):\n self._read_calibration_data()\n self.set_oversamplings_and_mode(\n HumidityOversampling.x08,\n TemperatureOversampling.x08,\n PressureOversampling.x16,\n SensorMode.Normal)\n self.set_config(\n InactiveDuration.ms1000,\n FilterCoefficient.fc04)", "def __init__(self, color, brightness=None):\n if type(color) is HSBK:\n self.hue = color.hue\n self.saturation = color.saturation\n self.brightness = color.brightness\n self.kelvin = color.kelvin\n return\n elif type(color) is str:\n response = requests.get(f\"https://api.lifx.com/v1/color?string={color}\", headers=headers)\n color = json.loads(response.content)\n\n self.hue = color.get('hue')\n self.saturation = color.get('saturation')\n if brightness is not None:\n self.brightness = color['brightness']\n else:\n self.brightness = color.get('brightness', 1.0)\n self.kelvin = color.get('kelvin')", "def __init__(self, BridgeObj, speed=0.005):\n self.speed = speed\n self.BridgeObj = BridgeObj\n print(\"[RainbowAll] Mode Initialized. Speed : \" + str(speed))", "def __init__(self, color, distribution):\n self._spots = [spot.Spot() for _ in xrange(0, constants.ROAD_LENGTH)]\n self._spots[constants.CROSSING_LOCATION - 1].add_light(color, 0)\n self._spots[constants.CROSSING_LOCATION + constants.NUM_LANES].add_light(color, 1)\n self._steps = 0\n self._num_queued = 0\n self._car_distro = distributions.Probability()\n self._distribution = distribution", "def __init__(self):\n GPIO.setwarnings(False)\n GPIO.cleanup() # Reset the high and low levels of the GPIO port\n #The following code defines the GPIO used to control the L298N chip. This definition is different for different Raspberry Pi driver boards.\n self.Motor_A_EN = 17\n self.Motor_B_EN = 4\n self.Motor_A_Pin1 = 27\n self.Motor_A_Pin2 = 18\n self.Motor_B_Pin1 = 21\n self.Motor_B_Pin2 = 26\n self.setup()", "def Create(options: Options) -> HolLight:\n return HolLight(options)", "def __init__(self) -> None:\n\n super().__init__(255, 255, 255, 255)", "def set_lighting(self):\n lightPosition = [-1, 1, 1, 0]\n glLightfv(GL_LIGHT0, GL_POSITION, lightPosition)\n\n ambientLight = [1, 1, 0.4, 0.5]\n\n if self.lighting:\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight)\n else:\n glDisable(GL_LIGHTING)\n glDisable(GL_LIGHT0)", "def testLightImport(self):\n\n archive = IArchive(\"light1.abc\")\n emptyLightObj = ILight(archive.getTop(), \"emptyLight\" )\n lightObj = ILight(archive.getTop(), \"myLight\" )\n\n self.assertFalse(emptyLightObj.getSchema().getArbGeomParams().valid())\n self.assertFalse(emptyLightObj.getSchema().getUserProperties().valid())\n self.assertEqual(lightObj.getSchema().getArbGeomParams().getNumProperties(), 1)\n self.assertEqual(lightObj.getSchema().getUserProperties().getNumProperties(), 1)\n\n samp = lightObj.getSchema().getCameraSchema().getValue( 0 )\n window = samp.getScreenWindow();\n self.assertAlmostEqual( window['top'], 0.666666666666667 )\n self.assertAlmostEqual( window['bottom'], -0.666666666666667 )\n self.assertAlmostEqual( window['left'], -1.0 )\n self.assertAlmostEqual( window['right'], 1.0 )\n\n samp = lightObj.getSchema().getCameraSchema().getValue( 1 )\n window = samp.getScreenWindow();\n self.assertAlmostEqual( window['top'], -0.35 )\n self.assertAlmostEqual( window['bottom'], 0.75 )\n self.assertAlmostEqual( window['left'], 0.1 )\n self.assertAlmostEqual( window['right'], 0.5 )\n\n self.assertFalse(lightObj.getSchema().getCameraSchema().getChildBoundsProperty().valid())", "def __init__(self):\n self._read_calibration_data()\n self.configure_sensor(\n TemperatureOversamplings.x08,\n PressureOversamplings.x16,\n HumidityOversamplings.x08,\n IIRFilterCoefficients.FC_003,\n 250,\n 250)", "def initialize_robot():\n\n proxy_motion = naoqi.ALProxy(\"ALMotion\", IP_ROBOT, PORT_ROBOT)\n proxy_motion.wakeUp()\n\n proxy_autonomous_life = naoqi.ALProxy(\"ALAutonomousLife\", IP_ROBOT, PORT_ROBOT)\n proxy_autonomous_life.setState(\"disabled\")\n\n proxy_motion = naoqi.ALProxy(\"ALMotion\", IP_ROBOT, PORT_ROBOT)\n proxy_motion.wakeUp()", "def setup( self ):\n glClearColor(*self.background)\n glClearDepth(1.0)\n glDepthFunc(GL_LEQUAL)\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n '''\n ambientLight = [0.2, 0.2, 0.2, 1.0]\n diffuseLight = [0.8, 0.8, 0.8, 1.0]\n specularLight = [0.5, 0.5, 0.5, 1.0]\n lightPos = [0.0, 0.0, -30.0, 1.0]\n glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight)\n glLightfv(GL_LIGHT0, GL_DIFFUSE, diffuseLight)\n glLightfv(GL_LIGHT0, GL_SPECULAR, specularLight)\n glLightfv(GL_LIGHT0, GL_POSITION, lightPos)\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n \n mat = [1.0, 0.0, 0.1, 1.0]\n glMaterialfv(GL_FRONT, GL_AMBIENT, mat)\n mat[0] = 1.0; mat[1] = 0.0; mat[2] = 0.0\n glMaterialfv(GL_FRONT, GL_DIFFUSE, mat)\n mat[0] = 1.0; mat[1] = 1.0; mat[2] = 1.0\n glMaterialfv(GL_FRONT, GL_SPECULAR, mat)\n glMaterialf(GL_FRONT, GL_SHININESS, 0.6*128.0)\n glEnable(GL_FOG)\n fogColor = [1.0, 0.0, 1.0, 1.0]\n \n global fogMode\n fogMode = GL_EXP2\n glFogi (GL_FOG_MODE, fogMode)\n glFogfv (GL_FOG_COLOR, fogColor)\n glFogf (GL_FOG_DENSITY, 0.0001)\n glHint (GL_FOG_HINT, GL_NICEST)\n glFogf (GL_FOG_START, 10.0)\n glFogf (GL_FOG_END, -1000)\n glClearColor(0.0, 0.0, 0.1, 1.0)\n '''\n glEnable(GL_DEPTH_TEST) # Enables Depth Testing\n glShadeModel(GL_SMOOTH) # Enables smooth color shading\n glMatrixMode(GL_PROJECTION)\n glLoadIdentity() \n # Set up perspective view\n gluPerspective(50.0, float(self.size[0])/float(self.size[1]), 0.1, 5000.0)\n # Set up an orthographic view\n #glOrtho(-float(width)/2, float(width)/2, -float(height)/2, float(height)/2, -1.0, 1.0)\n glMatrixMode(GL_MODELVIEW)\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)\n display.flip() # For interactiveness sake\n return", "def __init__(self):\n self.left_motor = ev3.LargeMotor(ev3.OUTPUT_B)\n self.right_motor = ev3.LargeMotor(ev3.OUTPUT_C)\n self.arm_motor = ev3.MediumMotor(ev3.OUTPUT_A)\n self.touch_sensor = ev3.TouchSensor()\n self.color_sensor = ev3.ColorSensor()\n self.ir_sensor = ev3.InfraredSensor()\n self.MAX_SPEED = 900\n self.pixy = ev3.Sensor(driver_name=\"pixy-lego\")\n assert self.left_motor.connected\n assert self.right_motor.connected\n assert self.arm_motor.connected\n assert self.touch_sensor\n assert self.color_sensor\n assert self.ir_sensor\n assert self.pixy", "def __init__(self, boolee):\n\n super(BinaryColor, self).__init__(1)\n self.boolee = bool(boolee)", "def init(self):\n self.connect_to_switches()\n self.reset_states()", "def __init__(self):\n # Global attributes\n self.ON = {\"RED\":[0], \"GREEN\":[2], \"YELLOW\":[4], \"BLINK\":[6], \"NORMAL\":[2], \"WARNING\":[2,6], \"CRITICAL\":[4], \"ERROR\":[0]}\n self.OFF = {\"RED\":[1], \"GREEN\":[3], \"YELLOW\":[5], \"BLINK\":[5], \"NORMAL\":[3], \"WARNING\":[3,5], \"CRITICAL\":[5], \"ERROR\":[1]}\n\n # Indicator topic\n topic = rospy.get_param(rospy.get_name() + \"/indicator_topic\", \"/tower_lights_cmd\")\n # Namespace fixing\n if (topic[0] != '/'): topic = rospy.get_name() + \"/\" + topic\n\n # Starting publisher\n self.indicator_publisher = rospy.Publisher(topic, Int32, queue_size=100)\n rospy.sleep(0.8) # Publisher initialization tiom\n\n # Turn off all indications\n for state in self.OFF:\n for cmd in self.OFF[state]:\n self.publish_cmd(cmd)\n \n # Start indicator thread\n self.event = threading.Condition()\n thread = threading.Thread(target=self.indicator_thread)\n thread.start()\n\n # Initialize default indication\n self.current_indication = \"NORMAL\"\n self.indication = \"NORMAL\"\n for i in self.ON[self.current_indication]:\n self.publish_cmd(i)", "def _Init_lightcurve(self, ndiv, read=True, oldchi=False):\n logger.log(9, \"start\")\n self.star = Core.Star_temperature(ndiv)\n logger.log(9, \"end\")\n return", "def initColor(self, color=None, brightness=100):\n r = self.controller.send(self.light.on(self.group))\n logger.debug('Turned on lights (group: %s): %s' % (self.group, r))\n r = self.controller.send(self.light.brightness(brightness, self.group))\n logger.debug('Set brightness to %s (group: %s): %s' % (brightness, self.group, r))\n if color:\n self.setColor(color)", "def setUp(self):\n commandutils.CommandTestCaseMixin.setUp(self)\n self.lighting = objects.LocationLighting.createFor(\n self.location, candelas=0)", "def set_lighting(self):\n lightPosition = [-1, 1, 1, 0]\n glLightfv(GL_LIGHT0, GL_POSITION, lightPosition)\n\n ambientLight = [0.1, 0.1, 0.1, 1]\n\n if self.lighting:\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight)\n glLightfv(GL_LIGHT0, GL_DIFFUSE, self.diffuse_light)\n else:\n glDisable(GL_LIGHTING)\n glDisable(GL_LIGHT0)", "async def light_fixture(\n hass: HomeAssistant, mock_entry: MockEntityFixture, mock_light: Light\n):\n\n # disable pydantic validation so mocking can happen\n Light.__config__.validate_assignment = False\n\n light_obj = mock_light.copy(deep=True)\n light_obj._api = mock_entry.api\n light_obj.name = \"Test Light\"\n light_obj.is_light_on = False\n\n mock_entry.api.bootstrap.lights = {\n light_obj.id: light_obj,\n }\n\n await hass.config_entries.async_setup(mock_entry.entry.entry_id)\n await hass.async_block_till_done()\n\n assert_entity_counts(hass, Platform.LIGHT, 1, 1)\n\n yield (light_obj, \"light.test_light\")\n\n Light.__config__.validate_assignment = True", "def __init__(self):\n\n self.left_motor = ev3.LargeMotor(ev3.OUTPUT_B)\n self.right_motor = ev3.LargeMotor(ev3.OUTPUT_C)\n self.arm_motor = ev3.MediumMotor(ev3.OUTPUT_A)\n self.touch_sensor = ev3.TouchSensor()\n self.color_sensor = ev3.ColorSensor()\n self.ir_sensor = ev3.InfraredSensor()\n self.pixy = ev3.Sensor(driver_name=\"pixy-lego\")\n\n assert self.left_motor.connected\n assert self.right_motor.connected\n assert self.arm_motor.connected\n assert self.touch_sensor\n assert self.color_sensor\n assert self.ir_sensor\n assert self.pixy", "def init_lens(self):\n\n response = self.send_lens_cmd(['00'], fast_mode=False)\n response = self.send_lens_cmd(['0A', '00'], fast_mode=False)\n\n if response['MISO'][1] != 'AA':\n print(response['return_str'])\n raise RuntimeError('Lens initialisation failed')\n\n response = self.send_lens_cmd(['0A', '00'], fast_mode=True)\n\n cmd = ['80', '0A']\n for n in range(10):\n cmd.append('00')\n\n response = self.send_lens_cmd(cmd, fast_mode=True)\n\n self._min_FL = int('0x' + response['MISO'][4], 16)\n self._max_FL = int('0x' + response['MISO'][6], 16)\n\n if self.min_FL == self.max_FL:\n self.lens_desc = '{} mm prime lens'.format(self.min_FL)\n else:\n self.lens_desc = '{}-{} mm tele lens'.format(self.min_FL, self.max_FL)\n\n print('initialised {}'.format(self.lens_desc))", "def __init__(self, hass):\n self.hass = hass\n self._volume = 0\n self._state = STATE_OFF", "def __init__(self, hass):\n self.hass = hass\n self._volume = 0\n self._state = STATE_OFF", "def __init__(self, arlo, device, sensor_type):\n\n sensor_details = SENSOR_TYPES[sensor_type]\n\n if device is None:\n self._name = sensor_details[0]\n self._unique_id = sensor_type\n self._device = arlo\n else:\n self._name = \"{0} {1}\".format(sensor_details[0], device.name)\n self._unique_id = (\n \"{0}_{1}\".format(sensor_details[0], device.entity_id)\n .lower()\n .replace(\" \", \"_\")\n )\n self._device = device\n\n self._sensor_type = sensor_type\n self._icon = \"mdi:{}\".format(sensor_details[2])\n self._state = None\n self._attr = sensor_details[3]\n _LOGGER.info(\"ArloSensor: %s created\", self._name)", "def __init__(self, **kws):\n super(ShadowOE, self).__init__(**kws)\n self_repair_oe(self)\n # self.set_screens()\n # self.set_empty()\n self.init_empty()\n self.set_unit()\n self.set_output_files()\n self.set_parameters()\n self.set_infinite()", "async def Turn_On_Light(\n light_id: int = Path(..., title=\"Numeric light identifier\", ge=0),\n) -> Dict[str, Any]:\n busylightapi.manager.light_on(light_id)\n return {\n \"action\": \"on\",\n \"light_id\": light_id,\n \"color\": \"green\",\n }", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_binary_sensor')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: False,\n FEATURE_PERCENT: False,\n FEATURE_NUMBER_OF_STEPS: False\n })", "def initialize_light_pins(pi, pins):\n for pin in pins:\n pi.set_mode(pin, pigpio.OUTPUT)\n pi.set_pull_up_down(pin, pigpio.PUD_DOWN)", "def __init__(self):\n #screen Settings\n self.screen_width = 1024\n self.screen_height = 768\n self.bg_color = (32, 32, 32)\n\n #rocket settings\n self.rocket_speed = 1\n\n #laser Settings\n self.laser_speed = 1.0\n self.laser_width = 3\n self.laser_height = 15\n self.laser_color = (0, 255, 255)\n self.lasers_allowed = 3", "def __init__(self, pin, minimum=0, maximum=0):\n DimmableLight.__init__(self)\n if pin is None:\n raise ArgumentNullException(\"'pin' param cannot be None.\")\n\n self.__min = minimum\n self.__max = maximum\n self.__pin = pin\n self.__pin.provision()", "def __init__(self):\n self.wnd = WindSensor()", "def turn_on(self, **kwargs: Any) -> None:\n _LOGGER.debug(\"Turn on light %s %s\", self._device.ip, kwargs)\n if not self.is_on:\n self._device.power_on = True\n\n if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]:\n self._device.brightness = kwargs[ATTR_BRIGHTNESS]\n\n if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]:\n color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP])\n self._device.color_temperature = color_temp", "def createLight(type, pos, centroid):\n light = cmds.shadingNode('areaLight', asLight=True)\n lookThruAndFrame(light)\n cmds.xform(light, ws=True, piv=centroid)\n if pos == 'key':\n cmds.setAttr(light+'.rotateY', -45)\n cmds.setAttr(light+'.rotateZ', -45)\n elif pos == 'fill':\n cmds.setAttr(light+'.rotateY', 45)\n cmds.setAttr(light+'.rotateZ', 20)\n cmds.setAttr(light+'.intensity', 0.5)\n elif pos == 'rim':\n cmds.setAttr(light+'.rotateY', -135)\n cmds.setAttr(light+'.rotateZ', -45)\n cmds.setAttr(light+'.intensity', 0.7)\n cmds.xform(light, ws=True, cp=True)\n return light", "def test_light_sensor(self):\n with patch.dict(TYPES, {'LightSensor': self.mock_type}):\n state = State('sensor.light', '900',\n {ATTR_DEVICE_CLASS: 'illuminance'})\n get_accessory(None, state, 2, {})" ]
[ "0.72170395", "0.717331", "0.69877267", "0.69179374", "0.67970073", "0.6705011", "0.6625122", "0.662029", "0.6540736", "0.6490158", "0.64509183", "0.6437352", "0.6358101", "0.63348967", "0.6321011", "0.6236198", "0.62147874", "0.6201365", "0.6063074", "0.6034815", "0.6011192", "0.6008737", "0.60071194", "0.60071194", "0.5977005", "0.5909796", "0.58923304", "0.58603793", "0.58313906", "0.581328", "0.57971203", "0.57866347", "0.5760451", "0.5741673", "0.57407665", "0.5738015", "0.573331", "0.57213527", "0.571847", "0.57169336", "0.5708891", "0.56860507", "0.56843793", "0.56826067", "0.56770396", "0.56696457", "0.56686085", "0.5662302", "0.5656149", "0.5654432", "0.56541955", "0.5651831", "0.5637916", "0.5619542", "0.55985266", "0.5598361", "0.55916786", "0.558311", "0.5580833", "0.55713284", "0.5556024", "0.5552093", "0.5551317", "0.5550846", "0.5521023", "0.55206525", "0.55123484", "0.54984957", "0.54976994", "0.5494225", "0.5493473", "0.5475411", "0.5475107", "0.5471529", "0.54675204", "0.54669046", "0.5451629", "0.54512817", "0.5446173", "0.544569", "0.54421127", "0.5441984", "0.5429918", "0.54127955", "0.5409201", "0.5404868", "0.5402104", "0.53992677", "0.53992677", "0.5396091", "0.53912896", "0.5390912", "0.53899884", "0.53833896", "0.5383292", "0.5380897", "0.53736657", "0.53726965", "0.5369932", "0.5369719" ]
0.69397825
3
Return True if light is available.
def available(self) -> bool: return self._device.is_online
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_is_on(self):\r\n return self._light == \"ON\"", "def light_is_on(self):\n return self._light == \"ON\"", "def have_light(self, light):\n if light > 1:\n return False\n return bool(self.light_array[light])", "def is_light(self) -> bool:\n return ATTRIBUTE.Light.value in self.type_data.attributes", "def is_light(self) -> bool:\n return ATTRIBUTE.Light.value in self.type_data.attributes", "def is_on(self):\n return self._light_on", "def check_engine_light(self):\n return self._check_engine_light", "def check_light(light: pykulersky.Light):\n light.connect()\n light.get_color()", "async def check_light(self, ip: str) -> bool:\n miner = self.miners[ip]\n return miner.lit", "def available(self):\n return self._power is not None", "def lights_are_on(image_path):\n _brightness = get_image_brightness(image_path)\n if _brightness > 10:\n return True\n return False", "def available(self):\n return True if self._device.status == \"AVAILABLE\" else False", "def available(self) -> bool:\n return self._device.is_connected", "def is_on(self):\n return self._brightness != 0", "def available(self) -> bool:\n return self.thermostat[\"runtime\"][\"connected\"]", "def available(self) -> bool:\n\n if CORE_STATUS_STATE in self.tahoma_device.active_states:\n return bool(\n self.tahoma_device.active_states.get(CORE_STATUS_STATE) == \"available\"\n )\n\n if CORE_SENSOR_DEFECT_STATE in self.tahoma_device.active_states:\n return (\n self.tahoma_device.active_states.get(CORE_SENSOR_DEFECT_STATE) != \"dead\"\n )\n\n # A RTS power socket doesn't have a feedback channel,\n # so we must assume the socket is available.\n return True", "def available(self) -> bool:\n return self._product and self._product.online", "def is_nightlight_supported(self) -> bool:\n return self._nightlight_brightness is not None", "def available(self) -> bool:\n return self._product.online", "def available(self) -> bool:\n return self._product.online", "def available(self) -> bool:\n return self._api.available", "def is_on(self):\n return self._client.get_power()", "def available(self) -> bool:\n return self._device.available", "def is_on(self):\n return self._brightness_pct != 0", "def is_on(self):\n return not self.ready", "def available(self) -> bool:\n return True", "def available(self) -> bool:\n return True", "def available(self) -> bool:\n return self._ctrl.connected()", "def is_available():", "def is_on(self):\n return self.wink.state()", "def available(self):\n return True", "def available(self):\n return True", "def check_lighting_state_room1():\n if timer_lights_on_off_room1() == room1_lux():\n pass\n else:\n light_room1(timer_lights_on_off_room1())", "def ready(self):\n return self.shader is not None and self.texturesReady()", "def needs_updating(self, light_data):\n if light_data.get('brightness') != self.brightness \\\n or light_data.get('power_state') != self.power_state:\n return True\n return False", "def is_available() -> bool:\n return HAVE_RLE", "def check_any_light_on(bridge):\n for i,group in bridge.get_group().items():\n if group['state']['any_on']:\n return True\n return False", "def available(self) -> bool:\n return self._thermostat.online", "def is_on(self):\n return self.car.data[DATA_PLUGGED_IN]", "def available(self):\n\t\t\treturn True", "def available(self):\n\t\t\treturn True", "def available(self):\n\t\t\treturn True", "def available(self) -> bool:\n return self._tm_client.api.available", "def available(self) -> bool:\n return bool(self._connected)", "def is_on(self):\n return self.coordinator.data[self.info_type] == \"red\"", "def is_available(self) -> bool:\n return self.on_hand > self.warn_limit", "def is_on(self):\n return (\n self._device.batterylevel != SHCBatteryDevice.BatteryLevelService.State.OK\n )", "def is_on(self) -> bool:\n return self._device.is_on", "def available(self) -> bool:\n is_avail = True\n if self.entity_description.available_fn is not None:\n is_avail = self.entity_description.available_fn(self._wrap_device)\n return self._api.available and is_avail", "def available(self) -> bool:\n return self._is_available", "def update(self) -> None:\n prev_available = self.available\n self._device.update()\n\n if (\n prev_available == self.available\n and self._is_on == self._device.power_on\n and self._brightness == self._device.brightness\n and self._color_temp == self._device.color_temperature\n ):\n return\n\n if not self.available:\n _LOGGER.debug(\"Light %s is offline\", self._device.ip)\n return\n\n self._is_on = self._device.power_on\n self._brightness = self._device.brightness\n self._color_temp = self._device.color_temperature\n\n if not self.is_on:\n _LOGGER.debug(\"Update light %s success: power off\", self._device.ip)\n else:\n _LOGGER.debug(\n \"Update light %s success: power on brightness %s color temperature %s\",\n self._device.ip,\n self._brightness,\n self._color_temp,\n )", "def is_on(self) -> bool:\n return self._client.get_circ_pump()", "def is_on(self):\n return self._brightness > 0 or self._white_value > 0", "def is_on(self):\n return self._program.get(\"enabled\") is True", "def get_available(self) -> bool:\n return self._available", "def available(self):\n return self._state is not None", "def available(self):\n return self._state is not None", "def available(self) -> bool:\n return (\n self._wrap_device.device.is_duct_zone_enabled(self._zone)\n and self._wrap_device.is_power_on\n )", "def feature_available(self, module_id: str) -> bool:\n return self.feature_loaded(module_id) or ZeroBot.module.module_available(module_id, \"feature\")", "def is_on(self) -> bool:\n return self._device.fan_on", "def available(self):\n\t\t\treturn False", "def available(self):\n\t\t\treturn False", "def available(self):\n\t\t\treturn False", "def available(self):\n return (\n hub.get_first(\n \"$.doorLockStatusList[?(@.deviceLabel=='%s')]\", self._device_label\n )\n is not None\n )", "def is_on(self):\n return self.car.data[DATA_CHARGING]", "def is_on(self):\n return self._device.is_on", "def is_on(self):\n return self._device.is_on", "def is_on(self):\n return self._device.is_on", "def available(self) -> bool:\n return pulumi.get(self, \"available\")", "def available(self, wl, *args):\n return True", "def is_available(self) -> bool:\n raise NotImplementedError", "def available(self) -> bool:\n if self._avm_wrapper.devices[self._mac].wan_access is None:\n return False\n return super().available", "def available(self) -> bool:\n raise NotImplementedError", "def target_connected(self):\n return self.connected() and bool(self._dll.JLINKARM_IsConnected())", "def is_on(self):\n return bool(getattr(self.resource, self.variable))", "def is_on(self):\n if self._power_state == HYSEN_POWERON :\n return True\n else:\n return False", "async def async_update(self):\n self._state = await self._gate.is_light_on(self._light_id)", "def available(self) -> bool:\n return bool(self._api.surveillance_station)", "def is_on(self):\n request = requests.get(self._resource+\"/state\", timeout=10)\n \n try:\n if int(float(request.text)) > 0:\n self._state = True\n else:\n self._state = False\n \n except:\n self._state = None\n \n return self._state", "def is_on(self):\n return self._sensor_state()", "def get_power(self) -> bool:\r\n if not self.backlight:\r\n return None\r\n\r\n return self.backlight.power", "def is_on(self):\n return self._device.state == SHCShutterContact.ShutterContactService.State.OPEN", "def is_on(self):\n return self._device.car_state.get(self._key)", "def is_on(self):\n pass", "def is_on(self) -> bool:\n return self.entity_description.get_ufp_value(self.device) is True", "def is_on(self) -> bool:\n return self.entity_description.get_ufp_value(self.device) is True", "def is_on(self) -> bool:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def isSpectrumReady(self):\n\n while True:\n try:\n status = self.getStatus()\n if status.acquisitionStatus & 4 != 0:\n break\n except:\n return False\n\n return True", "def lighton(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 0:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is on\")\n else:\n update.message.reply_text(\"Light was already on\")\n\n __sauna.control.setPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def probe(self):\n return False", "def is_available() -> bool:\n # This function never throws and returns 0 if driver is missing or can't\n # be initialized\n return device_count() > 0", "def available(self) -> bool:\n if self._coordinator and not self._coordinator.last_update_success:\n return False\n return self.rest.data is not None", "def is_on(self):\n return self._device.state", "def connected(self):\n return self.opened() and bool(self._dll.JLINKARM_EMU_IsConnected())", "def is_light(game_object: GameObject) -> bool:\n return CommonObjectTagUtils.has_game_tags(game_object, (\n CommonGameTag.BUY_CAT_LD_WALL_LIGHT,\n CommonGameTag.BUY_CAT_LD_OUTDOOR_LIGHT,\n CommonGameTag.BUY_CAT_LD_CEILING_LIGHT,\n CommonGameTag.BUY_CAT_LD_NIGHT_LIGHT,\n CommonGameTag.BUY_CAT_LD_MISC_LIGHT,\n CommonGameTag.FUNC_LIGHT_NON_ELECTRIC,\n CommonGameTag.FUNC_POOL_LIGHT,\n CommonGameTag.FUNC_BUSINESS_LIGHT,\n CommonGameTag.FUNC_LASER_LIGHT,\n CommonGameTag.FUNC_RETAIL_NEON_LIGHT,\n CommonGameTag.STYLE_FESTIVAL_LIGHT,\n CommonGameTag.FUNC_HOLIDAY_FESTIVE_LIGHTING\n ))", "def available(self) -> bool:\n return self._available", "def available(self) -> bool:\n return self._available", "def available(self) -> bool:\n return self._available", "def available(self) -> bool:\n return self._available", "def available(self) -> bool:\n return self._available" ]
[ "0.7484702", "0.73775405", "0.7287511", "0.72777367", "0.72777367", "0.7260376", "0.72128695", "0.6995078", "0.692183", "0.6848789", "0.68302953", "0.6803893", "0.6738999", "0.6737111", "0.67068386", "0.6706666", "0.669279", "0.66660124", "0.6653982", "0.6653982", "0.66485125", "0.66474086", "0.6592996", "0.6584517", "0.65817493", "0.6581631", "0.6581631", "0.6574667", "0.6567602", "0.6539162", "0.6516012", "0.6516012", "0.6497393", "0.6496152", "0.64911354", "0.64697784", "0.64573765", "0.64522666", "0.64381576", "0.642995", "0.642995", "0.642995", "0.6418104", "0.6418085", "0.6415818", "0.64132565", "0.63673794", "0.6364809", "0.63614935", "0.63527465", "0.63476574", "0.63422686", "0.6339687", "0.63209575", "0.6318399", "0.630992", "0.630992", "0.63052225", "0.6300457", "0.6294688", "0.62889224", "0.62889224", "0.62889224", "0.62862337", "0.6286203", "0.627455", "0.627455", "0.627455", "0.6268938", "0.6264849", "0.62584335", "0.6257715", "0.6255436", "0.6249413", "0.6247738", "0.6217677", "0.6214446", "0.6211385", "0.6208608", "0.6198094", "0.61970305", "0.61927134", "0.6189906", "0.61869675", "0.61761093", "0.61761093", "0.6163188", "0.6156832", "0.615126", "0.6141483", "0.6140147", "0.6132567", "0.6128381", "0.61196136", "0.61170685", "0.6111426", "0.6111426", "0.6111426", "0.6111426", "0.6111426" ]
0.6739951
12
Return unique ID for light.
def unique_id(self): return self._device.mac
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unique_id(self):\n return self._light.address", "def unique_id(self):\n return self.heater.id + \"_switch\"", "def unique_id(self) -> str:\n return 'remo_device_' + self._remo_device.id + '_' + self._sensor_class", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id, \"sensor_status\", \"frigate\"\n )", "def getID():", "def unique_id(self):\n return self.device_id + '_' + self._sensor_type", "def id(self):\n return self.raw_resource.uuid", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id, \"sensor_detector_speed\", self._detector_name\n )", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id,\n \"sensor_object_count\",\n f\"{self._cam_name}_{self._obj_name}\",\n )", "def unique_id(self) -> str:\n return self.tahoma_device.url", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id, \"sensor_temp\", self._name\n )", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id, \"sensor_fps\", \"detection\"\n )", "def unique_id(self):\n return f\"{DOMAIN}_{self._name}_camera\"", "def GetID(self):\n return hex(id(self()))", "def unique_id(self):\n id = \"{}{}{}\".format(\n DOMAIN, self._account, self.sensorName.lower().replace(\" \", \"\")\n )\n return id", "def unique_id(self):\n return f\"{self._device.uuid}-FAN\"", "def unique_id(self):\n return f\"{self.config_entry.entry_id}_{self.hub_name}_{self.sensor_name}\"", "def unique_id(self):\n return f\"sma-{self._sensor.key}-{self._sensor.name}\"", "def getIdent (self) :\n return self.id", "def hook_id(self) -> int:\n return pulumi.get(self, \"hook_id\")", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def link_id(self):\n return uuid.uuid4().hex", "def unique_id(self) -> str:\n return f\"{self.entry_id}_{self.module_id}_{self.data_id}\"", "def unique_id(self):\n return self.device_id", "def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id,\n \"sensor_fps\",\n f\"{self._cam_name}_{self._fps_type}\",\n )", "def unique_id(self):\n return f\"{self.wrapper.mac}-{self.block.index}\"", "def unique_id(self) -> str:\n return f\"{self._inst.lower()}-{self._sid_data['sid']}_switch-{self._data[self._sid_data['sid_ref']]}\"", "def get_id(self) -> str:\n return self._register_id", "def unique_id(self) -> str:\n return f\"{self._device.unique_id}_battery\"", "def get_id(self):\n \"\"\"Requires use of Python 3\"\"\"\n return str(self.id)", "def unique_id(self):\n return '{}-{}-{}'.format(self.airly.latitude, self.airly.longitude,\n self.type)", "def unique_id(self) -> str | None:\n return f\"{self._station_id}_{self._fuel_type}\"", "def get_id(self): # real signature unknown; restored from __doc__\n return \"\"", "def id(self):\n # Might also be a first 12-characters shortcut.\n return self._id", "def _generateUID(self):\n self._GlobalShadowIndex += 1\n return self._GlobalShadowIndex", "def name(self):\n return f\"{get_device_name(self._data, self._device.id)} Light\"", "def unique_id(self):\n return f\"{DOMAIN}_{self._cam_name}_{self._obj_name}_snapshot\"", "def unique_id(self):\n return self._uuid", "def unique_id(self):\n return self._uuid", "def light_id(value):\n value = int(value)\n if value < 1:\n raise ValueError('Light IDS are greater or equal to 1')\n return value", "def unique_id(self):\n return self._device_id", "def id(self):\n return str(self.get_data(\"id\"))", "def id(self):\n return id(self._component)", "def id(self):\n return self.raw_resource[\"id\"]", "def getUniqueID(self):\n return self.unique_id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def unique_id(self):\n return self._device.serial", "def unique_id(self):\n return self._id", "def unique_id(self):\n return self._id", "def get_id():\n global UNIQUE_GAME_ID\n with threadLock:\n UNIQUE_GAME_ID += 1\n return str(UNIQUE_GAME_ID)", "def getId(self):\n return self.identifier", "def getID(self):\n return self.__id", "def _get_id(self):\n return self.id", "def identifier(self):\n return self.__id", "def getid(self):\n return self.__id", "def uniqueid(self):\n return self.raw.get(\"uniqueid\")", "def get_id(self):\n return self.__id", "def get_id(self):\n return self.__id", "def unique_id(self) -> str:\n return '_'.join(['wavin', self._controller_id, str(self._name), 'battery'])", "def unique_id(self) -> str:\n return f\"{self._mac}_tracker\"", "def unique_id(self):\n return self.config_entry.entry_id + \"lsa\"", "def getID(self) -> int:\n ...", "def unique_id(self):\n return f\"{self.device.id}-{self.key}\"" ]
[ "0.7778015", "0.72051173", "0.7008974", "0.69517404", "0.69132495", "0.6900635", "0.6889376", "0.6859422", "0.6858554", "0.68494785", "0.6847968", "0.68355125", "0.68354523", "0.68086004", "0.6788117", "0.67518526", "0.6732352", "0.6727304", "0.66908014", "0.6683743", "0.66485083", "0.66485083", "0.66485083", "0.66485083", "0.6635892", "0.6626312", "0.66170806", "0.6609472", "0.66063476", "0.6605146", "0.65888476", "0.6586551", "0.65820795", "0.65798455", "0.6575911", "0.65703046", "0.6563651", "0.6559168", "0.6556925", "0.65439665", "0.6538232", "0.6538232", "0.65296596", "0.6518653", "0.6488168", "0.6464636", "0.64620787", "0.6460928", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6458686", "0.6446396", "0.64457315", "0.64457315", "0.644339", "0.6429816", "0.64281094", "0.64253867", "0.64175314", "0.64170545", "0.6410155", "0.6406813", "0.6406813", "0.6406772", "0.6404177", "0.6401793", "0.6400686", "0.6400542" ]
0.0
-1
Return the display name of this light.
def name(self): return self._name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def name(self):\n return self._light.name", "def get_display_name(self):\n return DisplayText(self._display_name)", "def name(self):\n return f\"{get_device_name(self._data, self._device.id)} Light\"", "def get_display_name(self):\n return self.display_name", "def get_display_name(self):\n\n return self._display_name", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return self._display_name", "def get_real_name(self):\n return self.get_display_name()", "def display_name(self):\n return self.__display_name", "def display_name(self):\n answer = self._call('display_name')\n return answer.display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def displayName(self):\n return self.tr(self.name())", "def displayName(self):\n return self.tr(self.name())", "def displayName(self):\n return self.tr(self.name())", "def display_name(self):\n\n return self._display_name", "def display_name(self):\n try:\n return self.plug_dict['PlugName'].value\n except KeyError:\n return self.name", "def display(self):\n return self.name", "def display_name(self):\n return self.settings['displayName']", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"display_name\")", "def show_name(self):\n return self.name", "def display_name(self):\n return self.properties.get(\"displayName\", None)", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(cls):\n return cls.name.replace('_', ' ').title()", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def get_display_name(self):\n return self.get_property(dav.DisplayName())", "def display_name(self):\n if \"displayName\" in self._prop_dict:\n return self._prop_dict[\"displayName\"]\n else:\n return None", "def display_name(self):\n if \"displayName\" in self._prop_dict:\n return self._prop_dict[\"displayName\"]\n else:\n return None", "def display_name(self):\n if \"displayName\" in self._prop_dict:\n return self._prop_dict[\"displayName\"]\n else:\n return None", "def display_name(self):\n if \"displayName\" in self._prop_dict:\n return self._prop_dict[\"displayName\"]\n else:\n return None", "def display_name(self):\n if \"displayName\" in self._prop_dict:\n return self._prop_dict[\"displayName\"]\n else:\n return None", "def displayName(self):\n\t\treturn self.tr(\"Get Drainage Basins\")", "def displayName( self ):\n if ( not self._displayName ):\n return projex.text.pretty(self.objectName())\n return self._displayName", "def name(self):\n return self._shade_name", "def name(self) -> str:\n return self.get_full_name()", "def short_displayname(self):\n return self.get_short_displayname()", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def get_fulll_name(self):\n return self.name", "def display_name(self):", "def display_name(self) -> Optional[str]: # noqa: D401\n return self._display_name", "def get_display_name(self):\n\n return to_unicode(self.uid)", "def display_name(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[Mapping[str, str]]]:\n return pulumi.get(self, \"display_name\")", "def name(self):\n friendly_camera_name = self._cam_name.replace('_', ' ')\n return f\"{friendly_camera_name} {self._obj_name}\".title()", "def name(self):\n return self.heater.name", "def get_ereader_name(self):\n\t\tlong_name = self.make + ' - ' + self.model + ' - ' + self.backlight + ' - ' + self.battery + ' - ' + self.screen_type\n\t\treturn long_name.title()", "def name(self) -> str:\n return self.dev.label", "def full_name(self) -> str:\n return self._name", "def long_displayname(self):\n return self.get_long_displayname()", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def get_full_name(self):\n return self.name", "def displayName(self):\r\n return self.tr('Lockdown Liveability Tool')", "def name(self) -> str:\n return self._friendly_name", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")" ]
[ "0.8419186", "0.79949266", "0.7932655", "0.7864694", "0.77409434", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7716824", "0.7705757", "0.76947725", "0.7663317", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.76585907", "0.75676584", "0.75676584", "0.75676584", "0.75672585", "0.7537208", "0.75344324", "0.7518013", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.74949753", "0.7479069", "0.7446164", "0.73113257", "0.73113257", "0.73113257", "0.72975653", "0.7269889", "0.7269889", "0.7269889", "0.7269889", "0.7269889", "0.7269889", "0.7269889", "0.7262067", "0.72474766", "0.72474766", "0.72474766", "0.72474766", "0.72474766", "0.7203401", "0.71803194", "0.71139336", "0.70923877", "0.7082115", "0.7072894", "0.7072894", "0.7072894", "0.7072894", "0.7072894", "0.7072894", "0.70714235", "0.7059322", "0.7055976", "0.7007582", "0.6983785", "0.694852", "0.69239116", "0.6910551", "0.69031507", "0.6901672", "0.68893206", "0.68881154", "0.6886189", "0.6886189", "0.6886189", "0.6886189", "0.6886189", "0.68821084", "0.68706644", "0.6866881", "0.6866881", "0.6866881", "0.6866881", "0.6866881" ]
0.0
-1
Return true if light is on.
def is_on(self): return self._is_on
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_is_on(self):\r\n return self._light == \"ON\"", "def light_is_on(self):\n return self._light == \"ON\"", "def is_on(self):\n return self._light_on", "def is_light(self) -> bool:\n return ATTRIBUTE.Light.value in self.type_data.attributes", "def is_light(self) -> bool:\n return ATTRIBUTE.Light.value in self.type_data.attributes", "def is_on(self):\n return self._brightness != 0", "def have_light(self, light):\n if light > 1:\n return False\n return bool(self.light_array[light])", "def is_on(self):\n return self._brightness_pct != 0", "def is_on(self):\n return self._brightness > 0 or self._white_value > 0", "def lighton(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 0:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is on\")\n else:\n update.message.reply_text(\"Light was already on\")\n\n __sauna.control.setPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def set_light_on(self):\r\n self._light = \"ON\"", "def lights_are_on(image_path):\n _brightness = get_image_brightness(image_path)\n if _brightness > 10:\n return True\n return False", "def is_on(self):\n return self.wink.state()", "def is_on(self) -> bool:\n return self._device.is_on", "def is_on(self):\n if self._power_state == HYSEN_POWERON :\n return True\n else:\n return False", "def set_light_on(self):\n self._light = \"ON\"", "def is_on(self):\n data = self.sensor_data\n if data and data[\"model\"] == \"SML\" and data[\"changed\"]:\n return data[\"state\"] == STATE_ON\n return False", "def check_engine_light(self):\n return self._check_engine_light", "def is_on(self):\n return self._device.is_on", "def is_on(self):\n return self._device.is_on", "def is_on(self):\n return self._device.is_on", "def is_on(self):\n return self._sensor_state()", "def check_light(light: pykulersky.Light):\n light.connect()\n light.get_color()", "def is_on(self) -> bool:\n return self._device.fan_on", "def is_on(self):\n return self.coordinator.data[self.info_type] == \"red\"", "def is_on(self) -> bool:\n return self.entity_description.get_ufp_value(self.device) is True", "def is_on(self) -> bool:\n return self.entity_description.get_ufp_value(self.device) is True", "def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light", "def is_on(self):\n return self.heater.is_on", "def is_on(self) -> bool:\n return self._state == STATE_ON", "def is_on(self) -> bool:\n return self._is_on", "def is_on(self):\n return self._device.state == SHCShutterContact.ShutterContactService.State.OPEN", "def is_on(self):\n return self._device.state", "def is_on(self) -> bool:\n return self._state", "def is_light(game_object: GameObject) -> bool:\n return CommonObjectTagUtils.has_game_tags(game_object, (\n CommonGameTag.BUY_CAT_LD_WALL_LIGHT,\n CommonGameTag.BUY_CAT_LD_OUTDOOR_LIGHT,\n CommonGameTag.BUY_CAT_LD_CEILING_LIGHT,\n CommonGameTag.BUY_CAT_LD_NIGHT_LIGHT,\n CommonGameTag.BUY_CAT_LD_MISC_LIGHT,\n CommonGameTag.FUNC_LIGHT_NON_ELECTRIC,\n CommonGameTag.FUNC_POOL_LIGHT,\n CommonGameTag.FUNC_BUSINESS_LIGHT,\n CommonGameTag.FUNC_LASER_LIGHT,\n CommonGameTag.FUNC_RETAIL_NEON_LIGHT,\n CommonGameTag.STYLE_FESTIVAL_LIGHT,\n CommonGameTag.FUNC_HOLIDAY_FESTIVE_LIGHTING\n ))", "def is_on(self):\n return self._device.car_state.get(self._key)", "def is_on(self):\n pass", "def is_on(self):\n return self._program.get(\"enabled\") is True", "def is_on(self):\n camera = self.coordinator.data[self._camera_id]\n if self._switch_type == \"record_motion\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_MOTION else False\n elif self._switch_type == \"record_always\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_ALLWAYS else False\n else:\n enabled = True if camera[\"ir_mode\"] == self._ir_on_cmd else False\n return enabled", "def is_on(self) -> bool:\n return bool(self._state)", "def is_on(self):\n return self._on", "def light_action():\n if light_btn.isChecked():\n self.variables.default_values_dict[\"settings\"][\"external_lights\"] = True\n else:\n self.variables.default_values_dict[\"settings\"][\n \"external_lights\"\n ] = False", "def check_lighting_state_room1():\n if timer_lights_on_off_room1() == room1_lux():\n pass\n else:\n light_room1(timer_lights_on_off_room1())", "def is_on(self):\n if self._sensor_type != DEVICE_TYPE_DOORBELL:\n return self._camera_data[\"event_on\"]\n return self._camera_data[\"event_ring_on\"]", "def is_on(self):\n return self._state == STATE_ON", "def is_on(self):\n return self._state == STATE_ON", "def is_on(self):\n return self._state == STATE_ON", "def is_on(self):\n return self._state == STATE_ON", "def is_on(self):\n return self._state == STATE_ON", "def is_on(self):\n return getattr(self.coordinator.data[0], self._sensor) is True", "def is_on(self):\n if self._switch_type == \"record_motion\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_MOTION\n elif self._switch_type == \"record_always\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_ALWAYS\n elif self._switch_type == \"record_smart\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_SMARTDETECT\n elif self._switch_type == \"ir_mode\":\n return self._camera_data[\"ir_mode\"] == self._ir_on_cmd\n elif self._switch_type == \"hdr_mode\":\n return self._camera_data[\"hdr_mode\"] is True\n elif self._switch_type == \"high_fps\":\n return self._camera_data[\"video_mode\"] == TYPE_HIGH_FPS_ON\n else:\n return self._camera_data[\"status_light\"] == \"True\"", "def is_on(self):\n return False", "def is_nightlight_supported(self) -> bool:\n return self._nightlight_brightness is not None", "def is_on(self):\n return self._client.get_power()", "def is_on(self):\n return self.car.data[DATA_PLUGGED_IN]", "def get_light_state(self, light):\n\treturn light.state \n\n\t#if(not self.has_image):\n # self.prev_light_loc = None\n # return False", "def get_light_status(self):\n return self._light_status", "def is_on(self) -> bool:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def lightning_turnon(self):\n self.turnOn()", "def check_any_light_on(bridge):\n for i,group in bridge.get_group().items():\n if group['state']['any_on']:\n return True\n return False", "def is_on(self):\n ret_val = self._get_switch_state()\n if ret_val is None:\n return False\n if isinstance(ret_val, bool):\n return ret_val\n if ret_val == STATE_ON:\n return True\n state = STATE_LOOKUP.get(ret_val, STATE_OFF)\n return state == STATE_ON", "def is_on(self):\n return bool(self._state)", "def is_on(self):\n return bool(self._state)", "def is_on(self) -> bool | None:\n return self._state", "def turnLightingSystemOn():\n dislin.light('ON')", "def is_on(self) -> bool:\n return self.tuya_device.status.get(DPCODE_SWITCH, False)", "def is_on(self):\n return bool(self.arest.data.get('state'))", "def is_on(self):\n return (\n self._device.batterylevel != SHCBatteryDevice.BatteryLevelService.State.OK\n )", "def is_on(self) -> bool:\n if self._state == STATE_UNKNOWN:\n return False\n return bool(self._state)", "async def check_light(self, ip: str) -> bool:\n miner = self.miners[ip]\n return miner.lit", "def is_on(self) -> bool:\n return self._current_speed != SPEED_OFF", "def is_on(self):\n return getattr(self._node, STICK_API[USB_MOTION_ID][ATTR_STATE])", "def is_on(self) -> bool:\n return self.coordinator.data.get_metric(METRIC_KEY_MODE) == MODE_ON", "def is_on(self) -> bool:\n return self._client.get_circ_pump()", "def lights_on(self) -> list:\n return [\n entity for entity in self.all_lights if self.hass.get_state(entity) == \"on\"\n ]", "async def async_turn_on(self, **kwargs: Any) -> None:\n if (brightness := kwargs.get(ATTR_BRIGHTNESS)) is not None:\n # set the brightness, which will also turn on/off light\n if brightness == 255:\n brightness = 256 # this will end up as 16 which is max\n self._device.light_brightness = int(brightness / 16)\n else:\n self._device.light_on = True", "def is_on(self):\n return bool(self.enabled)", "def is_on(self):\n return self.car.data[DATA_CHARGING]", "async def async_update(self):\n self._state = await self._gate.is_light_on(self._light_id)", "def is_on(self) -> bool:\n return self.entity_description.state_fn(self._valve)", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state", "def is_on(self):\n return self._state" ]
[ "0.9035416", "0.89275813", "0.86664206", "0.8008696", "0.8008696", "0.78880787", "0.7866302", "0.7610281", "0.7565963", "0.7415077", "0.7345493", "0.7335999", "0.73116577", "0.7303292", "0.72991306", "0.7243393", "0.71792775", "0.71722543", "0.7157757", "0.7157757", "0.7157757", "0.70897263", "0.7086699", "0.70853215", "0.70711905", "0.70237964", "0.70237964", "0.70188177", "0.7016563", "0.7014014", "0.69964457", "0.6982847", "0.6981409", "0.69680965", "0.69553804", "0.6944279", "0.6932273", "0.69256914", "0.6925266", "0.692203", "0.6921335", "0.6916988", "0.69142187", "0.6900505", "0.690016", "0.690016", "0.690016", "0.690016", "0.690016", "0.6862116", "0.68534386", "0.684857", "0.6843141", "0.68277967", "0.68221456", "0.68043554", "0.67920357", "0.6788725", "0.6772637", "0.6768367", "0.6764079", "0.67327076", "0.67327076", "0.67286813", "0.6716502", "0.6703632", "0.66983616", "0.6696828", "0.66967547", "0.66932815", "0.66826254", "0.66725147", "0.66494465", "0.6647969", "0.6644221", "0.66237044", "0.6620733", "0.6620028", "0.6615723", "0.66006655", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362", "0.6595362" ]
0.67288214
71
Return the brightness of the light.
def brightness(self): return self._brightness
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def brightness(self):\n return brightness_from_percentage(self._brightness_pct)", "def brightness(self):\n return self.get_value('bri')", "def brightness(self):\n return round((self._device.current_percentage / 100) * 255)", "def get_brightness(self):\n _lib.caca_get_dither_brightness.argtypes = [_Dither]\n _lib.caca_get_dither_brightness.restype = ctypes.c_float\n\n return _lib.caca_get_dither_brightness(self)", "def get_brightness(self) -> int:\r\n if not self.backlight:\r\n return -1\r\n\r\n return self.backlight.brightness", "def _get_brightness(self):\n result = self._client_cmd('backlight_tool --get_brightness')\n return int(result.stdout.rstrip())", "def brightness(self):\n _LOGGER.debug(\"polled state brightness: %s\", self._brightness)\n return self._brightness", "def get_brightness(self):\n response = self.parent.backlight.get_brightness()\n if response is not None:\n response = response[0]\n return response", "def brightness(self) -> float:\n # http://alienryderflex.com/hsp.html\n r, g, b = self.r, self.g, self.b\n return sqrt(0.299*r**2 + 0.587*g**2 + 0.114*b**2)/255", "def getSurfaceBrightness(self):\n return self._sbrightn", "def brightness(self) -> int | None:\n old_range = self._tuya_brightness_range()\n brightness = self.tuya_device.status.get(self.dp_code_bright, 0)\n\n if self._work_mode().startswith(WORK_MODE_COLOUR):\n colour_json = self.tuya_device.status.get(self.dp_code_colour)\n if not colour_json:\n return None\n colour_data = json.loads(colour_json)\n v_range = self._tuya_hsv_v_range()\n hsv_v = colour_data.get(\"v\", 0)\n return int(self.remap(hsv_v, v_range[0], v_range[1], 0, 255))\n\n return int(self.remap(brightness, old_range[0], old_range[1], 0, 255))", "def brightness(self):\n return to_hass_level(self._position)", "def brightness(self):\n return to_hass_level(self._position)", "def get_brightness(self, channel=None):\n return float(self.getSensorData(\"ILLUMINATION\", channel))", "def lightness(self):\n min_component = min(self.red, self.green, self.blue)\n max_component = max(self.red, self.green, self.blue)\n avg = (max_component + min_component) / 2\n light = avg / 255\n return light", "def estimate_brightness(self):\n\n intensity = self._get_intensity()\n self.avg_standard_lum = np.sum(intensity) / (self.img_height * self.img_width)\n return self.avg_standard_lum", "def calculate_brightness(self, center, width, height):\n x1, y1 = int(round(center.x - width / 2)), int(round(center.y - height / 2))\n x2, y2 = int(round(x1 + width)), int(round(y1 + height))\n\n brightness = np.sum(self.img[y1:y2, x1:x2]) / (width * height)\n return brightness", "def brightness(rgb):\n # return (min(rgb) + max(rgb)) / 2\n return rgb_to_hls(rgb)[1] * 255", "def brightnessunit(self):\n return _image.image_brightnessunit(self)", "def _get_color_brightness(self, color):\n d0, _, _ = self._get_color_dominance_indices(color)\n return color[d0]/MAX", "def get_brightness(self, channel=None):\n return int(self.getSensorData(\"BRIGHTNESS\", channel))", "def brightness(self):\n _LOGGER.error(\"inside brightness\")\n url = self.urlx + '/dimstate'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = int(int(json_data['dimState'])*1.5)\n\n # if int(self._dimmer) < 170:\n self._dimmer = state\n\n return self._dimmer", "def min_brightness(self):\n return .0", "def _calcBrightness(self, brightness):\n if 0 <= int(brightness) <= 100:\n return int(float(brightness) / 100 * 0xFF)\n raise Exception('Brightness must be an integer betwenn 0 and 100')", "def get_brightness():\n file = open (\"/home/fblaise/.i3/scripts/backlight_p.out\",'r')\n return \"{}%\".format(file.readline().strip())", "def brightness(r, g, b):\n return sqrt(pow(r, 2) * .241 + pow(g, 2) * .691 + pow(b, 2) * .068) / 255", "def brightness(pixel):\n red = pixel[0]\n green = pixel[1]\n blue = pixel[2]\n return (21*red + 72*green + 7*blue) // 100", "def get_max_brightness(self) -> float:\n return max(self._color)", "def getLight(self):\n return self.light", "def auto_brightness(self):\n return self._auto_brightness", "def get_image_brightness(image_path):\n # Convert the Image to greyscale\n img = Image.open(image_path).convert('L')\n # Get the average pixel brightness\n stat = ImageStat.Stat(img)\n return stat.rms[0]", "def getLightSensor() -> int:\n pass", "def max_brightness(self):\n status_filename = os.path.join(self.path, 'max_brightness')\n with open(status_filename) as status_fp:\n result = status_fp.read()\n status_text = result.strip()\n try:\n status = int(status_text)\n except ValueError:\n return status_text\n return status", "def get_light_status(self):\n return self._light_status", "def average_brightness(im):\n imcopy = im.copy().convert('L')\n stat = ImageStat.Stat(imcopy)\n return stat.mean[0]", "def brightness(self, factor):\n\n channels = [\"r\", \"g\", \"b\"]\n total_lumes = clamp(self.get_luminance() + (255.0 * factor) - 255.0, 0.0, 255.0)\n\n if total_lumes == 255.0:\n # white\n self.r, self.g, self.b = 0xFF, 0xFF, 0xFF\n elif total_lumes == 0.0:\n # black\n self.r, self.g, self.b = 0x00, 0x00, 0x00\n else:\n # Adjust Brightness\n pts = (total_lumes - 0.299 * self.r - 0.587 * self.g - 0.114 * self.b)\n slots = set(channels)\n components = [float(self.r) + pts, float(self.g) + pts, float(self.b) + pts]\n count = 0\n for c in channels:\n overage, components[count] = self._get_overage(components[count])\n if overage:\n slots.remove(c)\n components = list(self._distribute_overage(components, overage, slots))\n count += 1\n\n self.r = clamp(round_int(components[0]), 0, 255) & 0xFF\n self.g = clamp(round_int(components[1]), 0, 255) & 0xFF\n self.b = clamp(round_int(components[2]), 0, 255) & 0xFF", "def pixel_brightness(distance_from_down, accel_magnitude):\n half_lighting_arc_length = LIGHTING_ARC_LENGTH / 2\n\n if accel_magnitude < 0.1 or distance_from_down > half_lighting_arc_length:\n return None\n\n normalized_nearness = 1 - distance_from_down / half_lighting_arc_length\n scale_factor = (255 - MIN_BRIGHTNESS) * accel_magnitude\n color_part = MIN_BRIGHTNESS + round(normalized_nearness * scale_factor)\n return color_part", "def brightness(colors):\n return np.sum(colors * const_bright, -1)", "def get_brightness(img,mask):\r\n\r\n bright = cv2.meanStdDev(img, mask=mask)\r\n return {\"bright_avg\":bright[0][0,0],\"bright_sd\":bright[1][0,0]}", "def brightness_from_percentage(percent):\n return int((percent * 255.0) / 100.0)", "def __lightness(self, color):\n hsv = color.toHsv()\n return hsv.valueF()", "def _get_brightness_component(self, color):\n brightness = self._get_color_brightness(color)\n if brightness == 0:\n return tuple(map(lambda x: -x, color))\n result = [0] * len(color)\n for i in range(len(color)):\n result[i] = color[i] - int(round(color[i]/brightness))\n return tuple(result)", "def normalize_hue_brightness(brightness):\n if brightness is not None:\n # Hue uses a range of [0, 100] to control brightness.\n brightness = float((brightness / 255) * 100)\n\n return brightness", "def update(self):\n self._brightness = self._lj.get_load_level(self._index) / 99 * 255", "def image_brightness(new_bright=0):\n # Scale brightness value\n bright = int(map_range(new_bright, 0, 15, 0x00, 0xFF))\n # Recombine and return a composite RGB888 value\n return (bright << 16) + (bright << 8) + bright", "def get_brightness(arr):\n\tR,G,B = arr[:,:,0], arr[:,:,1], arr[:,:,2]\n\tY = 0.299*R + 0.587*G + 0.144*B\n\treturn Y.mean()", "def brightness(value):\n value = int(value)\n if value < 1 or value > 254:\n raise ValueError('Minimum brightness is 1, to the maximum 254')\n return value", "def brightness(image, magnitude, name=None):\n _check_image_dtype(image)\n\n with tf.name_scope(name or \"brightness\"):\n dark = tf.zeros_like(image)\n bright_image = blend(dark, image, magnitude)\n return bright_image", "def _get_brightness_modifier(self, base_color, brightness, white_level):\n # make sure base_color is valid\n if not self._is_base_color(base_color):\n raise ValueError(f\"Invalid base color: {base_color}\")\n # get the white level modifier\n white_level_modifier = self._get_white_level_modifier(base_color, white_level)\n # add in the white level component modifiers\n color = tuple(map(lambda c, m: c + m, base_color, white_level_modifier))\n # full brightness\n if brightness >= 1:\n return (MIN, MIN, MIN)\n # general case\n result = [0] * 3\n for d in self._get_color_dominance_indices(color):\n result[d] = int(round((brightness - 1) * color[d]))\n return tuple(result)", "def lightspeed(self):\n return self._lightspeed", "def bright(self,l):\n if 1 <= l <= 4:\n self.send(\"\\x1f\\x58%c\" % l)\n else:\n raise ValueError('brightness values have to be between 1 and 4')", "def status(self):\n status_filename = os.path.join(self.path, 'brightness')\n with open(status_filename) as status_fp:\n result = status_fp.read()\n status_text = result.strip()\n try:\n status = int(status_text)\n except ValueError:\n return status_text\n return status", "def light_color(self):\n return self._spots[constants.CROSSING_LOCATION - 1].light_color()", "def change_brightness(image, value):\n\n return change_light(image, value, \"v\")", "def brightness(\n id: int = typer.Argument(1),\n ip: str = typer.Option(..., \"--ip\", \"-i\", envvar=\"HUE_BRIDGE_IP\"),\n user: str = typer.Option(..., \"--user\", \"-u\", envvar=\"HUE_BRIDGE_USER\"),\n brightness: int = typer.Option(..., \"--brightness\", \"-b\", min=1, max=255),\n):\n light = Light(id, ip=ip, user=user)\n resp = asyncio.run(light.set_brightness(brightness))\n console.print(f\"[{ip}] Light {id} Brightness:\\n{json.dumps(resp, indent=2)}\")", "def lightness(color):\n\n strongest = max(color.red, color.green, color.blue)\n weakest = min(color.red, color.green, color.blue)\n return 0.5 * (strongest + weakest) / 255", "def get_rgb_light():\n return list(light.rgb())", "def lens_surface_brightness(self, kwargs_lens_light, unconvolved=False, k=None):\n _, _, kwargs_lens_light_i, kwargs_ps_i, _ = self.select_kwargs(\n kwargs_lens=None,\n kwargs_source=None,\n kwargs_lens_light=kwargs_lens_light,\n kwargs_ps=None,\n kwargs_extinction=None)\n return self._lens_surface_brightness(kwargs_lens_light_i, unconvolved=unconvolved, k=k)", "def luminance(self):\n \n return (self.r + self.g + self.b) // 3", "def white_value(self):\n if ((self._type == CONF_LIGHT_TYPE_RGBW) or\n (self._type == CONF_LIGHT_TYPE_RGBWD) or\n (self._type == CONF_LIGHT_TYPE_DRGBW)):\n return self._white_value\n else:\n return None", "def ambient_light(self):\n return self._ambient_light", "def diffuse_light(self):\n return self._diffuse_light", "def specular_light(self):\n return self._specular_light", "def set_brightness(distance):\n if math.floor(distance / 100) - 1 >= 0 and math.floor(distance / 100) - 1 <= 9:\n return 9 - (math.floor(distance / 100) - 1)\n elif math.floor(distance / 100) - 1 >= 0:\n return 1\n else:\n return 9", "def lightSpeed():\n return const.c.value", "def _update_brightness(self):\n while self.current_brightness != self.brightness:\n next_color = RGB(r=int(self.color.r * (self.current_brightness/100.0)),\n g=int(self.color.g * (self.current_brightness/100.0)),\n b=int(self.color.b * (self.current_brightness/100.0)))\n self._update_color(next_color)\n diff = self.brightness - self.current_brightness\n # adjust current brightness to +/- 1\n self.current_brightness = self.current_brightness + \\\n (diff) / abs(diff)\n time.sleep(.05)\n # Final update to exact brightness and default if no change in brightness setting\n final_color = RGB(r=int(self.color.r * (self.brightness/100.0)),\n g=int(self.color.g * (self.brightness/100.0)),\n b=int(self.color.b * (self.brightness/100.0)))\n self._update_color(final_color)", "def get_step_brightness(self, step_id: int) -> List[Union[str, int]]:\n step = self.Sequence[step_id]\n if isinstance(step, Step):\n return step.Brightness\n return list()", "def convertBrightness(self, in_level):\n maxp = 31\n maxv = 6.9305\n scale = maxv / maxp\n\n level = math.exp(scale * in_level)\n return int(level + .5)", "def test_set_and_get_led_brightness_level(self):", "def get_ambient_light(self, ldr_voltage: Optional[int] = None) -> float:\n if ldr_voltage is None:\n ldr_voltage = self.raw_ldr_voltage\n\n # TODO: this conversion algorithm is straight from the manual but it seems odd.\n # It goes \"to infinity\" as ldr_voltage nears 1023 (hence the clamp, I guess)\n # Clarify.\n if ldr_voltage > 1022:\n ldr_voltage = 1022\n if ldr_voltage < 1:\n ldr_voltage = 1\n\n return self.ldr_pull_up_resistance / ((1023 / ldr_voltage) - 1)", "def print_brightness(image):\n target = image.copy()\n for y in range(len(image)):\n for x in range(len(image[y])):\n rgb = image[y, x]\n target[y, x] = brightness(rgb)\n\n return target", "def light(brightness, filter):\n brightness = clamp(MIN_BRIGHTNESS, round(brightness), MAX_BRIGHTNESS)\n for col in range(DISPLAY_WIDTH):\n for row in range(DISPLAY_HEIGHT):\n if filter(col, row):\n microbit.display.set_pixel(col, row, brightness)", "def brightness_to_percentage(brightness):\n return int((brightness * 100.0) / 255.0)", "def GetLuminance(self):\n return _itkRGBAPixelPython.itkRGBAPixelUS_GetLuminance(self)", "def get_lux(self):\n\n svc = \"urn:micasaverde-com:serviceId:LightSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentLevel\")", "def get_relative_ambient_light(self, ldr_voltage: Optional[int] = None) -> float:\n if ldr_voltage is None:\n ldr_voltage = self.raw_ldr_voltage\n\n return round(1 - ldr_voltage / 1023, 4)", "def get_light_state(self, light):\n if self.use_classifier: \n if(not self.has_image):\n return None\n\n cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, \"bgr8\")\n\n #Get classification\n return self.light_classifier.get_classification(cv_image) \n \n else:\n # For testing, just return the ground-truth light state, directly coming from simulator\n return light.state", "def get_light_state(self, light):\n \tif (not self.has_image):\n return light.state, 1.0, None if light else TrafficLight.UNKNOWN, 0, None\n cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, \"rgb8\")\n #Get classification\n \treturn self.light_classifier.get_classification(cv_image)", "def update_brightness(intent):\n card_title = \"Brightness\"\n\n brightness = intent.get('slots',{}).get('Brightness',{}).get('value')\n\n if brightness:\n brightness = int(brightness)\n if brightness > 0 and brightness <= 100:\n speech_output = \"Setting brightness to {}.\".format(brightness)\n new_value_dict = {\"brightness\":brightness}\n shadow_connection.update_shadow(new_value_dict)\n elif brightness == 0:\n speech_output = \"Turning off.\"\n new_value_dict = {\"power_state\":\"OFF\"}\n shadow_connection.update_shadow(new_value_dict)\n else:\n speech_output = \"I'm sorry that value is not in the proper range. \"\\\n \"Please give me a number between 0 and 100.\"\n else:\n speech_output = \"I did not understand that. Please repeat your request.\"\n\n response = response_builders.build_response(session_attributes,\n response_builders.build_speechlet_response(card_title,\n speech_output, reprompt_text, should_end_session))\n return response", "def config_brightness(self):\n orig_brightness, prev_brightness = self.brightness, self.brightness\n self.make_ui_group(False, 'Brightness:', self.brightness)\n\n while True:\n action_left, action_right = (self.button_left.action(),\n self.button_right.action())\n if action_left is RichButton.HOLD:\n return self.brightness is not orig_brightness, False # Config\n if action_right is RichButton.HOLD:\n return self.brightness is not orig_brightness, True # Paint\n if action_left is RichButton.TAP:\n self.brightness = max(0.0, self.brightness - 0.1)\n elif action_right is RichButton.TAP:\n self.brightness = min(1.0, self.brightness + 0.1)\n\n if self.brightness is not prev_brightness:\n self.rect.x = int(board.DISPLAY.width * (self.brightness - 1.0))\n prev_brightness = self.brightness", "def GetLuminance(self):\n return _itkRGBAPixelPython.itkRGBAPixelUC_GetLuminance(self)", "def get_luminosity(self):\n\n h, l, s = colorsys.rgb_to_hls(self.r, self.g, self.b)\n return l", "def get_light_state(self, light):\n if(not self.has_image):\n self.prev_light_loc = None\n return TrafficLight.RED\n\n # fixing convoluted camera encoding...\n if hasattr(self.camera_image, 'encoding'):\n self.attribute = self.camera_image.encoding\n if self.camera_image.encoding == '8UC3':\n self.camera_image.encoding = \"rgb8\"\n else:\n self.camera_image.encoding = 'rgb8'\n cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, \"rgb8\")\n\n #Get classification\n if self.light_classifier is not None:\n classification = self.light_classifier.get_classification(cv_image)\n else:\n classification = TrafficLight.UNKNOWN\n print \"traffic light: \", label[classification]\n return classification", "def get_light_state(self, light):\n\treturn light.state \n\n\t#if(not self.has_image):\n # self.prev_light_loc = None\n # return False", "def get_luminosity(self):\n\n if self.no_dist is False and self.no_flux is False:\n\n dist = self.distance\n snu = self.snu_at_1GHz\n lum = lumin(dist, snu)\n\n self.lum = lum\n else:\n self.lum = -1 # use -1 to indicate unknown luminosity\n return self.lum", "def get_light_state(self, light):\n\n # If there is no image to process...\n if self.camera_image == None:\n\n # Don't know the color\n return TrafficLight.UNKNOWN\n\n else:\n\n # Convert image format\n cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, \"bgr8\")\n\n # Classify the image\n return self.light_classifier.get_classification(cv_image)", "def change_brightness(image, max_delta):\n return tf.image.adjust_brightness(image, max_delta)", "def update(self) -> None:\n self._light.update()\n self._state = self._light.is_on()\n self._brightness = self._light.brightness", "def set_brightness(self, brightness):\n if isinstance(brightness, int):\n brightness = float(brightness)\n\n _lib.caca_set_dither_brightness.argtypes = [_Dither, ctypes.c_float]\n _lib.caca_set_dither_brightness.restype = ctypes.c_int\n\n return _lib.caca_set_dither_brightness(self, brightness)" ]
[ "0.8602627", "0.84503806", "0.8426228", "0.8375639", "0.8349791", "0.8339884", "0.83344954", "0.8283124", "0.81917197", "0.79680145", "0.7894601", "0.78586304", "0.78586304", "0.77162117", "0.76703614", "0.7597096", "0.75513947", "0.7543812", "0.7524217", "0.7521598", "0.7497429", "0.73815596", "0.72830206", "0.7244168", "0.7207582", "0.7144548", "0.7101553", "0.7074816", "0.7040813", "0.70391256", "0.6915878", "0.6855632", "0.67488647", "0.6733471", "0.6708466", "0.66842103", "0.6671265", "0.6633193", "0.66046566", "0.6586856", "0.65676177", "0.65629405", "0.65524536", "0.65443903", "0.65416116", "0.65105844", "0.65049595", "0.6457946", "0.64353096", "0.6432432", "0.63812506", "0.63806796", "0.63728386", "0.63545436", "0.63146967", "0.6304875", "0.6293291", "0.62797797", "0.6254985", "0.6253061", "0.6246028", "0.6226528", "0.62218", "0.62114304", "0.6196993", "0.61906344", "0.6186092", "0.6175604", "0.6107358", "0.60929054", "0.60757303", "0.6071021", "0.60170275", "0.5997619", "0.59870607", "0.59852976", "0.5979988", "0.59795666", "0.5966082", "0.59652525", "0.5905576", "0.59027714", "0.5898936", "0.58908", "0.5889117", "0.5863182", "0.58511364", "0.58510685", "0.5827553" ]
0.8533625
12
Return the color temperature of this light.
def color_temp(self): return kelvin_to_mired(self._color_temp)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def color_temp(self):\n return self._color_temp", "def getTemperature(self):\n return self.temperature", "def temperature(self):\n return self._temperature", "def temperature(self):\n return self._temperature", "def temperature(self):\n return float(self._current_observation['temp_c'])", "def temperature(self):\n temp = ct.c_float()\n self.lib.GetTemperatureF(ct.pointer(temp))\n return temp.value", "def temperature(self):\n return self.read_short(65) / 340.0 + 36.53", "def get_temperature(self):\n pass", "def temperature(self):\n return _cantera.reactor_temperature(self.__reactor_id)", "def color_temp(self) -> int:\n new_range = self._tuya_temp_range()\n tuya_color_temp = self.tuya_device.status.get(self.dp_code_temp, 0)\n return (\n self.max_mireds\n - self.remap(\n tuya_color_temp,\n new_range[0],\n new_range[1],\n self.min_mireds,\n self.max_mireds,\n )\n + self.min_mireds\n )", "def get_temperature(self):\n self.temperature = self.temperature_sensors.get_temperature(\n self.channel)\n return self.temperature", "def temperature(self):\r\n self._read_temperature()\r\n return self._t_fine / 5120.0", "def getTemperature(self):\n return self.json_state.get(\"temperature\")", "def temperature(self) -> float:\n # Start a measurement then poll the measurement finished bit.\n self.temp_start = 1\n while self.temp_running > 0:\n pass\n # Grab the temperature value and convert it to Celsius.\n # This uses the same observed value formula from the Radiohead library.\n temp = self._read_u8(_REG_TEMP2)\n return 166.0 - temp", "def sky_temperature(self) -> float:\n\n return 0.0552 * (self.ambient_temperature**1.5)", "def get_temperature_color_preview(lamp_props):\n temperature = lamp_props.color_temperature\n\n mm = 1000.0 / temperature\n mm2 = mm ** 2\n mm3 = mm2 * mm\n x, y = 0, 0\n\n if temperature < 4000:\n x = -0.2661239 * mm3 - 0.2343580 * mm2 + 0.8776956 * mm + 0.179910\n else:\n x = -3.0258469 * mm3 + 2.1070379 * mm2 + 0.2226347 * mm + 0.240390\n\n x2 = x**2\n x3 = x2 * x\n if temperature < 2222:\n y = -1.1063814 * x3 - 1.34811020 * x2 + 2.18555832 * x - 0.20219683\n elif temperature < 4000:\n y = -0.9549476 * x3 - 1.37418593 * x2 + 2.09137015 * x - 0.16748867\n else:\n y = 3.0817580 * x3 - 5.87338670 * x2 + 3.75112997 * x - 0.37001483\n\n # xyY to XYZ, assuming Y=1.\n xyz = mathutils.Vector((x / y, 1, (1 - x - y) / y))\n return xyz_to_rgb * xyz", "def getTemperature(self):\n with self.lock:\n temp = self.temp\n return temp", "def current_temperature(self) -> float:\n return self._thermostat.current_temperatue", "def temp(self):\n if self.temp_sensor is None:\n return None\n else:\n if self.temp_scale.lower() in ['f', 'fahrenheit']:\n return self.temp_sensor.temp_f\n elif self.temp_scale.lower() in ['c', 'celsius']:\n return self.temp_sensor.temp_c", "def temperature(self):\n names = ['anc_air_temperature']\n return self.sensor.get_with_fallback('temperature', names)", "def target_temperature(self):\n if self._client.mode == self._client.MODE_HEAT:\n return self._client.heattemp\n if self._client.mode == self._client.MODE_COOL:\n return self._client.cooltemp\n return None", "def get_color(self):\n return self.color", "def current_temperature(self):\n return self.atag.dhw_temperature", "def temperature(self) -> SmartSsdTemperature:\n return self._temperature", "def get_temperature(self):\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_GET_TEMPERATURE, (), '', 'i')", "def get_color(self):\n\n return self.color", "def internal_temp_c(self) -> int:\n return int(self._device_info[\"Temperature\"])", "def current_temperature(self):\n return self._cur_temp", "def ambient_temperature(self) -> int:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature\"))\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"F\":\r\n return self.ambient_temperature_f\r\n elif self.temperature_scale == \"C\":\r\n return self.ambient_temperature_c\r\n else:\r\n return self._ambient_temperature", "def get_color(self):\r\n return self.__color", "def temperature() -> float:", "def get_chip_temperature(self):\n self.check_validity()\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')", "def get_color(self):\r\n return self._color", "def getColor(self):\n return self.color", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def current_temperature(self):\n return self._current_temperature", "def get_color(self):\n return self._color", "def get_color(self):\n return self._color", "def get_temperature_state(self):\n return self.__sensor_states[0]", "def temperature() -> FlowFieldVal:\n return [\n self._t_s - self._delta_t * tf.math.tanh(z / self._height) for z in zz\n ]", "def getColor(self):\r\n return self.color", "def color(self):\n return self.__color", "def calculateTemperature(self):\n \n # CIE XYZ space\n self.X = (1/0.17697)*((0.49)*self.R + (0.31)*self.G + (0.2)*self.B)\n self.Y = (1/0.17697)*((0.17697)*self.R + (0.81240)*self.G + (0.01063)*self.B)\n self.Z = (1/0.17697)*((0)*self.R + (0.010)*self.G + (0.99)*self.B)\n\n # CIE Chromaticities xy\n self.x = self.X/(self.X + self.Y + self.Z)\n self.y = self.Y/(self.X + self.Y + self.Z)\n \n # CIE Chromaticities uv\n #self.u = (0.4661*self.x + 0.1593*self.y)/(self.y - 0.15735*self.x + 0.2424)\n #self.v = (0.6581*self.y)/(self.y - 0.15735*self.x + 0.2424)\n \n # constant for McCamy's/Hernandez-Andrés formula\n n = (self.x - self.x_e)/(self.y - self.y_e)\n \n # Correlated color temperature according to Hernández-Andrés (1999)\n self.color_temp = ( self.A_0 + \n self.A_1*np.exp(-n/self.t_1) + \n self.A_2*np.exp(-n/self.t_2) + \n self.A_3*np.exp(-n/self.t_3) )\n \n # Delete too high values\n self.color_temp[self.color_temp > 30000] = 0\n \n # Affichage de la CCT\n self.mean_temp = int(round(self.color_temp.mean()))\n self.mean_temp_label.setText(\"Temperature moyenne = \"+str(self.mean_temp))\n self.mean_temp_label.adjustSize()\n \t\n # Affichage de l'illuminance (Y)\n self.mean_illu = int(round((self.Y.mean())))\n self.illuminance_label.setText(\"Illuminance moyenne = \"+str(self.mean_illu))\n self.illuminance_label.adjustSize()", "def ambient_temperature_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature_c\"))\r\n return kelvin_to_celsius(self._ambient_temperature)", "def current_temperature(self) -> float | None:\n return self.vera_device.get_current_temperature()", "def get_temperature(self, unit=DEGREES_C):\n if self.type in self.TYPES_12BIT_STANDARD:\n value = self.raw_sensor_count\n # the int part is 8 bit wide, 4 bit are left on 12 bit\n # so divide with 2^4 = 16 to get the celsius fractions\n value /= 16.0\n\n # check if the sensor value is the reset value\n if value == 85.0:\n raise ResetValueError(self)\n\n factor = self._get_unit_factor(unit)\n return factor(value)\n\n # Fallback to precalculated value for other sensor types\n factor = self._get_unit_factor(unit)\n return factor(self.raw_sensor_temp * 0.001)", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def current_temperature(self) -> float:\n return self._current_temperature", "def target_temperature(self) -> int:\r\n # TODO: Find a better way to do this. This is ugly.\r\n if self._hvac_mode == \"cool\":\r\n return self.target_temperature_low\r\n elif self._hvac_mode == \"heat\":\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"heat-cool\":\r\n # TODO: Fix this so that heat or cool is chosen.\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.target_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"eco\":\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.eco_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.eco_temperature_high\r\n elif self._hvac_mode == \"off\":\r\n return self.ambient_temperature\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature\"))", "def get_chiller_temperature(self) -> float:\n\n return self.send(self.cmd.GET_COOLING_ACT)", "def target_temperature(self):\n return self._target_temp", "def get_temperature(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. .*? .*? (.*?) .*? .*? . .*? .*? . . . .*?'\n temperature = float(re.findall(pattern,summary).pop())\n return temperature", "def color(self):\n return self._color", "def color(self):\n return self._color", "def temperature(self, alt):\n T = self.altitude_profile(alt)[1]\n return T", "def current_temperature(self) -> float:\n return self._device.scaled_temperature", "def color(self):\n return self['color']", "def get_cold_junction_temperature(self):\n return self._mcp9600.get('COLD_JUNCTION').temperature", "def current_temperature(self):\n try:\n return self._boiler.temperature\n except AttributeError:\n return None", "def get_color(self):\n\n return self._color", "def current_temperature(self):\n return self._client.get_indoor_temp()", "def read_temperature(self):\n tRaw = self._read_multiple_bytes_as_array(self.BME280_TEMP_MSB, 3)\n\n return float(self._compensate_temperature((tRaw[0] << 12) + (tRaw[1] << 4) + (tRaw[2] >> 4)))", "def target_temperature(self) -> float:\n return self._thermostat.setpoint_temperature", "def get_temperature(self):\n \n # Get temp readings from both sensors\n humidity_temp = self._sense_hat.get_temperature_from_humidity()\n pressure_temp = self._sense_hat.get_temperature_from_pressure()\n \n # avg_temp becomes the average of the temperatures from both sensors\n # We need to check for pressure_temp value is not 0, to not ruin avg_temp calculation\n avg_temp = (humidity_temp + pressure_temp) / 2 if pressure_temp else humidity_temp\n \n # Get the CPU temperature\n cpu_temp = self._get_cpu_temp()\n \n # Calculate temperature compensating for CPU heating\n adj_temp = avg_temp - (cpu_temp - avg_temp) / 1.5\n \n # Average out value across the last three readings\n return self._get_smooth(adj_temp)", "def current_temperature(self):\n if self._device.temp is not None and self._device.temp > -460:\n return self._device.temp\n return None", "def get_temperature(self):\n rand_number = randint(18, 30)\n return rand_number", "def current_temperature(self) -> float | None:\n return self._state.current_temperature", "def get_temperature(self):\n\n svc = \"urn:upnp-org:serviceId:TemperatureSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentTemperature\")", "def get_temp(self):\n\t\traw_temp = self.read_i2c_word(self.TEMP_OUT0)\n\n\t\t# Get the actual temperature using the formule given in the\n\t\t# MPU-6050 Register Map and Descriptions revision 4.2, page 30\n\t\tactual_temp = (raw_temp / 340.0) + 36.53\n\n\t\treturn actual_temp", "def temperature(self) -> Optional[float]:\n return self.data.get(\"temp\")", "def target_temperature_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_c\"))\r\n return kelvin_to_celsius(self._target_temperature)", "def state(self):\n return self.device.device_data[self.device_id]['temperature']", "def target_temperature(self):\n return self._target_temperature", "def target_temperature(self):\n return self._target_temperature", "def target_temperature(self):\n return self._target_temperature", "def target_temperature(self):\n return self._target_temperature", "def target_temperature(self):\n return self._target_temperature", "def target_temperature(self):\n return self._target_temperature", "def read_temperature(self):\n data = self.ag.read_bytes(Register.OUT_TEMP_L, 2)\n return lsm9ds1.to_int16(data)", "def current_temperature(self) -> float | None:\n return convert_isy_value_to_hass(\n self._node.status, self._uom, self._node.prec, 1\n )", "def target_temperature_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_f\"))\r\n return celsius_to_fahrenheit(self.target_temperature_c)", "def state(self):\n return self._device.temperature", "def __getRawTemperature(self):\n t1 = self.read_byte_data(self.address, 0x03)\n t2 = self.read_byte_data(self.address, 0x04)\n t3 = self.read_byte_data(self.address, 0x05)\n t = (t1 << 16) | (t2 << 8) | t3\n t = getTwosComplement(t, 24)\n return t", "def get_temperature(self):\n # Fake a random temperature change\n temperature = random.randint(20, 25)\n self.set_temperature(temperature)", "def color(self):\n return self._zoom.color", "def get_temperature(self): # This function implements the equations needed to convert the digital data to degrees celsius\n C_1, C_2, C_3, C_4, C_5, C_6=self.calibration_constants()\n self.digital_temp_data() \n dT = self.tempadc-(C_5*(2**8))\n temperature=(2000+(dT*(C_6/(2**23))))/100\n return temperature, dT", "def current_temperature(self):\n temperature = float('NaN')\n while math.isnan(temperature) or temperature < MINIMUM_BELIEVABLE_TEMPERATURE:\n temperature = float(self._sensor.readTempC())\n return temperature", "def ambient_temperature_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature_f\"))\r\n return celsius_to_fahrenheit(self.ambient_temperature_c)", "def read_temperature(self):\n self._force_read(False)\n\n tempADC = (self._read_register_1ubyte(self.BME680_TEMP_MSB) << 12) | (self._read_register_1ubyte(self.BME680_TEMP_LSB) << 4) | (self._read_register_1ubyte(self.BME680_TEMP_XLSB) >> 4)\n\n return float(self._compensate_temperature(tempADC))", "def get_color(self) -> str:\n return self.color", "def native_value(self):\n return self.temperature", "def getTEMP(self):\r\n\t\ttemp_H = self.read(0x41)\r\n\t\ttemp_L = self.read(0x42)\r\n\t\ttemp = self.twos_comp(val = (temp_H*256 + temp_L),bits=16)\r\n\t\ttempC = (temp/340.0)+36.53\r\n\t\ttempF = tempC*(9.0/5) + 32\r\n\t\treturn tempC,tempF ,temp", "def color(self):\n return self.container['color']" ]
[ "0.8550451", "0.75972027", "0.75929296", "0.75929296", "0.7584848", "0.7554157", "0.74379694", "0.7430193", "0.73825926", "0.7336957", "0.73212904", "0.72820795", "0.72075", "0.72032225", "0.71964854", "0.71878976", "0.7180456", "0.7172465", "0.7114398", "0.7094845", "0.70741916", "0.70650184", "0.70343816", "0.7015165", "0.70147234", "0.7010132", "0.7008548", "0.7004446", "0.69951445", "0.6989003", "0.698297", "0.6976447", "0.69698274", "0.69626546", "0.6959003", "0.6959003", "0.6959003", "0.6959003", "0.6959003", "0.6959003", "0.6959003", "0.6948725", "0.6948725", "0.69352746", "0.692816", "0.69180906", "0.6917342", "0.69125444", "0.6902531", "0.6898551", "0.68944997", "0.6883716", "0.6883716", "0.6883716", "0.6882296", "0.68749136", "0.68735343", "0.6873373", "0.6869895", "0.6867228", "0.6867228", "0.68661624", "0.68642485", "0.6853825", "0.6838723", "0.68381035", "0.6830608", "0.6819465", "0.6818337", "0.6809301", "0.6802917", "0.677832", "0.6777751", "0.6777426", "0.67732364", "0.6770708", "0.6770329", "0.67659247", "0.67643327", "0.67566174", "0.67566174", "0.67566174", "0.67566174", "0.67566174", "0.67566174", "0.6754115", "0.6751518", "0.6739687", "0.6739243", "0.6737653", "0.6729734", "0.67061985", "0.6697299", "0.6688618", "0.66872853", "0.66718996", "0.66575956", "0.66553384", "0.66358876", "0.66348594" ]
0.8436134
1
Return minimum supported color temperature.
def min_mireds(self): return 175
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def min_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return mini.value", "def min_temp(self):\n if self.temperature_unit == UnitOfTemperature.CELSIUS:\n return MIN_TEMP_C\n return MIN_TEMP_F", "def min_temp(self):\n # return convert_temperature(\n # self._device.min_temp, TEMP_CELSIUS, self.hass.config.units.temperature_unit\n # )\n return self._device.min_temp", "def min_temp(self):\n return 16", "def min_temp(self):\n return GH_MIN_TEMP", "def minimum_temperature(self):\n return self._minimum_temperature", "def min_temp(self) -> float | None:\n try:\n return self._device.config[\"min_temp\"]\n except TypeError: # 'NoneType' object is not subscriptable\n return", "def min_temp(self):\n return 1", "def min_temp(self):\n return self.atag.dhw_min_temp", "def color_temp(self) -> int:\n new_range = self._tuya_temp_range()\n tuya_color_temp = self.tuya_device.status.get(self.dp_code_temp, 0)\n return (\n self.max_mireds\n - self.remap(\n tuya_color_temp,\n new_range[0],\n new_range[1],\n self.min_mireds,\n self.max_mireds,\n )\n + self.min_mireds\n )", "def native_min_value(self) -> float:\n return TEMP_MINIMUM", "def locked_temp_min(self) -> int:\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"C\":\r\n return self.locked_temp_min_c\r\n elif self.temperature_scale == \"F\":\r\n return self.locked_temp_min_f\r\n else:\r\n return self._locked_temp_min\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_min\"))", "def color_temp(self):\n return kelvin_to_mired(self._color_temp)", "def current_temperature(self):\n temperature = float('NaN')\n while math.isnan(temperature) or temperature < MINIMUM_BELIEVABLE_TEMPERATURE:\n temperature = float(self._sensor.readTempC())\n return temperature", "def locked_temp_min_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_min_c\"))\r\n return kelvin_to_celsius(self._locked_temp_min)", "def color_temp(self):\n return self._color_temp", "def low_temperature(self):\r\n try:\r\n return str(self.connect()['main']['temp_min'])\r\n except:\r\n return '@weather_low_temperature'", "def _minimum(self) -> float:\n if self._type == \"power\":\n return 1.0\n elif self._type == \"setpoint\":\n return self._product.get_data_config_json()[\"_value_setpoint_min\"]\n elif self._type == \"fan1\":\n fan = 1\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n ((fan - 1) * 2)\n ]\n elif self._type == \"fan2\":\n fan = 2\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n ((fan - 1) * 2)\n ]\n elif self._type == \"fan3\":\n fan = 3\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n ((fan - 1) * 2)\n ]", "def temperature() -> float:", "def min_temp(self):\n return self._min_temp", "def target_temperature_low(self) -> int:\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"C\":\r\n return self.target_temperature_low_c\r\n elif self.temperature_scale == \"F\":\r\n return self.target_temperature_low_f\r\n else:\r\n return self._target_temperature_low\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_low\"))", "def target_temperature_low(self):\n if self._client.mode == self._client.MODE_AUTO:\n return self._client.heattemp\n return None", "def get_temperature_color_preview(lamp_props):\n temperature = lamp_props.color_temperature\n\n mm = 1000.0 / temperature\n mm2 = mm ** 2\n mm3 = mm2 * mm\n x, y = 0, 0\n\n if temperature < 4000:\n x = -0.2661239 * mm3 - 0.2343580 * mm2 + 0.8776956 * mm + 0.179910\n else:\n x = -3.0258469 * mm3 + 2.1070379 * mm2 + 0.2226347 * mm + 0.240390\n\n x2 = x**2\n x3 = x2 * x\n if temperature < 2222:\n y = -1.1063814 * x3 - 1.34811020 * x2 + 2.18555832 * x - 0.20219683\n elif temperature < 4000:\n y = -0.9549476 * x3 - 1.37418593 * x2 + 2.09137015 * x - 0.16748867\n else:\n y = 3.0817580 * x3 - 5.87338670 * x2 + 3.75112997 * x - 0.37001483\n\n # xyY to XYZ, assuming Y=1.\n xyz = mathutils.Vector((x / y, 1, (1 - x - y) / y))\n return xyz_to_rgb * xyz", "def temp(self):\n if self.temp_sensor is None:\n return None\n else:\n if self.temp_scale.lower() in ['f', 'fahrenheit']:\n return self.temp_sensor.temp_f\n elif self.temp_scale.lower() in ['c', 'celsius']:\n return self.temp_sensor.temp_c", "def get_compensated_temperature() -> float:\n comp_factor = 2.25\n cpu_temp = get_cpu_temperature()\n raw_temp = bme280.get_temperature()\n comp_temp = raw_temp - ((cpu_temp - raw_temp) / comp_factor)\n # print(\"\"\"\n # Compensated_Temperature: {:05.2f} *C\n # Pressure: {:05.2f} hPa\n # Relative humidity: {:05.2f} %\n # \"\"\".format(temperature, pressure, humidity))\n return comp_temp", "def locked_temp_min_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_min_f\"))\r\n return celsius_to_fahrenheit(self.locked_temp_min_c)", "def eco_temperature_low(self) -> int:\r\n # TODO: Force this to return an int or float rounded to 0.5.\r\n if self.temperature_scale == \"C\":\r\n return self.eco_temperature_low_c\r\n elif self.temperature_scale == \"F\":\r\n return self.eco_temperature_low_f\r\n else:\r\n return self._eco_temperature_low\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"eco_temperature_low\"))", "def temperature():\n\tsensors = commands.getstatusoutput('sensors -u | grep -E temp[0-9]_input')\n\n\tif sensors[0] == 1:\n\t\traise Exception('lm-sensors is not setup. Run sensors-detect')\n\n\tif sensors[0] == 127:\n\t\traise Exception('lm-sensors is not installed')\n\n\ttemps = re.findall(r\"(\\d{2}.\\d+)\",sensors[1],re.M)\n\n\tif not temps:\n\t\traise Exception('No temperature sensors found')\n\n\tfor i,temp in enumerate(temps):\n\t\ttemps[i] = float(temp)\n\t\ttemps[i] = int(temps[i])\n\n\treturn max(temps)", "def max_temp(self):\n if self.temperature_unit == UnitOfTemperature.CELSIUS:\n return MAX_TEMP_C\n return MAX_TEMP_F", "def get_initial_color():\n if os.path.isfile(DATA_FILE):\n with open(DATA_FILE, 'r') as f:\n data = f.readline()\n print data\n return int(data, base=16)\n else:\n return INITIAL_COLOR", "def target_temperature_low_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_low_c\"))\r\n return kelvin_to_celsius(self._target_temperature_low)", "def color_for_temp(temp):\n color = None\n for temp_ceil in sorted(color_map.iterkeys()):\n color = color_map[temp_ceil]\n if(temp < temp_ceil):\n break\n return adjust_color(color)", "def target_temperature_low(self):\n return self._device.setpoint_heat", "def _minimum(self) -> float:\n return self._config[CONF_MIN]", "def target_humidity_min(self):\n if not (hum_range := self._get_humidity_range()):\n return None\n return hum_range[0]", "def get_cpu_temperature():\n try:\n return psutil.sensors_temperatures()['cpu_thermal'][0].current\n except:\n return None", "def min_temp(self):\n if self._min_temp:\n return self._min_temp\n\n # get default temp from super class\n return super().min_temp", "def get_specific_heat() -> float:\n return 1006.0", "def temperature(self) -> Optional[float]:\n return self.data.get(\"temp\")", "def min_humidity(self):\n return 0", "def target_temperature_low_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_low_f\"))\r\n return celsius_to_fahrenheit(self.target_temperature_low_c)", "def temperature():\n from .imperial import deg_F as F\n from .imperial import deg_R as R\n\n K = si.K\n C = si.deg_C\n\n return Equivalency(\n [\n (K, C, lambda x: x - 273.15, lambda x: x + 273.15),\n (C, F, lambda x: x * 1.8 + 32.0, lambda x: (x - 32.0) / 1.8),\n (K, F, lambda x: x * 1.8 - 459.67, lambda x: (x + 459.67) / 1.8),\n (R, F, lambda x: x - 459.67, lambda x: x + 459.67),\n (R, C, lambda x: (x - 491.67) * (5 / 9), lambda x: x * 1.8 + 491.67),\n (R, K, lambda x: x * (5 / 9), lambda x: x * 1.8),\n ],\n \"temperature\",\n )", "def temperature(self):\n temp = ct.c_float()\n self.lib.GetTemperatureF(ct.pointer(temp))\n return temp.value", "def target_temperature(self):\n if self.current_operation == 'Heat & Cool':\n return None\n if self.current_operation == 'Heat only':\n return int(self._api._heatto)\n elif self.current_operation == 'Cool only':\n return int(self._api._coolto)\n return None", "def low_temperature(self):\r\n raise NotImplementedError", "def target_temperature(self) -> float | None:\n if self._device.mode == ThermostatMode.COOL and self._device.cooling_setpoint:\n return self._device.scaled_cooling_setpoint\n\n if self._device.heating_setpoint:\n return self._device.scaled_heating_setpoint\n\n return None", "def getMinAbundanceOfClrSample(self):\n #try: minimum = min(self.clr_sample['abundance'])-0.001\n try: minimum = min(self.clr_sample)-0.01\n except: minimum = 0\n return minimum", "def target_temperature_low(self) -> float | None:\n return self._target_temperature_low", "def cmin(self):\n return self[\"cmin\"]", "def max_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return maxi.value", "def target_temperature(self) -> float | None:\n\n zones = [z for z in self._device.zones if z.setpoint is not None]\n temps = [z.setpoint for z in zones if z.heat_demand is not None]\n return max(z.setpoint for z in zones) if temps else None\n\n # temps = [z.setpoint for z in self._device.zones]\n # return round(sum(temps) / len(temps), 1) if temps else None", "def internal_temp_c(self) -> int:\n return int(self._device_info[\"Temperature\"])", "def get_temperature_latched(self):\n return self.__latched_states[0]", "def cmin(self):\n return self['cmin']", "def min_brightness(self):\n return .0", "def target_temperature_low(self):\n return self._target_temperature_low", "def temperature(self):\n return self.read_short(65) / 340.0 + 36.53", "def temperature(self):\n return float(self._current_observation['temp_c'])", "def target_temperature(self):\n if self._client.mode == self._client.MODE_HEAT:\n return self._client.heattemp\n if self._client.mode == self._client.MODE_COOL:\n return self._client.cooltemp\n return None", "def current_temperature(self):\n if self._device.temp is not None and self._device.temp > -460:\n return self._device.temp\n return None", "def temperature_status(self):\n temp = ct.c_float()\n ans = self.lib.GetTemperatureF(ct.pointer(temp))\n return _ERRORS[ans]", "def GetTempCPU():\n tPath = \"/sys/class/thermal/thermal_zone0/temp\"\n tFile = open(tPath)\n temp = tFile.read()\n tFile.close()\n return (float(temp)*0.0018 + 32)", "def getInitialColor(self):\n return self.__initial_color__", "def target_temperature_low(self) -> float | None:\n return self._state.target_temperature_low", "def target_temperature(self) -> int:\r\n # TODO: Find a better way to do this. This is ugly.\r\n if self._hvac_mode == \"cool\":\r\n return self.target_temperature_low\r\n elif self._hvac_mode == \"heat\":\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"heat-cool\":\r\n # TODO: Fix this so that heat or cool is chosen.\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.target_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"eco\":\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.eco_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.eco_temperature_high\r\n elif self._hvac_mode == \"off\":\r\n return self.ambient_temperature\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature\"))", "def eco_temperature_low_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"eco_temperature_low_c\"))\r\n return kelvin_to_celsius(self._eco_temperature_low)", "def low_temperature(self):\r\n return self._yesterdays_weather.get_low_temperature()", "def target_temperature_low(self) -> float | None:\n target = self._node.aux_properties.get(PROP_SETPOINT_HEAT)\n if not target:\n return None\n return convert_isy_value_to_hass(target.value, target.uom, target.prec, 1)", "def _find_min_gradient_id(self) -> int:\n\n mask = self.alpha > self.a\n mask_ids = np.where(mask)[0]\n j = mask_ids[np.argmin(self.gradient[mask])]\n\n return j", "def max_temp(self):\n return GH_MAX_TEMP", "def target_temperature_high(self):\n if self._client.mode == self._client.MODE_AUTO:\n return self._client.cooltemp\n return None", "def min_background_concentration(self) -> _VectorisedFloat:\n return self.CO2_atmosphere_concentration", "def get_temp(val):\n if val in ['', 32767]:\n return None\n return temperature(val / 100., 'C').value('F')", "def calculate_color(thisCountry, maxCountry, maxColor, minColor):\n countryFactor = float(thisCountry)/maxCountry\n colorRange = maxColor - minColor\n return int(colorRange*countryFactor+minColor)", "def max_temp(self):\n # return convert_temperature(\n # self._device.max_temp, TEMP_CELSIUS, self.hass.config.units.temperature_unit\n # )\n return self._device.max_temp", "def min_background_concentration(self) -> _VectorisedFloat:\n return 0.", "def native_min_value(self) -> float:\n return -9", "def temperature(self) -> float:\n # Start a measurement then poll the measurement finished bit.\n self.temp_start = 1\n while self.temp_running > 0:\n pass\n # Grab the temperature value and convert it to Celsius.\n # This uses the same observed value formula from the Radiohead library.\n temp = self._read_u8(_REG_TEMP2)\n return 166.0 - temp", "def calculate_color_temperature(r: int, g: int, b: int) -> float:\n\n # 1. Map RGB values to their XYZ counterparts.\n # Based on 6500K fluorescent, 3000K fluorescent\n # and 60W incandescent values for a wide range.\n # Note: Y = Illuminance or lux\n x = (-0.14282 * r) + (1.54924 * g) + (-0.95641 * b)\n y = (-0.32466 * r) + (1.57837 * g) + (-0.73191 * b)\n z = (-0.68202 * r) + (0.77073 * g) + (0.56332 * b)\n\n # 2. Calculate the chromaticity co-ordinates\n xchrome = x / (x + y + z)\n ychrome = y / (x + y + z)\n\n # 3. Use to determine the CCT\n n = (xchrome - 0.3320) / (0.1858 - ychrome)\n\n # 4. Calculate the final CCT\n cct = (449.0 * pow(n, 3)) + (3525.0 * pow(n, 2)) + (6823.3 * n) + 5520.33\n\n # Return the results in degrees Kelvin\n return cct", "def current_temperature(self) -> float | None:\n return convert_isy_value_to_hass(\n self._node.status, self._uom, self._node.prec, 1\n )", "def recognize_color(color, palette):\n min_distance = np.inf\n most_similar_color = None\n for cname, cvalue in palette.items():\n distance = np.sum(np.abs(np.array(color) - np.array(cvalue)))\n if distance < min_distance:\n min_distance = distance\n most_similar_color = cname\n return most_similar_color", "def eco_temperature_low_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"eco_temperature_low_f\"))\r\n return celsius_to_fahrenheit(self.eco_temperature_low_c)", "def potential_color(self):\n\n return (1., 1., 0.)", "def temperature(self):\n names = ['anc_air_temperature']\n return self.sensor.get_with_fallback('temperature', names)", "def find_min_node(self):\n min_energy = 10 ** 10\n min_id = -1\n for node in self.node:\n if node.energy < min_energy:\n min_energy = node.energy\n min_id = node.id\n return min_id", "def identify_color(animal):\n min_dist = None\n identified_color = None\n for color in animal.colors:\n color_rgb = color_rgbs[color]\n dist = ((animal.get_red() - color_rgb[0]) ** 2) + (\n (animal.get_green() - color_rgb[1]) ** 2) + ((animal.get_blue() - color_rgb[2]) ** 2)\n if (min_dist is None) or (dist < min_dist):\n min_dist = dist\n identified_color = color\n return identified_color", "def calculateTemperature(self):\n \n # CIE XYZ space\n self.X = (1/0.17697)*((0.49)*self.R + (0.31)*self.G + (0.2)*self.B)\n self.Y = (1/0.17697)*((0.17697)*self.R + (0.81240)*self.G + (0.01063)*self.B)\n self.Z = (1/0.17697)*((0)*self.R + (0.010)*self.G + (0.99)*self.B)\n\n # CIE Chromaticities xy\n self.x = self.X/(self.X + self.Y + self.Z)\n self.y = self.Y/(self.X + self.Y + self.Z)\n \n # CIE Chromaticities uv\n #self.u = (0.4661*self.x + 0.1593*self.y)/(self.y - 0.15735*self.x + 0.2424)\n #self.v = (0.6581*self.y)/(self.y - 0.15735*self.x + 0.2424)\n \n # constant for McCamy's/Hernandez-Andrés formula\n n = (self.x - self.x_e)/(self.y - self.y_e)\n \n # Correlated color temperature according to Hernández-Andrés (1999)\n self.color_temp = ( self.A_0 + \n self.A_1*np.exp(-n/self.t_1) + \n self.A_2*np.exp(-n/self.t_2) + \n self.A_3*np.exp(-n/self.t_3) )\n \n # Delete too high values\n self.color_temp[self.color_temp > 30000] = 0\n \n # Affichage de la CCT\n self.mean_temp = int(round(self.color_temp.mean()))\n self.mean_temp_label.setText(\"Temperature moyenne = \"+str(self.mean_temp))\n self.mean_temp_label.adjustSize()\n \t\n # Affichage de l'illuminance (Y)\n self.mean_illu = int(round((self.Y.mean())))\n self.illuminance_label.setText(\"Illuminance moyenne = \"+str(self.mean_illu))\n self.illuminance_label.adjustSize()", "def get_temperature(self):\n pass", "def get_cpu_temperature():\n\n output = \"\"\n\n try:\n process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE)\n output, _error = process.communicate()\n output = str(output)\n except Exception:\n logger.error(\"Exception while reading cpu temp\")\n\n float_temp = 0\n\n try:\n float_temp = float(output[output.index('=') + 1:output.rindex(\"'\")])\n if debug==True:\n logger.info(\"Temp: \" + str(float_temp))\n except ValueError:\n logger.error(\"Temp value is not float\")\n\n return float_temp", "def current_temperature(self) -> float | None:\n temps = [z.temperature for z in self._device.zones if z.temperature is not None]\n temps = [t for t in temps if t is not None] # above is buggy, why?\n try:\n return round(sum(temps) / len(temps), 1) if temps else None\n except TypeError:\n _LOGGER.error(f\"temp ({temps}) contains None\")", "def get_min_cell_voltage(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. .*? .*? .*? (.*?) .*? . .*? .*? . . . .*?'\n minv = float(re.findall(pattern,summary).pop())\n return minv", "def get_chip_temperature(self):\n self.check_validity()\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_GET_CHIP_TEMPERATURE, (), '', 10, 'h')", "def get_color(self):\n return self._io.last_state['color']['front-center']", "def target_temperature_high(self):\n return self._device.setpoint_cool", "def min_value(self) -> float:\n return DEFAULT_MIN_VALUE", "def required_gpu_minimum(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"required_gpu_minimum\")", "def get_min_mag_edge(self):\r\n\t\treturn self.min_mag", "def get_cpu_temperature():\n process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE)\n output, _error = process.communicate()\n return float(output[output.index('=') + 1:output.rindex(\"'\")])", "def _get_minimum(self):\n return self._minimum", "def temperature_unit(self):\n return TEMP_FAHRENHEIT", "def target_temperature_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_c\"))\r\n return kelvin_to_celsius(self._target_temperature)" ]
[ "0.75069237", "0.7210723", "0.7091037", "0.70822674", "0.70354617", "0.6973797", "0.682857", "0.6697634", "0.66812134", "0.66597176", "0.65733534", "0.6456474", "0.6444758", "0.6424821", "0.6410424", "0.63718545", "0.6371298", "0.63557136", "0.6349745", "0.6336295", "0.6310369", "0.62802786", "0.62620455", "0.6253269", "0.6250468", "0.6236167", "0.62112594", "0.6179339", "0.615223", "0.6134367", "0.6095043", "0.6094932", "0.6084683", "0.60426706", "0.6028579", "0.6028439", "0.60213673", "0.6019485", "0.6003627", "0.5986541", "0.59847486", "0.59560424", "0.5952708", "0.5946855", "0.5939021", "0.5901129", "0.5900273", "0.5882344", "0.58805597", "0.5862217", "0.5860411", "0.58596647", "0.58595693", "0.58578277", "0.58526206", "0.5850634", "0.5850205", "0.58409864", "0.5836143", "0.58349895", "0.5829849", "0.5814869", "0.5814008", "0.5812041", "0.5800946", "0.5795478", "0.5781845", "0.5781699", "0.5773026", "0.57726264", "0.576691", "0.5766596", "0.5753772", "0.5747323", "0.5744592", "0.574398", "0.5741536", "0.5740021", "0.57379216", "0.5722356", "0.5717981", "0.57165563", "0.5712581", "0.57079923", "0.5699946", "0.56867456", "0.5679136", "0.567892", "0.5677794", "0.5669821", "0.56661457", "0.56490016", "0.5647746", "0.5647701", "0.56466055", "0.56454885", "0.5645191", "0.56365204", "0.5630991", "0.562738", "0.5626858" ]
0.0
-1
Return maximum supported color temperature.
def max_mireds(self): return 333
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def max_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return maxi.value", "def max_temp(self):\n if self.temperature_unit == UnitOfTemperature.CELSIUS:\n return MAX_TEMP_C\n return MAX_TEMP_F", "def max_temp(self):\n # return convert_temperature(\n # self._device.max_temp, TEMP_CELSIUS, self.hass.config.units.temperature_unit\n # )\n return self._device.max_temp", "def max_temp(self):\n return GH_MAX_TEMP", "def maximum_temperature(self):\n return self._maximum_temperature", "def color_temp(self) -> int:\n new_range = self._tuya_temp_range()\n tuya_color_temp = self.tuya_device.status.get(self.dp_code_temp, 0)\n return (\n self.max_mireds\n - self.remap(\n tuya_color_temp,\n new_range[0],\n new_range[1],\n self.min_mireds,\n self.max_mireds,\n )\n + self.min_mireds\n )", "def max_temp(self):\n return self.atag.dhw_max_temp", "def get_color_max(image, color):\n boundaries = find_color_boundaries(image, color)\n if boundaries:\n return (0, image[boundaries[0] : boundaries[1] + 1, boundaries[2] : boundaries[3] + 1])\n else:\n return 1, None", "def get_max_brightness(self) -> float:\n return max(self._color)", "def max_temp(self) -> float | None:\n try:\n return self._device.config[\"max_temp\"]\n except TypeError: # 'NoneType' object is not subscriptable\n return", "def max_temp(self):\n return 99", "def maximal_color(graph, node):\n return max(get_node_colors(graph, node))", "def max_temp(self):\n return 30", "def native_max_value(self) -> float:\n return TEMP_MAXIMUM", "def get_max_temp(self):\n self.max_temp = self.domain[1] * 2", "def max_temp(self):\n return self._max_temp", "def locked_temp_max_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_max_c\"))\r\n return kelvin_to_celsius(self._locked_temp_max)", "def locked_temp_max(self) -> int:\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"C\":\r\n return self.locked_temp_max_c\r\n elif self.temperature_scale == \"F\":\r\n return self.locked_temp_max_f\r\n else:\r\n return self._locked_temp_max\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_max\"))", "def _maximum(self) -> float:\n if self._type == \"power\":\n return 5.0\n elif self._type == \"setpoint\":\n return self._product.get_data_config_json()[\"_value_setpoint_max\"]\n elif self._type == \"fan1\":\n fan = 1\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n (((fan - 1) * 2) + 1)\n ]\n elif self._type == \"fan2\":\n fan = 2\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n (((fan - 1) * 2) + 1)\n ]\n elif self._type == \"fan3\":\n fan = 3\n return self._product.get_data_config_json()[\"_value_fan_limits\"][\n (((fan - 1) * 2) + 1)\n ]", "def cmax(self):\n return self[\"cmax\"]", "def cmax(self):\n return self['cmax']", "def locked_temp_max_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"locked_temp_max_f\"))\r\n return celsius_to_fahrenheit(self.locked_temp_max_c)", "def color_temp(self):\n return kelvin_to_mired(self._color_temp)", "def get_max_depth_val():\n data = SUNRGBDTrainDataset(True)\n return max([data[0][i][-1].flatten().item() for i in range(len(data))])", "def color_temp(self):\n return self._color_temp", "def max_temp(self):\n if self._max_temp:\n return self._max_temp\n\n # Get default temp from super class\n return super().max_temp", "def native_max_value(self) -> float:\n return 9", "def target_humidity_max(self):\n if not (hum_range := self._get_humidity_range()):\n return None\n return hum_range[1]", "def target_temperature_high(self) -> int:\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"C\":\r\n return self.target_temperature_high_c\r\n elif self.temperature_scale == \"F\":\r\n return self.target_temperature_high_f\r\n else:\r\n return self._target_temperature_high\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_high\"))", "def high_temperature(self):\r\n try:\r\n return str(self.connect()['main']['temp_max'])\r\n except:\r\n return '@weather_high_temperature'", "def rgb_maximum(colors_tuple):\n r_sorted_tuple = sorted(colors_tuple, key=lambda x:x[1][0])\n g_sorted_tuple = sorted(colors_tuple, key=lambda x:x[1][1])\n b_sorted_tuple = sorted(colors_tuple, key=lambda x:x[1][2])\n\n r_min = r_sorted_tuple[0][1][0]\n g_min = g_sorted_tuple[0][1][1]\n b_min = b_sorted_tuple[0][1][2]\n\n r_max = r_sorted_tuple[len(colors_tuple)-1][1][0]\n g_max = g_sorted_tuple[len(colors_tuple)-1][1][1]\n b_max = b_sorted_tuple[len(colors_tuple)-1][1][2]\n\n return {\n \"r_max\":r_max,\n \"r_min\":r_min,\n \"g_max\":g_max,\n \"g_min\":g_min,\n \"b_max\":b_max,\n \"b_min\":b_min,\n \"r_dvalue\":(r_max-r_min)/3,\n \"g_dvalue\":(g_max-g_min)/3,\n \"b_dvalue\":(b_max-b_min)/3\n }", "def temperature_limit(self):\n return self._read(MX_TEMPERATURE_LIMIT)", "def temperature():\n\tsensors = commands.getstatusoutput('sensors -u | grep -E temp[0-9]_input')\n\n\tif sensors[0] == 1:\n\t\traise Exception('lm-sensors is not setup. Run sensors-detect')\n\n\tif sensors[0] == 127:\n\t\traise Exception('lm-sensors is not installed')\n\n\ttemps = re.findall(r\"(\\d{2}.\\d+)\",sensors[1],re.M)\n\n\tif not temps:\n\t\traise Exception('No temperature sensors found')\n\n\tfor i,temp in enumerate(temps):\n\t\ttemps[i] = float(temp)\n\t\ttemps[i] = int(temps[i])\n\n\treturn max(temps)", "def _maximum(self) -> float:\n return self._config[CONF_MAX]", "def color_for_temp(temp):\n color = None\n for temp_ceil in sorted(color_map.iterkeys()):\n color = color_map[temp_ceil]\n if(temp < temp_ceil):\n break\n return adjust_color(color)", "def target_temperature_high(self):\n if self._client.mode == self._client.MODE_AUTO:\n return self._client.cooltemp\n return None", "def max_humidity(self):\n return 60", "def _get_color(self, c, x, max_num):\n\n ratio = 5*(float(x)/max_num)\n i = int(math.floor(ratio))\n j = int(math.ceil(ratio))\n ratio -= i\n r = (1 - ratio) * self._colors[i][c] + ratio*self._colors[j][c]\n return int(255*r)", "def eco_temperature_high(self) -> int:\r\n # TODO: Force this to return an int.\r\n if self.temperature_scale == \"C\":\r\n return self.eco_temperature_high_c\r\n elif self.temperature_scale == \"F\":\r\n return self.eco_temperature_high_f\r\n else:\r\n return self._eco_temperature_high\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"eco_temperature_high\"))", "def calculate_color(thisCountry, maxCountry, maxColor, minColor):\n countryFactor = float(thisCountry)/maxCountry\n colorRange = maxColor - minColor\n return int(colorRange*countryFactor+minColor)", "def getMaxAbundanceOfClrSample(self):\n #try: maximum = max(self.clr_sample['abundance'])+0.001\n try: maximum = max(self.clr_sample)+0.01\n except: maximum = 0\n return maximum", "def internal_temp_c(self) -> int:\n return int(self._device_info[\"Temperature\"])", "def target_temperature_high_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperature_high_c\"))\r\n return kelvin_to_celsius(self._target_temperature_high)", "def min_temp(self):\n return 16", "def target_temperature_high_f(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"target_temperatue_high_f\"))\r\n return celsius_to_fahrenheit(self.target_temperature_high_c)", "def z_max(self):\n return self.get_max_value(self.Z_INDEX)", "def energy_max(self) -> Union[int, float]:\n return self.proto.energy_max", "def energy_max(self) -> Union[int, float]:\n return self.proto.energy_max", "def current_temperature(self):\n temperature = float('NaN')\n while math.isnan(temperature) or temperature < MINIMUM_BELIEVABLE_TEMPERATURE:\n temperature = float(self._sensor.readTempC())\n return temperature", "def _find_max_gradient_id(self) -> int:\n\n mask = self.alpha < self.b\n mask_ids = np.where(mask)[0]\n i = mask_ids[np.argmax(self.gradient[mask])]\n\n return i", "def get_temperature_color_preview(lamp_props):\n temperature = lamp_props.color_temperature\n\n mm = 1000.0 / temperature\n mm2 = mm ** 2\n mm3 = mm2 * mm\n x, y = 0, 0\n\n if temperature < 4000:\n x = -0.2661239 * mm3 - 0.2343580 * mm2 + 0.8776956 * mm + 0.179910\n else:\n x = -3.0258469 * mm3 + 2.1070379 * mm2 + 0.2226347 * mm + 0.240390\n\n x2 = x**2\n x3 = x2 * x\n if temperature < 2222:\n y = -1.1063814 * x3 - 1.34811020 * x2 + 2.18555832 * x - 0.20219683\n elif temperature < 4000:\n y = -0.9549476 * x3 - 1.37418593 * x2 + 2.09137015 * x - 0.16748867\n else:\n y = 3.0817580 * x3 - 5.87338670 * x2 + 3.75112997 * x - 0.37001483\n\n # xyY to XYZ, assuming Y=1.\n xyz = mathutils.Vector((x / y, 1, (1 - x - y) / y))\n return xyz_to_rgb * xyz", "def get_specific_heat() -> float:\n return 1006.0", "def target_temperature(self) -> int:\r\n # TODO: Find a better way to do this. This is ugly.\r\n if self._hvac_mode == \"cool\":\r\n return self.target_temperature_low\r\n elif self._hvac_mode == \"heat\":\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"heat-cool\":\r\n # TODO: Fix this so that heat or cool is chosen.\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.target_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"eco\":\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.eco_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.eco_temperature_high\r\n elif self._hvac_mode == \"off\":\r\n return self.ambient_temperature\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature\"))", "def ultrascale_temperature_limit_degC(self):\n return self._ultrascale_temperature_limit_degC", "def hemt_gate_max_voltage(self):\n return self._hemt_gate_max_voltage", "def get_max_cell_voltage(self): \n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. .*? .*? .*? .*? (.*?) . .*? .*? . . . .*?'\n maxv = float(re.findall(pattern,summary).pop())\n return maxv", "def get_color(in_val, min_val=0, max_val=100):\n width = max_val - min_val\n unit = width / len(continuum)\n return continuum[min(int(in_val / unit), 19)]", "def target_temperature_high(self):\n return self._device.setpoint_cool", "def getmaxnumcone(self):\n maxnumcone_ = ctypes.c_int32()\n res = __library__.MSK_XX_getmaxnumcone(self.__nativep,ctypes.byref(maxnumcone_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n maxnumcone_ = maxnumcone_.value\n _maxnumcone_return_value = maxnumcone_\n return (_maxnumcone_return_value)", "def u_max(self):\n if self._u_max is None:\n return self.uv_max\n else:\n return self._u_max", "def temperature() -> float:", "def temperature(k, kmax):\n return 1.0 / 500 * (1.0 / k - 1.0 / kmax)", "def _get_max_t(self):\n\n return max([\n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n ])", "def calculate_color_temperature(r: int, g: int, b: int) -> float:\n\n # 1. Map RGB values to their XYZ counterparts.\n # Based on 6500K fluorescent, 3000K fluorescent\n # and 60W incandescent values for a wide range.\n # Note: Y = Illuminance or lux\n x = (-0.14282 * r) + (1.54924 * g) + (-0.95641 * b)\n y = (-0.32466 * r) + (1.57837 * g) + (-0.73191 * b)\n z = (-0.68202 * r) + (0.77073 * g) + (0.56332 * b)\n\n # 2. Calculate the chromaticity co-ordinates\n xchrome = x / (x + y + z)\n ychrome = y / (x + y + z)\n\n # 3. Use to determine the CCT\n n = (xchrome - 0.3320) / (0.1858 - ychrome)\n\n # 4. Calculate the final CCT\n cct = (449.0 * pow(n, 3)) + (3525.0 * pow(n, 2)) + (6823.3 * n) + 5520.33\n\n # Return the results in degrees Kelvin\n return cct", "def get_max_density(self):\n max_density = str(self.density.index(min(self.density)) + 1)\n print(max_density)\n return max_density", "def findMaxFactor(self):\n factorMax = 0\n factorMaxInd = ''\n for ue in list(self.ues.keys()):\n if len(self.ues[ue].bearers[0].buffer.pckts)>0 and self.ues[ue].pfFactor>factorMax:\n factorMax = self.ues[ue].pfFactor\n factorMaxInd = ue\n if factorMaxInd=='':\n ue = list(self.ues.keys())[self.ind_u]\n q = 0\n while len(self.ues[ue].bearers[0].buffer.pckts)==0 and q<len(self.ues):\n self.updIndUE()\n ue = list(self.ues.keys())[self.ind_u]\n q = q + 1\n factorMaxInd = ue\n\n return factorMaxInd", "def default(maximum_luminosity=30):\n return [maximum_luminosity, 5 * 10 ** -3, 20, -2., 0.0]", "def max_exposure(self):\n exp = ct.c_float()\n self.lib.GetMaximumExposure(ct.pointer(exp))\n return exp.value", "def max_gpu(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"max_gpu\")", "def min_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return mini.value", "def getImageMax(self):\n fname = '%s::%s'%(self.__class__.__name__, self.getImageMax.__name__)\n if (not self.lhaveImage):\n print(\"%s: DSM image not yet computed\"%fname)\n return None, None\n maxIndex = c_int(1)\n maxValue = c_float(1)\n ierr = c_int(1)\n self.lib.xcloc_getImageMax(maxIndex, maxValue, ierr)\n if (ierr.value != 0):\n print(\"%s: Failed to get max value and index of DSM image\"%fname)\n return None, None\n imax = maxIndex.value - 1 # Fortran to C\n vmax = maxValue.value\n return imax, vmax", "def target_temperature(self):\n if self.current_operation == 'Heat & Cool':\n return None\n if self.current_operation == 'Heat only':\n return int(self._api._heatto)\n elif self.current_operation == 'Cool only':\n return int(self._api._coolto)\n return None", "def max_intensity(self, time):\n ti = np.where(time == self.times)[0][0]\n return self.timesteps[ti].max()", "def get_rmax(self):\n return self.rmax", "def getZMax(self):\n return self.zmax", "def required_gpu_maximum(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"required_gpu_maximum\")", "def _get_maximum(self):\n return self._maximum", "def get_compensated_temperature() -> float:\n comp_factor = 2.25\n cpu_temp = get_cpu_temperature()\n raw_temp = bme280.get_temperature()\n comp_temp = raw_temp - ((cpu_temp - raw_temp) / comp_factor)\n # print(\"\"\"\n # Compensated_Temperature: {:05.2f} *C\n # Pressure: {:05.2f} hPa\n # Relative humidity: {:05.2f} %\n # \"\"\".format(temperature, pressure, humidity))\n return comp_temp", "def max_value(self) -> float:\n return DEFAULT_MAX_VALUE", "def max_well(self):\n maxVal = np.max(self.get_well_depth_image())\n return maxVal", "def state_max(self) -> float:\n raise NotImplementedError", "def get_max(self):\n return self.serie.max()", "def f_max(cls):\n return cls.params[\"f_max\"]", "def getmaxnumcon(self):\n maxnumcon_ = ctypes.c_int32()\n res = __library__.MSK_XX_getmaxnumcon(self.__nativep,ctypes.byref(maxnumcon_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n maxnumcon_ = maxnumcon_.value\n _maxnumcon_return_value = maxnumcon_\n return (_maxnumcon_return_value)", "def maximum(self) -> Union[int, float]:\n return self.range[1]", "def getmaxnumbarvar(self):\n maxnumbarvar_ = ctypes.c_int32()\n res = __library__.MSK_XX_getmaxnumbarvar(self.__nativep,ctypes.byref(maxnumbarvar_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n maxnumbarvar_ = maxnumbarvar_.value\n _maxnumbarvar_return_value = maxnumbarvar_\n return (_maxnumbarvar_return_value)", "def target_temperature(self) -> float | None:\n\n zones = [z for z in self._device.zones if z.setpoint is not None]\n temps = [z.setpoint for z in zones if z.heat_demand is not None]\n return max(z.setpoint for z in zones) if temps else None\n\n # temps = [z.setpoint for z in self._device.zones]\n # return round(sum(temps) / len(temps), 1) if temps else None", "def get_Ec_max(self):\n return self.Ec_max", "def getcolors(self, maxcolors=256):\r\n\r\n if self._mode in (\"1\", \"L\", \"P\"):\r\n h = self._instance.histogram()\r\n out = []\r\n for i in range(256):\r\n if h[i]:\r\n out.append((h[i], i))\r\n if len(out) > maxcolors:\r\n return None\r\n return out\r\n uni, counts = self._getcolors()\r\n if c>maxcolors: return None\r\n colors = []\r\n for l in range(len(counts)):\r\n colors.append((counts[l], l))\r\n return colors", "def target_temperature_high(self):\n return self._target_temperature_high", "def high_temperature(self):\r\n raise NotImplementedError", "def test_value_max(self):\n self.assertEqual(DPTValue1Ucount().to_knx(255), (0xFF,))\n self.assertEqual(DPTValue1Ucount().from_knx((0xFF,)), 255)", "def target_temperature(self):\n if self._client.mode == self._client.MODE_HEAT:\n return self._client.heattemp\n if self._client.mode == self._client.MODE_COOL:\n return self._client.cooltemp\n return None", "def max_temp(sample):\n step_temps = list(\n filter(None, list(map(lambda step: step.furnace_temperature,\n sample.annealing_steps)) + \\\n list(map(lambda step: step.furnace_temperature,\n sample.growing_steps)) + \\\n list(map(lambda step: step.furnace_temperature,\n sample.cooling_steps))))\n\n if(len(step_temps)):\n return max(step_temps)\n else:\n return 0", "def eco_temperature_high_c(self) -> float:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"eco_temperature_high_c\"))\r\n return kelvin_to_celsius(self._eco_temperature_high)", "def recognize_color(color, palette):\n min_distance = np.inf\n most_similar_color = None\n for cname, cvalue in palette.items():\n distance = np.sum(np.abs(np.array(color) - np.array(cvalue)))\n if distance < min_distance:\n min_distance = distance\n most_similar_color = cname\n return most_similar_color", "def temp(self):\n if self.temp_sensor is None:\n return None\n else:\n if self.temp_scale.lower() in ['f', 'fahrenheit']:\n return self.temp_sensor.temp_f\n elif self.temp_scale.lower() in ['c', 'celsius']:\n return self.temp_sensor.temp_c", "def get_color(activePerMillion):\n activePer100k = activePerMillion / 10.0\n if activePer100k < 100:\n return \"#aaf0d1\"\n elif activePer100k < 500:\n return \"#a3f7bf\"\n elif activePer100k < 1000:\n return \"#90EE90\"\n elif activePer100k < 1500:\n return \"#00ff7f\"\n elif activePer100k < 2000:\n return \"#77dd77\"\n elif activePer100k < 2500:\n return \"#32cd32\"\n elif activePer100k < 3000:\n return \"#4cbb17\"\n elif activePer100k < 3500:\n return \"#228b22\"\n elif activePer100k < 4000:\n return \"#355e3b \"\n else:\n return \"#006400\"", "def calculateTemperature(self):\n \n # CIE XYZ space\n self.X = (1/0.17697)*((0.49)*self.R + (0.31)*self.G + (0.2)*self.B)\n self.Y = (1/0.17697)*((0.17697)*self.R + (0.81240)*self.G + (0.01063)*self.B)\n self.Z = (1/0.17697)*((0)*self.R + (0.010)*self.G + (0.99)*self.B)\n\n # CIE Chromaticities xy\n self.x = self.X/(self.X + self.Y + self.Z)\n self.y = self.Y/(self.X + self.Y + self.Z)\n \n # CIE Chromaticities uv\n #self.u = (0.4661*self.x + 0.1593*self.y)/(self.y - 0.15735*self.x + 0.2424)\n #self.v = (0.6581*self.y)/(self.y - 0.15735*self.x + 0.2424)\n \n # constant for McCamy's/Hernandez-Andrés formula\n n = (self.x - self.x_e)/(self.y - self.y_e)\n \n # Correlated color temperature according to Hernández-Andrés (1999)\n self.color_temp = ( self.A_0 + \n self.A_1*np.exp(-n/self.t_1) + \n self.A_2*np.exp(-n/self.t_2) + \n self.A_3*np.exp(-n/self.t_3) )\n \n # Delete too high values\n self.color_temp[self.color_temp > 30000] = 0\n \n # Affichage de la CCT\n self.mean_temp = int(round(self.color_temp.mean()))\n self.mean_temp_label.setText(\"Temperature moyenne = \"+str(self.mean_temp))\n self.mean_temp_label.adjustSize()\n \t\n # Affichage de l'illuminance (Y)\n self.mean_illu = int(round((self.Y.mean())))\n self.illuminance_label.setText(\"Illuminance moyenne = \"+str(self.mean_illu))\n self.illuminance_label.adjustSize()", "def last_color(self):\n idx = self._color_indexes.get(self._plotid)\n if idx is not None:\n return COLOR_CYCLE[(idx-1) % len(COLOR_CYCLE)]\n return COLOR_CYCLE[0]", "def temperature_scale(self) -> str:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"temperature_scale\"))\r\n return self._temperature_scale" ]
[ "0.75112236", "0.72960573", "0.7186571", "0.7073784", "0.6881457", "0.683345", "0.6819592", "0.6797262", "0.6717002", "0.6712087", "0.6707936", "0.6685103", "0.66778046", "0.66512823", "0.66321784", "0.65648353", "0.6525134", "0.64380866", "0.63266563", "0.6323054", "0.63075083", "0.62968665", "0.6268585", "0.62510455", "0.6226008", "0.6212373", "0.6197438", "0.6196351", "0.6167231", "0.6161099", "0.61067784", "0.6103885", "0.60868216", "0.6083847", "0.6049706", "0.6030463", "0.6021632", "0.59792507", "0.59776497", "0.5971575", "0.59695566", "0.5969402", "0.59569496", "0.5949019", "0.5921193", "0.59184474", "0.59114665", "0.59114665", "0.5903878", "0.58949184", "0.5891799", "0.5869714", "0.5865995", "0.58611643", "0.58544135", "0.585135", "0.5833785", "0.5831657", "0.5830375", "0.58199954", "0.58126146", "0.5794893", "0.5788109", "0.5782661", "0.5773339", "0.5760143", "0.57584697", "0.57578295", "0.5757703", "0.57510006", "0.5749636", "0.5738435", "0.5738338", "0.5734689", "0.5731924", "0.573077", "0.57215285", "0.5721522", "0.5719854", "0.5717592", "0.5713106", "0.5706481", "0.5705921", "0.57041854", "0.56917524", "0.5691226", "0.5684131", "0.56837326", "0.5677832", "0.5674178", "0.56718445", "0.564802", "0.56420857", "0.56387526", "0.5635597", "0.5632112", "0.56157625", "0.5615069", "0.5603202", "0.5599029", "0.5594454" ]
0.0
-1
Instruct the light to turn on.
def turn_on(self, **kwargs: Any) -> None: _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) if not self.is_on: self._device.power_on = True if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) self._device.color_temperature = color_temp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_light_on(self):\r\n self._light = \"ON\"", "def set_light_on(self):\n self._light = \"ON\"", "def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light", "def turn_on(self, **kwargs: Any) -> None:\n self._set_light(ON_STATE)", "def lightning_turnon(self):\n self.turnOn()", "def turn_on(self, **kwargs):\n self._brightness = 100\n self._state = 'on'\n #self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)\n #self._light.turn_on()\n _LOGGER.info(\"turn_on() is called\")", "def turn_on(self, **kwargs):\n self._is_on = True", "def turn_on(self, **kwargs: Any) -> None:\n self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)\n self._light.turn_on()", "def light_on(self, pin='D13'):\n self.light_set(pin, '1')", "def change_light(self):\n self._light_status = not self._light_status", "def turnLightingSystemOn():\n dislin.light('ON')", "def turn_on(self):\n self._interrupt_flash()\n if not self.on:\n GPIO.output(self.pin, GPIO.HIGH)\n self.on = True", "def turn_on(self, **kwargs):\n set_sonoff_state(self._host, \"on\")\n self._state = True", "def turn_on(self):\n self._remote.power(1)", "async def async_turn_on(self, **kwargs: Any) -> None:\n if (brightness := kwargs.get(ATTR_BRIGHTNESS)) is not None:\n # set the brightness, which will also turn on/off light\n if brightness == 255:\n brightness = 256 # this will end up as 16 which is max\n self._device.light_brightness = int(brightness / 16)\n else:\n self._device.light_on = True", "def turn_on(self):\n self._lms.query(self._id, 'power', '1')\n self.update_ha_state()", "def setLightSwitch(self, _state=False):\n if _state == True:\n render.setLight(self.lightNP)\n elif _state == False:\n render.clearLight(self.lightNP)", "def turn_on(self, **kwargs):\n self._send_command(\"turn_on\")", "def turn_on(self, **kwargs):\n self._state = True\n\n # Make initial update\n self.update_switch(self._initial_transition)\n\n self.schedule_update_ha_state()", "async def async_turn_on(self, **kwargs) -> None:\n self._state = await self._gate.turn_on_light(self._light_id)", "def turn_on(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn on\"):\n self.wemo.on()", "def lighton(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 0:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is on\")\n else:\n update.message.reply_text(\"Light was already on\")\n\n __sauna.control.setPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def turnLightOn(ID):\n dislin.litmod(ID, 'ON')", "def turnOn(self):\n self.off = False\n self.turnOnAnimation()", "def light_action():\n if light_btn.isChecked():\n self.variables.default_values_dict[\"settings\"][\"external_lights\"] = True\n else:\n self.variables.default_values_dict[\"settings\"][\n \"external_lights\"\n ] = False", "def turn_on(self, **kwargs):\n self.smartplug.turn_on()", "def _turn_on(self):\n self._turn_display('ON')", "def turn_on(self) -> None:\n self._state = self._player.turn_on()", "def turn_on(self, **kwargs) -> None:\n self.heater.turn_on()", "def on(self):\n self._set_state(on=True)", "def turn_on(self, **kwargs):\n brightness_pct = 100\n if kwargs.get(ATTR_BRIGHTNESS):\n brightness_pct = \\\n brightness_to_percentage(int(kwargs.get(ATTR_BRIGHTNESS)))\n elif self._is_dimmable:\n brightness_pct = 101 # Sets the light to last known brightness.\n self._client.set_brightness(self._id, brightness_pct)", "def _set_light(self, new_state):\n try:\n self._device.lights = new_state\n except requests.Timeout:\n _LOGGER.error(\"Time out setting %s light to %s\", self.entity_id, new_state)\n return\n\n self._light_on = new_state == ON_STATE\n self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY\n self.async_write_ha_state()", "def turn_on(self):\n self._state = True\n self.write_state(bytes([9]))\n self.schedule_update_ha_state()", "def turn_on(self, **kwargs: Any) -> None:\n if self._dimmable:\n level = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness)\n else:\n level = 255\n self._light.turn_on(to_futurenow_level(level))", "def flicker_lights(self):\n print 'Lights Set'", "def turn_on(self, **kwargs):\n self._is_on = True\n self.schedule_update_ha_state()\n self.hass.data[ZIGATE_DOMAIN].action_onoff(self._device.addr,\n self._endpoint,\n 1)", "def set_light_off(self):\r\n self._light = \"OFF\"", "def turn_on(self, **kwargs):\n self._state = True\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device,'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":1 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":1 }', 5)", "def turn_on(self, **kwargs) -> None:\n self._device.writeCharacteristic(self._handle, b'\\x00', True)\n self._state = True\n self.schedule_update_ha_state()", "def turn_on(self, **kwargs):\n onValue = str((kwargs.get(ATTR_BRIGHTNESS, int(self._brightness))/255)*100)\n request = requests.post(self._resource,\n data=onValue,\n timeout=10)\n if (request.status_code == 200) or (request.status_code == 201):\n self._state = True\n else:\n _LOGGER.info(\"HTTP Status Code: %s\", request.status_code)\n _LOGGER.error(\"Can't turn on %s. Is resource/endpoint offline?\", self._resource)\n\n self.schedule_update_ha_state()", "async def light(self) -> None:\n self.lit = True\n await self.run_command(\"miner fault_light on\")\n print(\"light \" + self.ip)", "def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)", "def turn_on(self):\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, STATE_NETATMO_HOME\n )\n self.update_without_throttle = True\n self.schedule_update_ha_state()", "def turn_on(self, **kwargs):\n _LOGGER.error(\"DALI TURN ON\")\n\n self._state = True\n\n if ATTR_BRIGHTNESS in kwargs:\n _LOGGER.error(kwargs[ATTR_BRIGHTNESS])\n\n bri = kwargs[ATTR_BRIGHTNESS]\n\n if bri == 0:\n self._state = False\n else:\n bri = int(bri / 1.5)\n _LOGGER.error(bri)\n\n\n url = self.urlx + '/dimset?bri=' + str(bri)\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n self._dimmer = kwargs[ATTR_BRIGHTNESS]\n\n else:\n url = self.urlx + '/toggle'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = json_data['state']\n self._dimmer = 255\n self._state = state == 'on'", "def turn_on(self, **kwargs):\n self.enabled = self.fritz_box.set_call_forwarding(self.uid, 1)", "def set_light_off(self):\n self._light = \"OFF\"", "async def async_set_light(self, light, state_mode):\n if light not in ON_OFF_LIST:\n return\n self._light = light.lower()\n self._state_mode = state_mode\n await self.async_send_cmd()", "def turn_on(self, r=None, g=None, b=None, brightness=None):\n print(\"Got request to turn on the lights on with values: (r=%s, g=%s, b=%s, brightness=%s)\" % (r, g, b, brightness))\n if r is not None:\n self.r = r\n if g is not None:\n self.g = g\n if b is not None:\n self.b = b\n if brightness is not None:\n self.brightness = brightness\n print(\"Turning on lights on with values: (r=%s, g=%s, b=%s, brightness=%s)\" % (self.r, self.g, self.b, self.brightness))\n self.led.fill(Color(self.r,self.g,self.b, self.brightness))\n\n self.led.update()\n self.client.publish(STATE_TOPIC, ON) #publish", "def ON(self):\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(self.PIN, GPIO.OUT)\n GPIO.output(self.PIN, True)\n self.STATUS = \"ON\"", "def force_switch_on(self):\n self.turn_on_modem()", "def set_led(self, on=True):\n if on:\n GPIO.output(self.LED, GPIO.HIGH)\n else:\n GPIO.output(self.LED, GPIO.LOW)", "def turn_on(self, **kwargs):\n setattr(self.resource, self.variable, True)", "def toggle_lights(bridge):\n if check_any_light_on(bridge):\n turn_off_lights(bridge)\n else:\n turn_on_lights(bridge)", "def turn_on_lights(bridge):\n for light in bridge.lights:\n bridge.set_light(light.light_id, {'ct': 350, 'bri': 254, 'on': True})", "def turnOn(self):\n self.write('E;O1;E;')\n return self.output()", "def switch_on(self,name):\n self.circles[name].switch_on()\n self.cursor.execute(\"\"\"UPDATE sensors_powersensor SET state=1 WHERE target=%s\"\"\", (name,))", "def switch_on(self):\n if threading.current_thread() != self._blinking_thread:\n self._blinking_thread.unregister(self)\n GPIO.output(self.pin, GPIO.HIGH)", "def turn_on(self, **kwargs):\n default_hs = (0, 0) if self._hs_color is None else self._hs_color\n hue_sat = kwargs.get(ATTR_HS_COLOR, default_hs)\n\n default_brightness = 0 if self._brightness is None else self._brightness\n brightness = kwargs.get(ATTR_BRIGHTNESS, default_brightness)\n\n default_white_value = 255 if self._white_value is None else self._white_value\n white_value = kwargs.get(ATTR_WHITE_VALUE, default_white_value)\n\n if brightness == 0 and white_value == 0 and not kwargs:\n # If the light would be off, and no additional parameters were\n # passed, just turn the light on full brightness.\n brightness = 255\n white_value = 255\n\n rgb = color_util.color_hsv_to_RGB(*hue_sat, brightness / 255 * 100)\n\n self._light.set_color(*rgb, white_value)", "async def async_turn_on(self, **kwargs: Any) -> None:\n self._is_on = True\n await self.enable_rain_delay()", "def turn_on(self, **kwargs: Any) -> None:\n if self.type == \"on_off\":\n _LOGGING.debug(\"Starting all torrents\")\n self._tm_client.api.start_torrents()\n elif self.type == \"turtle_mode\":\n _LOGGING.debug(\"Turning Turtle Mode of Transmission on\")\n self._tm_client.api.set_alt_speed_enabled(True)\n self._tm_client.api.update()", "def on(self):\n print(f\"RF {self.name} on\")\n self.status(True)", "def turn_on(self, **kwargs):\n self.set_graceful_lock(True)\n self.robot.start_cleaning()", "async def Turn_On_Light(\n light_id: int = Path(..., title=\"Numeric light identifier\", ge=0),\n) -> Dict[str, Any]:\n busylightapi.manager.light_on(light_id)\n return {\n \"action\": \"on\",\n \"light_id\": light_id,\n \"color\": \"green\",\n }", "def turn_on(\n self,\n speed: str = None,\n percentage: int = None,\n preset_mode: str = None,\n **kwargs,\n ) -> None:\n self.wink.set_state(True, speed)", "def turn_on(self, **kwargs):\n if (CommandSwitch._switch(self._command_on) and\n not self._command_state):\n self._state = True\n self.schedule_update_ha_state()\n if ATTR_BRIGHTNESS in kwargs:\n self._brightness = kwargs[ATTR_BRIGHTNESS]\n self.schedule_update_ha_state()\n if ATTR_RGB_COLOR in kwargs:\n self._color = kwargs[ATTR_RGB_COLOR]\n self.schedule_update_ha_state()\n # White is a special case.\n if min(self._color) > 256 - RGB_BOUNDARY:\n self._color = WHITE\n self.schedule_update_ha_state()\n if ATTR_EFFECT in kwargs:\n if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP:\n self.repeating = True\n pipeline.append(COLORLOOP)\n if kwargs[ATTR_EFFECT] == EFFECT_WHITE:\n pipeline.white()\n self._color = WHITE", "def on(self):\n if not self._is_on:\n self._pwms.enable(self._pin_index, self._frequency)\n self._is_on = True", "async def async_turn_on(self, **kwargs):\n try:\n if await self._api.set_relay_state(self._dev_id, \"on\"):\n self._is_on = True\n self.async_write_ha_state()\n except Smile.PlugwiseError:\n _LOGGER.error(\"Error while communicating to device\")", "def turn_on(self):\n self.write(\"OUT1\\n\")", "async def async_turn_on(self, **kwargs: Any) -> None:\n self.entity_description.on_off_fn(self._valve, True)\n await self._device.push_state()\n self.async_write_ha_state()", "def _turn_off_light(self, light):\n self.bridge.set_light(light, 'on', False)\n return True", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self._set_state(True)", "def light_is_on(self):\r\n return self._light == \"ON\"", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self._switch.async_on()\n self._attr_is_on = True\n self.async_write_ha_state()", "def enable(self):\n self.switch.enable()\n self._enabled = True", "def turn_on(self, **kwargs):\n if ATTR_BRIGHTNESS in kwargs:\n brightness = int(kwargs[ATTR_BRIGHTNESS] / 255 * 99)\n self._lj.activate_load_at(self._index, brightness, 0)\n else:\n self._lj.activate_load(self._index)", "def toggle(self):\n self._interrupt_flash()\n GPIO.output(self.pin, GPIO.LOW if self.on else GPIO.HIGH)\n self.on = not self.on", "def set_light_mode(self, is_lid):\n raise NotImplementedError()", "async def async_turn_on(self) -> None:\n self._zone.power = True", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self.call_state_change(\"open\")", "async def async_turn_on(self, **kwargs: Any) -> None:\n if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:\n self._device.light_color_temp = color_temperature_mired_to_kelvin(\n color_temp\n )\n await super().async_turn_on(**kwargs)", "def affection_status_switch_on(self):\n self._affection_status_switch = True", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._device.light_on = False", "async def async_turn_on(self, **kwargs):\n if self._switch_type == \"record_motion\":\n _LOGGER.debug(\"Turning on Motion Detection\")\n await self.upv.set_camera_recording(self._camera_id, TYPE_RECORD_MOTION)\n elif self._switch_type == \"record_always\":\n _LOGGER.debug(\"Turning on Constant Recording\")\n await self.upv.set_camera_recording(self._camera_id, TYPE_RECORD_ALLWAYS)\n else:\n _LOGGER.debug(\"Turning on IR\")\n await self.upv.set_camera_ir(self._camera_id, self._ir_on_cmd)\n await self.coordinator.async_request_refresh()", "def turn_off(self, **kwargs: Any) -> None:\n self._device.power_on = False\n _LOGGER.debug(\"Turn off light %s\", self._device.ip)", "async def async_turn_on(self, **kwargs: Any) -> None:\n\n self._attr_is_on = await self.relay.set_active(True)\n self.async_write_ha_state()", "async def async_turn_on(self, **kwargs):\n if self._switch_type == \"record_motion\":\n _LOGGER.debug(f\"Turning on Motion Detection for {self._name}\")\n await self.upv.set_camera_recording(self._camera_id, TYPE_RECORD_MOTION)\n elif self._switch_type == \"record_always\":\n _LOGGER.debug(\"Turning on Constant Recording\")\n await self.upv.set_camera_recording(self._camera_id, TYPE_RECORD_ALWAYS)\n elif self._switch_type == \"record_smart\":\n _LOGGER.debug(\"Turning on SmartDetect Recording\")\n await self.upv.set_camera_recording(\n self._camera_id, TYPE_RECORD_SMARTDETECT\n )\n elif self._switch_type == \"ir_mode\":\n _LOGGER.debug(\"Turning on IR\")\n await self.upv.set_camera_ir(self._camera_id, self._ir_on_cmd)\n elif self._switch_type == \"hdr_mode\":\n _LOGGER.debug(\"Turning on HDR mode\")\n await self.upv.set_camera_hdr_mode(self._camera_id, True)\n elif self._switch_type == \"high_fps\":\n _LOGGER.debug(\"Turning on High FPS mode\")\n await self.upv.set_camera_video_mode_highfps(self._camera_id, True)\n else:\n _LOGGER.debug(\"Changing Status Light to On\")\n await self.upv.set_camera_status_light(self._camera_id, True)\n await self.protect_data.async_refresh(force_camera_update=True)", "def light_standby():\n for led in leds:\n led.on()\n\n rgb_driver.pulse(on_color=(scale[\"R\"], scale[\"G\"], scale[\"B\"]), off_color=(0,0,0))", "def turn_on(self, **kwargs):\n if not self.is_on:\n _LOGGER.debug(\"Sending START command to: %s\", self._name)\n self._api.control('START')\n self._mower_status = STATUS_EXECUTING_START\n self.schedule_update_ha_state()", "def turn_on(self, **kwargs):\n _LOGGER.debug(\"Turning on Motion Detection \")\n self.data.set_camera_recording(self._camera_id, \"motion\")", "def flicker_lights(self):\n if self.lighting:\n self.lighting = False\n else:\n self.lighting = True\n self.redraw()", "def flicker_lights(self):\n if self.lighting:\n self.lighting = False\n else:\n self.lighting = True\n self.redraw()", "def set_lighting(self):\n lightPosition = [-1, 1, 1, 0]\n glLightfv(GL_LIGHT0, GL_POSITION, lightPosition)\n\n ambientLight = [1, 1, 0.4, 0.5]\n\n if self.lighting:\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight)\n else:\n glDisable(GL_LIGHTING)\n glDisable(GL_LIGHT0)", "async def async_turn_on(self, **kwargs):\n await self.data.set_appliance_state(self.appliance_id, True)\n return True", "async def Turn_On_Lights() -> Dict[str, Any]:\n busylightapi.manager.light_on(ALL_LIGHTS)\n return {\n \"action\": \"on\",\n \"light_id\": \"all\",\n \"color\": \"green\",\n }", "def action_on_light_by_id(bridge, light_id, action):\n if action == 'on':\n bridge.set_light(light_id, 'on', True)\n elif action == 'off':\n bridge.set_light(light_id, 'on', False)\n elif action == 'toggle':\n current_state = bridge.get_light(light_id, 'on')\n bridge.set_light(light_id, 'on', not current_state)\n click.secho(\n 'Turning %s light %s!' % (bridge.get_light(light_id, 'name'),\n get_state(not current_state)),\n fg='green')\n\n return", "async def async_turn_on(self, **kwargs: Any) -> None:\n _LOGGER.debug(\"Tried to switch on %s\", self.name)\n try:\n await self.hass.async_add_executor_job(\n self.device.appliance.set_setting, BSH_POWER_STATE, BSH_POWER_ON\n )\n except HomeConnectError as err:\n _LOGGER.error(\"Error while trying to turn on device: %s\", err)\n self._state = False\n self.async_entity_update()", "async def async_turn_on(self, **kwargs: Any) -> None:\n\n self._previous_mic_level = self.device.mic_volume\n self._previous_record_mode = self.device.recording_settings.mode\n await self.device.set_privacy(True, 0, RecordingMode.NEVER)", "def set_light_status(self, new_light_status):\n if type(new_light_status) != bool:\n self._logger.write(\"Error! new_light_status should be of type bool\")\n try:\n self._light_status = new_light_status\n except Exception as e:\n self._logger.write(\"Error! could not set light status\")", "async def async_turn_on(self, **kwargs):\n try:\n state_on = await self._api.set_relay_state(\n self._dev_id, self._members, \"on\"\n )\n if state_on:\n self._is_on = True\n self.async_write_ha_state()\n except PlugwiseException:\n _LOGGER.error(\"Error while communicating to device\")", "def light_is_on(self):\n return self._light == \"ON\"" ]
[ "0.8915807", "0.8871167", "0.88496405", "0.88148886", "0.86926144", "0.8564589", "0.82041657", "0.819282", "0.8009568", "0.7997265", "0.798326", "0.7978372", "0.7975517", "0.7933295", "0.79127806", "0.7911189", "0.7742512", "0.7726695", "0.76945895", "0.76829916", "0.7646578", "0.7643466", "0.7640809", "0.7627225", "0.7620087", "0.76199174", "0.76092124", "0.7608499", "0.76000535", "0.75837976", "0.7562227", "0.7524245", "0.751268", "0.75114685", "0.7467902", "0.7458313", "0.7421022", "0.74007356", "0.7396485", "0.7360607", "0.7349505", "0.7339886", "0.7339616", "0.7313728", "0.73012143", "0.7260652", "0.7259565", "0.72472787", "0.7221646", "0.7220681", "0.721153", "0.7198675", "0.71978813", "0.7194892", "0.7173827", "0.7172893", "0.71572083", "0.71189517", "0.7118656", "0.71128887", "0.7112357", "0.7107832", "0.708906", "0.70810884", "0.70795757", "0.7060113", "0.7032292", "0.7007245", "0.7000513", "0.6985608", "0.695305", "0.6952757", "0.6947298", "0.6936467", "0.6928918", "0.6928403", "0.69236827", "0.69184935", "0.69106096", "0.6907698", "0.6906771", "0.6901914", "0.6901417", "0.6893052", "0.6889541", "0.6887449", "0.68864244", "0.6882976", "0.6876904", "0.68759584", "0.68759584", "0.6875171", "0.6872026", "0.68451", "0.6833417", "0.68309754", "0.6828297", "0.6817739", "0.6792112", "0.67909056" ]
0.7831892
16
Instruct the light to turn off.
def turn_off(self, **kwargs: Any) -> None: self._device.power_on = False _LOGGER.debug("Turn off light %s", self._device.ip)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn_off(self):\n GPIO.output(self.gpio, False) # turn off light", "def set_light_off(self):\n self._light = \"OFF\"", "def set_light_off(self):\r\n self._light = \"OFF\"", "def lightning_turnoff(self):\n self.turnOff()", "def turn_off(self, **kwargs: Any) -> None:\n self._set_light(OFF_STATE)", "def turn_off(self, **kwargs: Any) -> None:\n self._light.turn_off()", "def turn_off(self, **kwargs):\n #self._light.turn_off()\n self._brightness = 0\n self._state = 'off'\n _LOGGER.info(\"turn_off() is called\")", "def turn_off(self, **kwargs):\n self._light.set_color(0, 0, 0, 0)", "def _turn_off_light(self, light):\n self.bridge.set_light(light, 'on', False)\n return True", "def turn_off(self):\n print(\"Turning the lights off\")\n self.led.all_off()\n self.client.publish(STATE_TOPIC, OFF) #publish", "def turn_off(self, **kwargs) -> None:\n self.wink.set_state(False)", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._device.light_on = False", "def turn_off(self, **kwargs: Any) -> None:\n self._light.turn_off()\n if self._brightness:\n self._last_brightness = self._brightness", "def turn_off(self, **kwargs):\n self._is_on = False", "def turn_off(self, **kwargs):\n self._client.set_brightness(self._id, 0)", "def turn_off(self, **kwargs):\n set_sonoff_state(self._host, \"off\")\n self._state = False", "def turn_off(self):\n self.handleCommand(1)\n self._state = STATE_OFF", "def turn_off(self):\n self._interrupt_flash()\n if self.on:\n GPIO.output(self.pin, GPIO.LOW)\n self.on = False", "def turn_off(self, **kwargs):\n self._state = False\n self.schedule_update_ha_state()\n self._hs_color = None\n self._attributes[\"hs_color\"] = self._hs_color\n self._attributes[\"brightness\"] = None", "def turnLightingSystemOff():\n dislin.light('OFF')", "def light_off(self, pin='D13'):\n self.light_set(pin, '0')", "def off(self):\n for light in self.all:\n GPIO.output(light, 0)", "def _turn_off(self):\n self._turn_display('OFF')", "def turnOff(self):\n self.off = True\n self.turnOffAnimation()", "def turn_off(self, **kwargs):\n self._send_command(\"turn_off\")", "def turn_off(self, **kwargs):\n self.smartplug.turn_off()", "def turn_off(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn off\"):\n self.wemo.off()", "def off(self):\n self._set_state(on=False)", "def turnOff(self):\n self.write(\"E;O0;E;\")\n return self.output()", "def turn_off(self, **kwargs):\n _LOGGER.error(\"DALI TURN OFF\")\n self._state = False\n\n url = self.urlx + '/toggle'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = json_data['state']\n\n self._dimmer = 0\n\n self._state = state == 'on'", "def turn_off(self, **kwargs: Any) -> None:\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands = [{\"code\": DPCODE_LIGHT, \"value\": False}]\n else:\n commands = [{\"code\": DPCODE_SWITCH, \"value\": False}]\n self._send_command(commands)", "def turnLightOff(ID):\n dislin.litmod(ID, 'OFF')", "def turn_off(self) -> None:\n self._monoprice.set_power(self._zone_id, False)", "async def unlight(self) -> None:\n self.lit = False\n await self.run_command(\"miner fault_light off\")\n print(\"unlight\" + self.ip)", "async def async_turn_off(self, **kwargs) -> None:\n self._state = await self._gate.turn_off_light(self._light_id)", "def turn_off(self, **kwargs):\n self.heater.turn_off()", "def switch_off(self):\n if threading.current_thread() != self._blinking_thread:\n self._blinking_thread.unregister(self)\n GPIO.output(self.pin, GPIO.LOW)", "def turn_off(self):\n self._state = False\n self.write_state(bytes([1]))\n self.schedule_update_ha_state()", "def turn_off(self, **kwargs):\n setattr(self.resource, self.variable, False)", "def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)", "def turn_off(self):\n self.set_pin(0, -1)\n self.set_pin(1, -1)\n self.set_pin(2, -1)", "def turn_off(self):\n self.robot.stop_simulation()", "def off_switch(self):\n self._switch_callback = None", "def turn_off(self) -> None:\n self._media_title = None\n self._state = self._player.turn_off()", "def off(self):\n print(f\"RF {self.name} off\")\n self.status(False)", "def turn_off(self, **kwargs: Any) -> None:\n if self.type == \"on_off\":\n _LOGGING.debug(\"Stopping all torrents\")\n self._tm_client.api.stop_torrents()\n if self.type == \"turtle_mode\":\n _LOGGING.debug(\"Turning Turtle Mode of Transmission off\")\n self._tm_client.api.set_alt_speed_enabled(False)\n self._tm_client.api.update()", "def turn_off(self):\n if self._module_type == NA_VALVE:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id,\n self._room_id,\n STATE_NETATMO_MANUAL,\n DEFAULT_MIN_TEMP,\n )\n elif self.hvac_mode != HVAC_MODE_OFF:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, STATE_NETATMO_OFF\n )\n self.update_without_throttle = True\n self.schedule_update_ha_state()", "def turn_off(self, **kwargs):\n if (CommandSwitch._switch(self._command_off) and\n not self._command_state):\n self._state = False\n self.schedule_update_ha_state()", "def disable(self):\n self.direction = None # remove direction\n self.state['enabled'] = False # reset states\n self.state['return'] = False\n self.return_path = None # remove path\n if self.state['blue']:\n self.stop_blue_state(resume_audio=False)\n self.image, _ = self.norm_images.get_image() # reset image\n self.sound_manager.stop()", "def turn_off(self, **kwargs):\n if self.is_on:\n _LOGGER.debug(\"Sending STOP command to: %s\", self._name)\n self._api.control('STOP')\n self._mower_status = STATUS_EXECUTING_STOP\n self.schedule_update_ha_state()", "def turn_off(self, **kwargs):\n self.vacuum.stop()\n self.vacuum.home()", "def turn_off(self, **kwargs):\n _LOGGER.debug(\"Turning off Motion Detection \")\n self.data.set_camera_recording(self._camera_id, \"never\")", "def turn_off(self, **kwargs):\n self.enabled = self.fritz_box.set_call_forwarding(self.uid, 0)", "def turn_off(self, **kwargs):\n self._is_on = False\n self.schedule_update_ha_state()\n self.hass.data[ZIGATE_DOMAIN].action_onoff(self._device.addr,\n self._endpoint,\n 0)", "def turn_off(self, **kwargs):\n self.robot.pause_cleaning()\n time.sleep(1)\n self.robot.send_to_base()", "def lightoff(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 1:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is off\")\n else:\n update.message.reply_text(\"Light was already off\")\n\n __sauna.control.resetPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def off(self):\n if self._is_on:\n self._pwms.disable(self._pin_index)\n self._is_on = False", "async def Turn_Off_Lights() -> Dict[str, Any]:\n busylightapi.manager.light_off(ALL_LIGHTS)\n return {\n \"action\": \"off\",\n \"light_id\": \"all\",\n }", "def turn_off(self, **kwargs):\n self._lj.deactivate_load(self._index)", "def switch_off(self,name):\n self.circles[name].switch_off()\n self.cursor.execute(\"\"\"UPDATE sensors_powersensor SET state=0 WHERE target=%s\"\"\", (name,))", "def turnOffEnvirLights(self):\n c_offEnvirLights = self.layer.createCollection(\"c_OffEnvirLights\")\n c_offEnvirLights.getSelector().setFilterType(4)\n c_offEnvirLights.getSelector().setPattern(\"*LIGHTS_ENVIR*\")\n o_offEnvirLgtVisibility = c_offEnvirLights.createOverride(\"offEnvirLgtVisibility\", override.AbsOverride.kTypeId)\n \n lgtEnvGrp = str(cmds.ls(\"*LIGHTS_ENVIR*\")[0])\n \n o_offEnvirLgtVisibility.finalize(lgtEnvGrp+\".visibility\")\n o_offEnvirLgtVisibility.setAttrValue(0)", "def turn_off(self, **kwargs):\n self._attributes['current_speed'] = SPEED_OFF\n self._bond.turnOff(self._deviceId)", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._is_on = False\n await self.disable_rain_delay()", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._device.fan_on = False", "async def Turn_Off_Light(\n light_id: int = Path(..., title=\"Numeric light identifier\", ge=0)\n) -> Dict[str, Any]:\n busylightapi.manager.light_off(light_id)\n return {\n \"action\": \"off\",\n \"light_id\": light_id,\n }", "def off_all(self):\n self._set_status(\"off\", \"11111111\")", "def turn_off(self) -> None:\n self._get_chromecast().quit_app()", "async def async_turn_off(self, **kwargs: Any) -> None:\n await self._set_state(False)", "async def async_turn_off(self, **kwargs):\n await self.data.set_appliance_state(self.appliance_id, False)\n return True", "async def async_turn_off(self) -> None:\n self._zone.power = False", "async def async_turn_off(self, **kwargs):\n await self._endpoint.on_off.off()\n self._state = False", "def turn_off(self, **kwargs) -> None:\n self._device.writeCharacteristic(self._handle, b'\\x01', True)\n self._state = False\n self.schedule_update_ha_state()", "def set_Off(self):\n if not(self._locked):\n self.__dict__['statusOn']=False\n self._undo_action()\n else:\n self._log.info('The JobProperty %s is blocked', self.__name__)", "def switch_off(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "async def async_turn_off(self, **kwargs: Any) -> None:\n await self._switch.async_off()\n self._attr_is_on = False\n self.async_write_ha_state()", "def switch_off_traffic_lights(self):\n for actor in self.world.get_actors():\n if actor.type_id == 'traffic.traffic_light':\n actor.freeze(True)\n # We set the traffic light to 'green' because 'off' state sets the traffic light to\n # 'red'.\n actor.set_state(carla.TrafficLightState.Green)", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._attr_is_on = await self.relay.set_active(False)\n self.async_write_ha_state()", "def turn_off(self, **kwargs):\n request = requests.post(self._resource, data=\"0\", timeout=10)\n if (request.status_code == 200) or (request.status_code == 201):\n self._state = False\n else:\n _LOGGER.error(\"Can't turn off %s. Is resource/endpoint offline?\",\n self._resource)\n\n self.schedule_update_ha_state()", "def turn_off(self):\n self.write(\"OUT0\\n\")", "def change_light(self):\n self._light_status = not self._light_status", "def stop(self, **kwargs):\n self.turn_off()", "async def async_turn_off(self, **kwargs):\n if self._switch_type == \"ir_mode\":\n _LOGGER.debug(\"Turning off IR\")\n await self.upv.set_camera_ir(self._camera_id, self._ir_off_cmd)\n else:\n _LOGGER.debug(\"Turning off Recording\")\n await self.upv.set_camera_recording(self._camera_id, TYPE_RECORD_NEVER)\n await self.coordinator.async_request_refresh()", "def all_off():\n Leds.red_left.brightness = 0\n Leds.red_right.brightness = 0\n Leds.green_left.brightness = 0\n Leds.green_right.brightness = 0\n Leds.blue_left.brightness = 0\n Leds.blue_right.brightness = 0", "def off(self):\n if self._state or (settings.log_state_of_switched_off_managers and self._state is None):\n if self._hidden:\n self.log_state_change('H')\n else:\n self.log_state_change('-')\n self._state = False", "def stop(self):\n self.turnOffMotors()", "async def async_turn_off(self, **kwargs: Any) -> None:\n\n await self.entity_description.ufp_set(self.device, False)", "async def async_turn_off(self, **kwargs: Any) -> None:\n\n await self.entity_description.ufp_set(self.device, False)", "async def async_turn_off(self, **kwargs):\n try:\n if await self._api.set_relay_state(self._dev_id, \"off\"):\n self._is_on = False\n self.async_write_ha_state()\n except Smile.PlugwiseError:\n _LOGGER.error(\"Error while communicating to device\")", "async def async_turn_off(self, **kwargs: Any) -> None:\n await self.stop_watering()", "def disable(self):\n self.enabled = False", "async def async_turn_off(self) -> None:\n await self._device.enter_standby()", "def setOff(self, command):\r\n self.setDriver('ST', 0)", "def ToggleOff(self):\n for s in self.sensors:\n self.gSetpt[s.GetID()].Disable()\n\n self.top_sizer.Layout()\n print(\"Toggle off\")", "async def async_turn_off(self, **kwargs: Any) -> None:\n await self.async_turn_on_off(False)", "async def async_turn_off(self):\n await self.async_mute_volume(True)", "def turn_away_mode_off(self):\n self._away = 0\n self._api._awaymode = self.away\n self._api.set()\n self.schedule_update_ha_state()", "def turn_off(self, **kwargs) -> None:\n _LOGGER.debug(\n \"SynoDSMSurveillanceHomeModeToggle.turn_off(%s)\",\n self._api.information.serial,\n )\n self._api.surveillance_station.set_home_mode(False)", "def stop(self) -> None:\n turnOffMotors()", "def disable(self):\n self._enabled = False", "def off(config: dict):\n switch_device(config, config[\"inching\"], \"off\")" ]
[ "0.9141917", "0.89695406", "0.89557284", "0.89015293", "0.8897358", "0.8873913", "0.8805522", "0.87862444", "0.86670524", "0.86476165", "0.847877", "0.8473624", "0.84669805", "0.8463841", "0.8435901", "0.8370482", "0.83494097", "0.8275936", "0.82717854", "0.82705396", "0.82532924", "0.82187337", "0.8217621", "0.8184601", "0.8083128", "0.80586106", "0.80510616", "0.80149883", "0.79797316", "0.7962505", "0.7956067", "0.79069495", "0.7893334", "0.7857711", "0.78368104", "0.78194183", "0.7805929", "0.7770012", "0.7758722", "0.77266425", "0.7717671", "0.768302", "0.76624376", "0.76486105", "0.76409245", "0.7620356", "0.75884205", "0.7588156", "0.7568105", "0.7547861", "0.75426954", "0.7539952", "0.75355875", "0.75304496", "0.7529129", "0.75176084", "0.75140625", "0.74943256", "0.7490942", "0.7485253", "0.74819356", "0.74682766", "0.74621415", "0.74033755", "0.73895", "0.7388273", "0.73769015", "0.73580277", "0.7337677", "0.73128504", "0.72962236", "0.72939336", "0.7287377", "0.72776794", "0.7272516", "0.72687864", "0.7260809", "0.72483015", "0.7237548", "0.7227871", "0.7196511", "0.71915615", "0.7186259", "0.71804136", "0.7155737", "0.71522707", "0.71522707", "0.715101", "0.71012276", "0.7100655", "0.7098918", "0.708679", "0.70831513", "0.70658827", "0.7060946", "0.70559573", "0.7055395", "0.7052322", "0.7045605", "0.7045487" ]
0.87764317
8
Synchronize state with light.
def update(self) -> None: prev_available = self.available self._device.update() if ( prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness and self._color_temp == self._device.color_temperature ): return if not self.available: _LOGGER.debug("Light %s is offline", self._device.ip) return self._is_on = self._device.power_on self._brightness = self._device.brightness self._color_temp = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) else: _LOGGER.debug( "Update light %s success: power on brightness %s color temperature %s", self._device.ip, self._brightness, self._color_temp, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_sync(self):", "async def async_update(self):\n self._state = await self._gate.is_light_on(self._light_id)", "def update(self) -> None:\n self._light.update()\n self._state = self._light.is_on()\n self._brightness = self._light.brightness", "def lock (self):\n self.locked = True\n self._changed = False", "async def async_set_light(self, light, state_mode):\n if light not in ON_OFF_LIST:\n return\n self._light = light.lower()\n self._state_mode = state_mode\n await self.async_send_cmd()", "def change_light(self):\n self._light_status = not self._light_status", "def sync() -> None:", "def update(self) -> None:\n state = int(self._light.is_on())\n self._state = bool(state)\n self._brightness = to_hass_level(state)", "def _set_light(self, new_state):\n try:\n self._device.lights = new_state\n except requests.Timeout:\n _LOGGER.error(\"Time out setting %s light to %s\", self.entity_id, new_state)\n return\n\n self._light_on = new_state == ON_STATE\n self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY\n self.async_write_ha_state()", "def update(self):\n #self._light.update()\n #self._state = 'on' #self._light.is_on()\n #self._brightness = 80 #self._light.brightness\n _LOGGER.info(\"update() is called\")", "async def _async_force_refresh_state(self):\n await self._shade.refresh()\n self._async_update_current_cover_position()\n self.async_write_ha_state()", "async def async_turn_on(self, **kwargs) -> None:\n self._state = await self._gate.turn_on_light(self._light_id)", "def sync(self) -> None: #\n self.__target.load_state_dict(self.__policy.state_dict())", "async def light(self) -> None:\n self.lit = True\n await self.run_command(\"miner fault_light on\")\n print(\"light \" + self.ip)", "def sync(self):\r\n\r\n # Ensure to rerun only once to avoid infinite loops\r\n # caused by a constantly changing state value at each run.\r\n #\r\n # Example: state.value += 1\r\n if self._state[\"is_rerun\"]:\r\n self._state[\"is_rerun\"] = False\r\n \r\n elif self._state[\"hash\"] is not None:\r\n if self._state[\"hash\"] != self._state[\"hasher\"].to_bytes(self._state[\"data\"], None):\r\n self._state[\"is_rerun\"] = True\r\n self._state[\"session\"].request_rerun()\r\n\r\n self._state[\"hash\"] = self._state[\"hasher\"].to_bytes(self._state[\"data\"], None)", "def lock(self):\n\n self.wait = True", "def lightning_turnon(self):\n self.turnOn()", "def sync(self):\r\n\r\n # Ensure to rerun only once to avoid infinite loops\r\n # caused by a constantly changing state value at each run.\r\n #\r\n # Example: state.value += 1\r\n if self._state[\"is_rerun\"]:\r\n self._state[\"is_rerun\"] = False\r\n\r\n elif self._state[\"hash\"] is not None:\r\n if self._state[\"hash\"] != self._state[\"hasher\"].to_bytes(self._state[\"data\"], None):\r\n self._state[\"is_rerun\"] = True\r\n self._state[\"session\"].request_rerun()\r\n\r\n self._state[\"hash\"] = self._state[\"hasher\"].to_bytes(self._state[\"data\"], None)", "def update(self):\n try:\n if not self._light.connected:\n self._light.connect()\n # pylint: disable=invalid-name\n r, g, b, w = self._light.get_color()\n except pykulersky.PykulerskyException as exc:\n if self._available:\n _LOGGER.warning(\"Unable to connect to %s: %s\", self._light.address, exc)\n self._available = False\n return\n if not self._available:\n _LOGGER.info(\"Reconnected to %s\", self.entity_id)\n self._available = True\n\n hsv = color_util.color_RGB_to_hsv(r, g, b)\n self._hs_color = hsv[:2]\n self._brightness = int(round((hsv[2] / 100) * 255))\n self._white_value = w", "def sync(self):\n\n # Ensure to rerun only once to avoid infinite loops caused by a constantly changing state value at each run.\n # Example: state.value += 1\n\n if self._state[\"is_rerun\"]:\n self._state[\"is_rerun\"] = False\n\n elif self._state[\"hash\"] is not None:\n if self._state[\"hash\"] != self._state[\"hasher\"].to_bytes(\n self._state[\"data\"], None\n ):\n self._state[\"is_rerun\"] = True\n self._state[\"session\"].request_rerun()\n\n self._state[\"hash\"] = self._state[\"hasher\"].to_bytes(self._state[\"data\"], None)", "def state_locked_changed(self, state):\n self.door_interlock_state = state\n self.get_state()", "def sync(self):\n\n # Ensure to rerun only once to avoid infinite loops\n # caused by a constantly changing state value at each run.\n #\n # Example: state.value += 1\n if self._state[\"is_rerun\"]:\n self._state[\"is_rerun\"] = False\n\n elif self._state[\"hash\"] is not None:\n if self._state[\"hash\"] != self._state[\"hasher\"].to_bytes(\n self._state[\"data\"], None\n ):\n self._state[\"is_rerun\"] = True\n self._state[\"session\"].request_rerun()\n\n self._state[\"hash\"] = self._state[\"hasher\"].to_bytes(self._state[\"data\"], None)", "def sync(self):\n\n # Ensure to rerun only once to avoid infinite loops\n # caused by a constantly changing state value at each run.\n #\n # Example: state.value += 1\n if self._state[\"is_rerun\"]:\n self._state[\"is_rerun\"] = False\n\n elif self._state[\"hash\"] is not None:\n if self._state[\"hash\"] != self._state[\"hasher\"].to_bytes(self._state[\"data\"], None):\n self._state[\"is_rerun\"] = True\n self._state[\"session\"].request_rerun()\n\n self._state[\"hash\"] = self._state[\"hasher\"].to_bytes(\n self._state[\"data\"], None)", "def sync(self):\n pass", "def sync(self):\n pass", "def sync_state_changed(self, state):\n return", "def lock(self):\n self.mainloop().lock()", "def setLightSwitch(self, _state=False):\n if _state == True:\n render.setLight(self.lightNP)\n elif _state == False:\n render.clearLight(self.lightNP)", "def set_light(self, idx, light):\n\n # Don't set a light that doesn't need its thing set\n if self.application.settings[\"lights_state\"][idx] == light:\n return\n\n # synchronize our internal representation of the lights\n self.application.settings[\"lights_state\"][idx] = light\n\n packed_cmd = srsly.pack_light_data(idx, light)\n srsly.write_light_cmd(\n self.application.settings['serial_connection'],\n packed_cmd,\n sleep=self.application.settings[\"refresh_rate\"])", "def arduinoSync(self):\n self.arduinoSyncStarted.emit()\n #Calculation of the time LED must be on\n exp = (self.mmc.getExposure()) # in ms\n #list containing each illumTime for each LED\n illumTime=[round(exp*self.expRatio[0],3),\n round(exp*self.expRatio[1],3),\n round(exp*self.expRatio[2],3)]\n\n if self.seqMode == \"rgbMode\":\n ledDriverNb=[0,1,2] #[Red, Green, Blue]\n for driverNb in ledDriverNb:\n driver = Arduino(driverNb)\n driver.synchronization(illumTime,\n rgbLedRatio = self.rgbLedRatio)\n elif self.seqMode == 'rbMode':\n ledDriverNb=[0,1,2] #[Red, Green, Blue]\n for driverNb in ledDriverNb:\n driver = Arduino(driverNb)\n driver.synchronization(illumTime,\n greenFrameInterval = self.greenFrameInterval,\n colorMode = self.colorMode)\n\n self.arduinoSyncFinished.emit()", "def synchronize_state(self):\n theta = self.unicycle_state[:,2]\n v = self.unicycle_state[:,3]\n self.position[:, :2] = self.unicycle_state[:,:2]\n self.orientation[:,2] = theta\n vx = v * np.cos(theta)\n vy = v * np.sin(theta)\n\n self.velocity[:, 0] = vx\n self.velocity[:, 1] = vy\n\n self.angular_velocity[:, 2] = self.unicycle_state[:, 4]", "def lock_gate(self):\n self.fsm_gate.clear()", "async def change_light(self, light, newstate):\n if not self.connected:\n return\n\n # we don't have 3 lights!\n if light > 1:\n return\n\n # we don't have THIS light\n if not self.light_array[light]:\n return\n\n # this is a toggle switch, not on/off\n if self.light_status[light] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_LIGHT1 if light == 0 else C_LIGHT2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def f_lock(self):\n self._locked = True", "def update_states(self) -> None:\n self.set_states()\n self.async_write_ha_state()", "def light_action():\n if light_btn.isChecked():\n self.variables.default_values_dict[\"settings\"][\"external_lights\"] = True\n else:\n self.variables.default_values_dict[\"settings\"][\n \"external_lights\"\n ] = False", "def set_light_on(self):\r\n self._light = \"ON\"", "def lighton(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 0:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is on\")\n else:\n update.message.reply_text(\"Light was already on\")\n\n __sauna.control.setPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def updateState(self):\n self.state = self.microgridPolicy.computeState();", "def update(self):\r\n self._state = self._dev.state", "def flicker_lights(self):\n print 'Lights Set'", "def update_lights(self, light_data):\n self.current_brightness = self.brightness\n self.brightness = light_data.get('brightness')\n self.power_state = light_data.get('power_state')\n self._update_board()", "def sync(type, all):\n print(\"Syncing\")", "async def async_update(self) -> None:\n if (\n self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE)\n == BSH_POWER_ON\n ):\n self._state = True\n elif (\n self.device.appliance.status.get(BSH_POWER_STATE, {}).get(ATTR_VALUE)\n == self.device.power_off_state\n ):\n self._state = False\n elif self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get(\n ATTR_VALUE, None\n ) in [\n \"BSH.Common.EnumType.OperationState.Ready\",\n \"BSH.Common.EnumType.OperationState.DelayedStart\",\n \"BSH.Common.EnumType.OperationState.Run\",\n \"BSH.Common.EnumType.OperationState.Pause\",\n \"BSH.Common.EnumType.OperationState.ActionRequired\",\n \"BSH.Common.EnumType.OperationState.Aborting\",\n \"BSH.Common.EnumType.OperationState.Finished\",\n ]:\n self._state = True\n elif (\n self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get(ATTR_VALUE)\n == \"BSH.Common.EnumType.OperationState.Inactive\"\n ):\n self._state = False\n else:\n self._state = None\n _LOGGER.debug(\"Updated, new state: %s\", self._state)", "async def async_turn_on(self, **kwargs: Any) -> None:\n self.entity_description.on_off_fn(self._valve, True)\n await self._device.push_state()\n self.async_write_ha_state()", "def async_turn_on(self, **kwargs):\n self._state = STATE_ON\n transition = kwargs.get(ATTR_TRANSITION, self._fade_time)\n\n # Update state from service call\n if ATTR_BRIGHTNESS in kwargs:\n self._brightness = kwargs[ATTR_BRIGHTNESS]\n\n if ATTR_HS_COLOR in kwargs:\n self._rgb = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR])\n # self._white_value = color_rgb_to_rgbw(*self._rgb)[3]\n\n if ATTR_WHITE_VALUE in kwargs:\n self._white_value = kwargs[ATTR_WHITE_VALUE]\n\n logging.debug(\"Setting light '%s' to %s with transition time %i\",\n self._name, repr(self.dmx_values), transition)\n asyncio.ensure_future(\n self._controller.set_channels_async(\n self._channels, self.dmx_values, transition=transition))\n self.async_schedule_update_ha_state()", "def sync(self):\n return", "def sync_start(self):", "def do_sync(self):\n raise NotImplementedError() # pragma: no cover", "def manualState(self, tfid, state):\n self.trafficLights.get(int(tfid)).setState(state)\n self.trafficLights.get(int(tfid)).updateState()", "def action_lock(self):\n self.state = 'locked'", "def run(self):\n global moving_line\n global change_requested\n global thick_1_a, thick_1_b, thick_1_c, thick_2_a, thick_2_b, thick_2_c\n global lap_to_go\n global lights\n line = moving_line\n if line == 1:\n thick_1_a = 1\n lights[2].write(1)\n thick_1_b = -1\n lights[1].write(0)\n time.sleep(self.interval)\n thick_1_b = 1\n lights[1].write(1)\n thick_1_c = -1\n lights[0].write(0)\n thick_2_a = -1\n lights[5].write(0)\n thick_2_b = 1\n lights[4].write(1)\n thick_2_c = 1\n lights[3].write(1)\n line = 2\n else:\n thick_2_a = 1\n lights[5].write(1)\n thick_2_b = -1\n lights[4].write(0)\n time.sleep(self.interval)\n thick_2_b = 1\n lights[4].write(1)\n thick_2_c = -1\n lights[3].write(0)\n thick_1_a = -1\n lights[2].write(0)\n thick_1_b = 1\n lights[1].write(1)\n thick_1_c = 1\n lights[0].write(1)\n line = 1\n\n moving_line = line\n change_requested = 0\n lap_to_go = lap_period_sec", "def flicker_lights(self):\n if self.lighting:\n self.lighting = False\n else:\n self.lighting = True\n self.redraw()", "def flicker_lights(self):\n if self.lighting:\n self.lighting = False\n else:\n self.lighting = True\n self.redraw()", "async def unlight(self) -> None:\n self.lit = False\n await self.run_command(\"miner fault_light off\")\n print(\"unlight\" + self.ip)", "def light_standby():\n for led in leds:\n led.on()\n\n rgb_driver.pulse(on_color=(scale[\"R\"], scale[\"G\"], scale[\"B\"]), off_color=(0,0,0))", "def _update(self, force_update=True):\n with self._wemo_exception_handler(\"update status\"):\n self._state = self.wemo.get_state(force_update)", "def set_state( self ):", "def set_light_on(self):\n self._light = \"ON\"", "def sync(self, sync):\n self._sync = sync", "def ControlLights(state):\n for led in (RED,YELLOW,GREEN):\n GPIO.output(LED[led],state[led])\n time.sleep(FLASH_TIME)", "def react(self):\n if self.flag == 0:\n self.state.set(\"SIMULATING\")\n self.flag = 1\n self.simulate()\n else:\n self.state.set(\"AT REST\")\n self.flag = 0", "def __setstate__(self, state):\n state['_lock'] = Lock()\n self.__dict__.update(state)", "def update(self):\n self._state = self._state", "def doSync (self) :\r\n \r\n self.factory.getSyncFor(self)", "def _light_changed(self, _entity, _attribute, old, new, _kwargs):\n is_on = new == \"on\"\n is_off = new == \"off\"\n if is_on and old == \"off\":\n if not self._light_on:\n self.log(\n \"UNSYNCED ON LIGHT (stored as off)\",\n level=\"WARNING\",\n log=LOGGER,\n )\n self._last_light_on = monotonic()\n self._light_on = True\n elif is_off and old == \"on\":\n if self._light_on:\n self.log(\n \"UNSYNCED OFF LIGHT (stored as on)\",\n level=\"ERROR\",\n log=LOGGER,\n )\n self._light_on = False\n elif not is_on and not is_off:\n self.log(\n f\"Unavailable LIGHT? {new} (stored as {old})\", log=LOGGER,\n )\n else:\n self._light_on = is_on", "async def refresh_entity_state(self):", "def sync():\n sync_ssda()", "def syncContents(self):\n self._contents.setState_TRY(self.temperature(),\n self.density(),\n self.massFractions())", "def turnLightingSystemOn():\n dislin.light('ON')", "async def change_light_state(self, old, attribute, direction, steps):\n sign = self.sign_mapping[direction]\n if attribute == \"xy_color\":\n self.index_color += sign\n self.index_color = self.index_color % len(self.colors)\n new_state_attribute = self.colors[self.index_color]\n attributes = {\n attribute: new_state_attribute,\n \"transition\": self.delay / 1000,\n }\n await self.on(**attributes)\n # In case of xy_color mode it never finishes the loop, the hold loop\n # will only stop if the hold action is called when releasing the button.\n # I haven't experimented any problems with it, but a future implementation\n # would be to force the loop to stop after 4 or 5 loops as a safety measure.\n return False\n self.log(f\"Attribute: {attribute}; Current value: {old}\", level=\"DEBUG\")\n max_ = self.attribute_minmax[attribute][\"max\"]\n min_ = self.attribute_minmax[attribute][\"min\"]\n step = (max_ - min_) // steps\n new_state_attribute = old + sign * step\n if self.check_smooth_power_on(\n attribute, direction, await self.get_state(self.light[\"name\"])\n ):\n new_state_attribute = min_\n # The light needs to be turned on since the current state is off\n # and if the light is turned on with the brightness attribute,\n # the brightness state won't remain when turned of and on again.\n await self.on()\n attributes = {attribute: new_state_attribute, \"transition\": self.delay / 1000}\n if min_ <= new_state_attribute <= max_:\n await self.on(**attributes)\n self.value_attribute = new_state_attribute\n return False\n else:\n new_state_attribute = max(min_, min(new_state_attribute, max_))\n attributes[attribute] = new_state_attribute\n await self.on(**attributes)\n self.value_attribute = new_state_attribute\n return True", "def synchronize_traffic_light(self, landmark_id, state):\n if not landmark_id in self._tls:\n logging.warning('Landmark %s not found in carla', landmark_id)\n return False\n\n traffic_light = self._tls[landmark_id]\n traffic_light.set_state(state)\n return True", "def update(self):\n self._state = 23", "def i_am_locking(self):\r\n pass", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self._switch.async_on()\n self._attr_is_on = True\n self.async_write_ha_state()", "def lockMeshes():\n setLockOnMeshes(2)", "def hold_sync(self):\n if self._holding_sync is True:\n yield\n else:\n try:\n self._holding_sync = True\n yield\n finally:\n self._holding_sync = False\n self.send_state(self._states_to_send)\n self._states_to_send.clear()", "async def async_update(self):\n await self.hass.async_add_job(self.ccs811_client.update)\n if self.type == SENSOR_ECO2:\n eco2 = self.ccs811_client.eco2\n self._state = eco2\n elif self.type == SENSOR_TVOC:\n self._state = self.ccs811_client.tvoc", "def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light", "def test_light_interface(light_name='head_green_light'):\n l = Lights()\n rospy.loginfo(\"All available lights on this robot:\\n{0}\\n\".format(\n ', '.join(l.list_all_lights())))\n rospy.loginfo(\"Blinking Light: {0}\".format(light_name))\n on_off = lambda x: 'ON' if l.get_light_state(x) else 'OFF'\n rospy.loginfo(\"Initial state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn off light\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # reset output\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"Final state: {0}\".format(on_off(light_name)))", "async def test_switch_change_lock_state(hass: HomeAssistant, utcnow) -> None:\n helper = await setup_test_component(hass, create_lock_service)\n\n await hass.services.async_call(\n \"lock\", \"lock\", {\"entity_id\": \"lock.testdevice\"}, blocking=True\n )\n helper.async_assert_service_values(\n ServicesTypes.LOCK_MECHANISM,\n {\n CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE: 1,\n },\n )\n\n await hass.services.async_call(\n \"lock\", \"unlock\", {\"entity_id\": \"lock.testdevice\"}, blocking=True\n )\n helper.async_assert_service_values(\n ServicesTypes.LOCK_MECHANISM,\n {\n CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE: 0,\n },\n )", "def lock(self):\n raise NotImplementedError", "def update(self):\n print(\"sensorState Update\")", "def update(self):\n try:\n if self._remote.power() == 1:\n self._state = STATE_ON\n else:\n self._state = STATE_OFF\n\n # Set TV to be able to remotely power on\n # self._remote.power_on_command_settings(2)\n if self._remote.mute() == 2:\n self._muted = False\n else:\n self._muted = True\n self._volume = self._remote.volume() / 60\n except OSError:\n self._state = STATE_OFF", "def magma_device_sync():\n\n _libmagma.magma_device_sync()", "async def async_turn_aux_heat_on(self) -> None:\n self._aux = True\n self.async_write_ha_state()", "async def async_update(self):\n await self.hass.async_add_job(self.hdc1000_client.update)\n if self.type == SENSOR_TEMP:\n temperature = round(self.hdc1000_client.temperature, 1)\n if self.temp_unit == TEMP_FAHRENHEIT:\n temperature = round(celsius_to_fahrenheit(temperature), 1)\n self._state = temperature\n elif self.type == SENSOR_HUMID:\n self._state = round(self.hdc1000_client.humidity, 1)", "def lightRotate( task, sc = sc ):\r\n global lightAngle\r\n lightAngle += math.pi / 180 * globalClock.getDt() * 10\r\n r = 50\r\n sc.setLightPos( Vec3( r * math.cos(lightAngle), r * math.sin(lightAngle), 100 ) )\r\n return Task.cont", "def test_update_with_target_state(self):\n self.switch._target_state = True\n self.port.data = {}\n self.port.data[\"output\"] = \"stale\"\n self.switch.update()\n assert 1.0 == self.port.data[\"output\"]\n assert self.switch._target_state is None\n self.port.data[\"output\"] = \"untouched\"\n self.switch.update()\n assert \"untouched\" == self.port.data[\"output\"]", "def turn_on(self):\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, STATE_NETATMO_HOME\n )\n self.update_without_throttle = True\n self.schedule_update_ha_state()", "def _sync(self, message=None):\n if message[0] == self._multi_level_sensor_property.element_uid:\n self._state = self._device_instance.multi_level_sensor_property[\n message[0]\n ].value\n elif message[0].startswith(\"hdm\"):\n self._available = self._device_instance.is_online()\n else:\n _LOGGER.debug(\"No valid message received: %s\", message)\n self.schedule_update_ha_state()", "def sync(self, sync):\n\n self._sync = sync", "def local(self):\n logging.info(__name__ + ' : Set control to local & locked')\n self.set_remote_status(0)", "def loop(self):\r\n\r\n self._next_state = self._current_state", "def _pre_sync(self):", "def turn_on(self, **kwargs):\n self._brightness = 100\n self._state = 'on'\n #self._light.brightness = kwargs.get(ATTR_BRIGHTNESS, 255)\n #self._light.turn_on()\n _LOGGER.info(\"turn_on() is called\")", "def SynchronizeFlags(self):\n pass", "def update(self):\r\n if self._block.info_values is not None:\r\n self._state = self._block.info_values.get(self._sensor_name, None)", "async def async_reset_brightness(self):\n await self.local_meural.send_als_calibrate_off()", "def ToggleLock(self, event):\n pass" ]
[ "0.83932275", "0.730228", "0.6771389", "0.6599177", "0.6598202", "0.65861017", "0.65388566", "0.6511652", "0.65106404", "0.65080667", "0.646516", "0.64101493", "0.6382941", "0.63396305", "0.63342994", "0.6315549", "0.6311331", "0.630048", "0.62958086", "0.6278275", "0.6262956", "0.6221932", "0.6219884", "0.62183535", "0.62183535", "0.62163985", "0.6206121", "0.61812603", "0.61713946", "0.6156673", "0.61259884", "0.6054895", "0.5995861", "0.59925365", "0.5991874", "0.5977013", "0.59575695", "0.59567493", "0.5952764", "0.5946643", "0.5936217", "0.5925061", "0.59102", "0.5908267", "0.58997303", "0.5888819", "0.58883935", "0.5881094", "0.58672374", "0.5862866", "0.58623147", "0.58579713", "0.5852074", "0.5852074", "0.58496416", "0.58486825", "0.58483416", "0.58449686", "0.58408666", "0.5829397", "0.5828932", "0.58251804", "0.582311", "0.58219635", "0.58015716", "0.57972044", "0.57946086", "0.57771343", "0.5767987", "0.5766001", "0.576058", "0.5753456", "0.5749587", "0.57345986", "0.57305497", "0.57299465", "0.57278484", "0.5726045", "0.5712647", "0.569842", "0.56974477", "0.569576", "0.5681973", "0.5675535", "0.5666285", "0.5663045", "0.5660302", "0.56585", "0.5649668", "0.56463194", "0.56395304", "0.5632333", "0.5609172", "0.56080437", "0.56026405", "0.5587125", "0.55841684", "0.5580051", "0.55632675", "0.5557886" ]
0.6158366
29
This function returns the number of elements in the numbers list that are divisible by divide.
def listDivide(numbers, divide = 2): divisible_count = 0 for i in numbers: if i % divide == 0: divisible_count += 1 return divisible_count
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listDivide(numbers, divide=2): \n counter = 0\n for num in numbers:\n if num % divide == 0:\n counter+=1\n return counter", "def listDivide(numbers, divide=2):\n newList = []\n for i in numbers:\n if i % divide == 0:\n newList.append(i)\n return len(newList)", "def divide(numbers):\n counter = 0\n for num in numbers:\n counter /= num\n return counter", "def answer(l):\n num_divisors = [0] * len(l)\n triple_count = 0\n for large in range(1, len(l)):\n for small in range (0, large):\n if l[large] % l[small] == 0:\n num_divisors[large] += 1\n triple_count += num_divisors[small]\n return triple_count", "def count_multiples( start, stop, divisor ):\n count = 0\n num = start\n while num < stop + 1:\n if num % divisor == 0:\n count += 1\n num += 1\n return count", "def num_divisors_iii(n):\n set_pf = set(n)\n n_div = 1\n for pf in set_pf:\n x = n.count(pf)\n n_div *= (1 + x)\n return n_div", "def count_divisor(num):\n ans = 2 # considering 1 and number itself by default\n for i in range(2, int(math.sqrt(num)) + 1):\n if num % i == 0:\n # for equal divisor (incase of perfect square)\n if (num / i) == i:\n ans += 1\n else:\n ans += 2\n return ans", "def get_divisores(num):\n divisores = [] #uso una lista para guardar los divisores\n for i in range(1, num):\n if num%i == 0:\n divisores.append(i)\n return divisores", "def count_divisions(num, n):\n count = 0\n while pe_005.is_divisible(num, n):\n num = num // n\n count += 1\n return count, num", "def question_23(list_num: float) -> float:\n return sum(list_num) / len(list_num)", "def find_count_divisor(this_list):\n max_found = this_list[0][1]\n count = 0\n\n while max_found/50 > 0:\n max_found -= 50\n count += 1\n\n return count", "def num_divisors_ii(n):\n set_pf = set(n)\n n_og = 2**(len(set_pf))\n n_div = n_og\n for pf in set_pf:\n x = n.count(pf)\n n_div += n_div//2 * (x - 1)\n return n_div", "def div_numbers(a: int, b: int) -> int:\n return a / b", "def divide(numbers):\n \n result = numbers[0]\n for n in numbers[1:]:\n result = result / n\n return result", "def div_by(n, list_of_num):\n for num in list_of_num:\n if not n % num:\n return True\n return False", "def sumDivisor(inputList):\n result = 0\n for i in inputList:\n result += i\n return result", "def verifica_element_divide_lista(numar, lista_divizori):\n for i in lista_divizori:\n if i == 0:\n return False\n if numar % i != 0:\n return False\n return True", "def findDivisor(num):\n divisors = [1]\n for i in range(2, int(sqrt(num)) + 1):\n if num % i == 0:\n divisors.append(i)\n temp = num / i\n if temp != i:\n divisors.append(temp)\n return divisors", "def div_sum(data: list) -> int:\n\n def even_quotient(nums: list) -> int:\n \"\"\"Finds the quotient of the only two numbers in the list that evennly divide.\"\"\"\n for i in range(len(nums[:-1])):\n for j in range(i + 1, len(nums)):\n if nums[i] % nums[j] == 0:\n return nums[i] // nums[j]\n elif nums[j] % nums[i] == 0:\n return nums[j] // nums[i]\n\n total = 0\n for row in data:\n total += even_quotient(row)\n return total", "def test_count_divisible_digits():\n print('Testing count_divisible_digits')\n\n # Cases given to test this problem\n assert_equals(4, hw1.count_divisible_digits(650899, 3))\n assert_equals(1, hw1.count_divisible_digits(-204, 5))\n assert_equals(0, hw1.count_divisible_digits(24, 5))\n assert_equals(0, hw1.count_divisible_digits(1, 0))\n\n # Additional cases to check the 0 check\n assert_equals(0, hw1.count_divisible_digits(0, 0))\n assert_equals(2, hw1.count_divisible_digits(-579300, 2))", "def divisions(self,domain,divisions):\n size = domain.height/divisions\n counter = []\n for i in range(divisions):\n count = ((self.z >= i*size) & (self.z < (i+1)*size)).sum()\n counter.append(count)\n return counter", "def get_divisors_sum(number):\n if number == 0:\n return 0\n\n divisors_list = []\n for i in range(number+1):\n j = i + 1\n if number % j == 0:\n divisors_list.append(j)\n\n return sum(divisors_list)", "def testListDivide():\n assert listDivide([1,2,3,4,5]) == 2\n assert listDivide([2,4,6,8,10]) == 5\n assert listDivide([30, 54, 63,98, 100], divide = 10) == 2\n assert listDivide([]) == 0\n assert listDivide([1,2,3,4,5], 1) == 5", "def divisible_by(array, divisor):\n return_list = list()\n for i in array:\n if i % divisor == 0:\n return_list.append(i)\n return return_list", "def sum_of_proper_divisors(number: int):\n divisors = []\n\n for n in range(1, number):\n if number % n == 0:\n divisors.append(n)\n\n return sum(divisors)", "def n_photon_counting_div(self):\n inti = ct.c_ulong()\n self.lib.GetNumberPhotonCountingDivisions(ct.pointer(inti))\n return inti.value", "def getDivisors(n):", "def divisor_counter(num):\n if num <= 0:\n raise ValueError('num must be a positive, non-zero number')\n\n divisors = 0\n num_sq_rt = num ** .5\n for possible_divisor in range(1, int(num_sq_rt)):\n if num % possible_divisor == 0:\n divisors += 1\n\n divisors *= 2\n # If num is a perfect square, we have to subtract one so we only count\n # the square root once. i.e. if num is 16, we only want to count 4 once\n if num_sq_rt.is_integer():\n divisors -= 1\n return divisors*2", "def _find_dividers(num: int) -> List[int]:\r\n\r\n dividers: List[int] = list()\r\n while num != 1:\r\n primes = PrimeHandler.find_all_primes(num)\r\n for prime in reversed(primes):\r\n if num % prime == 0:\r\n dividers.append(prime)\r\n num = num // prime\r\n break\r\n return list(reversed(dividers))", "def main():\n numbers = int(input())\n count = 0\n for num in range(1, numbers+1):\n if num > 1:\n for i in range(2, num):\n if (num % i) == 0:\n break\n else:\n count += 1\n print(count)", "def get_count_of_divisors_by_number(self, number):\n if int(number) < 1:\n print \"this method needs number >= 1\"\n return 0\n if int(number) == 1:\n return 1\n # n = (a ** p) * (b ** q) * (c ** r) のとき、\n # n の約数は (p + 1) * (q + 1) * (r + 1) で求められる\n factors = self.get_prime_factors_by_number(number)\n patterns = factors.values()\n patterns_considered_power_of_zero = map(lambda x: x + 1, patterns)\n ret = reduce(lambda x, y: x * y, patterns_considered_power_of_zero)\n return ret", "def amountdiv(number, minnum, maxnum):\n\n # Set the amount to 0\n amount = 0\n\n # For each item in range of minimum and maximum\n for i in range(minnum, maxnum + 1):\n # If the remainder of the divided number is 0\n if number % i == 0:\n # Add 1 to the total amount\n amount += 1\n\n # Return the result\n return amount", "def divide(numbers):\n quot = numbers[0]\n for i in numbers[1:]:\n quot = quot / i\n return quot", "def test_list_int(self):\n result = div(2, 4)\n self.assertEqual(result, 0.5)", "def getNumDivisors(n):\n\n n = abs(int(n))\n\n r = 1\n i = 2\n while i <= n:\n a = 0\n while n % i == 0:\n n = n / i\n a = a + 1\n r = r * (a + 1)\n i = i + 1\n\n return r", "def num_divisors(n):\n divisors = []\n for i in range(1, int(n**0.5) + 1):\n if n % i == 0:\n divisors += {i, n //i}\n return divisors", "def n50_counter(input_list):\n input_list.sort()\n half_tot = sum(input_list) / 2\n\n current_count = 0\n for num in input_list:\n current_count += num\n if current_count >= half_tot:\n return num", "def even_quotient(nums: list) -> int:\n for i in range(len(nums[:-1])):\n for j in range(i + 1, len(nums)):\n if nums[i] % nums[j] == 0:\n return nums[i] // nums[j]\n elif nums[j] % nums[i] == 0:\n return nums[j] // nums[i]", "def testListDivide():\n listDivide([1, 2, 3, 4, 5])\n listDivide([2, 4, 6, 8, 10])\n listDivide([30, 54, 63, 98, 100], divide=10)\n listDivide([])\n listDivide([1, 2, 3, 4, 5], 1)", "def div(self):\n a = self.nums()\n x = LibraryFunctions.per(a, 0.9) - LibraryFunctions.per(a, 0.1)\n return x / 2.58", "def find_divisors_1(number):\n divisors = []\n # Test all numbers from 1 to number-1.\n # Actually, we can be more efficient with range(1, (number//2)+1)\n for n in range(1, number): \n if number % n == 0:\n divisors.append(n)\n return divisors", "def calc_average(numbers):\n return sum(numbers) // len(numbers) # integer division //", "def get_max_divisible_subset_length(self, nums):\n if not nums:\n return 0\n \n max_lengths = [1]\n max_length = 1\n\n for i in range(1, len(nums)):\n max_length_here = 1\n for j in range(i - 1, -1, -1):\n if nums[i] % nums[j] == 0:\n max_length_here = max(max_length_here, 1 + max_lengths[j])\n max_lengths.append(max_length_here)\n max_length = max(max_length, max_length_here)\n \n return max_length", "def count_factors_with_primes(x, primelist):\n # Number of Divisors.\n nod = 1\n # Reassigned after every divisor to become the remainder.\n remain = x\n # Let's pick a prime number.\n for p in primelist:\n # If this prime squared, is greater than x, we double the number of\n # divisors currently found, and return that\n if p*p > x:\n return nod * 2\n # Exponent\n exp = 1\n # if the prime divides the current remainder, increase the exponent,\n # divide it in, and check again. - This means a prime that divides in\n # once will double our number of divisors - seems legit.\n while remain % p == 0:\n exp += 1\n remain = remain // p\n nod *= exp\n # We're done dividing. Call it quits.\n if remain == 1:\n return nod\n # We've run out of primes. Must be done. Call it quits.\n return nod", "def get_divisors(num):\n assert num != 0, \"Num is 0\"\n divisors = []\n sq_root = int(num**0.5)\n for i in range(1, sq_root + 1):\n if num % i == 0:\n divisors.extend([i, num // i])\n # if num has a perfect sq, that number will be added twice, then:\n if sq_root ** 2 == num:\n divisors.remove(sq_root)\n return divisors", "def exercise_b2_24():\r\n number = input(\"Insert the number: \")\r\n flag = 0\r\n count = 0\r\n divisors_list =[]\r\n while flag <= int(number):\r\n flag +=1\r\n if (int(number) % flag) == 0:\r\n count += 1\r\n divisors_list.append(flag)\r\n print(\"\"\"\\nThe amount of divisors are: %s\"\"\"\r\n \"\"\"\\nThe numbers are: %s\\n\"\"\" % (count, divisors_list))\r\n return", "def find_divisors_2(number):\n divisors = [n for n in range(1, number) if number % n == 0]\n return divisors", "def d(n):\n divisors = []\n for i in range(1, n):\n if n % i == 0:\n divisors.append(i)\n return sum(divisors)", "def selfDividingNumbers(left, right):\n ret = []\n bounds = list(range(left, right + 1))\n \n for num in bounds:\n div = True\n if '0' in str(num):\n pass\n elif num < 10:\n ret.append(num)\n else:\n for n in str(num): \n if num % int(n) !=0:\n div = False\n if div is True:\n ret.append(num) \n return ret", "def divisor_num(x):\n factor_pow = map(lambda y: y + 1, factorint(x).values())\n div_num = reduce(mul, factor_pow)\n return div_num", "def count_divisors(n):\r\n if n == 1:\r\n return 0\r\n m = int(sqrt(n))\r\n c = 1\r\n if m * m == n:\r\n c += 1\r\n m -= 1\r\n for i in xrange(2, m+1):\r\n if n % i == 0:\r\n c += 2\r\n return c", "def finddiv(x):\r\n \r\n div = (1, x)\r\n for i in range(2, x//2+1):\r\n if x%i==0:\r\n div+=(i,)\r\n return div", "def get_dividers(num: int) -> List[int]:\r\n if num <= 0:\r\n raise ValueError\r\n\r\n if num == 1:\r\n return [1]\r\n\r\n if PrimeHandler.is_prime(num):\r\n return [num]\r\n\r\n return _find_dividers(num)", "def nr_pare(lista):\n nr_elemente_pare = 0\n for i in lista:\n if i % 2 == 0:\n nr_elemente_pare += 1\n return nr_elemente_pare", "def num_divisors(n):\n\tif n < 2:\n\t\treturn 1 \t# not really correct\n\t\n\tdivisors = 1\n\ti = 2\n\n\twhile n > 1:\n\t\tp = 0 \t# p will be the maximum x such that i^x evenly divides n\n\n\t\t# repeatedly divide n by i, and store the number of times into p\n\t\twhile (n % i == 0):\n\t\t\tn = n / i\n\t\t\tp += 1\n\n\t\tdivisors = divisors * (p + 1)\n\t\ti += 1\n\n\treturn divisors", "def diviseur(n):\n s = 0\n for i in range (1, n):\n if n%i == 0:\n s += 1\n print(i)\n return \"Le nombre de diviseurs est\", s", "def divide_ints_check_divisible(numerator, denominator):\n if not isinstance(numerator, int):\n raise TypeError('`numerator` is not an instance of `int`.')\n if not isinstance(denominator, int):\n raise TypeError('`denominator` is not an instance of `int`.')\n if numerator % denominator != 0:\n raise ValueError('`numerator` is not divisible by `denominator`.')\n return numerator//denominator", "def div(a, x):\n return [a[i]/x for i in range(2)]", "def div(a, b):\n\n c = 0\n d = b\n while True:\n if d > a:\n break\n else:\n c = c + 1\n d = d + b\n\n return c", "def simple_get_divisors(num: int) -> list:\n all_divisors = []\n for possible_divisor in range(1, math.floor(num / 2) + 1):\n if num % possible_divisor == 0:\n all_divisors.append(possible_divisor)\n return all_divisors", "def check_divisions(self, terms):\n start = 0\n while 1:\n try:\n index = terms.index('/', start)\n if terms[index - 1] % terms[index + 1]:\n raise DecimalDivisionError\n start = index + 1\n except ValueError:\n break", "def proper_divisors(n):\r\n numbers = []\r\n for i in xrange(1, n):\r\n if n % i == 0:\r\n numbers.append(i)\r\n \r\n return numbers", "def divisors(x):\n x = abs(x)\n result = []\n upper_bound = int(math.sqrt(x))\n for i in range(1, upper_bound + 1):\n if x % i == 0:\n if x / i == i:\n result.append(i)\n else:\n result.append(i)\n result.append(x//i)\n return sorted(distinct(result))", "def between_two_sets(factors, elements):\n results = []\n for i in xrange(max(factors), min(elements) + 1):\n tally = True\n for factor in factors:\n tally = tally and (i % factor == 0)\n for element in elements:\n tally = tally and (element % i == 0)\n if tally:\n results.append(i)\n return len(results)", "def divisors(num: int) -> Iterable[int]:\n assert num > 0\n if num == 1:\n yield 1\n return\n\n for divisor in range(1, int(math.sqrt(num)) + 1):\n if num % divisor == 0:\n yield divisor\n divisor_2 = num // divisor\n if divisor_2 != divisor:\n yield divisor_2\n else:\n return", "def div(seq):\n for (i, n) in enumerate(seq):\n # try dividing this number with all others\n # (in fact, we can only consider the subsequent numbers,\n # and check the ratio both ways)\n for j in range(i+1, len(seq)):\n ratio1 = seq[j] / seq[i]\n ratio2 = seq[i] / seq[j]\n for result in [ratio1, ratio2]:\n # is the result an integer? if so, done\n if is_int(result):\n return int(result)", "def countElements(self, nums):\n import sys\n max_n = -sys.maxint\n min_n = sys.maxint\n\n for n in nums:\n max_n = max(n, max_n)\n min_n = min(n, min_n)\n\n count = 0\n for n in nums:\n if min_n < n < max_n:\n count += 1\n return count", "def number_list_average(numbers):\n return sum(numbers)/len(numbers)", "def n_percentage_part(percentage_level, counted_od):\n total = 0.0\n od_num = 0\n percentage_od = count_to_percentage(counted_od)\n if percentage_level == 1.0:\n od_num = len(counted_od)\n else:\n for i in percentage_od:\n if total < percentage_level:\n total += i\n else:\n od_num = percentage_od.index(i)\n break\n return od_num", "def count_proper_divisors(n):\r\n if n == 1:\r\n return 0\r\n m = int(sqrt(n))\r\n c = 1\r\n if m * m == n:\r\n c += 1\r\n m -= 1\r\n for i in xrange(2, m+1):\r\n if n % i == 0:\r\n c += 2\r\n return c", "def divisors(n):\r\n numbers = []\r\n for i in xrange(1, n+1):\r\n if n % i == 0:\r\n numbers.append(i)\r\n return numbers", "def divisors(N):\n # Initialize the list of divisors\n divisor_list = [1]\n # Check division by d for d <= N/2\n for d in range(2,N // 2 + 1):\n if N % d == 0:\n divisor_list.append(d)\n divisor_list.append(N)\n return divisor_list", "def divisior(n: int) -> list:\n j = [n]\n for d in range(n+1): #loop bis n\n d > 0", "def count_divisible_digits(n, m):\n result = 0\n if m == 0:\n return 0\n n = abs(n)\n while n != 0:\n temp = n % 10\n if temp % m == 0:\n result += 1\n n = n // 10\n return result", "def gatherDivisors(number): # prvni string ve funkci je comment; \"\"\" znamenam ze je na vic radek\n\tdivisors = []\n\tfor div in range(1, number + 1): # range vyhodi vse od jedne az do number\n\t\tif number % div == 0:\n\t\t\tdivisors.append(div)\n\treturn divisors", "def test_div():\n l = [1, 2, 3, 4]\n assert s7.div(*l) == 1 / 2 / 3 / 4\n assert s7.div(100, 20) == 5\n assert s7.div(100.0, 20) == 5.0\n assert s7.div(100, 20.0) == 5.0", "def sum_divisibles(limit):\n res = [x for x in range(limit) if x % 3 == 0 or x % 5 == 0]\n return sum(res)", "def count_factors(n):\n i, total= 1, 0\n while i <= n:\n if n % i == 0:\n total += 1\n i += 1\n return total", "def findDivisors(num1, num2):\n divisors = (1,)\n for i in range(2, (min(num1, num2) + 1)):\n if num1 % i == 0 and num2 % i == 0:\n divisors += (i,)\n return divisors", "def testListDivide():\n #a\n numbers = [1,2,3,4,5]\n expected = 2\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\")\n \n \n #b\n numbers = [2,4,6,8,10]\n expected = 5\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #c\n numbers = [30, 54, 63, 98, 100]\n divide = 10\n expected = 2\n \n try:\n assert listDivide(numbers, divide) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #d\n numbers = []\n expected = 0\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #e\n numbers = [1, 2, 3, 4, 5]\n divide = 1\n expected = 5\n \n try:\n assert listDivide(numbers, divide) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\")", "def is_divisible(inpt:int, i:int) -> bool:\n return inpt%i == 0", "def is_divisible(inpt:int, i:int) -> bool:\n return inpt%i == 0", "def is_divisible(num, n):\n return num % n == 0", "def num_divs(self):\n return len(self.q(css='div.test').results)", "def divisori(n):\n div=set()\n for i in range(1,int(n**0.5+1)):\n if n%i==0:\n div.add(int(n/i))\n div.add(i)\n return sorted(div)", "def divisor_lister(num):\n if num <= 0:\n raise ValueError('num must be a positive, non-zero number')\n\n divisors = []\n for possible_divisor in range(2, num-1):\n if num % possible_divisor == 0:\n divisors.append(possible_divisor)\n\n # 1 and num itself are divisors so throw them in there\n divisors.append(1)\n divisors.append(num)\n divisors.sort()\n return divisors", "def divisible(a, b):\n return not a % b", "def divisors(number: int) -> Set[int]:\n\n if number == 0:\n return {0}\n divisor = 2\n while divisor * divisor <= number:\n if number % divisor == 0:\n smaller_result = divisors(number // divisor)\n multiplied_result = {d * divisor for d in smaller_result}\n\n return smaller_result | multiplied_result\n divisor = divisor + 1\n\n return {1, number}", "def divisors(n: int) -> list:\n # iterate through every number <= n/2 and check whether the number is a divisor\n # append to list if not in list\n # in the end, append the number\n divs = [n]\n for i in range(1, n//2 + 1):\n if n % i == 0:\n divs.append(i)\n return divs", "def count_evens(L):\n result = 0\n for x in L:\n if x%2 == 0:\n result = result + 1\n return result", "def average(numbers):\n numbers_sum = 0\n numbers_count = 0\n for number in numbers:\n numbers_sum += number\n numbers_count += 1\n\n return numbers_sum / numbers_count", "def perfect_num(number):\n new_list=[]\n for indx in range(1,number):\n if number % indx==0:\n new_list.append(indx)\n total=sum(new_list)\n if total==number:\n return True\n else:\n return False", "def count_evens(l):\n evens = []\n c = 0\n for number in l:\n if number % 2 == 0:\n c += 1\n return c", "def factors(n):\n nfactors = 0 # the number of factors of n\n for divisor in range(1, n+1): # divisors: {1,2,3,4...,n}\n if n%divisor == 0: # divides with no remainder\n nfactors += 1 # i.e. one new factor found\n return nfactors", "def find_factors(num):\n factors = set()\n i = 1\n while i*i < num:\n if num % i == 0:\n factors.add(i)\n factors.add(int(num/i))\n i+=1\n factors = list(factors)\n factors.sort()\n return factors", "def division_algorithm(n):\n assert n < 1000\n decimals = []\n dividend = 1\n divisor = n\n counter = 0\n repeating, repeating_length = False, 0\n while dividend != 0 and not repeating:\n dividend = dividend * 10\n decimals.append(dividend // divisor)\n dividend = dividend % divisor\n counter += 1\n repeating, repeating_length = is_repeating(decimals)\n if repeating:\n counter = repeating_length\n return repeating, counter", "def get_divisors_with_parity_check(num: int) -> list:\n all_divisors = []\n # if number is odd, increment by 2 because don't have to check evens\n increment = 2 if num % 2 == 1 else 1\n\n for possible_divisor in range(1, math.floor(num / 2) + 1, increment):\n if num % possible_divisor == 0:\n all_divisors.append(possible_divisor)\n return all_divisors", "def div(num1, num2):\n return num1 / num2", "def div(num1, num2):\n return num1 / num2", "def test_cases(self):\n case_one = math_helpers.num_divisors(1)\n self.assertEqual(case_one, 1)\n\n case_two = math_helpers.num_divisors(10)\n self.assertEqual(case_two, 4)\n\n case_three = math_helpers.num_divisors(6930)\n self.assertEqual(case_three, 48)" ]
[ "0.82095575", "0.8028148", "0.7676638", "0.724413", "0.7203808", "0.6955737", "0.6939475", "0.6889885", "0.6795731", "0.6774087", "0.6745991", "0.6704762", "0.66760755", "0.66736186", "0.6646036", "0.65987384", "0.6588523", "0.6577694", "0.6576061", "0.6554679", "0.64939314", "0.6487798", "0.648749", "0.64867496", "0.6440039", "0.64290667", "0.6417297", "0.6391014", "0.63629925", "0.6293372", "0.62753785", "0.62738156", "0.62730074", "0.6251308", "0.62457114", "0.62347996", "0.6193436", "0.61628693", "0.6154936", "0.61194354", "0.60913664", "0.6089427", "0.6072354", "0.60513204", "0.6047879", "0.60361385", "0.6035536", "0.6018641", "0.60143554", "0.6010721", "0.5996156", "0.59792453", "0.5961359", "0.5957594", "0.5947149", "0.5944434", "0.5937499", "0.5932532", "0.59298253", "0.59219176", "0.59163725", "0.5915535", "0.5910087", "0.59087974", "0.59084624", "0.5897865", "0.5872371", "0.587192", "0.5869713", "0.5864348", "0.58581734", "0.5855075", "0.58537", "0.5848702", "0.5844007", "0.5843559", "0.58419335", "0.5836459", "0.5806218", "0.57953936", "0.578582", "0.578582", "0.5774065", "0.57667553", "0.57566303", "0.5750457", "0.5740777", "0.5740359", "0.57310015", "0.57219416", "0.57136303", "0.570716", "0.5690586", "0.56766176", "0.5675487", "0.56747377", "0.5671815", "0.56709325", "0.56709325", "0.56693983" ]
0.8605654
0
This function tests the listDivide function.
def testListDivide(): assert listDivide([1,2,3,4,5]) == 2 assert listDivide([2,4,6,8,10]) == 5 assert listDivide([30, 54, 63,98, 100], divide = 10) == 2 assert listDivide([]) == 0 assert listDivide([1,2,3,4,5], 1) == 5
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testListDivide():\n listDivide([1, 2, 3, 4, 5])\n listDivide([2, 4, 6, 8, 10])\n listDivide([30, 54, 63, 98, 100], divide=10)\n listDivide([])\n listDivide([1, 2, 3, 4, 5], 1)", "def testListDivide():\n #a\n numbers = [1,2,3,4,5]\n expected = 2\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\")\n \n \n #b\n numbers = [2,4,6,8,10]\n expected = 5\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #c\n numbers = [30, 54, 63, 98, 100]\n divide = 10\n expected = 2\n \n try:\n assert listDivide(numbers, divide) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #d\n numbers = []\n expected = 0\n \n try:\n assert listDivide(numbers) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\") \n \n #e\n numbers = [1, 2, 3, 4, 5]\n divide = 1\n expected = 5\n \n try:\n assert listDivide(numbers, divide) == expected\n except AssertionError:\n raise ListDivideException(\"Test Failed\")", "def test_list_int(self):\n result = div(2, 4)\n self.assertEqual(result, 0.5)", "def test_divide(self):\n self.assertEqual(2, divide(6, 3))\n self.assertEqual(2.5, divide(5, 2))", "def listDivide(numbers, divide=2):\n newList = []\n for i in numbers:\n if i % divide == 0:\n newList.append(i)\n return len(newList)", "def divide_list(ld, division):\n buckets = []\n current = []\n for obj in ld:\n if len(current) < division:\n current.append(obj)\n else:\n buckets.append(current)\n current = [obj]\n if len(current) > 0:\n buckets.append(current)\n return buckets", "def test_div():\n l = [1, 2, 3, 4]\n assert s7.div(*l) == 1 / 2 / 3 / 4\n assert s7.div(100, 20) == 5\n assert s7.div(100.0, 20) == 5.0\n assert s7.div(100, 20.0) == 5.0", "def test_divide(self):\n self.assertEqual(2, foo.divide(6, 3))\n self.assertEqual(2.5, foo.divide(5, 2))", "def test_divide(self):\n print \"divide\"\n self.assertEqual(2, divide(6, 3))\n self.assertEqual(2.5, divide(5, 2))", "def divideList(L):\n for x in range(len(L)):\n L[x] = L[x]/100.0\n return L", "def test_dividing(self):\n divider = Divider()\n\n for i in range(-10, 10):\n for j in range(-10, 10):\n if j != 0:\n self.assertEqual(i/j, divider.calc(j, i))", "def seperate_list(list, division_part):\n avg = len(list) / float(division_part)\n out = []\n last = 0.0\n\n while last < len(list):\n out.append(list[int(last):int(last + avg)])\n last += avg\n return out", "def listDivide(numbers, divide=2): \n counter = 0\n for num in numbers:\n if num % divide == 0:\n counter+=1\n return counter", "def test_splitlist():\n lst = [4, 2, 3, 1, 6, 7]\n lt, pi, gt = splitlist(lst)\n if lt == [2, 3, 1] and pi == 4 and gt == [6, 7]:\n print(\"test splitlist OK!\")\n else:\n print(\"test splitlist Failed!\")", "def test_divide_success(self):\n with self.assertNoLogs():\n divide_by(10, 2)", "def listDivide(numbers, divide = 2):\n divisible_count = 0\n\n for i in numbers:\n if i % divide == 0:\n divisible_count += 1\n return divisible_count", "def test_split(range_size, partition_size):\n dump = Mock()\n\n iterable = list(range(range_size))\n\n list(_split(partition_size=partition_size, dump=dump, iterable=iterable))\n expected_call_count = (range_size // partition_size) + int(bool(range_size % partition_size))\n\n assert dump.call_count == expected_call_count", "def test_list_int(self):\n\n result = get_avg([0,0,0,0])\n self.assertEqual(result, ZeroDivisionError)", "def list_division(my_list_1, my_list_2, list_length):\n\n res_1 = []\n res = 0\n for n in range(0, list_length):\n try:\n res = my_list_1[n] / my_list_2[n]\n res_1.append(res)\n except ZeroDivisionError:\n print(\"division by 0\")\n res_1.append(0)\n except TypeError:\n print(\"wrong type\")\n res_1.append(0)\n except IndexError:\n print(\"out of range\")\n res_1.append(0)\n finally:\n pass\n return res_1", "def test_divide(self):\n\n result = random.randint(2, 10)\n\n b = random.randint(100, 1000)\n a = result * b\n\n path = \"/divide/{}/{}\".format(a, b)\n\n response = self.get_response(path)\n self.assertEqual(200, response.getcode())\n\n self.assertIn(str(result).encode(), response.read())", "def divide_list(input_list, n):\n\n avg = len(input_list) / float(n)\n last = 0.0\n divided = []\n\n while last < len(input_list):\n divided.append(input_list[int(last):int(last + avg)])\n last += avg\n\n return divided", "def divide(numbers):\n \n result = numbers[0]\n for n in numbers[1:]:\n result = result / n\n return result", "def verifica_element_divide_lista(numar, lista_divizori):\n for i in lista_divizori:\n if i == 0:\n return False\n if numar % i != 0:\n return False\n return True", "def div(a, x):\n return [a[i]/x for i in range(2)]", "def test_four_divided_by_two():\n assert divide(4, 2) == 2", "def test_list_group(self):\n pass", "def test_scalar_division(self):\n\n a1 = tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], 1, -2, 3, -4)\n\n a2 = a1 / 2\n\n self.assertEqual(a2,\n tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], 0.5, -1, 1.5, -2))", "def divide(a, b):\n return a / b", "def _divide_pred(pred):\n if type(pred) == list:\n fake = []\n real = []\n for p in pred:\n fake.append([tensor[:tensor.size(0) // 2] for tensor in p])\n real.append([tensor[tensor.size(0) // 2:] for tensor in p])\n else:\n fake = pred[:pred.size(0) // 2]\n real = pred[pred.size(0) // 2:]\n\n return fake, real", "def test_list(self):\n pass", "def test_list(self):\n pass", "def divide(numbers):\n quot = numbers[0]\n for i in numbers[1:]:\n quot = quot / i\n return quot", "def test_get_list(self):\n pass", "def test_div(self):\n newvalues= Fraction(7,10)/Fraction(4,5)\n fraction1 = Fraction(newvalues[0],newvalues[1])\n self.assertEqual(str(fraction1),\"35/40\")", "def test_three_divided_by_nothing():\n assert divide(3) == 1", "def divide(self, a, b):\n return a / b", "def check_divisions(self, terms):\n start = 0\n while 1:\n try:\n index = terms.index('/', start)\n if terms[index - 1] % terms[index + 1]:\n raise DecimalDivisionError\n start = index + 1\n except ValueError:\n break", "def test_div():\n assert_equal(Vector(4.0, 1.0) / 2.0, Vector(2.0, 0.5))", "def div_value(self, lv, rv):", "def test_div():\n\n assert div(9, 2) == pytest.approx(4.5)", "def test_div():\n\n assert div(9, 2) == pytest.approx(4.5)", "def test_calculator_divide():\n assert Calculator.divide_numbers(4, 2) == 2", "def divideSeries(requestContext, dividendSeriesList, divisorSeries):\n if len(divisorSeries) != 1:\n raise ValueError(\"divideSeries second argument must reference exactly 1 series\")\n\n divisorSeries = divisorSeries[0]\n results = []\n\n for dividendSeries in dividendSeriesList:\n name = \"divideSeries(%s,%s)\" % (dividendSeries.name, divisorSeries.name)\n bothSeries = (dividendSeries, divisorSeries)\n step = reduce(lcm,[s.step for s in bothSeries])\n\n for s in bothSeries:\n s.consolidate( step / s.step )\n\n start = min([s.start for s in bothSeries])\n end = max([s.end for s in bothSeries])\n end -= (end - start) % step\n\n values = ( safeDiv(v1,v2) for v1,v2 in izip(*bothSeries) )\n\n quotientSeries = TimeSeries(name, start, end, step, values)\n quotientSeries.pathExpression = name\n results.append(quotientSeries)\n\n return results", "def test_list_int(self):\n result = get_avg([])\n self.assertEqual(result, 0)", "def divide(numbers):\n counter = 0\n for num in numbers:\n counter /= num\n return counter", "def test_get_list8(self):\n pass", "def split(data):\n data = sorted(data, key=lambda x: x[0])\n half = len(data)//2\n return (data[half][0]+data[half + 1][0])/2\n print(data)", "def test_split(self):\n array = np.arange(1000)\n df = DataFlow.from_numpy(array)\n\n # first, test throw errors on invalid arguments\n def assert_invalid_arg(**kwargs):\n with self.assertRaises(ValueError):\n df.split(**kwargs)\n assert_invalid_arg(partitions=[])\n assert_invalid_arg(partitions=[1000, 1])\n assert_invalid_arg(partitions=[1000, -1])\n assert_invalid_arg(partitions=[1, 2])\n assert_invalid_arg(portions=[])\n assert_invalid_arg(portions=[1.0, 0.1])\n assert_invalid_arg(portions=[1.0, -1])\n assert_invalid_arg(portions=[0.1, 0.2])\n\n # next, test split without shuffling\n df1, df2, df3 = df.split(partitions=[700, 200, 100])\n np.testing.assert_array_equal(df1.all()[0], array[:700])\n np.testing.assert_array_equal(df2.all()[0], array[700:900])\n np.testing.assert_array_equal(df3.all()[0], array[900:1000])\n df1, df2, df3 = df.split(portions=[-1, 0.2, 0.1])\n np.testing.assert_array_equal(df1.all()[0], array[:700])\n np.testing.assert_array_equal(df2.all()[0], array[700:900])\n np.testing.assert_array_equal(df3.all()[0], array[900:1000])\n\n # finally, test split with shuffling\n df1, df2 = df.split(portions=[0.5, -1], shuffle=True)\n self.assertEquals(len(df1), 500)\n self.assertEquals(len(df2), 500)\n df_array = np.concatenate([df1.all()[0], df2.all()[0]], axis=0)\n self.assertFalse(np.all(df_array == array))\n np.testing.assert_array_equal(np.sort(df_array), array)", "def test_01_pass(self):\n \n print(arr / x)", "def divide(self,*datas):\n\t\tdatas = list(datas)\n\t\tresult = datas.pop(0)\n\t\tfor data in datas :\n\t\t\tresult /= data\n\n\t\treturn result", "def test_div():\n with pytest.raises(ValueError) as __:\n value = 42\n num_a = param.Integer(value=value)\n assert num_a.value == value\n\n num_a.value /= 2", "def test_partition(self):\n # one swap at the end\n list = [5, 6, 7, 8, 9, 2]\n partition(list, 0, 5)\n # assert list == [2, 6, 7, 8, 9, 5] # should be improved in future", "def test_chunked():\n examples = list(range(10))\n assert list(chunked(iter(examples), 0)) == examples\n assert list(chunked(iter(examples), 1)) == [[i] for i in examples]\n assert list(chunked(iter(examples), 2)) == [[0,1], [2,3], [4,5], [6,7], [8,9]]\n assert list(chunked(iter(examples), 3)) == [[0,1,2], [3,4,5], [6,7,8], [9]]\n assert list(chunked(iter(examples), 4)) == [[0,1,2,3], [4,5,6,7], [8,9]]\n assert list(chunked(iter(examples), 5)) == [[0,1,2,3,4], [5,6,7,8,9]]\n assert list(chunked(iter(examples), 6)) == [[0,1,2,3,4,5], [6,7,8,9]]\n assert list(chunked(iter(examples), 7)) == [[0,1,2,3,4,5,6], [7,8,9]]\n assert list(chunked(iter(examples), 8)) == [[0,1,2,3,4,5,6,7], [8,9]]\n assert list(chunked(iter(examples), 9)) == [[0,1,2,3,4,5,6,7,8], [9]]\n assert list(chunked(iter(examples), 10)) == [examples]\n assert list(chunked(iter(examples), 11)) == [examples]", "def test_evenly_divisable_row_1(self, day2part2_data):\n result = day2.get_and_divide_evenly_divisable(day2part2_data[0])\n assert result == 4", "def test_split_string(self):\n self.assertEqual(('1-4', 14), split_string('1-4/14'))", "def split_list(l, ratio=0.75):\n i = int(ratio * len(l))\n return l[:i], l[i:]", "def test_divide3(self):\n n = Rational(5, 2)\n d = Rational(5, 2)\n result = n.__div__(d)\n self.assertEqual(result.n, 1)\n self.assertEqual(result.d, 1)", "def divide(x, y):\n\n return x / y", "def test_list_root(self):\n expected = [\"search1\", \"search2\"]\n result = [r[0] for r in self.path_translator.list_directory(\n \"/{0}\".format(self.search.instance))]\n self.assertEqual(result, expected)", "def divide(x, y):\n return x / y", "def divide(x, y):\n return x / y", "def divide(x, y):\n return x / y", "def test_divide(vec3_fixture):\n scalar = vec3_fixture / 10.0\n np.testing.assert_allclose(scalar.x1, vec3_fixture.x1 / 10.0)\n np.testing.assert_allclose(scalar.x2, vec3_fixture.x2 / 10.0)\n np.testing.assert_allclose(scalar.x3, vec3_fixture.x3 / 10.0)\n\n vector = vec3_fixture / Vec3([10, 100, 1000])\n np.testing.assert_allclose(vector.x1, vec3_fixture.x1 / 10.0)\n np.testing.assert_allclose(vector.x2, vec3_fixture.x2 / 100.0)\n np.testing.assert_allclose(vector.x3, vec3_fixture.x3 / 1000.0)", "def test_pre_order_list(self):\n _expected_list = [23, 5, 13, 57, 103]\n\n _output_list = []\n\n # Call pre_order_list to test\n pre_order_list(self.root, _output_list)\n\n # We just want to test the values\n # so make a list from the list of objects\n _pre_order_output = [x.get_value() for x in _output_list]\n\n assert len(_expected_list) == len(_output_list)\n assert _expected_list == _pre_order_output", "def divisible_by(array, divisor):\n return_list = list()\n for i in array:\n if i % divisor == 0:\n return_list.append(i)\n return return_list", "def _items_divide(self, numerator_data, denominator_data):\n items = {}\n if numerator_data['items'] is None:\n items = None\n else:\n for n in numerator_data['items']:\n # TODO what should we do when a matching item isn't found?\n matching_d = next((item for item in denominator_data['items'] if\n item['group'] == n['group']),\n {'group': '_unknown', 'value': None})\n if matching_d['value'] is None or n['value'] is None:\n divided = None\n else:\n divided = n['value'] / matching_d['value']\n\n # item = dict({'group': n['group'],\n # 'value': divided})\n items[n['group']] = divided\n\n return {'items': items, 'grouping': numerator_data['grouping'],\n 'data_id': numerator_data['data_id']}", "def divide(num1, num2):\n return num1 / num2", "def divide(numerator, denominator):\n ensure_divisibility(numerator, denominator)\n return numerator // denominator", "def test_division():\n assert calculator.divide(10, 2) == 5\n assert calculator.divide(10.0, 2.0) == 5.0\n assert calculator.divide(10, -2) == -5\n assert calculator.divide(10.0, -2.0) == -5.0\n\n with pytest.raises(ZeroDivisionError) as err:\n calculator.divide(1, 0)\n assert str(err.value) == \"division by zero\"\n\n with pytest.raises(ZeroDivisionError) as err:\n calculator.divide(1.0, 0.0)\n assert str(err.value) == \"float division by zero\"", "def div(a,b):\r\n return a/b", "def test_split():\n nmrs = NIFTI_MRS(test_data_split)\n\n # Error testing\n # Wrong dim tag\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_EDIT', 1)\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"DIM_EDIT not found as dimension tag.\"\\\n \" This data contains ['DIM_COIL', 'DIM_DYN', None].\"\n\n # Wrong dim index (no dim in this data)\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 6, 1)\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"Dimension must be one of 4, 5, or 6 (or DIM_TAG string).\"\\\n \" This data has 6 dimensions,\"\\\n \" i.e. a maximum dimension value of 5.\"\n\n # Wrong dim index (too low)\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 3, 1)\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"Dimension must be one of 4, 5, or 6 (or DIM_TAG string).\"\\\n \" This data has 6 dimensions,\"\\\n \" i.e. a maximum dimension value of 5.\"\n\n # Wrong dim index type\n with pytest.raises(TypeError) as exc_info:\n nmrs_tools.split(nmrs, [3, ], 1)\n\n assert exc_info.type is TypeError\n assert exc_info.value.args[0] == \"Dimension must be an int (4, 5, or 6) or string (DIM_TAG string).\"\n\n # Single index - out of range low\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_DYN', -1)\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"index_or_indicies must be between 0 and N-1,\"\\\n \" where N is the size of the specified dimension (16).\"\n\n # Single index - out of range high\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_DYN', 64)\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"index_or_indicies must be between 0 and N-1,\"\\\n \" where N is the size of the specified dimension (16).\"\n\n # List of indicies - out of range low\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_DYN', [-1, 0, 1])\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"index_or_indicies must have elements between 0 and N,\"\\\n \" where N is the size of the specified dimension (16).\"\n\n # List of indicies - out of range high\n with pytest.raises(ValueError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_DYN', [0, 65])\n\n assert exc_info.type is ValueError\n assert exc_info.value.args[0] == \"index_or_indicies must have elements between 0 and N,\"\\\n \" where N is the size of the specified dimension (16).\"\n\n # List of indicies - wrong type\n with pytest.raises(TypeError) as exc_info:\n nmrs_tools.split(nmrs, 'DIM_DYN', '1')\n\n assert exc_info.type is TypeError\n assert exc_info.value.args[0] == \"index_or_indicies must be single index or list of indicies\"\n\n # Functionality testing\n\n out_1, out_2 = nmrs_tools.split(nmrs, 'DIM_DYN', 7)\n assert out_1[:].shape == (1, 1, 1, 4096, 4, 8)\n assert out_2[:].shape == (1, 1, 1, 4096, 4, 8)\n assert np.allclose(out_1[:], nmrs[:, :, :, :, :, 0:8])\n assert np.allclose(out_2[:], nmrs[:, :, :, :, :, 8:])\n assert out_1.hdr_ext == nmrs.hdr_ext\n assert out_1.hdr_ext == nmrs.hdr_ext\n assert np.allclose(out_1.getAffine('voxel', 'world'), nmrs.getAffine('voxel', 'world'))\n assert np.allclose(out_2.getAffine('voxel', 'world'), nmrs.getAffine('voxel', 'world'))\n\n out_1, out_2 = nmrs_tools.split(nmrs, 'DIM_DYN', [0, 4, 15])\n assert out_1[:].shape == (1, 1, 1, 4096, 4, 13)\n assert out_2[:].shape == (1, 1, 1, 4096, 4, 3)\n test_list = np.arange(0, 16)\n test_list = np.delete(test_list, [0, 4, 15])\n assert np.allclose(out_1[:], nmrs[:][:, :, :, :, :, test_list])\n assert np.allclose(out_2[:], nmrs[:][:, :, :, :, :, [0, 4, 15]])\n\n # Split some synthetic data with header information\n nhdr_1 = gen_nifti_mrs(\n np.ones((1, 1, 1, 10, 4), dtype=complex),\n 1 / 1000,\n 100.0,\n '1H',\n dim_tags=['DIM_DYN', None, None])\n\n nhdr_1.set_dim_tag(\n 'DIM_DYN',\n 'DIM_DYN',\n header={'RepetitionTime': [1, 2, 3, 4]})\n\n out_1, out_2 = nmrs_tools.split(nhdr_1, 'DIM_DYN', 1)\n assert out_1.shape == (1, 1, 1, 10, 2)\n assert out_1.hdr_ext['dim_5'] == 'DIM_DYN'\n assert out_1.hdr_ext['dim_5_header'] == {'RepetitionTime': [1, 2]}\n assert out_2.hdr_ext['dim_5_header'] == {'RepetitionTime': [3, 4]}", "def GetDivisions(self):\n ...", "def test_get_parts(self):\n pass", "def mod_lista_oglindit(lista_1, lista_divizori):\n lista_finala = []\n for element in lista_1:\n if verifica_element_divide_lista(element, lista_divizori):\n oglindit = get_oglindit(element)\n lista_finala.append(oglindit)\n else:\n lista_finala.append(element)\n return lista_finala", "def split_data(self):\r\n print('split data')\r\n np.random.shuffle(self.dataList)\r\n l = len(self.dataList)/self.fold\r\n self.dataList = [self.dataList[i*l: (i+1)*l] for i in range(self.fold-1)] + [self.dataList[(self.fold-1)*l:]] # each element in the list is splitted data list\r", "def test_list_field():", "def divide_chunks(a_list, n):\n return [a_list[i:i + n] for i in range(0, len(a_list), n)]", "def divide(n1, n2):\n return n1 / n2", "def test_half_case(self):\n steps = save_divide(np.ones(2), 2 * np.ones(2))\n np.testing.assert_equal(steps, 0.5 * np.ones(2))", "def test_small_cases(self):\n case_one = list(math_helpers.divisors(1))\n self.assertEqual(case_one, [1])\n\n case_two = math_helpers.divisors(2)\n self.assertCountEqual(case_two, [1, 2])\n\n case_three = math_helpers.divisors(10)\n self.assertCountEqual(case_three, [1, 2, 5, 10])\n\n case_four = math_helpers.divisors(21)\n self.assertCountEqual(case_four, [1, 21, 3, 7])\n\n case_five = math_helpers.divisors(37)\n self.assertCountEqual(case_five, [1, 37])\n\n case_six = math_helpers.divisors(128)\n self.assertCountEqual(case_six, [1, 2, 4, 8, 16, 32, 64, 128])\n\n case_seven = math_helpers.divisors(11029)\n self.assertCountEqual(case_seven, [1, 41, 269, 11029])\n\n case_eight = math_helpers.divisors(6930)\n self.assertCountEqual(case_eight, [1, 2, 3, 5, 6, 7, 9, 10, 11, 14, 15, 18, 21, 22,\n 30, 33, 35, 42, 45, 55, 63, 66, 70, 77, 90, 99,\n 105, 110, 126, 154, 165, 198, 210, 231, 315, 330,\n 385, 462, 495, 630, 693, 770, 990, 1155, 1386,\n 2310, 3465, 6930])", "def test_division():\n calc = Calculator()\n # Calls the division function from main.py and inputs static number.\n result = calc.division(6, 3)\n # Assert that the results are correct\n assert result == 2", "def test_list_partition(self, cell_state_cls_mock):\n cell_state_cls_mock.return_value = self.cell_state\n\n state_api = state.API()\n\n self.assertEqual(\n state_api.list('foo.bar#000000000[1234567]', True, 'part1'),\n [\n {'host': 'baz1', 'state': 'running',\n 'name': 'foo.bar#0000000001'},\n {'host': 'baz1', 'name': 'foo.bar#0000000002', 'oom': False,\n 'when': '123456789.2', 'state': 'finished', 'exitcode': 0},\n {'host': 'baz1', 'name': 'foo.bar#0000000003', 'oom': False,\n 'when': '123456789.3', 'state': 'finished', 'exitcode': 255},\n {'host': 'baz1', 'name': 'foo.bar#0000000004', 'oom': False,\n 'signal': 11, 'when': '1234567890.4', 'state': 'finished'}\n ]\n )", "def test_divide1(self):\n n = Rational(1, 1)\n d = Rational(0, 1)\n with self.assertRaises(ZeroDivisionError):\n \"\"\"Divide by zero\"\"\"\n n.__div__(d)", "def test_evenly_divisable_row_3(self, day2part2_data):\n result = day2.get_and_divide_evenly_divisable(day2part2_data[2])\n assert result == 2", "def div_sum(data: list) -> int:\n\n def even_quotient(nums: list) -> int:\n \"\"\"Finds the quotient of the only two numbers in the list that evennly divide.\"\"\"\n for i in range(len(nums[:-1])):\n for j in range(i + 1, len(nums)):\n if nums[i] % nums[j] == 0:\n return nums[i] // nums[j]\n elif nums[j] % nums[i] == 0:\n return nums[j] // nums[i]\n\n total = 0\n for row in data:\n total += even_quotient(row)\n return total", "def test_listfield(self):\n self.assertEqual(self.scraped.urls, ['http://google.com', 'http://apple.com'])\n self.assertEqual(self.scraped.in_divs, ['Nested'])", "def cell_division(waitlist, celllist, AgEpitope, tnow, mut_list, RNs):\n for cell in celllist:\n # get list of 0 to 2 daughters\n dlist, mut_list = divide(cell, AgEpitope, tnow, mut_list, RNs)\n # add daughters to waitlist\n waitlist = waitlist + dlist\n return waitlist, mut_list", "def test_evenly_divisable_row_2(self, day2part2_data):\n result = day2.get_and_divide_evenly_divisable(day2part2_data[1])\n assert result == 3", "def ghetto_split(list_, chunk_size=100):\n logging.debug(f\"Splitting list of {len(list_)} length, chunk size = {chunk_size}\")\n split_lists = []\n for i in range(0,len(list_),chunk_size):\n split_lists.append(list_[i:i+chunk_size])\n logging.debug(f\"List has been split into {len(split_lists)} lists. Total num of elements in split lists is {sum([len(i) for i in split_lists])}\")\n return split_lists", "def test_floordiv(self):\n a = Vector(3, 5)\n c = a // (1, 2)\n assert c.x == 3\n assert c.y == 2", "def test_list_directory(self):\n import os\n stat_f = lambda x: FakeStat(33188, 16398844, 65024L, 1, 1049, 1049, 0,\n 1409046988, 1409046988, 1409046988)\n os.stat = stat_f\n os.lstat = stat_f\n expected = [\"subdir1\", \"subdir2\"]\n result = [r[0] for r in self.path_translator.list_directory(\n \"/{0}/search1/tmp/study\".format(self.search.instance))]\n self.assertEqual(result, expected)", "def test_old_div(self):\n self.assertEqual(old_div(1, 2), 0)\n self.assertEqual(old_div(2, 2), 1)\n self.assertTrue(isinstance(old_div(2, 2), int))\n\n self.assertEqual(old_div(3, 2), 1)\n self.assertTrue(isinstance(old_div(3, 2), int))\n\n self.assertEqual(old_div(3., 2), 1.5)\n self.assertTrue(not isinstance(old_div(3., 2), int))\n\n self.assertEqual(old_div(-1, 2.), -0.5)\n self.assertTrue(not isinstance(old_div(-1, 2.), int))\n\n with self.assertRaises(ZeroDivisionError):\n old_div(0, 0)\n with self.assertRaises(ZeroDivisionError):\n old_div(1, 0)", "def test_error_case(self):\n with self.assertRaises(ValueError):\n list(math_helpers.divisors(0))\n\n with self.assertRaises(ValueError):\n list(math_helpers.divisors(-2))\n\n with self.assertRaises(ValueError):\n list(math_helpers.divisors(3.5))", "def arithmeticDivision2(numbers,str_result,target):\n success = False\n str_temp=\"\"\n for x in numbers:\n if x%target==0:\n target2=x//target\n numbers.remove(x)\n numbers,str_temp,success=arithmeticBasic(numbers,str_temp,target2)\n if success:\n str_result+=str(x)+'/('+str_temp+')'\n numbers.clear()\n numbers.append(target)\n break\n else:\n numbers.insert(0,x)\n return numbers,str_result,success", "def split_list(list_in,number_of_pieces):\n output_length = len(list_in) / number_of_pieces\n output = []\n piece = []\n counter = 0\n for list_item in list_in:\n counter += 1\n piece.append(list_item)\n if counter >= output_length:\n output.append(piece)\n counter = 0\n piece = []\n # Make sure nothing is missed\n if len(piece) > 0:\n output.append(piece)\n return output", "def train_dev_test_split(object_list, split_ratio=(0.8, 0.1, 0.1), seed=2018):\n train_size, dev_size, test_size = split_ratio\n # TODO: add stratify option for split on pair level, investigate same option for document/sentence level?\n train, tmp = train_test_split(object_list, random_state=seed, train_size=train_size)\n dev, test = train_test_split(tmp, random_state=seed, test_size=test_size / (test_size + dev_size))\n\n if isinstance(train, pd.DataFrame) and isinstance(dev, pd.DataFrame) and isinstance(test, pd.DataFrame):\n return train, dev, test\n else:\n # TODO: add checks that train, dev, test are lists of documents or sentences\n train_examples = pd.concat([elem.get_examples() for elem in train]).reset_index(drop=True)\n dev_examples = pd.concat([elem.get_examples() for elem in dev]).reset_index(drop=True)\n test_examples = pd.concat([elem.get_examples() for elem in test]).reset_index(drop=True)\n\n return train_examples, dev_examples, test_examples", "def test_evaluate_div_expression(self):\n value = self.evaluate_common(\"4M div 2M\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Decimal, \"Expected Decimal\")\n self.assertTrue(value.value == 2, \"Expected 2\")\n value = self.evaluate_common(\"4D div 2M\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Double, \"Expected Double\")\n self.assertTrue(value.value == 2.0, \"Expected 2.0\")\n try:\n value = self.evaluate_common(\"4D div 0\")\n self.fail(\"Division by zero\")\n except odata.EvaluationError:\n pass\n value = self.evaluate_common(\"4F div 2D\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Double, \"Expected Double\")\n self.assertTrue(value.value == 2.0, \"Expected 2.0\")\n value = self.evaluate_common(\"5 div 2L\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Int64, \"Expected Int64\")\n self.assertTrue(value.value == 2, \"Expected 2L\")\n value = self.evaluate_common(\"-5 div 2L\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Int64, \"Expected Int64\")\n self.assertTrue(value.value == -2, \"Expected -2L\")\n try:\n value = self.evaluate_common(\"4 div '2'\")\n self.fail(\"String promotion to int\")\n except odata.EvaluationError:\n pass\n value = self.evaluate_common(\"4 div null\")\n self.assertTrue(\n value.type_code == edm.SimpleType.Int32, \"Expected Int32\")\n self.assertTrue(value.value is None, \"Expected None\")", "def divide(num1, num2):\n divided = num1/num2\n return divided", "def split(list):\r\n \r\n mid = len(list)//2\r\n left = list[:mid]\r\n right = list[mid:]\r\n \r\n return left, right", "def test_divide_by_zero(self):\n with self.assertRaises(ZeroDivisionError):\n with self.assertLogs() as assert_logs:\n divide_by(10, 0)\n self.assertEqual(assert_logs.output[0], 'ERROR:root:Divide by zero!')\n self.assertEqual(assert_logs.output[1], 'ERROR:root:It is really not a good idea.')" ]
[ "0.8978412", "0.8315685", "0.72600734", "0.7163162", "0.6961623", "0.69122976", "0.6898121", "0.68613684", "0.68511623", "0.6756895", "0.6750342", "0.66283035", "0.66096985", "0.65842646", "0.6559477", "0.64508224", "0.6335196", "0.6329726", "0.6322206", "0.6280527", "0.6270623", "0.6246634", "0.62314254", "0.61926675", "0.6124619", "0.6065383", "0.60648453", "0.60194075", "0.60072446", "0.5985423", "0.5985423", "0.598421", "0.5912032", "0.58952904", "0.5892963", "0.5845393", "0.5838011", "0.57953566", "0.5793579", "0.5791643", "0.5791643", "0.579055", "0.57636946", "0.5737099", "0.5731379", "0.572351", "0.57218266", "0.5701944", "0.570183", "0.5700941", "0.5692843", "0.5678719", "0.5666724", "0.56615794", "0.56475246", "0.56338626", "0.5622429", "0.5608283", "0.5606643", "0.5585235", "0.5585235", "0.5585235", "0.55758107", "0.55739266", "0.55638874", "0.55614275", "0.5560954", "0.5551875", "0.5547113", "0.5545656", "0.5536743", "0.5536285", "0.55261153", "0.55258876", "0.5510263", "0.5507779", "0.55061823", "0.5505774", "0.55000466", "0.5491729", "0.54854304", "0.54807246", "0.5476895", "0.5474945", "0.54696053", "0.5458143", "0.54571813", "0.5452228", "0.545141", "0.5435998", "0.54326236", "0.54306835", "0.5425015", "0.5424257", "0.5420331", "0.5420326", "0.5419444", "0.5417058", "0.5408522", "0.54054797" ]
0.88921094
1
saves the source content of the webpage to a file
def url_to_file(url): try: r = get(url) print(r.status_code) if r.status_code == 200: try: with open(f'print-{date}.html', 'w') as f: f.write(r.text) except UnicodeEncodeError as e: print("Unicode error :using encodeing utf-8") with open(f'print-{date}.html', 'w', encoding="utf-8") as f: f.write(r.text) else: print("passing headers") headers = {"user-agent":"Edg/87.0.664.66"} r = get(url, headers=headers) print(r.status_code) if r.status_code == 200: try: with open(f'print-{date}.html', 'w') as f: f.write(r.text) except UnicodeEncodeError as e: print("Unicode error: using encodeing utf-8") with open(f'print-{date}.html', 'w', encoding="utf-8") as f: f.write(r.text) else: print(f"Unable to send requests {r.status_code}") return r except Exception as e: print("Error occured",e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_page_as(browser, file_name):\n\n with open(file_name, \"w\") as fout:\n fout.write(browser.find_element_by_tag_name(\"pre\").text)", "def save(self, filename):\n outfile = open(filename, \"w\")\n outfile.write(self.html.encode('utf8'))\n outfile.close()", "def saveToFile(html):\n #print(\"Saving to file.\")\n html += \"\\n\"\n #open necessary files to save\n logFile = open(\"postLog_{0}_{1}.txt\".format(os.path.splitext(path)[0], dateTimeNow), \"a\")\n logFile.write(html)\n logFile.close()\n #print(\"Check Point.\")", "def make_backup(filename, html):\n\n with open(filename, 'wb') as f:\n f.write(html)", "def write(self,out):\n with open( out, \"wb\") as fi:\n fi.write(html.tostring(self.book))", "def save(self):\n html_file = '{}/{}.html'.format(self.web_dir, self.title)\n f = open(html_file, 'wt')\n f.write(self.doc.render())\n f.close()", "def saveHtml(path: str, filename: str, html: str) -> None:\n filepath = os.path.join(path, filename)\n with open(filepath, \"w\") as fileHandle:\n fileHandle.write(html)\n return filepath", "def scrape_to(str, dest):\n dest = path_validate(dest)\n site = urlopen(str)\n site_content = site.read()\n full_path = dest+str.replace('/', '.')+'.txt'\n site_file = open(full_path, 'w')\n site_file.write(site_content)\n site_file.close()\n return", "def write_file(self, contents):\n fd = open(os.path.join(os.path.dirname(__file__),\n 'data', 'test.html'), 'w')\n fd.write(contents)\n fd.close()", "def test_save_specific_webpage(self):\n self.mini_spider_thread.grab_url('http://example.com/savewebpage/saved.txt')\n self.mini_spider_thread.grab_url_success = True\n self.mini_spider_thread.save_specific_webpage('http://example.com/savewebpage/saved.txt',\n self.mini_spider_thread.output_directory)\n saved_path = os.path.join(self.mini_spider_thread.output_directory,\n 'http%3A%2F%2Fexample.com%2Fsavewebpage%2Fsaved.txt')\n self.assertTrue(os.path.exists(saved_path))\n with open(saved_path, 'r') as saved_file:\n self.assertEqual(saved_file.read(), 'Saved webpage content.')", "def save(self, url, output):\n\n shutil.copy2(self.get(url), output)", "def save_current_contents(url,update_file):\n r=requests.get(url)\n save_file=update_file+'.original'\n json.dump(r.json(), open(save_file,'w'))\n\n print \"\\nSaved contents of: \\n\\n\\t%s \\n\\nto \\n\\n\\t%s\\n\" % (url,save_file)", "def strToFile(text, web_dir, web_name):\n output = open(web_dir + web_name, \"w\")\n output.write(text)\n output.close()", "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def save(self):\n f=open(\"{}/{}.html\".format(self.path,self.name),\"w\")\n f.write(\"<html>\\n <head>\\n\")\n for c in self.css:\n f.write(\" <link rel=\\\"Stylesheet\\\" href=\\\"{}\\\" />\\n\".format(c))\n f.write(\" </head>\\n</body>\\n\")\n for line in self.template.split(\"\\n\"):\n f.write(\" {}\\n\".format(line))\n f.write(\" </body>\\n</html>\")\n f.close()", "def write_to_html_file(self, data: str):\n try:\n os.mkdir(\"../\" + self.uri)\n except FileExistsError:\n pass\n\n f = open(\"../\" + self.uri + self.file_name, \"w\")\n f.write(data)\n print(\"[WRITE] written to .html file\")\n f.close()", "def save_content_to_file(content, url):\n try:\n save_content = eval(os.environ[\"URL_SAVE_CONTENT\"])\n save_dest = os.environ[\"URL_SAVE_DEST\"]\n debug(\"SAVING CONTENT TO DISK AT: {}\".format(save_dest))\n except Exception as e:\n save_content = False\n\n if not save_content: return\n\n target_dir = save_dest \n debug(\"saving content to {}/{}\".format(target_dir, url))\n try:\n with open(\"{}/{}\".format(target_dir, url), 'w') as url_content:\n url_content.write(content)\n except Exception as e:\n debug(\"Exception saving file to disk: {}\".format(e.message))", "def save2File(self, contents, filename):\n self.setup()\n fullpath = os.path.join(self.output_path, filename)\n f = open(fullpath, 'w')\n f.write(contents) # python will convert \\n to os.linesep\n f.close() # you can omit in most cases as the destructor will call it\n url = \"file://\" + fullpath\n return url", "def htmlSaveFile(self, filename):\n ret = libxml2mod.htmlSaveFile(filename, self._o)\n return ret", "def save_html(self, file_name=None, raw_html=True):\n if raw_html:\n with open(file_name or self.url_obj.file_path, 'wb') as fh:\n fh.write(self.raw_html)\n else:\n self.lxml.getroottree().write(file_name or self.url_obj.file_path, method=\"html\")", "def save_file(self, filename):\r\n \r\n f = open(filename,'w')\r\n f.write(self.body)\r\n f.close", "def dump_html(self):\n l_html = self.m_driver.find_element_by_xpath('//html').get_attribute('outerHTML')\n with open(datetime.datetime.now().strftime('%Y%m%d_%H%M%S.html'), 'w') as f:\n f.write(l_html)", "def download (url):\n path, url = url\n r = requests.get (url, stream = True)\n content = r.text\n #print (content)\n with open (path + '.txt', 'w') as f:\n f.write (content)", "def save_into_html_file(path_html_file: str, response):\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)", "def process_webpage(self, target, output_file, url, embed, selenium):\n\t\t# Build the output file's name\n\t\tself._build_output_file(output_file)\n\t\t# Open the output file and clone the webpage\n\t\twith open(self.output_file_name, \"w\") as output:\n\t\t\tself.collect_source(target, output, url, embed, selenium)", "def print_contents(browser, dest='~/.browser.html'):\n import os\n open(os.path.expanduser(dest), 'w').write(browser.contents)", "def exportHtmlFile(self):\n\n fileName = QtGui.QFileDialog.getSaveFileName(None,\"Save html file\", os.getenv('HOME'))\n if fileName:\n fileName += \".html\"\n #print ((\"Exporting: to \" + fileName))\n filedata = \"<html>\\n<head>\\n<title>\" + self.settings['projectName'] + \"</title>\\n</head>\\n<body>\\n\"\n #filedata += str(self.htmlResults.encode('utf-8'))\n modData = \"\"\n for c in self.htmlResults:\n if ord(c) < 128:\n modData += c\n else:\n modData += \"&#\" + str(ord(c)) + \";\"\n filedata += modData\n filedata += \"</body>\\n</html>\"\n f = open(fileName, 'w')\n f.write(filedata)\n f.close()\n self.log += \"Search Results exported to \" + fileName + \"\\n\"\n QtGui.QMessageBox.information(None, \"Html file Export\", str(fileName) + \" exported\")", "def save(self, path):\n f = open(path, 'w')\n f.write(self.content().encode('utf-8'))\n f.close()", "def save(self, url, crawl_timeout, save_dir):\n try:\n if not save_dir.endswith('/'):\n save_dir = save_dir + '/'\n # replace '/' with '_'\n file_name = save_dir + url.replace('/', '_')\n print file_name\n file = open(file_name, \"w\")\n response = urllib2.urlopen(url, timeout=crawl_timeout)\n file.write(response.read())\n file.close()\n self.logger.info(\"get http request url %s succeed\" % url)\n except Exception as e:\n self.logger.info(\n \"get http request url %s failed,reason %s\" %\n (url, e))", "def make_file():\n get_content = input(\"Paste the content for your html file, include your doctype, html tags and header, body etc.\\n\")\n get_name = input(\"what do you want to call your file?\\n\")\n \n new_html_file = open(str(get_name) + '.html', 'w')\n page_content = \"\" + str(get_content) + \"\"\n \n new_html_file.write(page_content)\n new_html_file.close()", "def write_page(soup, fileName):\r\n soup.prettify(formatter='html')\r\n\r\n with open(fileName, 'wb') as f:\r\n f.write(str(soup).encode('utf-8'))", "def download(url, save_as):\n\topen(save_as, 'w').write(urllib2.urlopen(url).read())", "def download_to_file(url, filename):\n with browser_spoof_open(url) as download_conn:\n with open(filename, \"wb\") as out_file:\n shutil.copyfileobj(download_conn, out_file, 1024 * 8)", "def savecontents(contents, pathname):\n _maybe_mkdir(pathname)\n with open(pathname, 'w') as htmlfile:\n htmlfile.write(contents)", "def to_file(self, filename):\n resp = urlopen(self.url)\n self.file_size = self._get_content_length(resp.headers)\n block_size = 8192\n self.bytes_read = 0\n with open(filename, 'wb') as f:\n while True:\n buf = resp.read(block_size)\n if not buf:\n break\n self.bytes_read += len(buf)\n f.write(buf)\n self._dl_progress_bar()\n if self.show_progress:\n print(' ✓')", "def main(url, save_path, header):\n html = get_doctor_html(url)\n doctor_info = get_doctor_info(url,html) \n save_data(doctor_info, header, save_path)", "def to_file(self, html_file: str = None) -> None:\n if not html_file:\n html_file = f\"{self.id}.html\"\n\n with open(html_file, \"w\") as f:\n f.write(self.soup.html)", "def download(self, url):\n try:\n logging.info(self.log_format((\"downloading \" + url)))\n webFile = urllib.urlopen(url)\n localFile = open(self.paths['workspace'] + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n logging.error(self.log_format((\"could not get url \" + url)))", "def record_to_file(url, filename):\n\tcontents = []\n\tif os.path.exists(filename):\n\t\tcontents = open(filename, \"r\").readlines()\n\ts = set(contents)\n\ts.add(\"%s\\n\" % url)\n\topen(filename, \"w\").writelines(s)", "async def save_website(session, url, name):\n\n local_start_time = get_time_ms()\n\n html = await fetch(session, url)\n with open(name, \"w\") as f:\n f.write(str(html))\n\n print(f\"Time to get {name} was {get_time_ms() - local_start_time}\")", "def retrieve_html(self, input_url, domain_folder_name, data_type, file_name):\n print \"retrieve_html: RETRIEVING HTML CODE FOR PAGE:\", input_url\n try:\n from_path = \"%s%s%s%s\" % (self.main_path, domain_folder_name, data_type, file_name)\n print \"retrieve_html: HTML CODE RETRIEVED LOCALY\\npath:%s\" % from_path\n with io.open(from_path, \"r\", encoding='utf-8') as f:\n content = f.read()\n bs_object = BS(content, 'html.parser')\n return bs_object\n \n except IOError:\n print \"retrieve_html: RETRIEVING HTML CODE ONLINE\"\n\n # time_to_sleep = 2\n # print \"SLEEPING FOR %d s.................\" % time_to_sleep\n # time.sleep(time_to_sleep)\n try:\n response = urllib2.urlopen(input_url)\n content = response.read()\n except:\n print \"retrieve_html: FAILED TO RETRIEVE HTML ONLINE, INCREASING failed_retrieving_html_counter\"\n content = \"<HTML><Retrieval_Error>\"\n self.failed_retrieving_html_counter += 1\n \n\n # for always proper utf-8 encoding\n bs_object = BS(content, 'html.parser')\n bs_content = bs_object.prettify('utf-8')\n u_content = unicode(bs_content, 'utf-8')\n #/\n\n to_path = \"%s%s%s%s\" % (self.main_path, domain_folder_name, data_type, file_name)\n print \"retrieve_html: WRITING RETRIEVED HTML_CODE TO FILE\\npath:%s\" % to_path\n with io.open(to_path, \"w\", encoding='utf-8') as f:\n f.write(u_content)\n\n # print \"html WRITTEN to %s.txt\" % file_name\n return bs_object", "def save_soup_to_file(self, filename='soup.html', prettify=True):\n with open(filename, 'w', encoding='utf-8') as fd_div:\n if prettify:\n fd_div.write(self.soup.prettify())\n fd_div.write('\\n')\n else:\n # for item in self.soup:\n # #fd_div.write(item)\n fd_div.write(str(self.soup))\n fd_div.write('\\n')", "def outputHtml(s):\n htmlFile.write(s + \"\\n\")", "def __download_web(self):\n page = requests.get(self.url)\n\n if page.status_code == 200:\n return BeautifulSoup(page.content, \"html.parser\")", "def _save_raw_dom_to_local(self, url):\n # When we first load the page, don't run any javascript.\n response = requests.get(url, verify=False)\n if response.status_code != 200:\n return False\n response.encoding = response.apparent_encoding\n\n # Many pages have broken code that is fixed by the browsers. We use\n # BeautifulSoup to fix the code for us\n soup = BeautifulSoup(response.text, \"html5lib\")\n\n raw_dom = str(soup)\n # Overwrite the url to be the state's url.\n # This will help with relative links.\n # The --disable-web-security Chrome flag needs to be set to allow this.\n history_script = \"history.replaceState(null, '', '{URL}');\".replace('{URL}', url)\n # This creates a javascript warning on external pages, which we should be able to\n # get rid of by running in the page context.\n # Change all relative sources to be absolute.\n\n # Get absolute url as it may have been redirected\n # e.g., https://wisconsindot.gov/Pages/home.aspx -> https://wisconsindot.gov\n # Resources will use src|href=\"/path/to\" which needs absolute path\n split_url = url.split('/') # Gives ['http:', '', 'domain.com', ...]\n abs_url = f\"{split_url[0]}//{split_url[2]}\"\n\n # Cut off any trailing slash.\n if url[-1] == \"/\":\n url = url[:-1]\n\n modified_dom = raw_dom\n modified_dom = self.RE_BROKEN_OPS.sub(\"\", modified_dom)\n modified_dom = self.RE_ABS_LINK.sub(rf\"\"\"\\1=\\2{abs_url}/\\3\\2\"\"\", modified_dom)\n modified_dom = self.RE_REL_LINK.sub(rf\"\"\"\\1=\\2{url}/\\3\\2\"\"\", modified_dom)\n modified_dom = manage_event_listeners(modified_dom)\n js_demodocus_flag_start = js_start + 'demodocus_done=false;' + history_script + js_end\n modified_dom = insert_after(modified_dom, RE_HTML, js_demodocus_flag_start)\n modified_dom = insert_before(modified_dom, RE_HTML_CLOSE, js_start + 'demodocus_done=true;' + js_end)\n output_path = get_output_path(self._config)\n # Save to disk.\n # The pure raw dom isn't used anywhere; it's just for debugging.\n with open(output_path / \"raw.html\", 'w', encoding='utf-8') as fp:\n fp.write(raw_dom)\n # The modified raw dom is loaded whenever we go to a new state.\n with open(output_path / \"raw_modified.html\", 'w', encoding='utf-8') as fp:\n fp.write(modified_dom)\n\n return True", "def straight_dump_to_file(name, the_url, the_file):\n the_response = urllib.urlopen(the_url)\n the_data = json.loads(the_response.read())\n if name: the_file.write(name + \"\\n\")\n the_file.write(the_url)\n the_file.write(\"\\n\")\n the_file.write(str(the_data))\n the_file.write(\"\\n\\n\")\n return the_data", "def save_complete(self):\n self.save_assets(reset_html=False)\n # new_file(self.url_obj.file_path, content=tostring(self.lxml, encoding=self.encoding))\n self.lxml.getroottree().write(self.url_obj.file_path, method=\"html\")\n\n self._lxml = None # reset the tree", "def save(self, content_dir):\n print_verbose(\n \"INFO : Writing random HTML documents to files...\",\n self.args.verbose,\n )\n for i in range(self.n):\n dir_path = content_dir + \"/\" + \"staticpage\" + str(i)\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n index_file = os.path.join(dir_path, \"index.html\") \n with open(index_file, \"w\") as file:\n file.write(self.doc_strings[i].decode(\"utf-8\"))", "def download_file(self, url, path):\n print('\\tDownloading: ', path)\n with open(path, 'w') as outfile:\n try:\n response = self._http_client.get(url)\n outfile.write(response.text)\n finally:\n response.close()\n outfile.close()\n gc.collect()", "def download_and_save(url, file_name,file_extension):\n #make a request for the file\n response = requests.get(url, allow_redirects =True)\n\n #compose the file + extension\n file_to_be_saved = f\"{file_name}.{file_extension}\"\n \n #Create a new file with \"file_to_be_saved\" in the current directory\n # And save this file and print the directory with the OS module\n with open(file_to_be_saved, 'wb') as file:\n print(\"saving file.... \\n\")\n file.write(response.content)\n print('done....\\n')\n print('file saved as: ', file_to_be_saved )\n print('in: ', os.getcwd() )", "def extract_src(session, file_name, submission_num):\n # Gets the HTML page for the submission page\n response = session.get(\"https://dmoj.ca/src/\" + submission_num + \"/raw\")\n with open(file_name, \"w\") as f:\n f.write(response.text)", "def save_trail_html(title, url):\n r = requests.get(url).text\n raw_insert = {'trail': title,\n \"raw_html\": r}\n trail_page_raw_html.insert_one(raw_insert)\n return None", "def write_to_file(fname, html_body):\n dir_path = os.path.dirname(fname)\n ensure_dir_exists(dir_path)\n\n with open(fname, 'w') as html_file:\n html_file.write(html_body)", "def save(self, filepath: Union[str, pathlib.Path]) -> None:\n if isinstance(filepath, str):\n filepath = pathlib.Path(filepath)\n with filepath.open(mode='wb') as file:\n file.write(self.response.content)", "def save(self):\n with self.open(self.filename, 'wt') as fd:\n for node in self.elements:\n fd.write(node.text)", "def write_html_file(out_table, outpath):\r\n page_out = PAGE_HTML % (outpath, out_table)\r\n out = open(outpath, \"w+\")\r\n out.write(page_out)\r\n out.close()", "def fileWrite(content):\n file = open('./result.txt', 'w')\n file.write(content)\n file.close()", "def saveFile(self,newfile=None):\n if newfile == None:\n shutil.move(self.filename,self.filename+'~')\n self.handler = open(self.filename,'w')\n else:\n self.handler = open(newfile,'w')\n self.handler.writelines(self.content)\n self.handler.close()", "def save_to_file(urls):\n try:\n with open('url.txt', 'w') as file:\n for url in urls:\n file.write(url + \"\\n\")\n except:\n print(\"ERROR SAVING FILE\")", "def save_data(self, soup, url):\n # get the web page title\n title = soup.find('title').string\n # get the h1 tag of the page\n h1 = soup.find('h1')\n # checks if there is a h1 tag in the page\n # because is possible that a product url redirects to\n # another page.\n # In this way, only a valid product will be save.\n if h1:\n product_name = h1.contents[0].string\n page_values = PageValues(product_name, title, url, self.__csv_file_name)\n page_values.save_csv()\n else:\n # Shows the web page that have some problem.\n print('It was not possible to open {}'.format(url))", "def save_url_to_file(url, filepath):\n with open(filepath, 'ab') as f:\n f.write(url+'\\r\\n')", "def __save_article_to_file(self, content):\n with open(\"article.txt\", 'w') as out:\n out.write(content)", "def save_script(title):\n script = title_html(title)\n script = script.replace('</b>','')\n script = script.replace('<b>','\\n')\n\n cwd = os.getcwd()\n filepath = os.path.join(cwd,'scripts','%s.txt' % title)\n file = open(filepath, 'w')\n file.write(script)\n file.close()", "def download_html(url: str):\n response = urllib.request.urlopen(url)\n return response.read()", "def save_file(self, file_name, text):\n\n with open(file_name, 'w') as content_file:\n content = content_file.write(text)", "def test_write_to_file():\n from scraper import write_to_file\n encoding = 'utf-8'\n write_to_file(TEST_FILE, TEST_CONTENT, encoding)\n assert True", "def create_html_page(htmldata, filename):\n begin = \"<html>\\n\\n<body>\\n\\n<p>\\n\"\n end = \"\\n</p>\\n\\n</body>\\n\\n</html>\"\n full_text = begin + htmldata + end\n f = open(filename, \"w\")\n f.write(full_text)\n f.close()", "def getHTML():\n for url in urls: #Because there might be multipe URLs to scrape, iterate through the list \n r = requests.get(url)\n r.raise_for_status()\n webpage_html = str(bs4.BeautifulSoup(r.text, \"html.parser\"))\n filenumber = str(urls.index(url)) #Create a variable called filenumber using the index of the url in the list of urls\n filename = \"output_\" + filenumber + \".html\" #This and above line avoid the loop rewriting the file name of the previous file.\n with open(filename, 'w') as file_object: #open a new (or existing) file to be written (or overwritten)\n file_object.write(webpage_html) #write the scraped HTML into the file\n file_object.close #close the file", "def getHtml(url):\n log.finer(\" Opening URL: %s\" % url)\n handle = MozURLopener().open(url)\n html = handle.read()\n handle.close()\n return html", "def publish_html(self, readyhtml):\n with open(self.outfile,'w') as f_out:\n f_out.writelines(readyhtml)", "def save_file(s, filepath, download_url, cookie):\n\n with open(filepath, \"wb\") as f:\n response = s.get(download_url, cookies=cookie, stream=True)\n total = response.headers.get(\"content-length\")\n if total is None:\n f.write(response.content)\n else:\n downloaded = 0\n total = int(total)\n for data in response.iter_content(chunk_size=max(int(total / 1000),\n 1024 * 1024)\n ):\n downloaded += len(data)\n f.write(data)\n done = int(50 * downloaded / total)\n sys.stdout.write(\n \"\\r[{}{}]\".format(\"█\" * done, \".\" * (50 - done))\n )\n sys.stdout.flush()\n sys.stdout.write(\"\\n\")", "def download_simple(url): # url(str)\n html = urlopen(url).read().decode()\n return html", "def write_html_file (folder, filename, contents):\n\n result = False\n\n try:\n f = codecs.open(os.path.join(folder, filename), 'w', ENCODING)\n f.write(contents)\n f.close()\n result = True\n except (OSError, IOError):\n print \"Sorry, could not save contents in\", os.path.join(folder, filename)\n\n return result", "def httpretrieve_save_file(url, filename, querydata=None, postdata=None, \\\r\n httpheaders=None, proxy=None, timeout=None):\r\n\r\n # Open the output file object and http file-like object.\r\n outfileobj = open(filename, 'w')\r\n httpobj = httpretrieve_open(url, querydata=querydata, postdata=postdata, \\\r\n httpheaders=httpheaders, proxy=proxy, timeout=timeout)\r\n\r\n # Repeatedly read from the file-like HTTP object into our file, until the\r\n # response is finished.\r\n responsechunkstr = None\r\n while responsechunkstr != '':\r\n responsechunkstr = httpobj.read(4096)\r\n outfileobj.write(responsechunkstr)\r\n\r\n outfileobj.close()\r\n httpobj.close()", "def dump_file(str_content, filename):\n\n with open(\"results/\" + filename, \"w\") as f:\n f.write(str_content)", "def save_data_to_file(self, html_detail_tab, name_category):\n name_saved_data = name_category + \".html\"\n with open(os.path.join(self.current_path, name_saved_data), 'a+') as f:\n try:\n f.write(html_detail_tab)\n except IOError as io:\n logger.error(\n \"caught an io exception while writing to the file\",\n io)", "def store_cache(base_url, data, path=\"logs/\"):\n\n # Convert URL to filename and write html content into that file\n url_filename = url_to_filename(base_url)\n filename = f\"{path}CACHE-{url_filename}.html\"\n f = open(filename, \"w+\")\n f.write(data)\n f.close()", "def save(self, filename: str):\n r = requests.get(self.raw_url, stream=True)\n if r.status_code == 404:\n raise Exception(f\"Document {self.key} does not exist\")\n r.raise_for_status()\n \n with open(filename, 'wb') as file:\n for chunk in r.iter_content(10 * 1024):\n file.write(chunk)", "def scrape_to_file(cls, website, number):\n try:\n scraper = cls.scraper(website)\n scraper.scrape(number)\n DataHandler.data_frame_to_json(scraper.df, path.join(cls.path, f'{website}.json'))\n except PermissionError:\n print(f\"Access denied. Failed to write {website}.json.\")\n exit(3)", "def download_page(url, cookie_jar):\n browser_dir = os.path.join(server_path, 'static/browser')\n delete_directory_files(browser_dir)\n filename = '{}.html'.format(uuid.uuid4())\n filepath = os.path.join(browser_dir, filename)\n try:\n response = cookie_request(url, cookie_jar)\n except requests.RequestException as e:\n return e, None\n doc = html.document_fromstring(response.text)\n with open(filepath, 'wb') as f:\n f.write(html.tostring(doc))\n return None, filename", "def write_to_file(self, content):\n try:\n with open(self.full_path_to_file, \"wb\") as fp:\n fp.write(content)\n except PermissionError:\n logging.error(\n \"Conversion cannot be performed. Permission denied for this directory\"\n )\n sys.exit()\n self.logger.info(\"News has been successfully converted\")", "def read_and_save(res):\n fname = os.path.split(urlsplit(res.url).path)[-1]\n fpath = os.path.join(cfg.OUTPUT_DIR, fname)\n with open(fpath, 'wb') as f:\n for chunk in res.iter_content(cfg.CHUNK):\n f.write(chunk)", "def save_file(url, *, out_dir='sha_tmp/', out_name=None):\n exten_types = {'image/fits': '.fits',\n 'text/plain; charset=UTF-8': '.tbl',\n 'application/zip': '.zip',\n }\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Name file using ID at end\n if out_name is None:\n out_name = 'shaID_' + id_parse.findall(url)[0]\n # Determine extension\n exten = exten_types[response.headers['Content-Type']]\n # Check if path exists\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n # Write file\n with open(out_dir + out_name + exten, 'wb') as f:\n for block in response.iter_content(1024):\n f.write(block)", "def export(self):\n memento = self.create_memento()\n try:\n f = open(\"story.txt\", \"w\")\n try:\n f.write(memento.__str__())\n finally:\n f.close()\n except IOError:\n print 'IOError while exporting story!'", "def downloadData(url):\n response = urllib2.urlopen(url)\n html = response.read()\n localfile = open('hitdata.csv', 'wb')\n localfile.write(html)\n localfile.close()", "def save_scraped_data(data_filename, scraped_data):\n with open(data_filename, 'w') as file:\n file.write(scraped_data)", "def write_to_file(file_name, url):\n\n with open(file_name, 'a') as myfile:\n myfile.write('{}\\n'.format(url))", "def write_contents(self):\n dfile = open(os.path.join(self.directory, self.file_name), 'w')\n dfile.write(self.contents.strip())", "def save(self, file_path):\n with open(file_path, 'w') as file:\n file.write(self.text)\n file.close()", "def wonder():\n copy()\n get_soup()\n get_text()\n change_write_text()\n Check_status_time_stamp()", "def make_file_soup(self):\n soup = BeautifulSoup(self.html, 'html.parser')\n return soup", "def load_page(url):\n parameters = {'User-Agent': \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \\\n Chrome/69.0.3497.100 Safari/537.36\"}\n response = requests.get(url, params=parameters)\n\n # Abort if server is responding with error\n if not response.status_code == 200:\n print(\"Server stopped responding. Execution aborted.\")\n sys.exit(1)\n\n content = response.content.decode(response.encoding)\n\n # Save page to a file for debugging\n # with open(self.lastpage_path, 'w') as output_file:\n # output_file.write(content)\n\n return content", "def finish(self):\r\n\r\n self.text += \"</html>\\n\"\r\n\r\n if self.filename != None:\r\n with open(self.filename, \"w\") as f:\r\n f.write(self.text)\r\n\r\n return self.text", "def downloadvideo(filename):\n url = \"http://openings.moe/video/\" + filename\n f = getfile(url)\n safeprint(Colors.PURPLE + url + Colors.END + \":\\nSaving to --> \" + Colors.YELLOW + filename + Colors.END)\n with open(os.path.basename(url), \"wb\") as local_file:\n try:\n local_file.write(f.read())\n except IOError as e:\n safeprint(\"An error occurred while saving the file, try again. \" + str(e))", "def collect_source(self, target, file_descriptor, url, embed, selenium):\n\t\t# Collect the raw page source from the target -- like wget or curl\n\t\tprint(\"[+] Collecting HTML source from:\\n{}\".format(target))\n\t\ttry:\n\t\t\theaders = { 'User-Agent' : self.user_agent }\n\t\t\tif selenium:\n\t\t\t\tprint(\"[+] Using Selenium mode to load webage.\")\n\t\t\t\t# Setup the Selenium web browser for Chrome\n\t\t\t\tselenium_browser = webdriver.Chrome(executable_path=self.path_to_chromedriver)\n\t\t\t\t# Launch the Selenium browser and browse to the target webpage\n\t\t\t\tselenium_browser.get(target)\n\t\t\t\t# Grab a snapshot of the original webpage for comparison/reference later\n\t\t\t\tsource = selenium_browser.page_source\n\t\t\t\tscreenshot_file_name = target.split(\".\")[1]\n\t\t\t\tprint(\"[+] Taking a snapshot of the original page and saving it as {}.png\".format(screenshot_file_name))\n\t\t\t\tselenium_browser.save_screenshot(\"{}.png\".format(screenshot_file_name))\n\t\t\telse:\n\t\t\t\tr = requests.get(target, headers=headers)\n\t\t\t\tsource = r.text\n\t\t\t# Parse the source with BeautifulSoup\n\t\t\tsoup = BeautifulSoup(source, \"html.parser\")\n\t\t\tprint(\"[+] Succesfully collected source from the target.\")\n\t\texcept Exception as err:\n\t\t\t# If scraping fails, all is lost and we can only exit\n\t\t\tprint(\"[!] Failed to connect to target -- This must be valid and a fully qualified URL, e.g. http://www.foo.bar.\")\n\t\t\tprint(\"L.. Details: {!s}\\n\".format(err))\n\t\t\tsys.exit()\n\n\t\t# Find and replace the source code's URLs\n\t\ttry:\n\t\t\tif self.landing_page_url_replacement != \"\":\n\t\t\t\tprint(\"[+] Replacing the URLs in the HTML source with: {}\".format(self.landing_page_url_replacement))\n\t\t\t\tfor link in soup.findAll('a', href=True):\n\t\t\t\t\tlink['href'] = self.landing_page_url_replacement\n\t\t\telse:\n\t\t\t\tprint(\"[-] Warning: No URL provided for landing_page_url_replacement in config file, so the webpage's links will be preserved.\")\n\n\t\t\tif url is not None:\n\t\t\t\t# Check the URL because if it's invalid it will not work here\n\t\t\t\ttry:\n\t\t\t\t\tr = requests.get(url)\n\t\t\t\t\tprint(\"[+] Updating the link and script tag src attrbitues with: {}\".format(url))\n\t\t\t\t\t# Find all links and replace URLs with our new text/URLs\n\t\t\t\t\tfor link in soup.findAll('link', href=True):\n\t\t\t\t\t\tlink['href'] = urllib.parse.urljoin(url, link['href'])\n\t\t\t\t\tfor link in soup.findAll('script', src=True):\n\t\t\t\t\t\tlink['src'] = urllib.parse.urljoin(url, link['src'])\n\t\t\t\t\tprint(\"[+] URL parsing successful! All URLs have been replaced.\")\n\t\t\t\t\tif embed:\n\t\t\t\t\t\tsoup = self.fix_images_encode(url, soup, file_descriptor)\n\t\t\t\t\telse:\n\t\t\t\t\t\tsoup = self.fix_images_url(url, soup, file_descriptor)\n\t\t\t\texcept Exception as err:\n\t\t\t\t\tprint(\"[!] The provided base URL, {}, did not work for repairing links and images. This must be valid and a fully qualified URL, e.g. http://www.foo.bar.\".format(url))\n\t\t\t\t\tprint(\"L.. Details: {}\".format(err))\n\t\t\telse:\n\t\t\t\tprint(\"[-] Warning: No URL provided with --url for updating links, so skipping updating img, link, and script tags.\")\n\t\texcept Exception as err:\n\t\t\tprint(\"[!] URL parsing failed!\")\n\t\t\tprint(\"L.. Details: {}\".format(err))\n\n\t\t# Find and replace the source code's form actions\n\t\tprint(\"[+] Proceeding with updating form actions...\")\n\t\ttry:\n\t\t\t# Find all <form> tags with an action attribute and modify that attribute\n\t\t\tfor form in soup.findAll('form'):\n\t\t\t\tform['action'] = self.landing_page_form_action\n\t\t\t\t# form['method'] = \"post\"\n\t\t\t\t# form['onsubmit'] = \"return checkForm(this);\"\n\t\t\tprint(\"[+] Form parsing was successful!\")\n\t\texcept Exception as err:\n\t\t\tprint(\"[!] Form parsing failed!\")\n\t\t\tprint(\"L.. Details: {}\".format(err))\n\n\t\ttry:\n\t\t\t# Prettify update source from a blob of HTML to human readable source\n\t\t\tsource = soup.prettify()\n\t\t\t# Fix/unescape characters translated to ;lt ;gt ;amp\n\t\t\tsource = xml.sax.saxutils.unescape(source)\n\t\t\t# Write the updated source while removing the added [' and ']\n\t\t\tfile_descriptor.write(source.replace('[','').replace(']',''))\n\t\t\tprint(\"[+] All operations are complete and the output written to {}\".format(self.output_file_name))\n\t\texcept Exception as err:\n\t\t\tprint(\"[!] Could not write to the output file!\")\n\t\t\tprint(\"L.. Details: {}\".format(err))", "def get_text(URL_string, name):\n\n tale = URL(URL_string).download()\n save_file = open(name + '.pickle', 'w')\n pickle.dump(tale, save_file)\n save_file.close()", "def fetch(self, url):\r\n fname = os.path.join(self._cachedir, self._formatter(url))\r\n if not os.path.exists(fname):\r\n time.sleep(self._sleep)\r\n html = urllib.urlopen(url).read()\r\n with codecs.open(fname, 'w', 'utf-8') as f:\r\n soup = BeautifulSoup(html)\r\n f.write(unicode(soup))\r\n return fname", "def capture_web(pdb_file, output_file):\n # Register the streaming http handlers with urllib2\n register_openers()\n # use relpath to hide local path\n with open( os.path.relpath( pdb_file ), \"r\" ) as fp:\n # headers contains the necessary Content-Type and Content-Length\n # datagen is a generator object that yields the encoded parameters\n datagen, headers = multipart_encode({\n \"upfile\": fp,\n \"GO\": \"GO\",\n \"note\": \"note\"\n })\n # Create the Request object\n request = urllib2.Request(\n CAPTURE_URL + \"capture_ul.cgi\", datagen, headers\n )\n # Actually do the request, get and read the response\n response = urllib2.urlopen(request).read()\n with open( output_file, 'w' ) as fp:\n fp.write( response )", "def save_html(self, report_summary, file_name, folder):\n myfile = open(file_name, \"w\")\n myfile.write(t('! DOCTYPE html') + nl())\n myfile.write(t('html') + nl())\n myfile.write(t('head') + nl())\n myfile.write(t('link type=\"text/css\" rel=\"stylesheet\" ') + nl())\n\n myfile.write(html_space(4) + t('style'))\n myfile.write('table{width= 100%; border-collapse:collapse; border:1px solid black collapse}')\n myfile.write('th,td {padding:3px}' + nl())\n myfile.write(html_space(8) + 'td.detail{background-color:#D5DF93; font-size:20; '\n 'font-family:Helvetica, Arial, Sans Serif; font-weight:bold}' + nl())\n myfile.write(html_space(8) + 'td.detail1{font-size:20; '\n 'font-family:Helvetica, Arial, Sans Serif; font-weight:bold}' + nl())\n myfile.write(html_space(8) + 'td.detail2{font-size:20;'\n ' font-family:Helvetica, Arial, Sans Serif}' + nl())\n myfile.write(html_space(8) + 'td.header0{background-color:#8fac3a; font-size:20;'\n ' font-family:Helvetica, Arial, Sans Serif; font-weight:bold}' + nl())\n myfile.write(html_space(8) + 'td.header1{background-color:#E6E6E6; font-size:20;'\n ' font-family:Helvetica, Arial, Sans Serif; font-weight:bold}' + nl())\n myfile.write(html_space(8) + 'td.header2{font-size:20; width:50%}' + nl())\n myfile.write(html_space(4) + t('/style') + nl())\n\n myfile.write(t('/head') + nl())\n myfile.write(t('body') + nl())\n\n # Project summary\n self.company_name = str(report_summary[\"ProfileSummary\"]['CompanyName'])\n self.company_logo = str(report_summary[\"ProfileSummary\"]['CompanyLogo'])\n\n self.group_team_name = str(report_summary[\"ProfileSummary\"]['Group/TeamName'])\n self.designer = str(report_summary[\"ProfileSummary\"]['Designer'])\n self.project_title = str(report_summary['ProjectTitle'])\n self.sub_title = str(report_summary['Subtitle'])\n self.job_number = str(report_summary['JobNumber'])\n self.client = str(report_summary['Client'])\n additional_comments = str(report_summary['AdditionalComments'])\n\n # Seated angle design parameters\n connectivity = str(self.connectivity)\n shear_force = str(self.shear_force)\n column_sec = str(self.column_section)\n column_fu = str(self.column_fu)\n beam_sec = str(self.beam_section)\n seated_angle_section = str(self.angle_sec)\n top_angle_section = str(self.top_angle)\n angle_fu = str(self.angle_fu)\n\n bolt_type = str(self.bolt_type)\n is_hsfg = self.is_hsfg\n bolt_grade = str(self.bolt_grade)\n bolt_diameter = str(self.bolt_diameter)\n bolt_fu = str(self.bolt_fu)\n is_environ_corrosive = self.is_environ_corrosive\n\n # Design Preferences\n detail_gap = str(self.detail_gap)\n bolt_hole_clearance = str(self.bolt_hole_clearance)\n bolt_hole_type = str(self.bolt_hole_type)\n bolt_material_grade = self.bolt_fu_overwrite\n slip_factor_mu_f = self.mu_f\n min_edge_multiplier = self.min_edge_multiplier\n type_of_edge = self.type_of_edge\n design_method = self.design_method\n\n # Calculation outputs\n bolts_provided = str(self.bolts_provided)\n bolts_required = str(self.bolts_required)\n\n number_of_rows = str(self.num_rows)\n number_of_cols = str(self.num_cols)\n edge = str(self.edge_dist)\n gauge = str(self.gauge)\n pitch = str(self.pitch)\n end = str(self.end_dist)\n\n kb = str(self.k_b)\n beam_w_t = str(self.beam_w_t)\n beam_fu = str(self.beam_fu)\n dia_hole = str(self.bolt_hole_diameter)\n shear_capacity = str(self.bolt_shear_capacity)\n bearing_capacity = str(self.bolt_bearing_capacity)\n\n check_pass = \"<p align=left style=color:green><b>Pass</b></p>\"\n check_fail = \"<p align=left style=color:red><b>Fail</b></p>\"\n\n if self.safe == True:\n remark = check_pass\n elif self.safe == False:\n remark = check_fail\n\n # -----------------------------------------------------------------------------------\n rstr = self.design_report_header()\n # -----------------------------------------------------------------------------------\n\n # ---------------------------------- Design conclusion ------------------------------\n rstr += t('table border-collapse= \"collapse\" border=\"1px solid black\" width= 100% ') + nl()\n\n rstr += design_summary_row(0, \"Design Conclusion\", \"header0\", col_span=\"2\")\n\n row = [1, \"Seated Angle\", remark]\n rstr += t('tr')\n rstr += html_space(1) + t('td class=\"detail1 \"') + space(row[0]) + row[1] + t('/td')\n rstr += t('td class=\"detail1\"') + row[2] + t('/td') + nl()\n # rstr += t('td class=\"header1 safe\"') + row[3] + t('/td')\n rstr += t('/tr')\n\n rstr += design_summary_row(0, \"Seated Angle\", \"header0\", col_span=\"2\")\n rstr += design_summary_row(0, \"Connection Properties\", \"detail\", col_span=\"2\")\n rstr += design_summary_row(0, \"Connection \", \"detail1\", col_span=\"2\")\n rstr += design_summary_row(1, \"Connection Title\", \"detail2\", text_two=\" Seated Angle\")\n rstr += design_summary_row(1, \"Connection Type\", \"detail2\", text_two=\" Shear Connection\")\n rstr += design_summary_row(0, \"Connection Category\", \"detail1\")\n rstr += design_summary_row(1, \"Connectivity\", \"detail2\", text_two=str(connectivity))\n rstr += design_summary_row(1, \"Beam Connection\", \"detail2\", text_two=\"Bolted\")\n rstr += design_summary_row(1, \"Column Connection\", \"detail2\", text_two=\"Bolted\")\n rstr += design_summary_row(0, \"Loading (Factored Load)\", \"detail1\")\n rstr += design_summary_row(1, \"Shear Force (kN)\", \"detail2\", text_two=str(shear_force))\n rstr += design_summary_row(0, \"Components \", \"detail1\", col_span=\"2\")\n rstr += design_summary_row(1, \"Column Section\", \"detail1\", text_two=str(column_sec), text_two_css=\"detail2\")\n rstr += design_summary_row(2, \"Material\", \"detail2\", text_two=\"Fe \" + str(column_fu))\n rstr += design_summary_row(2, \"Hole\", \"detail2\", text_two=str(bolt_hole_type))\n rstr += design_summary_row(1, \"Beam Section\", \"detail1\", text_two=str(beam_sec), text_two_css=\"detail2\")\n rstr += design_summary_row(2, \"Material\", \"detail2\", text_two=\"Fe \" + str(beam_fu))\n rstr += design_summary_row(2, \"Hole\", \"detail2\", text_two=str(bolt_hole_type))\n rstr += design_summary_row(1, \"Seated Angle Section\", \"detail1\", text_two=str(seated_angle_section),\n text_two_css=\"detail2\")\n rstr += design_summary_row(2, \"Material\", \"detail2\", text_two=\"Fe \" + str(angle_fu))\n rstr += design_summary_row(2, \"Hole\", \"detail2\", text_two=str(bolt_hole_type))\n rstr += design_summary_row(1, \"Top Angle Section\", \"detail1\", text_two=str(top_angle_section),\n text_two_css=\"detail2\")\n rstr += design_summary_row(2, \"Material\", \"detail2\", text_two=\"Fe \" + str(angle_fu))\n rstr += design_summary_row(2, \"Hole\", \"detail2\", text_two=bolt_hole_type)\n rstr += design_summary_row(1, \"Bolts\", \"detail1\", col_span=\"2\")\n rstr += design_summary_row(2, \"Type\", \"detail2\", text_two=bolt_type)\n rstr += design_summary_row(2, \"Grade\", \"detail2\", text_two=bolt_grade)\n rstr += design_summary_row(2, \"Diameter (mm)\", \"detail2\", text_two=bolt_diameter)\n rstr += design_summary_row(2, \"Bolts - Required\", \"detail2\", text_two=bolts_required)\n rstr += design_summary_row(2, \"Bolts - Provided\", \"detail2\", text_two=bolts_provided)\n rstr += design_summary_row(2, \"Rows\", \"detail2\", text_two=number_of_rows)\n rstr += design_summary_row(2, \"Columns\", \"detail2\", text_two=number_of_cols)\n rstr += design_summary_row(2, \"Gauge (mm)\", \"detail2\", text_two=gauge)\n rstr += design_summary_row(2, \"Pitch (mm)\", \"detail2\", text_two=pitch)\n rstr += design_summary_row(2, \"End Distance (mm)\", \"detail2\", text_two=end)\n rstr += design_summary_row(2, \"Edge Distance (mm)\", \"detail2\", text_two=edge)\n rstr += design_summary_row(0, \"Assembly\", \"detail1\", col_span=\"2\")\n rstr += design_summary_row(1, \"Column-Beam Clearance (mm)\", \"detail2\", text_two=detail_gap,\n text_two_css=\"detail2\")\n\n rstr += \" \" + nl() + t('/table')\n rstr += t('h1 style=\"page-break-before:always\"') # page break\n rstr += t('/h1')\n\n # -----------------------------------------------------------------------------------\n rstr += self.design_report_header()\n # -----------------------------------------------------------------------------------\n\n # --------------------------------- Design Preferences ------------------------------\n # Write your code here\n\n\n # -----------------------------------------------------------------------------------\n rstr += self.design_report_header()\n # -----------------------------------------------------------------------------------\n\n # ------------------------------------ DESIGN CHECKS ---------------------------------\n rstr += t('table width = 100% border-collapse= \"collapse\" border=\"1px solid black\" table-layout:fixed')\n rstr += t('tr')\n rstr += t('td style=\"width:200px;\"')\n rstr += t('td width=\"50%\"')\n rstr += t('td width=\"50%\"')\n rstr += t('td style=\"width:50px;\"')\n rstr += t('/tr')\n rstr += design_check_row(\"Design Check\", \"\", \"\", \"\", col_span=\"4\", text_one_css=\"detail\")\n\n rstr += design_check_row(\"Check\", \"Required\", \"Provided\", \"Remark\", text_one_css=\"header1\",\n text_two_css=\"header1\", text_three_css=\"header1\", text_four_css=\"header1\")\n\n # Bolt\n rstr += design_check_row(\"Bolt Checks\", \"\", \"\", \"\", col_span=\"4\", text_one_css=\"detail\")\n\n # Bolt shear capacity (kN)\n const = str(round(math.pi / 4 * 0.78, 4))\n if is_hsfg == False:\n req_field = \"<i>V</i><sub>dsb</sub> = bolt_fu*(pi*0.78/4)*bolt_diameter^2/(&#8730;3)/\" \\\n \"<i>gamma<sub>mb</sub></i><br> [cl. 10.3.3]\"\n prov_field = \"<i>V</i><sub>dsb</sub> = \" + bolt_fu + \"*(\" + const + \")*\" + bolt_diameter + \"^2/\" \\\n + \"(&#8730;3)/1.25/1000 <br> \" + space(2) + \"= \" + shear_capacity\n elif is_hsfg == True:\n if bolt_hole_type == \"Standard\":\n K_h = str(1.0)\n elif bolt_hole_type == \"Oversized\":\n K_h = str(0.85)\n req_field = \"HSFG bolt shear capacity:\"\n # req_field += \"<br> <i>V</i><sub>dsf</sub> = mu_f*n_e*K_h*A_nb*f_0/<i>gamma<sub>mb</sub></i>\"\n req_field += \"<br> [cl. 10.3.3]\"\n prov_field = \"<i>V</i><sub>dsf</sub> = (\"\n prov_field += str(\n slip_factor_mu_f) + \")*(1)*(\" + K_h + \")*(\" + const + \"*\" + bolt_diameter + \"^2)<br>\" + space(2) + \\\n \"*(0.70*\" + bolt_fu + \")\" + \"/1.25/1000 <br> \" + space(2) + \"= \" + shear_capacity\n rstr += design_check_row(\"Bolt shear capacity (kN)\", req_field, prov_field, \" \")\n\n # Bolt bearing capacity (kN)\n # req_field = \"<i>V<sub>dpb</sub></i> = 2.5*k<sub>b</sub>*bolt_diameter*critical_thickness\" \\\n # +\"<br> *<i>f</i><sub>u</sub>/<i>gamma<sub>mb</sub></i><br> [Cl. 10.3.4]\"\n req_field = \"<i>V<sub>dpb</sub></i>:<br> [Cl. 10.3.4]\"\n if is_hsfg == False:\n prov_field = \"<i>V</i><sub>dpb</sub> = 2.5*\" + kb + \"*\" + bolt_diameter + \"*\" + beam_w_t + \"*\" \\\n + beam_fu + \"/1.25/1000) <br>\" + space(2) + \" = \" + bearing_capacity + \" kN\"\n elif is_hsfg == True:\n prov_field = 'N/A'\n rstr += design_check_row(\"Bolt bearing capacity (kN)\", req_field, prov_field, \"\")\n\n # Bolt capacity (kN)\n req_field = \"min (bolt_shear_capacity, bolt_bearing_capacity)\"\n prov_field = \"min (\" + str(self.bolt_shear_capacity) + \", \" + str(self.bolt_bearing_capacity) + \") = \" \\\n + str(self.bolt_value)\n rstr += design_check_row(\"Bolt capacity (kN)\", req_field, prov_field, \"\")\n\n # No. of bolts\n # bolts = str(round(float(shear_force) / float(str(self.bolt_value)), 1))\n bolts_req_based_on_force = (math.ceil(float(shear_force) / self.bolt_value))\n if bolts_req_based_on_force > self.bolts_provided:\n remark = check_fail\n else:\n remark = check_pass\n # req_field = \"shear_force/ bolt_value = \" + str(shear_force) + \"/\" + str(self.bolt_value) + \" = \" \\\n req_field = str(shear_force) + \"/\" + str(self.bolt_value) + \" = \" \\\n + str(bolts_req_based_on_force)\n rstr += design_check_row(\"No. of bolts\", req_field, bolts_provided, remark)\n\n rstr += design_check_row(\"No. of columns\", \" \", number_of_cols, \" \")\n rstr += design_check_row(\"No. of row(s)\", \" &#8804; 2\", number_of_rows, \" \")\n\n # Bolt pitch (mm)\n if self.pitch >= self.min_pitch and self.pitch <= self.max_spacing:\n remark = check_pass\n # req_field = \" &#8805; 2.5*bolt_diameter ,<br> &#8804; min(32*thickness_governing_min, 300) \"\n req_field = \"<br> &#8805; 2.5* \" + bolt_diameter + \" = \" + str(self.min_pitch) + \",<br> &#8804; min(32*\" + \\\n str(self.thickness_governing_min) + \", 300) = \" + str(self.max_spacing) + \"<br> [cl. 10.2.2] <br>\"\n prov_field = pitch\n elif self.pitch < self.min_pitch or self.pitch > self.max_spacing:\n if self.num_rows == 1:\n remark = \" \"\n req_field = \"N/A\"\n prov_field = \"N/A\"\n else:\n remark = check_fail\n # req_field = \" &#8805; 2.5*bolt_diameter ,<br> &#8804; min(32*thickness_governing_min, 300)\"\n req_field = \"<br> &#8805; 2.5* \" + bolt_diameter + \" = \" + str(\n self.min_pitch) + \",<br> &#8804; min(32*\" + \\\n str(self.thickness_governing_min) + \", 300) = \" + str(self.max_spacing) + \"<br> [cl. 10.2.2] <br>\"\n prov_field = pitch\n rstr += design_check_row(\"Bolt pitch (mm)\", req_field, prov_field, remark)\n\n # Bolt gauge (mm)\n if self.gauge >= self.min_gauge and self.gauge <= self.max_spacing:\n remark = check_pass\n elif self.gauge < self.min_gauge or self.gauge > self.max_spacing:\n remark = check_fail\n # req_field = \" &#8805; 2.5*bolt_diameter ,<br> &#8804; min(32*thickness_governing_min, 300)\"\n req_field = \"<br> &#8805; 2.5*\" + bolt_diameter + \" = \" + str(self.min_gauge) + \",<br> &#8804; min(32*\" + \\\n str(self.thickness_governing_min) + \", 300) = \" + str(self.max_spacing) + \"<br> [cl. 10.2.2] <br>\"\n rstr += design_check_row(\"Bolt gauge (mm)\", req_field, gauge, remark)\n\n # End distance (mm)\n if self.end_dist >= self.min_end_dist:\n remark = check_pass\n elif self.end_dist < self.min_end_dist:\n remark = check_fail\n # req_field = \" &#8805;\" + str(self.min_edge_multiplier) + \"*bolt_hole_diameter\" + \" [cl. 10.2.4.2]\"\n req_field = \"<br> &#8805;\" + str(self.min_edge_multiplier) + \"*\" + dia_hole + \" = \" + str(self.min_end_dist)\n rstr += design_check_row(\"End distance (mm)\", req_field, end, remark)\n\n # Edge distance (mm)\n if self.edge_dist >= self.min_edge_dist and self.edge_dist <= self.max_edge_dist:\n remark = check_pass\n elif self.edge_dist < self.min_edge_dist or self.edge_dist > self.max_edge_dist:\n remark = check_fail\n # req_field = \" &#8805;\" + str(self.min_edge_multiplier) + \"*bolt_hole_diameter,\"\n req_field = \" &#8805;\" + str(self.min_edge_multiplier) + \"*\" + dia_hole + \" = \" + str(self.min_edge_dist) + \" [cl. 10.2.4.2]<br>\"\n # Cl 10.2.4.3 if members are exposed to corrosive influences\n if is_environ_corrosive == \"Yes\":\n req_field += \"<br><br> As the members are exposed to corrosive influences: \"\n # req_field += \"<br> &#8804; min(12*thickness_governing_min*sqrt(250/f_y),<br>\" + space(\n # 2) + \" 40+4*thickness_governing_min)\"\n req_field += \"<br> [Cl 10.2.4.3]\"\n req_field += \"<br> &#8804; min(12*\" + str(self.thickness_governing_min) + \"*sqrt(250/\" \\\n + str(self.angle_fy) + \"), 40 + 4*\" + str(self.thickness_governing_min)\\\n + \") = \" + str(self.max_edge_dist)\n elif is_environ_corrosive == \"No\":\n # req_field += \"<br><br> &#8804; 12*thickness_governing_min*sqrt(250/f_y)\"\n req_field += \"<br> &#8804; 12*\" + str(self.thickness_governing_min) + \"sqrt(250/\" \\\n + str(self.angle_fy) + \") = \" + str(self.max_edge_dist) + \"[Cl 10.2.4.3]\"\n rstr += design_check_row(\"Edge distance (mm)\", req_field, edge, remark)\n\n # Seated angle\n rstr += design_check_row(\"Seated Angle \" + str(self.angle_sec), \"\", \"\", \"\", col_span=\"4\",\n text_one_css=\"detail\")\n\n # Seated angle length\n if connectivity == \"Column flange-Beam flange\":\n # req_field = \"= min(supported_beam_width,<br>\"+space(2)+\"supporting_column_width)\"\n req_field = \" <br> = min(\" + str(self.beam_w_f) + \", \" + str(self.column_w_f) + \")\"\n prov_field = str(self.angle_l)\n elif connectivity == \"Column web-Beam flange\":\n # limiting_angle_length = self.column_d - 2 * self.column_f_t - 2 * self.column_R1 - self.root_clearance_col\n # self.angle_l = int(math.ceil(min(self.beam_w_f, limiting_angle_length)))\n # req_field = \"= min(width of supported beam, <br>\" + space(2) + \\\n # \"column_depth - 2*column_flange_thickness<br>\" + space(2) +\\\n # \" - 2*column_R1 - root_clearance_col)\"\n req_field = \"<br> = min(\" + str(self.beam_w_f) \\\n + \", \" + str(self.column_d) + \" - 2*\" + str(self.column_f_t) \\\n + \" - 2*\" + str(self.column_R1) + \" - \" + str(self.root_clearance_col) + \")\"\n prov_field = str(self.angle_l)\n # As the seated angle length is a determined/calculated parameter, there is no design 'check' remark\n rstr += design_check_row(\"Length (mm)\", req_field, prov_field, \" \")\n\n # Length of outstanding leg\n if self.outstanding_leg_length_required < self.angle_B:\n remark = check_pass\n elif self.outstanding_leg_length_required > self.angle_B:\n remark = check_fail\n # req_field = \"b = (R*\" + sub(\"gamma\", \"m0\") + \"/(\" + sub(\"f\", \"yw\") +\\\n # \"*beam_web_thickness))<br>\" + space(2) + \"+ beam_column_clear_gap\"\n req_field = \"<br>[Cl. 8.7.4]\"\n req_field += \"<br> = (\" + str(self.shear_force) + \"*1000*\" + str(self.gamma_m0) + \"/(\" + str(self.beam_fy) \\\n + \"*\" + str(self.beam_w_t) + \")) + \" + str(self.detail_gap)\n prov_field = str(self.angle_B)\n rstr += design_check_row(\"Outstanding leg length (mm)\", req_field, prov_field, remark)\n\n # For angle thickness\n # Shear capacity of outstanding leg\n if self.outstanding_leg_shear_capacity > self.shear_force:\n remark = check_pass\n elif self.outstanding_leg_shear_capacity < self.shear_force:\n remark = check_fail\n req_field = sub(\"V\", \"dp\") + \" &#8805 V <br>\"\n req_field += sub(\"V\", \"dp\") + \" &#8805 \" + str(self.shear_force) + \"kN <br> [Cl. 8.4.1]\"\n # prov_field = sub(\"V\", \"dp\") + \"=\" + sub(\"A\", \"v\") + sub(\"f\", \"yw\") + \"/ (&#8730 3 *\" + sub(\"gamma\", \"m0\") + \")\"\n prov_field = \"<br>\" + space(1) + \"= (\" + str(self.angle_l) + \"*\" + str(self.angle_t)\\\n + \")*\" + str(self.angle_fy) + \"/ (&#8730 3 *\" + str(self.gamma_m0)\\\n + \")<br>\" + space(1) + \"= \" + str(self.outstanding_leg_shear_capacity)\n rstr += design_check_row(\"Shear capacity of outstanding leg (kN)\", req_field, prov_field,\n remark)\n\n # Moment capacity of outstanding leg\n if self.is_shear_high == False:\n req_field = \"As V &#8804 0.6 \" + sub(\"V\", \"d\")\n req_field += \",<br>[Cl 8.2.1.2] is applicable <br>\"\n req_field += sub(\"M\", \"d\") + \" &#8805 Moment at root of angle\"\n req_field += \"<br>\" + sub(\"M\", \"d\") + \" &#8805 \" + str(self.moment_at_root_angle)\n prov_field = sub(\"M\", \"d\") + \" = min(\" + sub(\"beta\", \"b\") + sub(\"Z\", \"e\") + sub(\"f\", \"y\")\n prov_field += \"/\" + sub(\"gamma\", \"m0\") + \", <br>\" + space(1) +\\\n \" 1.5\" + sub(\"Z\", \"e\") + sub(\"f\",\"y\") + \"/\" + sub(\"gamma\", \"m0\") + \")\"\n prov_field += \"<br>\" + space(1) + \" = min(1.0* \" + str(self.angle_l) + \"*(\" + str(self.angle_t) + \"^2/6)*\"\n prov_field += str(self.angle_fy) + \"/\" + str(self.gamma_m0) + \",<br>\" + space(2) \\\n + \" 1.5*\" + str(self.angle_l) + \"*(\" + str(self.angle_t) + \"^2/6)*\"\n prov_field += str(self.angle_fy) + \"/\" + str(self.gamma_m0) + \")\"\n prov_field += \"<br>\" + space(1) + \"= \" + str(self.moment_capacity_angle)\n\n elif self.is_shear_high == True:\n req_field = \"As V &#8805 0.6 \" + sub(\"V\", \"d\")\n req_field += \",<br>[Cl 8.2.1.3] is applicable\"\n req_field += \"<br>\" + sub(\"M\", \"dv\") + \" &#8805 Moment at root of angle\"\n req_field += \"<br>\" + sub(\"M\", \"dv\") + \" &#8805 \" + str(self.moment_at_root_angle) + \"<br>\"\n prov_field = sub(\"M\", \"dv\") + \"= min((1 - beta)\" + sub(\"M\", \"d\") + \" , \"\n prov_field += \"1.2 \" + sub(\"Z\", \"e\") + sub(\"f\", \"y\") + \"/\" + sub(\"gamma\", \"m0\") + \") <br>\"\n prov_field += space(1) + \"where, <br>\" + space(2) + \"beta = ((2V/\" + sub(\"V\", \"d\")\\\n + \")-1)^2 = \" + str(round(self.moment_high_shear_beta, 4)) + \"<br>\"\n prov_field += \"<br>\" + sub(\"M\", \"dv\") + \" = \" + \"min((1 - \" + str(round(self.moment_high_shear_beta, 4))\\\n + \")<br>\" + space(1) + \"*1.0*(\" + str(self.angle_l) + \"*\" + str(self.angle_t) + \"^2/6)*\"\n prov_field += str(self.angle_fy) + \"/\" + str(self.gamma_m0) + \" , \"\n prov_field += \"<br>\" + space(1) + \"1.2*(\" + str(self.angle_l) + \"*\" + str(self.angle_t) + \"^2/6)*\"\n prov_field += str(self.angle_fy) + \"/\" + str(self.gamma_m0) + \")\"\n prov_field += \"<br>\" + space(1) + \" = \" + str(self.moment_capacity_angle)\n\n if self.moment_capacity_angle > self.moment_at_root_angle:\n remark = check_pass\n elif self.moment_capacity_angle < self.moment_at_root_angle:\n remark = check_fail\n rstr += design_check_row(\"Moment capacity of outstanding leg (kN-mm)\", req_field,\n prov_field, remark)\n\n # Top angle\n rstr += design_check_row(\"Top Angle\", \"\", \"\", \"\", col_span=\"4\", text_one_css=\"detail\")\n req_field = \"Recommended size (based on stability only): \" + str(self.top_angle_recommended)\n prov_field = \"User selected size: \" + str(self.top_angle)\n rstr += design_check_row(\"Section \", req_field, prov_field, \" \")\n\n # End distance (mm)\n if self.top_angle_end_dist_beam <= self.min_end_dist or \\\n self.top_angle_end_dist_column <= self.min_end_dist:\n remark = check_fail\n else:\n remark = check_pass\n req_field = \" &#8805;\" + str(self.min_edge_multiplier) + \"*bolt_hole_diameter\" + \" [cl. 10.2.4.2]\"\n req_field += \"<br> &#8805;\" + str(self.min_edge_multiplier) + \"*\" + dia_hole + \" = \" + str(self.min_end_dist)\n prov_field = \" on leg connected to Beam: \" + str(self.top_angle_end_dist_beam)\n prov_field += \"<br> on leg connected to Column: \" + str(self.top_angle_end_dist_column)\n rstr += design_check_row(\"End distance (mm)\", req_field, prov_field, remark)\n\n\n rstr += t('/table')\n rstr += t('h1 style=\"page-break-before:always\"')\n rstr += t('/h1')\n\n # -----------------------------------------------------------------------------------\n rstr += self.design_report_header()\n # -----------------------------------------------------------------------------------\n\n # Connection images (views)\n rstr += t('table width = 100% border-collapse= \"collapse\" border=\"1px solid black\"')\n\n # row = [0, \"Views\", \" \"]\n # rstr += t('tr')\n # rstr += t('td colspan=\"2\" class=\" detail\"') + space(row[0]) + row[1] + t('/td')\n # rstr += t('/tr')\n rstr += design_summary_row(0, \"Views\", \"detail\", col_span=\"2\")\n\n if self.safe is True:\n png = folder + \"/images_html/3D_Model.png\"\n datapng = '<object type=\"image/PNG\" data= %s width =\"450\"></object\">' % png\n\n side = folder + \"/images_html/seatSide.png\"\n dataside = '<object type=\"image/PNG\" data= %s width =\"400\"></object>' % side\n\n top = folder + \"/images_html/seatTop.png\"\n datatop = '<object type=\"image/PNG\" data= %s width =\"400\"></object>' % top\n\n front = folder + \"/images_html/seatFront.png\"\n datafront = '<object type=\"image/PNG\" data= %s width =\"450\"></object>' % front\n\n row = [0, datapng, datatop]\n rstr += t('tr') + nl()\n rstr += html_space(4) + t('td align=\"center\" class=\" header2\"') + space(row[0]) + row[1] + t('/td') + nl()\n rstr += html_space(4) + t('td align=\"center\" class=\" header2\"') + row[2] + t('/td') + nl()\n rstr += t('/tr' + nl())\n\n row = [0, dataside, datafront]\n rstr += t('tr') + nl()\n rstr += html_space(4) + t('td align=\"center\" class=\" header2\"') + space(row[0]) + row[1] + t('/td') + nl()\n rstr += html_space(4) + t('td align=\"center\" class=\" header2 \"') + row[2] + t('/td') + nl()\n rstr += t('/tr') + nl()\n\n else:\n pass\n\n rstr += t('/table') + nl() + \" \" + nl()\n rstr += t('h1 style=\"page-break-before:always\"')\n rstr += t('/h1')\n\n # -----------------------------------------------------------------------------------\n rstr += self.design_report_header()\n # -----------------------------------------------------------------------------------\n\n rstr += t('hr')\n rstr += t('/hr') + nl() + \" \" + nl()\n\n rstr += t('table width = 100% border-collapse= \"collapse\" border=\"1px solid black\"') + nl()\n rstr += html_space(1) + t('''col width=30%''')\n rstr += html_space(1) + t('''col width=70%''') + nl()\n\n rstr += html_space(1) + t('tr') + nl()\n row = [0, \"Additional Comments\", additional_comments]\n rstr += html_space(2) + t('td class= \"detail1\"') + space(row[0]) + row[1] + t('/td') + nl()\n rstr += html_space(2) + t('td class= \"detail2\" align=\"justified\"') + row[2] + t('/td') + nl()\n rstr += html_space(1) + t('/tr') + nl()\n\n rstr += t('/table') + nl()\n\n myfile.write(rstr)\n myfile.write(t('/body'))\n myfile.write(t('/html'))\n myfile.close()", "def write_to_file(filename, content):\n with open(filename, 'w') as f:\n f.write(content)" ]
[ "0.7480921", "0.70431954", "0.6845408", "0.67692447", "0.6768163", "0.67273104", "0.66346675", "0.65480995", "0.64376026", "0.6430084", "0.64230853", "0.6403235", "0.6334453", "0.63271886", "0.6322628", "0.63213557", "0.62946224", "0.62572914", "0.62547946", "0.62540007", "0.62473965", "0.6234435", "0.6226364", "0.6190184", "0.61852014", "0.61759675", "0.6168344", "0.6146722", "0.61224496", "0.6107341", "0.61002266", "0.6064603", "0.6023513", "0.59865236", "0.59796846", "0.59741133", "0.59736687", "0.5920274", "0.59189785", "0.59102327", "0.5892055", "0.58856344", "0.5882894", "0.58700794", "0.58664656", "0.5816678", "0.58140993", "0.57807577", "0.57646966", "0.5762738", "0.5759294", "0.57517046", "0.5743602", "0.5742829", "0.5720453", "0.5694961", "0.5674933", "0.56749326", "0.56598026", "0.5658858", "0.5655614", "0.56306505", "0.5616396", "0.56162065", "0.5616121", "0.5599896", "0.55851346", "0.55795014", "0.557618", "0.5568649", "0.5559763", "0.5557997", "0.5541426", "0.5535628", "0.55337393", "0.55309373", "0.5525708", "0.5524649", "0.55246377", "0.5517801", "0.55142915", "0.5512815", "0.5511734", "0.55111516", "0.54942685", "0.54907894", "0.5489925", "0.54864264", "0.5477409", "0.5465038", "0.545698", "0.5456764", "0.5449184", "0.5448683", "0.54486454", "0.5448199", "0.5442242", "0.54411894", "0.54395604", "0.54387844" ]
0.5828391
45
Drops an Operation, identified by it's Operation Id and it's children recursively Drop deletes the Operations from Database
def drop_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.drop_operation(row["OPE_ID"]) stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancel_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.cancel_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def process_children(cls, operation):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;\"\n stmnt_lock = \"UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation.get_id(),))\n for row in cur.fetchallmap():\n child_operation = cls.restore_operation(row)\n db.query(cls._core,stmnt_lock,(child_operation.get_id(),),commit=True)\n try:\n cls.process_children(child_operation)\n child_operation.do_workload()\n except Exception,e:\n stmnt_err = \"UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;\"\n db.query(cls._core,stmnt_err,(int(row[\"OPE_ID\"]),),commit=True)\n #TODO GENERATE ERROR IN LOG\n raise e\n stmnt_delete = \"DELETE FROM OPERATIONS WHERE OPE_ID = ?;\"\n db.query(cls._core,stmnt_delete,(child_operation.get_id(),),commit=True)", "def remove_operation(self, name):\n\n del self.operations[name]", "def _simple_deletion(self, operation, labels):\r\n label_strings = []\r\n for label in labels:\r\n if inspect.isclass(label) and issubclass(label, Edge):\r\n label_string = label.get_label()\r\n elif isinstance(label, Edge):\r\n label_string = label.get_label()\r\n label_strings.append(label_string)\r\n\r\n return self._delete_related(operation, label_strings)", "def drop(self):\n j.sal.fs.removeDirTree(self._root, True)\n j.sal.fs.createDir(self._root)", "def _simple_deletion(self, operation, labels):\n from mogwai.models.edge import Edge\n\n label_strings = []\n for label in labels:\n if inspect.isclass(label) and issubclass(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, string_types):\n label_string = label\n else:\n raise MogwaiException('traversal labels must be edge classes, instances, or strings')\n label_strings.append(label_string)\n\n future = connection.future_class()\n future_result = self._delete_related(operation, label_strings)\n\n def on_read(f2):\n try:\n result = f2.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future.set_result(result)\n\n def on_save(f):\n try:\n stream = f.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future_read = stream.read()\n future_read.add_done_callback(on_read)\n\n future_result.add_done_callback(on_save)\n\n return future", "def delete_branch_from_db(element_id):\n from core_parser_app.components.data_structure.models import (\n DataStructureElement,\n )\n\n element = DataStructureElement.get_by_id(element_id)\n\n for child in element.children.all():\n delete_branch_from_db(str(child.pk))\n\n element.delete()", "def test_delete_complex_tree_06(comp):\n comp.delete(9)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 13, 6, 10, 12, 14, 4, 7, 15)", "def test_delete_complex_tree_03(comp):\n comp.delete(15)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 9, 10, 11, 12, 13, 14)\n assert tuple(comp.breadth_first()) == (11, 8, 13, 6, 10, 12, 14, 4, 7, 9)", "def drop(self):\n self.id = None", "def test_delete_complex_tree_02(comp):\n comp.delete(4)\n assert tuple(comp.in_order()) == (6, 7, 8, 9, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 13, 6, 10, 12, 14, 7, 9, 15)", "def test_delete_complex_tree_04(comp):\n comp.delete(13)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 9, 10, 11, 12, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 14, 6, 10, 12, 15, 4, 7, 9)", "def test_delete_complex_tree_07(comp):\n comp.delete(12)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 9, 10, 11, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 14, 6, 10, 13, 15, 4, 7, 9)", "def test_delete_complex_tree_05(comp):\n comp.delete(8)\n assert tuple(comp.in_order()) == (4, 6, 7, 9, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 9, 13, 6, 10, 12, 14, 4, 7, 15)", "def drop(self):\n\t\tdrop_model(self.name, self.cursor, print_info = False)", "def test_delete_complex_tree_08(comp):\n comp.delete(11)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 9, 10, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (12, 8, 14, 6, 10, 13, 15, 4, 7, 9)", "def op_delete(self, args):\n stack_level = 0\n if args != None:\n stack_level = int(args[0])\n self.require_stack(stack_level+1)\n if stack_level == None:\n self.stack.pop()\n else:\n self.stack.pop(-stack_level-1)", "def test_handle_root_deletion(right_left_most_has_right_child):\n right_left_most_has_right_child.delete(1)\n assert tuple(right_left_most_has_right_child.in_order()) == (\n 3, 5, 6, 7, 8, 10, 20\n )", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def DeleteOperation(\n self,\n request: google.longrunning.operations_pb2.DeleteOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.protobuf.empty_pb2.Empty:", "def delete_objects(data,\n path = None,\n submode_adjustor = None):\n if debug.description(): # description debugging\n print 'delete_objects', data, path\n\n if not path:\n raise error.CommandDescriptionError(\"Need path to delete an object\")\n\n data = dict(data)\n bigdb = bigsh.bigdb\n bigdb.canonicalize_values_of_path(path, data)\n\n # if the node type under configuration is a LIST\n # (or LEAF_LIST), this likely wants to add a new\n # item to the list.\n (schema, items_matched) = bigdb.schema_of_path(path, {} )\n if schema == None:\n print 'Missing Schema for', path\n return\n node_type = schema['nodeType']\n if debug.description(): # description debugging\n print 'delete_objects:', path, node_type\n\n if node_type == 'LIST':\n list_nodes = schema['listElementSchemaNode']\n selection = {}\n for key in list_nodes.get('keyNodeNames', []):\n if key in data:\n full_path = '%s/%s' % (path, key)\n selection[full_path] = data[key]\n del data[key]\n # populate for fields which are key's\n for key in list_nodes.get('keyNodeNames', []):\n if not key in selection:\n for row in command.bigsh.mode_stack:\n if 'name' in row and row['name'] == key:\n if 'obj' in row:\n selection[key] = row['obj']\n bigdb.add_mode_stack_paths(selection)\n if submode_adjustor:\n command.submode_adjustor_invoke(submode_adjustor,\n path,\n selection,\n data,\n 'delete')\n\n oper = bigdb.canonicalize_values_for_delete(path,\n data,\n selection,\n list_nodes['childNodes'])\n if oper == 'POST':\n bigdb.post(path, data, selection)\n else:\n # bigdb.delete(path, data, selection) perhaps date <- {}\n bigdb.delete(path, data, selection)\n return\n if node_type == 'LEAF_LIST':\n if debug.description(): # description debugging\n print 'delete_object: leaf-list needs implementation:LEAF_LISTN'\n selection = {}\n bigdb.add_mode_stack_paths(selection)\n leaf_node = schema['leafSchemaNode']\n type_node = leaf_node['typeSchemaNode']\n split_path = path.split('/')\n item_name = split_path[-1]\n item = None\n if item_name in data:\n item = data[item_name]\n elif type_node['name'] in data:\n item = data[type_node['name']]\n del data[type_node['name']]\n if debug.description(): # description debugging\n print 'DATUM', data, 'SELECTUM', selection, 'ITEM', item\n # Currently, 'add/delete' for specific elements isn't\n # directly support in the BigDB REST API's. \n split_path = path.split('/')\n base_path = '/'.join(split_path[:-1])\n (schema, result) = bigdb.schema_and_result(base_path, selection)\n collection = result.expect_single_result(failed_result = [])\n item_name = split_path[-1]\n if item_name in collection:\n collection = collection[item_name]\n if debug.description(): # description debugging\n print 'COLLECTION', collection, ' REMOVE ', item\n if item in collection:\n collection = [x for x in collection if x != item]\n bigdb.put(path, collection, selection, 'query')\n return\n raise error.CommandSemanticError('%s \"%s\" '\n 'not currently configured' %\n (item_name, item))\n return\n if node_type == 'CONTAINER':\n container_nodes = schema.get('childNodes')\n\n selection = {}\n bigdb.add_mode_stack_paths(selection)\n\n for (n,v) in data.items():\n oper = bigdb.canonicalize_values_for_delete(path,\n data,\n selection,\n container_nodes)\n if oper == 'PATCH':\n bigdb.patch(path, data, selection)\n else:\n item_path = '%s/%s' % (path, n)\n bigdb.delete(item_path, {}, selection)\n return\n\n bigsh.bigdb.add_mode_stack_paths(data)\n bigsh.bigdb.delete(path, data)", "def delete(self, tree_path):\n\t\traise NotImplementedError", "def test_deletion(basic_tree):\n tree = red_black_tree.RBTree()\n\n # 23, 4, 30, 11, 7, 34, 20, 24, 22, 15, 1\n for key, data in basic_tree:\n tree.insert(key=key, data=data)\n\n # No child\n tree.delete(15)\n assert [item for item in tree.inorder_traverse()] == [\n (1, \"1\"),\n (4, \"4\"),\n (7, \"7\"),\n (11, \"11\"),\n (20, \"20\"),\n (22, \"22\"),\n (23, \"23\"),\n (24, \"24\"),\n (30, \"30\"),\n (34, \"34\"),\n ]\n\n # One right child\n tree.delete(7)\n assert [item for item in tree.inorder_traverse()] == [\n (1, \"1\"),\n (4, \"4\"),\n (11, \"11\"),\n (20, \"20\"),\n (22, \"22\"),\n (23, \"23\"),\n (24, \"24\"),\n (30, \"30\"),\n (34, \"34\"),\n ]\n\n # One left child\n tree.insert(key=9, data=\"9\")\n tree.delete(11)\n assert [item for item in tree.inorder_traverse()] == [\n (1, \"1\"),\n (4, \"4\"),\n (9, \"9\"),\n (20, \"20\"),\n (22, \"22\"),\n (23, \"23\"),\n (24, \"24\"),\n (30, \"30\"),\n (34, \"34\"),\n ]\n\n # Two children\n tree.delete(23)\n assert [item for item in tree.inorder_traverse()] == [\n (1, \"1\"),\n (4, \"4\"),\n (9, \"9\"),\n (20, \"20\"),\n (22, \"22\"),\n (24, \"24\"),\n (30, \"30\"),\n (34, \"34\"),\n ]", "def _operation_tree(self):\n\n # initial state\n i = 0\n level = 0\n stack = []\n current = None\n\n def _create_operation(args):\n profile_stats = None\n name = args[0].strip()\n args.pop(0)\n if len(args) > 0 and \"Records produced\" in args[-1]:\n records_produced = int(\n re.search(\"Records produced: (\\\\d+)\", args[-1]).group(1)\n )\n execution_time = float(\n re.search(\"Execution time: (\\\\d+.\\\\d+) ms\", args[-1]).group(1)\n )\n profile_stats = ProfileStats(records_produced, execution_time)\n args.pop(-1)\n return Operation(\n name, None if len(args) == 0 else args[0].strip(), profile_stats\n )\n\n # iterate plan operations\n while i < len(self.plan):\n current_op = self.plan[i]\n op_level = current_op.count(\" \")\n if op_level == level:\n # if the operation level equal to the current level\n # set the current operation and move next\n child = _create_operation(current_op.split(\"|\"))\n if current:\n current = stack.pop()\n current.append_child(child)\n current = child\n i += 1\n elif op_level == level + 1:\n # if the operation is child of the current operation\n # add it as child and set as current operation\n child = _create_operation(current_op.split(\"|\"))\n current.append_child(child)\n stack.append(current)\n current = child\n level += 1\n i += 1\n elif op_level < level:\n # if the operation is not child of current operation\n # go back to it's parent operation\n levels_back = level - op_level + 1\n for _ in range(levels_back):\n current = stack.pop()\n level -= levels_back\n else:\n raise Exception(\"corrupted plan\")\n return stack[0]", "def delete(self):\n if not hasattr(self, 'id'):\n raise BadReference('No matching issue on disk')\n shutil.rmtree(self.paths['root'])", "def drop(self):\n for step in self.steps:\n step[1].drop()", "def drop(self):\n pass", "def drop(self):\n pass", "def test_do_delete(test_dao):\r\n DUT = dtmFunction(test_dao, test=True)\r\n DUT.do_select_all(revision_id=1)\r\n DUT.do_insert(revision_id=1, parent_id=1)\r\n\r\n _error_code, _msg = DUT.do_delete(DUT.last_id)\r\n\r\n assert _error_code == 0\r\n assert _msg == (\"RAMSTK SUCCESS: Deleting an item from the RAMSTK Program \"\r\n \"database.\")", "def catalog_drop(self, args):\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete_ermrest_catalog(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e", "def drop(self):\n cursor = self.connect.create_cursor()\n queries = (\n (\"USE dbPurBeurre\"),\n (\"SET foreign_key_checks = 0\"),\n (\"DROP TABLE IF EXISTS Asso_Prod_Cat\"),\n (\"DROP TABLE IF EXISTS Categories\"),\n (\"DROP TABLE IF EXISTS Produits\")\n )\n\n for query in queries:\n cursor.execute(query)", "def delete(self, id=None, **kwargs):\r\n rm = ResourceManager()\r\n pt = self.db.auth_permission\r\n if id and not isinstance(id, (list, tuple, set)):\r\n id = [id]\r\n\r\n # removing private args\r\n if self.private_args:\r\n private_args = self.private_args.table\r\n self.private_args.delete(id)\r\n else:\r\n private_args = None\r\n\r\n # # removing many to many references\r\n # m2ms = set()\r\n # for reference in (tuple(x.split('/')) for x in imap(itemgetter('indexName'),self.many_to_many)):\r\n # resource = rm.m2m(reference)\r\n # if resource:\r\n # m2ms.add(resource.table)\r\n # resource.delete(self,collection = id)\r\n\r\n # getting table names and field names to delete\r\n cascading_deletion = tuple((field.table, field) for field in self.table._referenced_by if\r\n field.ondelete == 'CASCADE' and field.table != private_args) # and field.table not in m2ms)\r\n # deleting all related objects\r\n for table, field in cascading_deletion:\r\n res = rm.resource(table)\r\n if res:\r\n # fetch all id of related rows\r\n ids = set(chain(*self.sql(field.belongs(id), table._id, as_dict=False)))\r\n if ids:\r\n # if related entitiy is a many to many relation delete reference with other objects, but not related objects\r\n if isinstance(res, ManyToManyRelation):\r\n # making deletion simpy by forign related attribute\r\n res.delete(self, resource_id=ids)\r\n else:\r\n res.delete(id=ids, _check_permissions=False)\r\n\r\n self.db(self.table.id.belongs(id)).delete()\r\n # deleting all directly related permissions\r\n self.db((pt.table_name == self.table._tablename) & pt.record_id.belongs(id)).delete()\r\n # if realtime_enabled and self.minimal_permissions:\r\n # sync_permissions(self.table._tablename, id, self.minimal_permissions)\r\n # perms = sql(pt.record_id.belongs(id) & (pt.table_name == self.table._tablename))\r\n # if perms:\r\n # rt_sync_permissions(self.table, id, perms)\r", "def tearDown(self):\n\n db.session.rollback()\n db.session.remove()\n db.drop_all()", "def remove_op(self, op):\n self._operations.remove(op)", "def delete_branch(self):\n for p in self.get_branch():\n if p.kind == 'image':\n self.get(p.uid).delete_image()\n else: \n # delete related tags\n for t in self.Tag.list(page=p.uid):\n t.delete()\n # delete page \n p.delete()", "def delete_tree(self, idx: int = None, id: int = None):\n\n if id is not None:\n idx = self.tree_ids.index(id)\n\n self.nodes.pop(idx)\n self.edges.pop(idx)\n self.names.pop(idx)\n self.colors.pop(idx)\n self.tree_ids.pop(idx)\n self.group_ids.pop(idx)", "def drop(self):\n self.__init__()\n cursor = self.connection.cursor()\n cursor.execute(drop_tables)\n queries = cursor.fetchall()\n for i in queries:\n cursor.execute(i[0])\n\n self.commit()\n self.__init__()", "def test_deletion_no_child(basic_tree):\n tree = red_black_tree.RBTree()\n\n test_tree = [(23, \"23\"), (4, \"4\"), (30, \"30\"), (11, \"11\")]\n\n for key, data in test_tree:\n tree.insert(key=key, data=data)\n\n tree.delete(4)\n assert [item for item in tree.inorder_traverse()] == [\n (11, \"11\"),\n (23, \"23\"),\n (30, \"30\"),\n ]", "def drop(self, cascade=False):\n if self.db.table_exists(self.name):\n self.drop_foreign_keys()\n self.execute(self.commands.drop_table(self.name, cascade))\n self.commit()", "def delete(self, session):\n # ForeignKey constrains CASCADE on delete\n session.delete(self)\n session.commit()\n # Clean up any orphan CatalogStars\n session.query(CatalogStar)\\\n .filter(CatalogStar.catalog_id.is_(None))\\\n .delete(synchronize_session=False)\n session.commit()\n # Clean up any orphan Observations\n session.query(Observation)\\\n .filter(Observation.catalog_star_id.is_(None))\\\n .delete(synchronize_session=False)\n session.commit()", "def teardown_db():\n engine = config['tg.app_globals'].sa_engine\n connection = engine.connect()\n\n # INFO - D.A. - 2014-12-04\n # Recipe taken from bitbucket:\n # https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything\n\n inspector = reflection.Inspector.from_engine(engine)\n metadata = MetaData()\n\n tbs = []\n all_fks = []\n views = []\n\n # INFO - D.A. - 2014-12-04\n # Sequences are hard defined here because SQLA does not allow to reflect them from existing schema\n seqs = [\n Sequence('seq__groups__group_id'),\n Sequence('seq__contents__content_id'),\n Sequence('seq__content_revisions__revision_id'),\n Sequence('seq__permissions__permission_id'),\n Sequence('seq__users__user_id'),\n Sequence('seq__workspaces__workspace_id')\n ]\n\n for view_name in inspector.get_view_names():\n v = Table(view_name,metadata)\n views.append(v)\n\n for table_name in inspector.get_table_names():\n\n fks = []\n for fk in inspector.get_foreign_keys(table_name):\n if not fk['name']:\n continue\n fks.append(\n ForeignKeyConstraint((),(),name=fk['name'])\n )\n t = Table(table_name,metadata,*fks)\n tbs.append(t)\n all_fks.extend(fks)\n\n if not config['sqlalchemy.url'].startswith('sqlite'):\n for fkc in all_fks:\n connection.execute(DropConstraint(fkc))\n\n for view in views:\n drop_statement = 'DROP VIEW {}'.format(view.name)\n # engine.execute(drop_statement)\n connection.execute(drop_statement)\n\n for table in tbs:\n connection.execute(DropTable(table))\n\n\n for sequence in seqs:\n try:\n connection.execute(DropSequence(sequence))\n except Exception as e:\n logger.debug(teardown_db, 'Exception while trying to remove sequence {}'.format(sequence.name))\n\n transaction.commit()\n connection.close()\n engine.dispose()", "def delete():\n\n from slicr.extensions import db\n\n click.echo('deleting database...')\n\n db.drop_all()", "def delete_order():", "def test_deletion_one_child(basic_tree):\n tree = red_black_tree.RBTree()\n\n # 23, 4, 30, 11, 7, 34, 9\n test_tree = [\n (23, \"23\"),\n (4, \"4\"),\n (30, \"30\"),\n (11, \"11\"),\n (7, \"7\"),\n (34, \"34\"),\n (9, \"9\"),\n ]\n\n for key, data in test_tree:\n tree.insert(key=key, data=data)\n\n tree.delete(11)\n assert [item for item in tree.inorder_traverse()] == [\n (4, \"4\"),\n (7, \"7\"),\n (9, \"9\"),\n (23, \"23\"),\n (30, \"30\"),\n (34, \"34\"),\n ]", "def remove():\n\n db_remove()", "def drop_path_op(self, op, x, drop_path_prob):\n if not isinstance(op, Identity):\n return drop_path(op(x), drop_path_prob)\n return op(x)", "def delete(self, flow):\n for parent in self.parents:\n parent.children.remove(self)\n for child in self.children:\n child.parents.remove(self)\n\n flow.blocks.remove(self)", "def test_deletion_two_children(basic_tree):\n tree = red_black_tree.RBTree()\n\n test_tree = [\n (23, \"23\"),\n (4, \"4\"),\n (30, \"30\"),\n (11, \"11\"),\n (7, \"7\"),\n (34, \"34\"),\n (9, \"9\"),\n (27, \"27\"),\n ]\n\n for key, data in test_tree:\n tree.insert(key=key, data=data)\n\n tree.delete(23)\n assert [item for item in tree.inorder_traverse()] == [\n (4, \"4\"),\n (7, \"7\"),\n (9, \"9\"),\n (11, \"11\"),\n (27, \"27\"),\n (30, \"30\"),\n (34, \"34\"),\n ]", "def delete(self, db: Session) -> Optional[FidesopsBase]:\n _ = [target.delete(db=db) for target in self.targets]\n return super().delete(db=db)", "def test_team_template_folders_id_children_fk_delete(self):\n pass", "def main(self):\n self.delete_details()\n self.delete_cleaned()\n self.vacuum()", "def drop_data():\n DATABASE['product'].drop()\n DATABASE['customer'].drop()\n DATABASE['rental'].drop()", "def delete(self):\n try:\n self.s.delete(self)\n self.s.commit()\n except SQLAlchemyError:\n self.s.rollback()\n raise", "def delete_food(self): # TODO test\n try:\n meal = self.food.meal\n meal.foods.remove(self.food)\n self.parent_node.parent_node.update_text(True)\n print(\"delete food\")\n except AttributeError:\n recipe = self.food.recipe\n recipe.ingredients.remove(self.food)\n self.parent_node.parent_node.update_text(True)\n print(\"delete ingredient\")\n tree = self.parent\n item_label = self.parent_node\n tree.remove_node(self)\n tree.remove_node(item_label)", "def delete(self) -> None:\n try:\n result_subpath = get_result_subpath(self.db_root, self.cache_path)\n del result_subpath[self.cache_path[-1]]\n except Exception as e:\n raise e\n finally:\n self._close_transaction()", "def delete(self):\n for lv in self.logical_volumes:\n self.delete_lv(lv_name=lv)\n\n super().delete()", "def removeDeletion(self, *args):\n return _libsbml.Submodel_removeDeletion(self, *args)", "def drop_table(self, name):\n if not self._open:\n raise ValueError(\"Operation on closed file\")\n\n grp = self.grp[name]\n if isinstance(grp, self.GROUP_TYPE) and all(\n isinstance(k, self.ARRAY_TYPE) for k in grp.values()\n ):\n table_name = grp.name\n if table_name == \"/\":\n for colname in self.grp.keys():\n self.delcol(grp, colname)\n else:\n parent = grp.parent\n del parent[name]", "def _is_valid_delete_operation(session, row):\n # Check for any pending or processing create or update\n # ops on the row itself\n if db.check_for_pending_or_processing_ops(\n session, row.object_uuid, operation=[odl_const.ODL_UPDATE,\n odl_const.ODL_CREATE]):\n return False\n\n # Check for dependent operations\n dependent_resource_types = _DELETE_DEPENDENCIES.get(row.object_type)\n if dependent_resource_types is not None:\n for resource_type in dependent_resource_types:\n if db.check_for_pending_delete_ops_with_parent(\n session, resource_type, row.object_uuid):\n return False\n return True", "def drop_package ( self, name ):\n p = self._subdirs [name]\n del self._subdirs [name]\n p.fs_destroy()", "def drop_db():\n database.db.reflect()\n database.db.drop_all()\n print('Dropped the database')", "def drop_statement(self, objecttype, objectname):\n statement = Engine.drop_statement(self, objecttype, objectname)\n statement += \" CASCADE;\"\n return statement.replace(\" DATABASE \", \" SCHEMA \")", "def test_request_do_delete(test_dao, test_configuration):\r\n DUT = dtcFunction(test_dao, test_configuration, test=True)\r\n DUT.request_do_select_all(revision_id=1)\r\n DUT.request_do_insert(revision_id=1, parent_id=0)\r\n\r\n assert not DUT.request_do_delete(DUT.request_last_id())", "def tearDown(self):\r\n\r\n db.session.rollback()\r\n db.drop_all()", "def traverse_postorder(operation):\n\n nodes_postorder = []\n def recurse(node):\n if isinstance(node, Operation):\n for input_node in node.input_nodes:\n recurse(input_node)\n nodes_postorder.append(node)\n\n recurse(operation)\n return nodes_postorder", "def test_remove_childless_on_delete_tree(delete_tree):\n tree_size = delete_tree.size\n delete_tree.remove(\"teabaggers\")\n assert delete_tree.size == tree_size - 1", "def traverse_postorder(operation):\n\n nodes_postorder = []\n def recurse(node):\n if isinstance(node, Operation):\n for input_node in node.input_nodes:\n recurse(input_node)\n nodes_postorder.append(node)\n\n recurse(operation)\n return nodes_postorder", "def delete(self):\n if self.left is None or self.right is None:\n if self is self.parent.left:\n self.parent.left = self.left or self.right\n if self.parent.left is not None:\n self.parent.left.parent = self.parent\n else:\n self.parent.right = self.left or self.right\n if self.parent.right is not None:\n self.parent.right.parent = self.parent\n else:\n s = self.successor()\n self.key, s.key = s.key, self.key\n return s.delete()", "def tearDown(self):\n with app.app_context():\n db = app.db.get_db()\n cur = db.cursor()\n with app.open_resource('sql/drop_tests.sql', mode='r') as f:\n cur.execute(f.read())\n db.commit()\n cur.close()\n db.close()", "def drop(name):\n\t\treturn \"DROP DATABASE {0};\".format(name)", "def delete(self, sql):\n try:\n # Execute the SQL command\n self.cursor.execute(sql)\n # Commit your changes in the database\n self.db.commit()\n except:\n # Rollback in case there is any error\n self.db.rollback()", "def test_deletionDisownsParent(self):\n port = self.port(store=self.store, portNumber=self.lowPortNumber, factory=self.factory)\n port.setServiceParent(self.store)\n port.deleteFromStore()\n service = IServiceCollection(self.store)\n self.failIfIn(port, list(service))", "def delete_recipe(self): # TODO test\n meal = self.recipe.meal\n meal.recipes.remove(self.recipe)\n self.parent_node.update_text(True)\n\n tree = self.parent\n # item_label = self.parent_node\n tree.remove_node(self)\n # tree.remove_node(item_label)\n print(\"delete recipe\")", "def delete_depend_by_workflow_id(self, workflow_id):\n try:\n # use of a sub-query instead of join for delete is required\n # for sqlite\n sub_query = self._session.query(StepEntity.id).\\\n filter(StepEntity.workflow_id == workflow_id)\n self._session.query(StepDependencyEntity).\\\n filter(StepDependencyEntity.child_id.in_(sub_query)).\\\n delete(synchronize_session=False)\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def delete(self):\n pdbox._args.get(\"dryrun\") or shutil.rmtree(self.path)\n pdbox.info(\"Deleted %s/\" % self.path)", "def Delete(root, node):\n target = root.FindLeaf(node)\n if target == None:\n # print 'no result'\n print(\"no result\")\n return root\n target.leaves.remove(node)\n target.CondenseTree()\n root = root.CondenseRoot()\n return root", "def pg_drop(ctx):\n ctx.obj = ConfigDBConnector()\n ctx.obj.connect()", "def drop(drop_all=False):\n\n engine = current_app.extensions['meowth_dbutils'].db.engine\n if current_app.extensions['meowth_dbutils'].metadata.bind is None:\n current_app.extensions['meowth_dbutils'].metadata.bind = engine\n with perform(\n name='dbutils drop',\n before='Dropping all project tables',\n fail='Error occured while droping project tables',\n ):\n current_app.extensions['meowth_dbutils'].metadata.drop_all()\n with perform(\n name='dbutils drop',\n before='Dropping alembic versioning table',\n fail='Error occured while dropping alembic table',\n ):\n engine.execute('drop table if exists alembic_version')\n if drop_all:\n with perform(\n name='dbutils drop',\n before='Dropping all other tables in database',\n fail='Error occured while dropping other tables',\n ):\n current_app.extensions['meowth_dbutils'].db.reflect()\n current_app.extensions['meowth_dbutils'].db.drop_all()", "def delete(self):\n\n lod_history = self.repo._get_lod_history(self.lod)\n assert lod_history.exists()\n lod_history.update(self.repo._youngest, None)\n self._mark_deleted()", "def drop(self):\n c = self.cursor()\n for table in ['experiment','fact']:\n c.execute(\"drop table if exists {}\".format(table))\n self.commit()", "def retry_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.retry_operation(row[\"OPE_ID\"])\n\n stmnt = \"UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def delete(self):\n if self.parent:\n assert isinstance(self.parent, Collection) # only know how to delete from Collection parents\n self.parent.delete_child(self)\n else:\n self._mark_deleted()", "def execute(self: \"DeleteBranchOperator\", context: Dict[str, Any]) -> Any:\n hook = NessieHook(conn_id=self.conn_id)\n\n hook.delete_reference(self.branch)", "def delete(self):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant delete abstract elements')\r\n if self.eid is None:\r\n return self\r\n query = \"\"\"\r\n e = g.e(eid)\r\n if (e != null) {\r\n g.removeEdge(e)\r\n g.stopTransaction(SUCCESS)\r\n }\r\n \"\"\" \r\n results = execute_query(query, {'eid':self.eid})", "def suspendOperations(options):\n runProgram([\"pkill\", \"repmgrd\"])\n program = options[\"bindir\"] + \"/pg_ctl\"\n cmd = [program, \"stop\", \"-D\", options[\"dbdir\"]]\n runProgram(cmd)\n audit(\"suspendOperations\")", "def delete_leaves_2(self, root):\n if not (root.left or root.right):\n print(\"delete\")\n print(root.data)\n print(\"---------\")\n root = None\n return\n\n if root.left:\n self.delete_leaves_2(root.left)\n\n if root.right:\n self.delete_leaves_2(root.right)", "def pre_database_node_delete(self, resource_id):\n pass", "def test_delete_retains_depth(comp):\n assert comp.depth() == 4\n comp.delete(7)\n assert tuple(comp.in_order()) == (4, 6, 8, 9, 10, 11, 12, 13, 14, 15)\n comp.delete(9)\n assert comp.depth() == 4\n comp.delete(4)\n comp.delete(15)\n assert comp.depth() == 3\n comp.delete(6)\n comp.delete(10)\n assert comp.depth() == 3\n comp.delete(12)\n comp.delete(14)\n assert comp.depth() == 2\n comp.delete(13)\n assert comp.depth() == 2\n assert tuple(comp.in_order()) == (8, 11)\n comp.delete(11)\n assert next(comp.in_order()) == 8\n assert comp.depth() == 1\n comp.delete(8)\n assert comp.depth() == 0\n comp.delete(12)\n assert comp.depth() == 0", "def __do_package_delete(item):\n\n file_path = DTF_PACKAGES_DIR + item.install_name\n\n if utils.delete_tree(file_path) != 0:\n log.e(TAG, \"Error removing tree! Continuing.\")\n\n conn = sqlite3.connect(DTF_DB)\n cur = conn.cursor()\n\n # Remove the line first.\n sql = ('DELETE FROM packages '\n \"WHERE name='%s'\" % item.name)\n\n cur.execute(sql)\n conn.commit()\n\n return 0", "def downgrade():\n pass\n # op.execute(\"\"\"\n # DELETE context_implications\n # FROM context_implications ci\n # INNER JOIN audits a ON a.context_id = ci.source_context_id\n # INNER JOIN programs p ON p.context_id = ci.context_id\n # WHERE a.program_id != p.id\n # \"\"\")\n\n # op.execute(\"\"\"\n # DELETE context_implications\n # FROM context_implications ci\n # INNER JOIN programs sp ON sp.context_id = ci.source_context_id\n # INNER JOIN programs p ON p.context_id = ci.context_id\n # \"\"\")", "def processDeleteCommand(self, objId):\n editor = self._parent\n obj = editor.findWithUUID(objId)\n if obj:\n print(\"DELETE FOR\",objId)\n # delete from object cache\n if objId in editor._total['objects']:\n del editor._total['objects'][objId]\n # clear uuid\n obj.opensim.uuid = \"\"\n scene = editor.get_current_scene()\n # unlink\n scene.objects.unlink(obj)\n editor.queueRedraw()", "def drop_everything():\n from sqlalchemy.engine.reflection import Inspector\n from sqlalchemy.schema import DropConstraint, DropTable, MetaData, Table\n\n con = db.engine.connect()\n trans = con.begin()\n inspector = Inspector.from_engine(db.engine)\n\n # We need to re-create a minimal metadata with only the required things to\n # successfully emit drop constraints and tables commands for postgres (based\n # on the actual schema of the running instance)\n meta = MetaData()\n tables = []\n all_fkeys = []\n\n for table_name in inspector.get_table_names():\n fkeys = []\n\n for fkey in inspector.get_foreign_keys(table_name):\n if not fkey[\"name\"]:\n continue\n\n fkeys.append(db.ForeignKeyConstraint((), (), name=fkey[\"name\"]))\n\n tables.append(Table(table_name, meta, *fkeys))\n all_fkeys.extend(fkeys)\n\n for fkey in all_fkeys:\n con.execute(DropConstraint(fkey))\n\n for table in tables:\n con.execute(DropTable(table))\n\n trans.commit()", "def delete_container(self, container: Container):", "def test_delete(self):\n data_columns = ['id', 'column_string', 'column_float']\n data_values = [[1, 'string1', 456.956], [2, 'string2', 38.905]]\n data = pd.DataFrame(data_values, columns=data_columns)\n data.name = 'test_delete'\n my_conn = MySQL(*self.conn_params)\n my_conn.insert(data)\n table = my_conn.get_table(data.name)\n expected = 2\n current = my_conn.engine.scalar(\n select([func.count('*')]).select_from(table)\n )\n self.assertEqual(current, expected)\n\n # delete from operation\n # the None argument in delete DML is included to avoid pylint E1120\n table.delete(None).where(table.c.id == 2).execute()\n\n expected = 1\n current = my_conn.engine.scalar(\n select([func.count('*')]).select_from(table)\n )\n self.assertEqual(current, expected)\n my_conn.drop(data.name)", "def do_command(self, args):\n testops = dbops.Tests()\n testops.delete(args)", "def delete(self):\n self.rpc.call(MsfRpcMethod.DbDelWorkspace, [{'workspace': self.name}])", "def visit_Delete(self, node):\n self.generic_visit(node)\n target = get_single_target(node)\n if isinstance(target, ast.Subscript):\n fun = to_attribute(self.operator, 'delitem')\n args = [ target.value, self.index_to_expr(target.slice) ]\n return ast.Expr(to_call(fun, args))\n return node", "def delete(self):\n\n # TODO find a way to remove this when sub-classing in HCRoot\n self.parent.del_child(self)" ]
[ "0.68534726", "0.59218234", "0.5495888", "0.5355895", "0.5291793", "0.5289697", "0.52653944", "0.52528286", "0.5250576", "0.52433366", "0.5235218", "0.5228821", "0.5226976", "0.522246", "0.5191076", "0.5159619", "0.5126623", "0.50397605", "0.50218856", "0.50218856", "0.50218856", "0.5013367", "0.5005075", "0.4987348", "0.49844423", "0.49713787", "0.49689227", "0.4968176", "0.49651787", "0.49651787", "0.49615496", "0.4938503", "0.49310765", "0.49119267", "0.49087885", "0.48826593", "0.48695815", "0.48568046", "0.48429465", "0.48320422", "0.48237476", "0.48153847", "0.48149046", "0.480681", "0.48065913", "0.47973275", "0.47945607", "0.47912702", "0.47777393", "0.4771215", "0.47597152", "0.475865", "0.4758547", "0.47387773", "0.47377208", "0.47374293", "0.4736282", "0.47255427", "0.47105178", "0.46981567", "0.46895945", "0.46881893", "0.46833056", "0.46815693", "0.46807772", "0.46777922", "0.46587753", "0.46568817", "0.46515256", "0.46478933", "0.46475947", "0.46435452", "0.46416634", "0.46388707", "0.46373907", "0.46256682", "0.46194994", "0.46150762", "0.46074557", "0.46043447", "0.46025577", "0.46007785", "0.45996848", "0.45980197", "0.45899272", "0.45898145", "0.45788133", "0.45778653", "0.45767343", "0.45756748", "0.45642406", "0.45630372", "0.4562901", "0.45613557", "0.4557722", "0.4543579", "0.45434293", "0.454323", "0.4540545", "0.45398095" ]
0.79740757
0
Resets the state of an operation and it's children recursively to 0 (PENDING) The operation is identified by a given operationId
def retry_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.retry_operation(row["OPE_ID"]) stmnt = "UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancel_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.cancel_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def drop_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.drop_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def reset_tree() -> None:\n global task_tree\n task_tree = TaskTreeNode(NoOperation())\n task_tree.start_time = datetime.datetime.now()\n task_tree.status = TaskStatus.RUNNING", "def reset_tree(self):\n self.root = None\n self.action = None\n self.dist_probability = None", "def process_children(cls, operation):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;\"\n stmnt_lock = \"UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation.get_id(),))\n for row in cur.fetchallmap():\n child_operation = cls.restore_operation(row)\n db.query(cls._core,stmnt_lock,(child_operation.get_id(),),commit=True)\n try:\n cls.process_children(child_operation)\n child_operation.do_workload()\n except Exception,e:\n stmnt_err = \"UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;\"\n db.query(cls._core,stmnt_err,(int(row[\"OPE_ID\"]),),commit=True)\n #TODO GENERATE ERROR IN LOG\n raise e\n stmnt_delete = \"DELETE FROM OPERATIONS WHERE OPE_ID = ?;\"\n db.query(cls._core,stmnt_delete,(child_operation.get_id(),),commit=True)", "def setOperationId(self, opid) :\n self.operation_id = opid", "def _reset_traversal_state(self):\n for n in self.nodes.values():\n n.reset_traversal_state()", "def reset(self):\n # The apply(f) method recursively calls f on itself and all children\n self.apply(self._reset_module)", "def reset(self) -> None:\r\n self.tree.delete(*self.tree.get_children())", "def operation_state(self, operation_state):\n\n self._operation_state = operation_state", "def reset(self):\r\n self.key = None\r\n self.value = None\r\n self.parent = None\r\n self.left_child = None\r\n self.right_child = None\r\n self.color = BLACK\r\n self.size_tree = 0", "def reset(self):\n self.state = EvaluationState.ready\n\n for child in self.children:\n if hasattr(child, \"reset\"):\n child.reset()", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\n self.undo_stack = Stack(self.undo_stack_size)\n self.redo_stack[:] = []\n self.not_undoable_action = False\n self.undo_in_progress = False", "def _reset_tree_ids(self, start_id: int):\n\n add_id = start_id - self.min_tree_id()\n self.tree_ids = [tree_id + add_id for tree_id in self.tree_ids]", "def clear(self):\n self.beginResetModel()\n self.root_item = RootItem()\n self.requests_items = {}\n self.endResetModel()", "def update_root(self, action: Action) -> \"MonteCarloSearchTree\":\n if action in self._root.children:\n new_root = self._root.children[action]\n else:\n new_root = self._root.add_child(action)\n self._root.remove_child(new_root)\n self._root = new_root\n return self", "def reset(self, state: nx.Graph = None):\n if state is None:\n self._state = self.init_mol\n else:\n self._state = state\n\n self.action_space.update_actions(self._state, self.observation_space)\n if self.record_path:\n self._path = [self._state]\n self._counter = 0", "def reset_state(self):\n self.intersection_per_class.assign(\n tf.zeros_like(self.intersection_per_class)\n )\n self.union_per_class.assign(tf.zeros_like(self.union_per_class))", "def on_ResetNode_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def clear(self):\n self.root = None", "def reset() -> None:\n Invocation.active = {}\n Invocation.current = None # type: ignore\n Invocation.top = Invocation(None, None)\n Invocation.top._become_current() # pylint: disable=protected-access\n Invocation.up_to_date = {}\n Invocation.phony = set()\n Invocation.poisoned = set()\n Invocation.actions_count = 0\n Invocation.skipped_count = 0", "def reset(self):\n self._open_activity_count = 0\n self._decisions = []\n self._tasks = TaskRegistry()", "def reset_workflow(self, new_id):\n with self._driver.session() as session:\n session.write_transaction(tx.reset_tasks_metadata)\n session.write_transaction(tx.reset_workflow_id, new_id=new_id)", "def clear_state(self):\n super().clear_state()\n self.pid = 0", "def reset(self):\n for parent in self.GetParents():\n parent.reset()", "def reset(self):\n self.state.fill(EMPTY)", "def refresh_tree(self):\n self.process_tree = ProcessNode.objects.get(id=self.process_tree_id)", "def _ClearRequest(self, request_id, state):\n os.remove(self._GetRequestPathname(request_id, state))", "def reset_b(self, sess):\r\n sess.run(self._init_ops)", "def _operation_tree(self):\n\n # initial state\n i = 0\n level = 0\n stack = []\n current = None\n\n def _create_operation(args):\n profile_stats = None\n name = args[0].strip()\n args.pop(0)\n if len(args) > 0 and \"Records produced\" in args[-1]:\n records_produced = int(\n re.search(\"Records produced: (\\\\d+)\", args[-1]).group(1)\n )\n execution_time = float(\n re.search(\"Execution time: (\\\\d+.\\\\d+) ms\", args[-1]).group(1)\n )\n profile_stats = ProfileStats(records_produced, execution_time)\n args.pop(-1)\n return Operation(\n name, None if len(args) == 0 else args[0].strip(), profile_stats\n )\n\n # iterate plan operations\n while i < len(self.plan):\n current_op = self.plan[i]\n op_level = current_op.count(\" \")\n if op_level == level:\n # if the operation level equal to the current level\n # set the current operation and move next\n child = _create_operation(current_op.split(\"|\"))\n if current:\n current = stack.pop()\n current.append_child(child)\n current = child\n i += 1\n elif op_level == level + 1:\n # if the operation is child of the current operation\n # add it as child and set as current operation\n child = _create_operation(current_op.split(\"|\"))\n current.append_child(child)\n stack.append(current)\n current = child\n level += 1\n i += 1\n elif op_level < level:\n # if the operation is not child of current operation\n # go back to it's parent operation\n levels_back = level - op_level + 1\n for _ in range(levels_back):\n current = stack.pop()\n level -= levels_back\n else:\n raise Exception(\"corrupted plan\")\n return stack[0]", "def _reset(self, _recursive=False):\n if self._isDefault:\n self.options = self.__class__.options.copy()\n if _recursive:\n for child in self._children:\n child.reset()\n else:\n self.options = self._defaultInstance.options.copy()", "def revert_state(self):\n if self.previous_states > 0: # checks for empty\n self.update_status(self.previous_states.pop())", "def set_nooperation_substatus_json(self, operation, activity_id, start_time, seq_no, status=Constants.Status.Transitioning, code=Constants.ExitCode.Okay):\n # Wrap patches into nooperation summary\n self.__nooperation_summary_json = self.new_nooperation_summary_json(activity_id, start_time)\n\n # Wrap nooperation summary into nooperation substatus\n self.__nooperation_substatus_json = self.new_substatus_json_for_operation(Constants.PATCH_NOOPERATION_SUMMARY, status, code, json.dumps(self.__nooperation_summary_json))\n\n # Update status on disk\n self.write_status_file(operation, seq_no, status=status)", "def reset(self):\r\n _debug('simq03b_api.reset')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def clear(self):\n self.tree = Tree()", "def _clear_node(self):\n self._element = None\n self._parent = None\n self._leftchild = None\n self._rightchild = None\n self._height = None", "def reset_state(self):\n for row in range(len(self.state)):\n for column in range(len(self.state[row])):\n self.state[row][column] = None", "def reset(self):\n self.set_state(self._initial_state)", "def resetTree(self):\n for fila in self.verDatos.get_children():\n self.verDatos.delete(fila)", "def _update_operation_state(self, operation, start_time, end_time):\n self.logger.info(\"updating '%s' operation in profile state\" %\n operation)\n # get current profile state ...\n with open(self.state_file, \"r\") as json_current:\n state = json.load(json_current)\n json_current.close()\n # ..., create \"succeded_operations\" object if not exists...\n try:\n _ = state[\"succeded_operations\"]\n except KeyError:\n state[\"succeded_operations\"] = {}\n # ... and also create the operation if not exists\n try:\n _ = state[\"succeded_operations\"][operation]\n except KeyError:\n state[\"succeded_operations\"][operation] = {}\n\n # update state\n start_time_iso8601 = start_time.strftime(\"%Y-%m-%dT%H:%M:%S.%f%z\")\n end_time_iso8601 = end_time.strftime(\"%Y-%m-%dT%H:%M:%S.%f%z\")\n op_status = {}\n op_status[\"start_time\"] = start_time_iso8601\n op_status[\"end_time\"] = end_time_iso8601\n op_status[\"duration\"] = str(end_time-start_time)\n op_status[\"file\"] = self.active_queue[0][\"objects_filename\"]\n state[\"succeded_operations\"][operation] = op_status\n # write to file\n with open(self.state_file, 'w') as out_file:\n json.dump(state, out_file, indent=4)\n out_file.close()", "def choose_new_root(\n self, root_node, action_choice, state_h, state_c, sampled_oh_action, state_ops\n ):\n new_root_node = root_node[\"childs\"][action_choice]\n new_root_node[\"parent\"] = None\n data = self.controller.forward_once(sampled_oh_action, state_h, state_c)\n _, _, state_h, state_c, sampled_oh_action, _ = data\n return new_root_node, sampled_oh_action, state_h, state_c", "def reset(self):\n self.children.clear()", "async def resume_operations(self):\n await asyncio.sleep(10)\n for op in await self.get_service('data_svc').locate('operations', match=dict(finish=None)):\n self.loop.create_task(self.run_operation(op))", "def Refresh(self,id_range):\n self.DeleteChildren(self.root)\n self.GenerateHierarchies(id_range)", "def unlock_root(self, root_id: np.uint64, operation_id: np.uint64) -> bool:\n operation_id_b = serialize_uint64(operation_id)\n\n lock_key = serialize_key(\"lock\")\n\n # Build a column filter which tests if a lock was set (== lock column\n # exists) and if it is still valid (timestamp younger than\n # LOCK_EXPIRED_TIME_DELTA) and if the given operation_id is still\n # the active lock holder\n\n time_cutoff = datetime.datetime.utcnow() - LOCK_EXPIRED_TIME_DELTA\n\n # Comply to resolution of BigTables TimeRange\n time_cutoff -= datetime.timedelta(\n microseconds=time_cutoff.microsecond % 1000)\n\n time_filter = TimestampRangeFilter(TimestampRange(start=time_cutoff))\n\n # column_key_filter = ColumnQualifierRegexFilter(lock_key)\n # value_filter = ColumnQualifierRegexFilter(operation_id_b)\n\n column_key_filter = ColumnRangeFilter(\n column_family_id=self.family_id,\n start_column=lock_key,\n end_column=lock_key,\n inclusive_start=True,\n inclusive_end=True)\n\n value_filter = ValueRangeFilter(\n start_value=operation_id_b,\n end_value=operation_id_b,\n inclusive_start=True,\n inclusive_end=True)\n\n # Chain these filters together\n chained_filter = RowFilterChain([time_filter, column_key_filter,\n value_filter])\n\n # Get conditional row using the chained filter\n root_row = self.table.row(serialize_uint64(root_id),\n filter_=chained_filter)\n\n # Delete row if conditions are met (state == True)\n root_row.delete_cell(self.family_id, lock_key, state=True)\n\n return root_row.commit()", "def reset(self):\n self.previous = None\n self.state = None\n self.args = None\n self.context = None", "def _re_init(self):\n self._child_index = 0", "def set_wait_for_undo_resources(self, sub_array_id: int) -> MessageBoardBuilder:\n brd = get_message_board_builder()\n brd.set_waiting_on(self._tel.sdp.subarray(sub_array_id)).for_attribute(\n \"obsState\"\n ).to_become_equal_to(\"EMPTY\")\n brd.set_waiting_on(self._tel.csp.subarray(sub_array_id)).for_attribute(\n \"obsState\"\n ).to_become_equal_to(\"EMPTY\")\n\n brd.set_waiting_on(self._tel.tm.subarray(sub_array_id)).for_attribute(\n \"obsState\"\n ).to_become_equal_to(\"EMPTY\")\n\n return brd", "def reset(self):\n\t\tself.graph = OrderedDict()\n\t\tself.bottoms = OrderedDict()\n\t\tself.output_shape = OrderedDict()\n\t\tself.cur_tensor = None\n\t\tself.cur_id = None\n\t\tself.tmp_list = []\n\t\tself.log_init()", "def reset(self, context):\n self.context = context\n self.min_order_size = 1e-4\n self.max_order_size = 0\n self.max_position_held = 0\n return self", "def clear(self):\n\t\tself._root = None\n\t\tself._size = 0\n\t\tself._depth = 0\n\t\tself._max_chars = 1\n\t\treturn", "def _doReset(self):\n self._cmdReset()", "def clear(self):\n self._ll_tree.clear()", "def reset(self):\n for c in self.children:\n c.reset()\n self.marked = False", "def clone_as_root(self) :\n clone = deepcopy(self)\n clone.parent = None\n clone.path_length = 0\n clone.previous_action = None\n return clone", "def _correct_tree(self, current_element: Node):\r\n while True:\r\n if current_element == None or current_element.parent() == None:\r\n return None\r\n current_element = current_element.parent()\r\n b1 = current_element.balance()\r\n\r\n try:\r\n b2 = current_element.right_son().balance()\r\n except AttributeError:\r\n b2 = 0\r\n try:\r\n b3 = current_element.right_son().left_son().balance()\r\n except AttributeError:\r\n b3 = 0\r\n\r\n if b1 in (-1, 0, 1):\r\n if current_element.parent() == None:\r\n break\r\n else:\r\n continue\r\n elif ((b1 == -2 and b2 == 1 and b3 == -1) or\r\n (b1 == -2 and b2 == 1 and b3 == 0 ) or\r\n (b1 == -2 and b2 == 1 and b3 == 1)):\r\n current_element.reset(*self._right_left(current_element))\r\n elif b1 == -2:\r\n current_element.reset(*self._right_right(current_element))\r\n break\r\n\r\n try:\r\n b2 = current_element.left_son().balance()\r\n except AttributeError:\r\n b2 = 0\r\n try:\r\n b3 = current_element.left_son().right_son().balance()\r\n except AttributeError:\r\n b3 = 0\r\n\r\n if ((b1 == 2 and b2 == 2 and b3 == 2) or\r\n (b1 == -1 and b2 == -1 and b3 == -1) or\r\n (b1 == -1 and b2 == 0 and b3 == 1) or\r\n (b1 == 2 and b2 == -1 and b3 == 0)):\r\n current_element.reset(*self._left_right(current_element))\r\n elif b1 == 2:\r\n current_element.reset(*self._left_left(current_element))\r\n break\r\n \r\n if current_element.parent() == None:\r\n break", "def change_ops_state(self, state):\n for op_button in self.operators.values():\n op_button['state'] = state", "def reset_task_state(self) -> None:\n self.set_task_state(task_state=self.sample_task_state())", "def clean(self):\n self.sess.run(self.init_op)\n print(\"Clean the running state of graph!\")", "def clear(self):\n # Record final status before clearing execution state.\n self._status = self.status\n\n self.expr = None\n self.args = None\n self.kwargs = None\n self.eval_args = None\n self.result_promise = None\n self.result = None\n self.job_tags.clear()\n self.value_tags.clear()\n\n for child_job in self.child_jobs:\n child_job.parent_job = None\n self.child_jobs.clear()", "def clear(self):\n self.state = [[None, None, None],\n [None, None, None],\n [None, None, None]]", "def reset_task(self, task_index: Optional[int] = None):\n if task_index is None:\n task_index = np.random.permutation(self.graph.num_graph)[0]\n else:\n task_index = task_index % self.graph.num_graph\n\n self.task_index = task_index\n self.graph.set_graph_index(task_index)\n self.num_subtasks = len(self.graph.subtask_id_list)\n self.subtask_reward = self.graph.subtask_reward\n self.subtask_id_list = self.graph.subtask_id_list\n self.game_length = self._default_game_len\n #self.game_length = int(np.random.uniform(0.8, 1.2) * self._default_game_len)\n\n # Reset map (96% of time)\n self.map.reset(subtask_id_list=self.subtask_id_list, reset_map=True)\n return self.task", "def clear(self):\n self.currentTotal = None\n self.previousOperator = None\n self.previousOperand = None", "def clear(self):\n del self.__tree\n self.__tree = AVLTree()\n print(\"Set is empty now\")", "def reset_progress(self):\n self.state = \"\"", "def reset(self,pcaId=None):\n if pcaId:\n self.transition(pcaId,GlobalSystemTransitions.reset)\n else:\n for partition in self.PCAs:\n self.transition(partition,GlobalSystemTransitions.reset)", "def reset_solver(self):\n self.total_iterations = 0\n self.active_constraints_index = 0\n self.active_constraints_set = False\n return", "def reset_state(self, noised_results, global_state):\n del noised_results\n new_tree_state = self._tree_aggregator.reset_state(global_state.tree_state)\n return attr.evolve(\n global_state,\n previous_tree_noise=self._zero_initial_noise(),\n tree_state=new_tree_state)", "def reset(self):\n for i in range(0, len(self.current_state)):\n self.current_state[i] = 0\n\n for i in range(0, len(self.weights)):\n self.weights[i] = 0", "def update_operations_count(self):\n project = get_current_project()\n if project is not None:\n fns, sta, err, canceled = self.flow_service.get_operation_numbers(project.id)\n project.operations_finished = fns\n project.operations_started = sta\n project.operations_error = err\n project.operations_canceled = canceled\n add2session(KEY_PROJECT, project)", "def reset_state(self, noised_results, global_state):\n new_tree_state = self._tree_aggregator.reset_state(global_state.tree_state)\n return attr.evolve(\n global_state,\n samples_cumulative_sum=noised_results,\n tree_state=new_tree_state)", "def reset(self):\n self.memory.clear()\n self.relative_base = 0\n self.input_queue.clear()\n self.instr_idx = 0", "def pre_config_root_delete(self, resource_id):\n pass", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset_states(self) -> None:\n self._metric.reset_states()\n # for each child log\n for child in self.children_real_fake:\n child[0].reset_states()\n child[1].reset_states()", "def clear(self):\n self.need_rebuild = False\n\n # txid -> int\n self.validity = {}\n\n # [address] -> set of \"prevouthash:n\" for that address\n self.txo_byaddr = {}\n\n # [token_id_hex] -> dict of [\"prevouthash:n\"] -> qty (-1 for qty indicates\n # minting baton)\n self.token_quantities = {}\n\n # [\"prevouthash:n\"] -> \"token_id_hex\"\n self.txo_token_id = {}", "def _reset_state(self):\n self.state = self.start_state.copy()", "def check_and_renew_root_locks(self, root_ids: Iterable[np.uint64],\n operation_id: np.uint64) -> bool:\n\n for root_id in root_ids:\n if not self.check_and_renew_root_lock_single(root_id, operation_id):\n print(\"check_and_renew_root_locks failed - %d\" % root_id)\n return False\n\n return True", "def updateTree(self):\n self.reset()\n self.resetTree() \n self.read()", "def __call__(self, node, operations, last_operation):\n if last_operation == NO_OPERATION:\n return 0\n return 1", "def clear(self):\n \n self.node_set.clear()\n self.prefix.clear()\n self.suffix.clear()\n self.num_node = 0\n self.edges = 0", "def _reset(self):\n self._value = self._default", "def clear(self) -> None:\n self.selected = {}\n self.orderings = []\n self.predicate = None\n self.limit_index = None\n self.offset_index = None\n self.callbacks.clear()", "def reset(self):\r\n self._root_dir = None", "def reset(self) -> None:\n self[-1].reset()", "def async_update_group_state(self) -> None:\n self._state = None\n self._async_update_group_state()", "def resetDeviceStates(self):", "def empty_solution(self):\n self._datacontroller.commit_change('pathsteps', [])\n self._datacontroller.commit_change('path', {})", "def reset_bag(self):", "def reset(self, *args):\n self.resource = self.resource.reset(list(args))\n return self", "def unflatten(self): \n self.assign(self.get_unflattened_circuit())\n self._expr_map = None", "def test_delete_empty_bst(bst_empty):\n assert bst_empty.delete() is None\n assert bst_empty._root is None", "def reset_workflow(self, workflow_id):\n self._workflow_id = workflow_id\n self._gdb_interface.reset_workflow(self._workflow_id)\n self._gdb_interface.set_workflow_state('SUBMITTED')", "def lock_single_root(self, root_id: np.uint64, operation_id: np.uint64\n ) -> bool:\n\n operation_id_b = serialize_uint64(operation_id)\n\n lock_key = serialize_key(\"lock\")\n new_parents_key = serialize_key(\"new_parents\")\n\n # Build a column filter which tests if a lock was set (== lock column\n # exists) and if it is still valid (timestamp younger than\n # LOCK_EXPIRED_TIME_DELTA) and if there is no new parent (== new_parents\n # exists)\n\n time_cutoff = datetime.datetime.utcnow() - LOCK_EXPIRED_TIME_DELTA\n\n # Comply to resolution of BigTables TimeRange\n time_cutoff -= datetime.timedelta(\n microseconds=time_cutoff.microsecond % 1000)\n\n time_filter = TimestampRangeFilter(TimestampRange(start=time_cutoff))\n\n # lock_key_filter = ColumnQualifierRegexFilter(lock_key)\n # new_parents_key_filter = ColumnQualifierRegexFilter(new_parents_key)\n\n lock_key_filter = ColumnRangeFilter(\n column_family_id=self.family_id,\n start_column=lock_key,\n end_column=lock_key,\n inclusive_start=True,\n inclusive_end=True)\n\n new_parents_key_filter = ColumnRangeFilter(\n column_family_id=self.family_id,\n start_column=new_parents_key,\n end_column=new_parents_key,\n inclusive_start=True,\n inclusive_end=True)\n\n # Combine filters together\n chained_filter = RowFilterChain([time_filter, lock_key_filter])\n combined_filter = ConditionalRowFilter(\n base_filter=chained_filter,\n true_filter=PassAllFilter(True),\n false_filter=new_parents_key_filter)\n\n # Get conditional row using the chained filter\n root_row = self.table.row(serialize_uint64(root_id),\n filter_=combined_filter)\n\n # Set row lock if condition returns no results (state == False)\n time_stamp = datetime.datetime.utcnow()\n root_row.set_cell(self.family_id, lock_key, operation_id_b, state=False,\n timestamp=time_stamp)\n\n # The lock was acquired when set_cell returns False (state)\n lock_acquired = not root_row.commit()\n\n if not lock_acquired:\n r = self.table.read_row(serialize_uint64(root_id))\n\n l_operation_ids = []\n for cell in r.cells[self.family_id][lock_key]:\n l_operation_id = deserialize_uint64(cell.value)\n l_operation_ids.append(l_operation_id)\n print(\"Locked operation ids:\", l_operation_ids)\n\n return lock_acquired", "def cancel(self) -> None:\n if self.current is not None and self.current.size:\n # reset path\n self.labels.flat[self.current] = False\n self.costs.flat[self.current] = np.finfo('d').max\n # reset path end\n self.labels.flat[self.destiny] = False\n self.costs.flat[self.destiny] = np.finfo('d').max" ]
[ "0.6283896", "0.55792785", "0.5552252", "0.55377895", "0.55284655", "0.53993994", "0.5203521", "0.5042159", "0.50319934", "0.50242394", "0.5023247", "0.5015806", "0.50039274", "0.4977529", "0.4977529", "0.4977529", "0.4977529", "0.48844925", "0.4862212", "0.48437867", "0.48415264", "0.48373774", "0.4833321", "0.4814799", "0.48086548", "0.47904965", "0.47648314", "0.47600612", "0.47410592", "0.47050318", "0.4699454", "0.46984056", "0.46887764", "0.46613675", "0.46602407", "0.4642065", "0.46336707", "0.4633083", "0.46295288", "0.46284914", "0.4624053", "0.46134806", "0.46038935", "0.4587389", "0.45873636", "0.45825753", "0.45781323", "0.4558457", "0.45576814", "0.45518914", "0.45343402", "0.45234933", "0.45180985", "0.45160747", "0.45154205", "0.45101777", "0.45026955", "0.4502291", "0.4499408", "0.44973263", "0.4488844", "0.44872168", "0.448677", "0.44671178", "0.44612896", "0.44515455", "0.444586", "0.44455233", "0.444019", "0.44327378", "0.44248232", "0.44235057", "0.44219878", "0.44133857", "0.44125545", "0.44095817", "0.4405919", "0.44001254", "0.43990132", "0.43969938", "0.43960568", "0.4395556", "0.438661", "0.4380434", "0.43696567", "0.4368766", "0.43657577", "0.43630487", "0.43586093", "0.43536952", "0.43522125", "0.43499798", "0.43496752", "0.4335641", "0.4333812", "0.432916", "0.43250632", "0.43224594", "0.4321237", "0.4314338" ]
0.6371056
0
Cancels an Operation, identified by it's Operation Id and it's children recursively Cancel Deletes the Operation from Database
def cancel_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.cancel_operation(row["OPE_ID"]) stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.drop_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def cancel_operation(\n self,\n ) -> Callable[[operations_pb2.CancelOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"cancel_operation\" not in self._stubs:\n self._stubs[\"cancel_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/CancelOperation\",\n request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"cancel_operation\"]", "def cancel_operation(\n self,\n ) -> Callable[[operations_pb2.CancelOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"cancel_operation\" not in self._stubs:\n self._stubs[\"cancel_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/CancelOperation\",\n request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"cancel_operation\"]", "def cancel_operation(\n self,\n ) -> Callable[[operations_pb2.CancelOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"cancel_operation\" not in self._stubs:\n self._stubs[\"cancel_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/CancelOperation\",\n request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"cancel_operation\"]", "def CancelOperation(\n self,\n request: google.longrunning.operations_pb2.CancelOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.protobuf.empty_pb2.Empty:", "def do_cancel(order):\r\n self.gox.cancel(order.oid)", "def _do_cancel(self):\r\n\r\n def do_cancel(order):\r\n \"\"\"cancel a single order\"\"\"\r\n self.gox.cancel(order.oid)\r\n\r\n if not len(self.items):\r\n return\r\n if not len(self.selected):\r\n order = self.items[self.item_sel]\r\n do_cancel(order)\r\n else:\r\n for order in self.selected:\r\n do_cancel(order)", "def cancel(self):\n self.session.rollback()", "def retry_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.retry_operation(row[\"OPE_ID\"])\n\n stmnt = \"UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def cancelarOperacion(self):\n\n ok=QtGui.QMessageBox.warning(self,\"Aviso\",\"¿Desea cancelar la operación?\",\\\n QtGui.QMessageBox.Cancel | QtGui.QMessageBox.Ok)\n if ok == QtGui.QMessageBox.Ok:\n if self.factura != None:\n self.factura.anular()\n for detalle in self.lotesVentas:\n for loteVenta in self.lotesVentas[detalle]:\n loteVenta[0].aumentarCantidad(loteVenta[1])\n loteVenta[0].modificar(self.sesion)\n detalle.eliminarLotesAsociados(self.sesion)\n detalle.borrar(self.sesion)\n self.objectModified.emit()\n self.limpiarVentana()", "def DeleteOperation(\n self,\n request: google.longrunning.operations_pb2.DeleteOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.protobuf.empty_pb2.Empty:", "def test_transaction_explitic_canceling(self):\n\n proxy = self.node.get_proxy('/')\n\n # look under the hood to verify that branches are added\n # recursively\n _latest_root_rev = self.node._branches[None].latest\n adapter_node = _latest_root_rev._children['adapters'][2].node\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n tx = proxy.open_transaction()\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n self.make_change(tx, '/adapters/2', 'config.log_level', 4)\n\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 2)\n\n tx.cancel()\n\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)", "def CancelOperation(self, operation_ref):\n request = self.messages.NetappProjectsLocationsOperationsCancelRequest(\n name=operation_ref.RelativeName())\n return self.client.projects_locations_operations.Cancel(request)", "def cancel(self, comment=None):\n payload = {\n \"Comment\": comment\n }\n qry = ServiceOperationQuery(self, \"cancel\", None, payload)\n self.context.add_query(qry)\n return self", "def cancel_operation(self):\n # <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>\n self.proceed = False\n self.entry_view.destroy()", "def cancel(bot, update):\n bot.sendMessage(chat_id=update.message.chat_id, text=\"As you wish, the operation has been cancelled! 😊\")\n return ConversationHandler.END", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def delete_operation(\n self,\n ) -> Callable[[operations_pb2.DeleteOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"delete_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/DeleteOperation\",\n request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"delete_operation\"]", "def process_children(cls, operation):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;\"\n stmnt_lock = \"UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation.get_id(),))\n for row in cur.fetchallmap():\n child_operation = cls.restore_operation(row)\n db.query(cls._core,stmnt_lock,(child_operation.get_id(),),commit=True)\n try:\n cls.process_children(child_operation)\n child_operation.do_workload()\n except Exception,e:\n stmnt_err = \"UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;\"\n db.query(cls._core,stmnt_err,(int(row[\"OPE_ID\"]),),commit=True)\n #TODO GENERATE ERROR IN LOG\n raise e\n stmnt_delete = \"DELETE FROM OPERATIONS WHERE OPE_ID = ?;\"\n db.query(cls._core,stmnt_delete,(child_operation.get_id(),),commit=True)", "def cancel(self):\n if not self.parent_node.is_job:\n return\n\n # First perform clean operation\n self.clean()\n\n self.winstance.send_event('Cancelling job..')\n result = self.winstance.execute_operation('hpc.interfaces.'\n 'lifecycle.cancel',\n kwargs={\"name\": self.name})\n self.winstance.send_event('.. job canceled')\n result.task.wait_for_terminated()\n\n self._status = 'CANCELLED'", "def delete(self):\n parser = reqparse.RequestParser()\n parser.add_argument('orderId', type=int, required=True,\n help='Order ID to cancel')\n args = parser.parse_args()\n return sync.cancel_order(args['orderId'])", "def _execute_cancel(self) -> None:\n # validation\n if self.position.is_open:\n raise Exception('cannot cancel orders when position is still open. there must be a bug somewhere.')\n\n logger.info('cancel all remaining orders to prepare for a fresh start...')\n\n self.broker.cancel_all_orders()\n\n self._reset()\n\n self._broadcast('route-canceled')\n\n self.on_cancel()\n\n if not jh.is_unit_testing() and not jh.is_live():\n store.orders.storage[f'{self.exchange}-{self.symbol}'].clear()", "def cancel(self):\n if not self.is_cancelled:\n self.will_change_value_for('is_cancelled')\n self.cancelled = True\n # remove our dependencies so that we're ready, properly behaved operations\n # will honor the cancel flag\n self.dependencies.clear()\n self.did_change_value_for('is_cancelled')\n \n if not self.is_executing and not self.is_finished:\n with self.changing('is_finished'):\n self.finished = True", "def _order_cancel(self, bo):\n log.info(\"bo_blotter: order_cancel bracket order bo#%s\" % bo.ticket) \n cancelled = bo.cancel()\n return(cancelled)", "def cancel_order(self, walletId, orderId):\n return", "def _simple_deletion(self, operation, labels):\n from mogwai.models.edge import Edge\n\n label_strings = []\n for label in labels:\n if inspect.isclass(label) and issubclass(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, string_types):\n label_string = label\n else:\n raise MogwaiException('traversal labels must be edge classes, instances, or strings')\n label_strings.append(label_string)\n\n future = connection.future_class()\n future_result = self._delete_related(operation, label_strings)\n\n def on_read(f2):\n try:\n result = f2.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future.set_result(result)\n\n def on_save(f):\n try:\n stream = f.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future_read = stream.read()\n future_read.add_done_callback(on_read)\n\n future_result.add_done_callback(on_save)\n\n return future", "def cancel(self):\n import googleapiclient\n\n # projects.locations.operations/cancel\n operations = self._api.projects().locations().operations()\n\n for job in self.active_jobs:\n request = operations.cancel(name=job.jobname)\n logger.debug(\"Cancelling operation {}\".format(job.jobid))\n try:\n self._retry_request(request)\n except (Exception, BaseException, googleapiclient.errors.HttpError):\n continue\n\n self.shutdown()", "def cancel(self, cr, uid, ids, notes='', context=None):\n return self.write(cr, uid, ids, {'state':'cancel'}, context=context)", "def cancel(self, id):\n self.__init_client()\n order = self.get_open_order(id)\n\n if order is None:\n return False\n\n try:\n retry(lambda: self.client.futures_cancel_order(symbol=self.pair, origClientOrderId=order['clientOrderId']))\n except HTTPNotFound:\n return False\n logger.info(f\"Cancel Order : (clientOrderId, type, side, quantity, price, stop) = \"\n f\"({order['clientOrderId']}, {order['type']}, {order['side']}, {order['origQty']}, \"\n f\"{order['price']}, {order['stopPrice']})\")\n return True", "def cancel(self):\n # type: () -> None\n if self.query_id is None or self.is_finished():\n return\n\n self._cancelled = True\n url = self._request.get_url(\"/v1/query/{}\".format(self.query_id))\n logger.debug(\"cancelling query: %s\", self.query_id)\n response = self._request.delete(url)\n logger.info(response)\n if response.status_code == requests.codes.no_content:\n logger.debug(\"query cancelled: %s\", self.query_id)\n return\n self._request.raise_response_error(response)", "def scancel(self, arg):\n\n if isinstance(arg, (list, tuple)):\n for job_id in arg:\n self.scancel(job_id)\n\n elif str(arg).lower() == 'all':\n self._queue = None\n for job_id in self.queue_job_ids:\n self.scancel(job_id)\n\n elif isinstance(arg, (int, str)):\n cmd = ('scancel {}'.format(arg))\n cmd = shlex.split(cmd)\n subprocess.call(cmd)\n\n else:\n e = ('Could not cancel: {} with type {}'\n .format(arg, type(arg)))\n logger.error(e)\n raise ExecutionError(e)", "def cancelJob(_id):\n job = mongo.db.jobs.find_one({'_id': _id})\n tasks = mongo.db.tasks.find({'job': _id})\n for each in tasks:\n _t = ca.AsyncResult(each.get('ctid'))\n _t.revoke()\n job['status'] = 'cancelled'\n \"\"\"Set status of job to cancelled\"\"\"\n mongo.db.jobs.update({'_id': _id}, job)\n \"\"\"Bulk update tasks\"\"\"\n bulk = mongo.db.tasks.initialize_unordered_bulk_op()\n bulk.find({'job': _id, 'status': {'$ne': 'completed'}}).update({\n '$set': {\n 'status': \"cancelled\",\n 'cancelled_on': now(),\n 'slave': None,\n }})\n bulk.execute()\n\n return {'info': 'success'}", "def cancel_operation(\n self,\n request: Optional[operations_pb2.CancelOperationRequest] = None,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Union[float, object] = gapic_v1.method.DEFAULT,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> None:\n # Create or coerce a protobuf request object.\n # The request isn't a proto-plus wrapped type,\n # so it must be constructed via keyword expansion.\n if isinstance(request, dict):\n request = operations_pb2.CancelOperationRequest(**request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method.wrap_method(\n self._transport.cancel_operation,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"name\", request.name),)),\n )\n\n # Send the request.\n rpc(\n request,\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )", "def action_cancel(self):\n ids = isinstance(self.ids, (int)) and [self.ids] or self.ids\n context = self._context or {}\n self.cancel_move()\n self.clear_wh_lines()\n return True", "def cancel(self, order_id):\n del self.__pending_orders[order_id]", "def cancel_job(job_id: str, connection: Optional['Redis'] = None, serializer=None, enqueue_dependents: bool = False):\n Job.fetch(job_id, connection=connection, serializer=serializer).cancel(enqueue_dependents=enqueue_dependents)", "def abort_transaction(self,xid):\n modlogger.debug( \"abort:%s\"%xid)\n opid = self.new_opid()\n xaction = AbortTxOperation(opid,xid)\n self._add_operation(xid,xaction)\n try:\n self.tx.rollback()\n finally:\n self.tx = None", "def cancelarOperacion(self):\n\n ok = QtGui.QMessageBox.information(self,\"Confirmacion\",\"¿Desea cancelar la operacion?\",\\\n QtGui.QMessageBox.Cancel, QtGui.QMessageBox.Accepted)\n if (ok==1):\n self.limpiarVentana()", "async def cancel_order(self, id: str, symbol: Optional[str] = None, params={}):\n await self.load_markets()\n # https://github.com/ccxt/ccxt/issues/6507\n clientOrderId = self.safe_value_2(params, 'clOrdID', 'clientOrderId')\n request = {}\n if clientOrderId is None:\n request['orderID'] = id\n else:\n request['clOrdID'] = clientOrderId\n params = self.omit(params, ['clOrdID', 'clientOrderId'])\n response = await self.privateDeleteOrder(self.extend(request, params))\n order = self.safe_value(response, 0, {})\n error = self.safe_string(order, 'error')\n if error is not None:\n if error.find('Unable to cancel order due to existing state') >= 0:\n raise OrderNotFound(self.id + ' cancelOrder() failed: ' + error)\n return self.parse_order(order)", "def orderCancel(self, id, refund=\"\"):\n return self.__insertOrderHistory(id, \"cancelled\", {\"stripeRefundID\": refund})", "def cancel(self):\r\n self.require_item()\r\n\r\n url = '{0}/cancel'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n request.use_xml = False\r\n\r\n return request, parsers.parse_empty", "def cancel(self) -> None:\n c = self.pgconn.get_cancel()\n c.cancel()", "async def cancel_operation(\n self,\n request: Optional[operations_pb2.CancelOperationRequest] = None,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Union[float, object] = gapic_v1.method.DEFAULT,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> None:\n # Create or coerce a protobuf request object.\n # The request isn't a proto-plus wrapped type,\n # so it must be constructed via keyword expansion.\n if isinstance(request, dict):\n request = operations_pb2.CancelOperationRequest(**request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method.wrap_method(\n self._client._transport.cancel_operation,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"name\", request.name),)),\n )\n\n # Send the request.\n await rpc(\n request,\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )", "async def cancel_operation(\n self,\n request: Optional[operations_pb2.CancelOperationRequest] = None,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Union[float, object] = gapic_v1.method.DEFAULT,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> None:\n # Create or coerce a protobuf request object.\n # The request isn't a proto-plus wrapped type,\n # so it must be constructed via keyword expansion.\n if isinstance(request, dict):\n request = operations_pb2.CancelOperationRequest(**request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method.wrap_method(\n self._client._transport.cancel_operation,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"name\", request.name),)),\n )\n\n # Send the request.\n await rpc(\n request,\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )", "def cancel_order(self, id: str, symbol: Optional[str] = None, params={}):\n request = {\n 'id': id,\n }\n return self.privatePostOrderCancelOrder(self.extend(request, params))", "def cancel(self):\n\n query = f\"scancel {self.jobid}\"\n if self.cluster:\n query = f\"scancel {self.jobid} --clusters={self.cluster}\"\n\n cmd = BuildTestCommand(query)\n cmd.execute()\n logger.debug(f\"Cancelling Job: {self.jobid} by running: {query}\")\n\n self.poll()\n self._state = \"CANCELLED\"", "def test_cancel_order(self):\n # create a order to cancel\n test_order = OrderFactory()\n resp = self.app.post('/orders',\n json=test_order.serialize(),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)\n\n # cancel the order\n new_order = resp.get_json()\n new_order['status'] = 'Cancelled'\n resp = self.app.put('/orders/{}/cancel'.format(new_order['id']),\n json=new_order,\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n cancelled_order = resp.get_json()\n self.assertEqual(cancelled_order['status'], 'Cancelled')", "def __call__(\n self,\n request: operations_pb2.CancelOperationRequest,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Optional[float] = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> None:\n\n http_options: List[Dict[str, str]] = [\n {\n \"method\": \"post\",\n \"uri\": \"/v3/{name=projects/*/operations/*}:cancel\",\n },\n {\n \"method\": \"post\",\n \"uri\": \"/v3/{name=projects/*/locations/*/operations/*}:cancel\",\n },\n ]\n\n request, metadata = self._interceptor.pre_cancel_operation(\n request, metadata\n )\n request_kwargs = json_format.MessageToDict(request)\n transcoded_request = path_template.transcode(http_options, **request_kwargs)\n\n uri = transcoded_request[\"uri\"]\n method = transcoded_request[\"method\"]\n\n # Jsonify the query params\n query_params = json.loads(json.dumps(transcoded_request[\"query_params\"]))\n\n # Send the request\n headers = dict(metadata)\n headers[\"Content-Type\"] = \"application/json\"\n\n response = getattr(self._session, method)(\n \"{host}{uri}\".format(host=self._host, uri=uri),\n timeout=timeout,\n headers=headers,\n params=rest_helpers.flatten_query_params(query_params),\n )\n\n # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception\n # subclass.\n if response.status_code >= 400:\n raise core_exceptions.from_http_response(response)\n\n return self._interceptor.post_cancel_operation(None)", "def post_cancel_operation(self, response: None) -> None:\n return response", "def post_cancel_operation(self, response: None) -> None:\n return response", "def __gitCancelMerge(self):\n self.vcs.gitCancelMerge(self.project.getProjectPath())", "async def cancel_and_delete_task(task_id: TaskId):", "def __call__(\n self,\n request: operations_pb2.CancelOperationRequest,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Optional[float] = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> None:\n\n http_options: List[Dict[str, str]] = [\n {\n \"method\": \"post\",\n \"uri\": \"/v3beta1/{name=projects/*/operations/*}:cancel\",\n },\n {\n \"method\": \"post\",\n \"uri\": \"/v3beta1/{name=projects/*/locations/*/operations/*}:cancel\",\n },\n ]\n\n request, metadata = self._interceptor.pre_cancel_operation(\n request, metadata\n )\n request_kwargs = json_format.MessageToDict(request)\n transcoded_request = path_template.transcode(http_options, **request_kwargs)\n\n uri = transcoded_request[\"uri\"]\n method = transcoded_request[\"method\"]\n\n # Jsonify the query params\n query_params = json.loads(json.dumps(transcoded_request[\"query_params\"]))\n\n # Send the request\n headers = dict(metadata)\n headers[\"Content-Type\"] = \"application/json\"\n\n response = getattr(self._session, method)(\n \"{host}{uri}\".format(host=self._host, uri=uri),\n timeout=timeout,\n headers=headers,\n params=rest_helpers.flatten_query_params(query_params),\n )\n\n # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception\n # subclass.\n if response.status_code >= 400:\n raise core_exceptions.from_http_response(response)\n\n return self._interceptor.post_cancel_operation(None)", "def cancel_order(self, custom_id=None, **params):\n self.conn.send('cancelOrder', custom_id=custom_id, **params)", "def _simple_deletion(self, operation, labels):\r\n label_strings = []\r\n for label in labels:\r\n if inspect.isclass(label) and issubclass(label, Edge):\r\n label_string = label.get_label()\r\n elif isinstance(label, Edge):\r\n label_string = label.get_label()\r\n label_strings.append(label_string)\r\n\r\n return self._delete_related(operation, label_strings)", "def cancel(self, uuid):\n\n result = self.api_query('CancelOrder', {'order_id': uuid})\n return result", "async def cancel_order():\n symbol = App.config[\"symbol\"]\n\n # Get currently active order and id (if any)\n order = App.order\n order_id = order.get(\"orderId\", 0) if order else 0\n if order_id == 0:\n # TODO: Maybe retrieve all existing (sell, limit) orders\n return None\n\n # -----\n try:\n log.info(f\"Cancelling order id {order_id}\")\n new_order = App.client.cancel_order(symbol=symbol, orderId=order_id)\n except Exception as e:\n log.error(f\"Binance exception in 'cancel_order' {e}\")\n return None\n\n # TODO: There is small probability that the order will be filled just before we want to kill it\n # We need to somehow catch and process this case\n # If we get an error (say, order does not exist and cannot be killed), then after error returned, we could do trade state reset\n\n # Impose and overwrite the new order information\n if new_order:\n order.update(new_order)\n else:\n return None\n\n # Now order[\"status\"] contains the latest status of the order\n return order[\"status\"]", "def cancelRequest(self, json):\n uID = json.get('uID')\n print(RequestsDAO().getRequestByuID(uID))\n if not RequestsDAO().getRequestByuID(uID):\n return jsonify(Error=\"No request found\"), 404\n else:\n\n if uID:\n RequestsDAO().deleteRequest(uID)\n return jsonify(User=\"User deleted\"), 200\n else:\n return jsonify(Error=\"Unexpected attributes in update request\"), 400", "def test_transaction_canceling(self):\n\n proxy = self.node.get_proxy('/')\n\n # look under the hood to verify that branches are added\n # recursively\n _latest_root_rev = self.node._branches[None].latest\n adapter_node = _latest_root_rev._children['adapters'][2].node\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n tx = proxy.open_transaction()\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n self.make_change(tx, '/adapters/2', 'config.log_level', 4)\n\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 2)\n\n del tx\n\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)", "async def cancel_order(cls, session, account, order_id):\n if not session.logged_in:\n raise Exception('Tastyworks session not logged in.')\n\n url = '{}/accounts/{}/orders/{}'.format(\n session.API_url,\n account.account_number,\n order_id\n )\n\n async with aiohttp.request('DELETE', url, headers=session.get_request_headers()) as resp:\n if resp.status != 200:\n raise Exception('Could not delete the order')\n data = (await resp.json())['data']\n order = cls.from_dict(data)\n return order.details.status", "def cancel_order(self, exchange, order_id):\n return self.ccxt.cancel_order(exchange, order_id)", "def cancel_order(self, symbol, orderId):\n payload = {\n 'symbol': symbol,\n 'orderId': orderId\n }\n\n return self.signed_request('DELETE', '/api/v3/order', **payload)", "def cancel_action(self, ctxt, action_id):\n action = action_mod.Action.load(self.db_session, action_id,\n project_safe=False)\n action.signal(action.SIG_CANCEL)", "def cancel(self, cr, uid, ids, context=None):\n if context:\n if 'owner' in context and 'model_id' in context:\n owner = context['owner']\n owner = int(owner)\n model_id = context['model_id']\n if str(model_id) == 'fleet.vehicle.log.contract':\n model_obj = self.pool.get(model_id)\n model = model_obj.browse(cr, uid, owner, context=context)\n model.write({'state':'cancel','note':'إلغاء لأسباب إداربة'})\n return self.write(cr, uid, ids, {'state':'cancel'})", "def remove_operation(self, name):\n\n del self.operations[name]", "def cancel(self, order_id):\n Library.functions.cancel(self._book, order_id)", "def cancel(self, uuid):\n return self.__call__('market', 'tradecancel',\n {'orderId': uuid})", "def cancel(self):", "def cancel(self):", "def cancel(self):", "def test_cancel_order_with_order_id():\n\n client = Client(key, secret)\n response = client.cancel_oco_order(**params)\n response.should.equal(mock_item)", "def cancel(self):\n if self.is_market:\n log.info(\"bo#%s: can't cancel order (market)\" % self.ticket)\n return(False)\n else:\n log.info(\"bo#%s: cancel master order, limit and stop order\" % self.ticket)\n if self.is_cancellable:\n cancel_order(self.order_master)\n cancel_order(self.order_limit)\n cancel_order(self.order_stop)\n self.cancelled.emit(bo=self)\n self.bo_blotter._move_cancelled_order(self)\n return(True)\n else:\n log.info(\"bo#%s: can't cancel order (not cancellable)\" % self.ticket)\n return(False)", "def hard_cancel(self, exec_info: ExecutionInfo) -> None:\n for task in exec_info.tasks.values():\n if not task.done():\n task.cancel()", "def cancel_stripe(self):\n TransactionLog = Pool().get('payment_gateway.transaction.log')\n\n if self.state != 'authorized':\n self.raise_user_error('cancel_only_authorized')\n\n stripe.api_key = self.gateway.stripe_api_key\n\n try:\n charge = stripe.Charge.retrieve(\n self.provider_reference\n ).refund(idempotency_key=('refund_%s' % self.uuid))\n except (\n stripe.error.InvalidRequestError,\n stripe.error.AuthenticationError, stripe.error.APIConnectionError,\n stripe.error.StripeError\n ), exc:\n TransactionLog.serialize_and_create(self, exc.json_body)\n else:\n self.state = 'cancel'\n self.save()\n TransactionLog.create([{\n 'transaction': self,\n 'log': unicode(charge),\n }])", "def cancel(self):\n self._log.debug(\"About to cancel job {0}\".format(self.id))\n resp = self._api.cancel(self.id)\n\n if resp.success:\n self.update()\n return True\n\n if resp.result.type is None:\n # Call was successful but job was unable to be cancelled.\n return False\n\n else:\n raise resp.result", "def comando_cancela(self):\r\n if args.xml:\r\n # Cancela utilizando XML\r\n self.cancela_nfce_por_xml()\r\n return\r\n\r\n if args.ultimo:\r\n # Cancela ultimo NFCe\r\n self.cancela_ultimo_nfce()\r\n\t return\r\n\r\n if not args.nnf or not args.serie or not args.protocolo or not args.chave:\r\n parser.error(\"informe as opcoes -n, -s, -p e -c para cancelamento.\")\r\n\r\n self.cancela_nfce(args.nnf, args.serie, args.chave, args.protocolo)", "def cancelOrder(self, order_number):\n pass", "def test_cancel_order_failure(self):\n # create a order to cancel\n test_order = OrderFactory()\n resp = self.app.post('/orders',\n json=test_order.serialize(),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)\n\n # cancel the order\n new_order = resp.get_json()\n new_order['status'] = 'Cancelled'\n resp = self.app.put('/orders/{}/cancel'.format(23),\n json=new_order,\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)", "def cancel(self, order_id):\n url = 'https://coincheck.com/api/exchange/orders/' + order_id\n headers = make_header(url, access_key=self.access_key, secret_key=self.secret_key)\n r = requests.delete(url, headers=headers, timeout=self.timeout)\n return json.loads(r.text)", "async def handle_cancel_order_response(self, response: RequesterResponse\n ) -> HitbtcOrderModel:", "def onCancelOrder(self, item):\n self.frame.mode.cancelMarketOrder(self.lstOrders.getMultiSelectedItems(), self.mySystemDict['id'])", "def unlink(self, cr, uid, ids, context=None):\n payenrich = self.read(cr, uid, ids, ['state'], context=context)\n for s in payenrich:\n if s['state'] not in ['draft', 'cancel']:\n raise osv.except_osv(_('Invalid Action Error'), _('In Order To Delete A Service Request Order(s), It Must Be Cancelled First!'))\n return super(payment_enrich, self).unlink(cr, uid, ids, context=context)", "def cancel_order(user_data):\n can_res = requests.delete(url=\"http://127.0.0.1:5000/cancel_order\", json=user_data)\n return can_res.text", "def market_cancel(self, orderid):\n return self.delete(f'orders/{orderid}', auth=True)", "def cancel_without_rollback(self, stack_id):\n body = {'cancel_without_rollback': None}\n self.client.post('/stacks/%s/actions' % stack_id, data=body)", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancelarOperacion(self):\n\n signal = QtGui.QMessageBox.warning(self,\"Advertencia\",QtCore.QString.fromUtf8(\"¿Desea cancelar la operación?\"),\\\n QtGui.QMessageBox.Close | QtGui.QMessageBox.Ok)\n if signal == QtGui.QMessageBox.Ok:\n self.data = {}\n self.facturaSeleccionada = None\n self.productosSeleccionados = 0\n self.detallesDevueltos = {}\n self.limpiarVentana()", "def cancel(self):\n pass", "def cancel_order(self, **kwargs):\n return self.client.execute(\"order/refund\", \"POST\", kwargs)", "def cancel(self, orderid, account=None):\n if not account:\n if \"default_account\" in config:\n account = config[\"default_account\"]\n if not account:\n raise ValueError(\"You need to provide an account\")\n\n op = transactions.Limit_order_cancel(**{\n \"owner\": account,\n \"orderid\": orderid,\n })\n return self.dpay.finalizeOp(op, account, \"active\")", "def unlink(self):\n context = self._context or {}\n for src_brw in self.browse():\n if src_brw.state != 'cancel':\n raise exceptions.except_orm(\n _(\"Invalid Procedure!!\"),\n _(\"The withholding document needs to be in cancel state to\"\n \" be deleted.\"))\n else:\n super(AccountWhSrc, self).unlink(\n )\n return True", "def DeleteWorkflow(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def cancel_order(self, order_id: str):\n return self._call_txtrader_api('cancel_order', {'id': order_id})", "def cancel_order(self, order):\r\n method = self.private_endpoints['cancel_order']['method']\r\n url = self.base_url + self.private_endpoints['cancel_order']['url'].format(orderId=order)\r\n req = requests.request(method, url, headers=self.get_auth_headers(nonce=True))\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return True\r\n else:\r\n return res", "def cancel_all(executions):\n for _, exec_node in executions:\n exec_node.cancel_all_instances()\n raise api.ExecutionCancelled()", "def cancel(self, connection):\n if hasattr(connection, \"cancel\"):\n connection.cancel()\n else:\n # A default cancel for databases for which no specific cancel is implemented\n # This will force an exit of the connection context manager\n raise QueryCancelled(\"Query was cancelled\")" ]
[ "0.66223377", "0.63557035", "0.63557035", "0.63557035", "0.62062633", "0.5849474", "0.5820822", "0.57708067", "0.5647814", "0.56469494", "0.55416864", "0.55382013", "0.54923594", "0.54598767", "0.5451226", "0.5448454", "0.54425424", "0.54425424", "0.54425424", "0.5434237", "0.5426428", "0.5382236", "0.5362991", "0.5299589", "0.5277859", "0.5275019", "0.5255635", "0.5233946", "0.522269", "0.5209626", "0.5203719", "0.5203615", "0.5189648", "0.5187281", "0.51693267", "0.5165638", "0.51650286", "0.5154334", "0.51533985", "0.5149053", "0.5144348", "0.51406485", "0.51333535", "0.51219034", "0.51219034", "0.51094514", "0.51071405", "0.50899464", "0.50898457", "0.50722384", "0.50722384", "0.5071832", "0.5036703", "0.5024621", "0.5018286", "0.50069124", "0.4995011", "0.49728987", "0.49703303", "0.4956957", "0.49531734", "0.49508253", "0.49405706", "0.49400198", "0.49265936", "0.49154264", "0.49096042", "0.4903525", "0.4900625", "0.4900625", "0.4900625", "0.4890705", "0.48890382", "0.48864177", "0.4867857", "0.48601916", "0.48540372", "0.48506114", "0.4845731", "0.48364073", "0.48243484", "0.48235518", "0.48192593", "0.48166612", "0.48166522", "0.48134908", "0.48103008", "0.48103008", "0.48103008", "0.48103008", "0.4806309", "0.4804697", "0.47983077", "0.47865775", "0.4784215", "0.47785655", "0.47703937", "0.4769644", "0.4768134", "0.47672054" ]
0.8243501
0
Restore an Operationobject stored in the database by a Dataset consisting of
def restore_operation(cls, operation_record): classname = operation_record["OPE_TYPE"] module = "" #TODO Implement modulename from database if Operation belongs to Module is_operation_of_module = False exec """ try: type(%(class)s) except NameError,e: is_operation_of_module = True"""%{'class':classname} if is_operation_of_module: exec """ from %(module)s import %(class)s operation = %(class)s(cls._core)"""%{'class':classname,'module':module} else: exec """ operation = %(class)s(cls._core)"""%{'class':classname} operation.set_id(operation_record['OPE_ID']) db = cls._core.get_db() stmnt = "SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;" cur = db.query(cls._core,stmnt,(operation_record["OPE_ID"],)) for row in cur.fetchallmap(): val = row["OPD_VALUE"] exec """val = %s(val)"""%row["OPD_TYPE"] operation.set_value(row["OPD_KEY"], val) return operation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restore(self, oid, serial, data, version, prev_txn, transaction):\n assert not version\n self._check_trans(transaction, 'restore')\n self._async('restorea', oid, serial, data, prev_txn, id(transaction))", "def restore(self, checkpoint):\n raise NotImplementedError", "def mos_object(self):\n return self._restore_fn(*self._restore_args)", "def restore(self, obj):\n return obj", "def _restore(self, graph):\n raise NotImplementedError()", "def restore(self):\n self.abstract_obj.restore()", "def restore_data(self):\n self.R = self._Ro\n del self._Ro", "def restore(self, restore):\n self._restore = restore", "def restore(self):\n raise NotImplementedError", "def restore(self, ids):\n with self._db_connection(transaction=True) as transaction:\n for id_ in ids:\n transaction.restore_dataset(id_)", "def load(cls,data, recovery_mode = False):\n opid = _read_delimited_field(data)\n operation_type = _read_delimited_field(data)\n modlogger.debug( \"loading: %s,%s\"%(opid,operation_type))\n return _operation_type_map[operation_type].load(opid,data, recovery_mode = recovery_mode)", "def persist(cls, dataset):\n return dataset", "def restore(self):\n\n self.brain.restore_checkpoint()", "def _restore(self, a_path):\n super(RDPAnalyzer, self)._restore(a_path)\n self._model._restore()", "def test_restore(self):\n s = Source([[10, 10], [10, 20]], values=[1.0, 2.0])\n assert(array_equal(s.center, [10, 15]))\n\n assert(\"center\" in s.__dict__.keys())\n s.restore()\n assert(\"center\" not in s.__dict__.keys())\n\n assert(array_equal(s.center, [10, 15]))\n assert(\"center\" in s.__dict__.keys())\n s.restore(skip=\"center\")\n assert(\"center\" in s.__dict__.keys())", "def __restoreBackup(self):\n pass #FIXME!!!", "def restore(self, session, **attrs):\n body = {\"instance\": {\"restorePoint\": {\"backupRef\": self.id}}}\n body.update(attrs)\n resp = session.post('instances', service=self.service, json=body).body\n return resp['instance']", "def test_backup_restore_with_ops(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n initial_gen = copy.deepcopy(gen)\n initial_keys = []\n for x in initial_gen:\n initial_keys.append(x[0])\n self.log.info(\"Start to load items to all buckets\")\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.ops_type = self.input.param(\"ops-type\", \"update\")\n self.log.info(\"Create backup repo \")\n self.backup_create()\n for i in range(1, self.backupset.number_of_backups + 1):\n self._backup_restore_with_ops()\n start = randrange(1, self.backupset.number_of_backups + 1)\n if start == self.backupset.number_of_backups:\n end = start\n else:\n end = randrange(start, self.backupset.number_of_backups + 1)\n\n if self.compact_backup and self.ops_type == \"delete\":\n self.log.info(\"Start to compact backup \")\n self.backup_compact_validate()\n self.log.info(\"Validate deleted keys\")\n self.backup_compact_deleted_keys_validation(initial_keys)\n\n self.log.info(\"start restore cluster \")\n restored = {\"{0}/{1}\".format(start, end): \"\"}\n for i in range(1, self.backupset.number_of_backups + 1):\n self.backupset.start = start\n self.backupset.end = end\n self._backup_restore_with_ops(backup=False, compare_function=\">=\")\n if self.backupset.number_of_backups == 1:\n continue\n while \"{0}/{1}\".format(start, end) in restored:\n start = randrange(1, self.backupset.number_of_backups + 1)\n if start == self.backupset.number_of_backups:\n end = start\n else:\n end = randrange(start, self.backupset.number_of_backups + 1)\n restored[\"{0}/{1}\".format(start, end)] = \"\"", "def restore_to_database(\r\n self,\r\n objects_to_restore=None,\r\n destination_client=None,\r\n sf_options=None):\r\n file_restore_option = {}\r\n\r\n if sf_options is None:\r\n sf_options = {}\r\n\r\n # check if client name is correct\r\n if destination_client is None:\r\n destination_client = self._backupset_object._instance_object.proxy_client\r\n\r\n if isinstance(destination_client, Client):\r\n dest_client = destination_client\r\n elif isinstance(destination_client, basestring):\r\n dest_client = Client(self._commcell_object, destination_client)\r\n else:\r\n raise SDKException('Subclient', '105')\r\n\r\n if not ('db_host_name' in sf_options and\r\n 'db_instance' in sf_options and\r\n 'db_name' in sf_options and\r\n 'db_user_name' in sf_options and\r\n 'db_user_password' in sf_options):\r\n raise SDKException('Salesforce', '101')\r\n\r\n # set the destination client\r\n file_restore_option[\"client_name\"] = dest_client.client_name\r\n file_restore_option[\"destination_path\"] = sf_options.get(\r\n \"destination_path\", self._backupset_object.download_cache_path\r\n )\r\n\r\n self._restore_destination_json(file_restore_option)\r\n\r\n # process the objects to restore\r\n if isinstance(objects_to_restore, list):\r\n objects_to_restore_list = objects_to_restore\r\n\r\n else:\r\n objects_to_restore_list = [objects_to_restore]\r\n\r\n file_restore_option[\"paths\"] = []\r\n browse_files, _ = self.browse(\r\n path='/Objects',\r\n from_time=sf_options.get(\"from_time\", 0),\r\n to_time=sf_options.get(\"to_time\", 0)\r\n )\r\n\r\n for each_object in objects_to_restore_list:\r\n if each_object.find('/Files') < 0:\r\n file_restore_option[\"paths\"].append(\r\n self.check_object_in_browse(\"%s\" % each_object, browse_files)\r\n )\r\n\r\n # set the salesforce options\r\n file_restore_option[\"staging_path\"] = sf_options.get(\r\n \"destination_path\", self._backupset_object.download_cache_path\r\n )\r\n file_restore_option[\"dependent_level\"] = sf_options.get(\"dependent_level\", 0)\r\n file_restore_option[\"streams\"] = sf_options.get(\"streams\", 2)\r\n file_restore_option[\"to_fs\"] = False\r\n file_restore_option[\"db_enabled\"] = True\r\n file_restore_option[\"db_type\"] = sf_options.get(\"db_type\", 'SQLSERVER')\r\n file_restore_option[\"db_host_name\"] = sf_options.get(\"db_host_name\", \"\")\r\n file_restore_option[\"db_instance\"] = sf_options.get(\"db_instance\", \"\")\r\n file_restore_option[\"db_name\"] = sf_options.get(\"db_name\", \"autorestoredb\")\r\n file_restore_option[\"db_port\"] = sf_options.get(\"db_port\", '1433')\r\n file_restore_option[\"db_user_name\"] = sf_options.get(\"db_user_name\", 'sa')\r\n db_base64_password = b64encode(sf_options['db_user_password'].encode()).decode()\r\n file_restore_option[\"db_user_password\"] = db_base64_password\r\n file_restore_option[\"override_table\"] = sf_options.get(\"override_table\", True)\r\n\r\n # set the browse option\r\n file_restore_option[\"copy_precedence_applicable\"] = True\r\n file_restore_option[\"copy_precedence\"] = sf_options.get(\"copy_precedence\", 0)\r\n file_restore_option[\"from_time\"] = sf_options.get(\"from_time\", 0)\r\n file_restore_option[\"to_time\"] = sf_options.get(\"to_time\", 0)\r\n\r\n # prepare and execute the Json\r\n request_json = self._prepare_salesforce_restore_json(file_restore_option)\r\n\r\n return self._process_restore_response(request_json)", "def revert(self):\n if not 'savepoint' in self._cache:\n w = \"No saved session DataSet file found!\"\n warnings.warn(w)\n return None\n self._meta, self._data = self._cache['savepoint']\n print('Reverted to last savepoint of {}'.format(self.name))\n return None", "def restore(self):\n return self._restore", "def _load_restored(self, dataset_path):\n for group in ['knowledge', 'source', 'target']:\n if getattr(self, group + '_format') != 'none':\n text_data = load_restored(dataset_path, group + '.', ignore_file='vocab')[0]\n setattr(self, group + '_text_data', text_data)\n idx2token, token2idx = load_restored(dataset_path, ignore_file='data')\n setattr(self, 'idx2token', idx2token)\n setattr(self, 'token2idx', token2idx)\n self.max_vocab_size = len(self.idx2token)\n self.logger.info(\"Restore finished!\")", "def restore_object(self):\n self.co_worker_list = self.original_co_worker_list", "def _restore_sub_obj(\n self,\n attr_name: pathlib.Path\n ):\n return pickle.load(attr_name.open(mode=\"rb\"))", "def _Restore(self) -> None:\n self._SetNodes(self._nodes)", "def _restore(self, restore_folder):\n tf.reset_default_graph()\n self.init_session()\n ckpt = tf.train.get_checkpoint_state(restore_folder)\n self.saver = tf.train.import_meta_graph('{}.meta'.format(ckpt.model_checkpoint_path))\n self.saver.restore(self.sess, ckpt.model_checkpoint_path)\n print(\"Model restored from {}\".format(restore_folder))", "def backup_dataset(outfile=None):\n return backup_es(Dataset, outfile=outfile)", "def restore(self, snapshot):\n self.unit_name = snapshot[\"unit_name\"]", "def archive_mds_ops_data(self, lmtdb):\n\n # mapping between OPERATION_INFO.OPERATION_NAME to HDF5 dataset names\n opname_to_dataset_name = {\n 'open': 'mdtargets/opens',\n 'close': 'mdtargets/closes',\n 'mknod': 'mdtargets/mknods',\n 'link': 'mdtargets/links',\n 'unlink': 'mdtargets/unlinks',\n 'mkdir': 'mdtargets/mkdirs',\n 'rmdir': 'mdtargets/rmdirs',\n 'rename': 'mdtargets/renames',\n 'getxattr': 'mdtargets/getxattrs',\n 'statfs': 'mdtargets/statfss',\n 'setattr': 'mdtargets/setattrs',\n 'getattr': 'mdtargets/getattrs',\n }\n dataset_names = list(opname_to_dataset_name.values())\n\n self.init_datasets(dataset_names, lmtdb.mds_names)\n\n results, columns = lmtdb.get_mds_ops_data(self.query_start, self.query_end_plusplus)\n\n # Index the columns to speed up insertion of data\n col_map = {}\n try:\n for db_col in ['TIMESTAMP', 'MDS_ID', 'OPERATION_ID', 'SAMPLES']:\n col_map[db_col] = columns.index(db_col)\n except ValueError:\n raise ValueError(\"LMT database schema does not match expectation\")\n\n # Loop through all the results of the timeseries query\n for row in results:\n if isstr(row[col_map['TIMESTAMP']]):\n # SQLite stores timestamps as a unicode string\n timestamp = datetime.datetime.strptime(row[col_map['TIMESTAMP']],\n \"%Y-%m-%d %H:%M:%S\")\n else:\n # MySQL timestamps are automatically converted to datetime.datetime\n timestamp = row[col_map['TIMESTAMP']]\n\n # figure out the dataset this row's data will go into (this\n # implicitly filters out operations that aren't defined in\n # opname_to_dataset_name)\n op_name = lmtdb.mds_op_id_map[row[col_map['OPERATION_ID']]]\n dataset_name = opname_to_dataset_name.get(op_name)\n if dataset_name is None:\n continue\n\n # figure out which column (MDS name) this row's data will go into\n mds_name = lmtdb.mds_id_map.get(row[col_map['MDS_ID']])\n if not mds_name:\n errmsg = \"unknown MDS_ID %s\" % row[col_map['MDS_ID']]\n warnings.warn(errmsg)\n continue\n\n self[dataset_name].insert_element(\n timestamp,\n mds_name,\n row[col_map['SAMPLES']])", "def restore(self):\n\t\treturn Job(SDK.PrlVm_Restore(self.handle)[0])", "def orchestration_restore(self, context, cancellation_context, saved_artifact_info, custom_params):\n '''\n # The saved_details JSON will be defined according to the JSON Schema and is the same object returned via the\n # orchestration save function.\n # Example input:\n # {\n # \"saved_artifact\": {\n # \"artifact_type\": \"REPLACE_WITH_ARTIFACT_TYPE\",\n # \"identifier\": \"16_08_09 11_21_35_657000\"\n # },\n # \"resource_name\": \"some_resource\",\n # \"restore_rules\": {\n # \"requires_same_resource\": true\n # },\n # \"created_date\": \"2016-08-09T11:21:35.657000\"\n # }\n\n # The example code below just parses and prints the saved artifact identifier\n saved_details_object = json.loads(saved_details)\n return saved_details_object[u'saved_artifact'][u'identifier']\n '''\n pass", "def restore_database(app):\n app.database().restore()\n app.refresh()", "def restore(self, checkpoint_path: str):\r\n raise NotImplementedError", "def restore(self):\n self.igate.restore()\n self.fgate.restore()\n self.ogate.restore()\n super(LSTM, self).restore()", "def test_data_object_untrash(self):\n pass", "def restore(self, PATH):\n self._saver.restore(self._sess, PATH)", "def restore(self,):\n self.pos, self.dataptr, = self.stack.pop()", "def GetDataAsObject(self):", "def orchestration_restore(self, context, saved_artifact_info, custom_params):\n with LoggingSessionContext(context) as logger, LogCommand(\n logger, \"orchestration_restore\"\n ):\n api = CloudShellSessionContext(context).get_api()\n\n resource_config = FirewallResourceConfig.from_context(\n self.SHELL_NAME, context, api, self.SUPPORTED_OS\n )\n\n cli_configurator = CheckpointCliConfigurator(\n self._cli, resource_config, logger\n )\n\n configuration_flow = CheckpointConfigurationFlow(\n logger, resource_config, cli_configurator\n )\n return configuration_flow.orchestration_restore(\n saved_artifact_info, custom_params\n )", "def restore(self):\n if self._restored_model:\n return\n with self.eval_graph.graph.as_default():\n last_checkpoint = self._find_last_checkpoint()\n # TODO(rbharath): Is setting train=False right here?\n saver = tf.train.Saver()\n saver.restore(self._get_shared_session(train=False), last_checkpoint)\n self._restored_model = True", "def restore(self, job, revision):\n\t\t\n\t\tjob_dict = self._get_job_dict(job)\n\t\tjob_dict['name'] = job\n\t\t\n\t\t# start restore process\n\t\tbackup = Backup(job_dict, self.db)\n\t\tbackup.restore(revision)", "def restore(self, ds_crossval):\n t = datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n print(\"[INFO] {} - Restoring from latest checkpoint ...\".format(t))\n self.ckpt_handle.restore(\n tf.train.latest_checkpoint(self.ckpt_dir)).expect_partial()\n self._eval_step(ds_crossval.data.values,\n ds_crossval.labels_onehot.values)\n self.best_accuracy = float(self.eval_accuracy.result())\n template = \"{} model restored after {} epoch(s) - \" \\\n \"Cross-vall accuracy = {:.9f}\"\n print(template.format(self.name, int(self.ckpt_handle.step),\n self.best_accuracy))", "def test_restore_backup():", "def test_restore_single(self):\n graph = self._restore_graph([self._remote_task()])\n assert len(graph.tasks) == 1\n assert isinstance(graph.tasks[0], tasks.RemoteWorkflowTask)", "def __call__(self, data: Dict[str, Any]) -> Any:\n loader_meths: Final[Dict[str, Callable]] = {\n DockerfileImages.__name__: DockerfileImages.load,\n TagConf.__name__: TagConf.load,\n ImageName.__name__: self._restore_image_name,\n }\n if \"__type__\" not in data:\n # __type__ is an identifier to indicate a dict object represents an\n # object that should be recovered. If no type is included, just\n # treat it as a normal dict and return.\n return data\n obj_type = data.pop(\"__type__\")\n loader_meth = loader_meths.get(obj_type)\n if loader_meth is None:\n raise ValueError(\n f\"Unknown object type {obj_type} to restore an object from data {data!r}.\"\n )\n return loader_meth(data)", "def restore(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n self.patch.misc[key] = copy.copy(self.patch.engine.misc[key])\n\n self.misclist_update_row(self.selected_index)\n self.update_properties()", "def reconstruct(self, data):\n self.recons = self.trf.reconstruct(data)", "def copy(self, other_object):\n #-------------------------------------------------------------------------\n # Debug parameters\n #-------------------------------------------------------------------------\n self._path_to_model = other_object.path_to_model\n \n #-------------------------------------------------------------------------\n # Data-model parameters\n #-------------------------------------------------------------------------\n self.is_verbose = other_object.is_verbose\n\n self._df_invoice_line = other_object._df_invoice_line.copy()\n self._total_outliers = other_object._total_outliers\n self._df_invoice_ref = other_object._df_invoice_ref.copy()\n self._list_quant_feature = other_object._list_quant_feature.copy()\n self._list_feature_to_drop = other_object._list_feature_to_drop.copy()\n self._df_invoice_original = other_object._df_invoice_original.copy()\n if other_object._arr_sample_customerID is not None:\n self._arr_sample_customerID = other_object._arr_sample_customerID.copy()\n else :\n self._arr_sample_customerID = None\n \n self._df_invoice_line_out_sample \\\n = other_object._df_invoice_line_out_sample.copy()\n \n #-------------------------------------------------------------------------\n # RFM features\n #-------------------------------------------------------------------------\n self._is_rfm_encode = other_object._is_rfm_encode\n self._encoder_rfm = other_object._encoder_rfm\n self._df_customers_rfm = other_object._df_customers_rfm.copy()\n self.df_customers_rfm_fileName = other_object.df_customers_rfm_fileName\n self.df_RFM_quantiles = other_object.df_RFM_quantiles\n self._day_now = other_object._day_now\n self._is_transform_rfm = other_object._is_transform_rfm\n\n #-------------------------------------------------------------------------\n # Time features\n #-------------------------------------------------------------------------\n self._list_new_feature = other_object._list_new_feature\n self._pca_timeFeature = other_object._pca_timeFeature\n self._std_scaler_timeFeature = other_object._std_scaler_timeFeature\n \n self._df_customers_timeFeature_fileName \\\n = other_object._df_customers_timeFeature_fileName\n \n if other_object._dict_timeFeature_encoder is not None:\n self._dict_timeFeature_encoder \\\n = other_object._dict_timeFeature_encoder.copy()\n else:\n self._dict_timeFeature_encoder = other_object._dict_timeFeature_encoder\n \n if other_object._df_customers_timeFeature is not None:\n self._df_customers_timeFeature \\\n = other_object._df_customers_timeFeature.copy()\n else:\n self._df_customers_timeFeature = other_object._df_customers_timeFeature\n \n self._is_transform_timeFeature = other_object._is_transform_timeFeature\n \n #-------------------------------------------------------------------------\n # NLP features\n #-------------------------------------------------------------------------\n self._vectorizer_nlp = other_object._vectorizer_nlp \n self._matrix_weights_nlp = other_object._matrix_weights_nlp\n self._df_customers_nlp_fileName = other_object._df_customers_nlp_fileName\n self._pca_nlp = other_object._pca_nlp\n self._df_customers_pca_nlp = other_object._df_customers_pca_nlp.copy()\n self._nlp_pca_ndim = other_object._nlp_pca_ndim\n self._is_transform_nlp = other_object._is_transform_nlp\n \n #-------------------------------------------------------------------------\n # All features\n #-------------------------------------------------------------------------\n self._df_customers_fileName = other_object._df_customers_fileName\n self._df_customers = other_object._df_customers.copy()\n\n #-------------------------------------------------------------------------\n # Classifier\n #-------------------------------------------------------------------------\n if other_object._y_clusters is not None:\n self._y_clusters = other_object._y_clusters.copy()\n else:\n self._y_clusters = other_object._y_clusters\n\n self._dict_classifier_param = other_object._dict_classifier_param.copy()\n self._classifier_name = other_object._classifier_name\n self._classifier_model = other_object._classifier_model", "def _restore_data_inputs(self):\n super()._restore_data_inputs()\n self.training_data = (\n self._data.training_data.data if self._data.training_data and self._data.training_data.data else None\n )\n self.validation_data = (\n self._data.validation_data.data if self._data.validation_data and self._data.validation_data.data else None\n )", "def save_redo_operation(self, fn, *args):\r\n inverse_op, args = self.get_inverse_operation_and_args(fn, *args)\r\n self.__redo_repository.record_inverse_operations(inverse_op, *args)", "def onDocumentRestored(self, obj):\n\n ArchComponent.Component.onDocumentRestored(self, obj)\n self.setProperties(obj)", "def _restore_taskset(self, taskset_id):\n session = Session()\n try:\n for taskset in session.query(TaskSet).filter(TaskSet.task_id == task_id):\n return taskset.to_dict()\n finally:\n session.close()", "def restore_original_ehr(self, ehr_record):\n self._check_unecessary_restore(ehr_record)\n return self.version_manager.restore_original(ehr_record.record_id)", "def step_back(\n self):\n if self.backup != None:\n self.data = self.backup", "def change_restored(self, event):\n pass", "def to_dataset(self):\n if not self.type:\n raise aspecd.exceptions.MissingDatasetError\n dataset = aspecd.utils.object_from_class_name(self.type)\n dataset.id = self.id\n for history_record in self.history:\n history_record.replay(dataset)\n return dataset", "def restore_backup_shape(self):\n\n self.shape = self.shape_backup", "def restore(self):\n documentUrl = self.metaData.graveyard[0].selfLink + \"/restore\"\n response = self._adapter.putRequest(documentUrl, self._baseHeader, \"{}\")\n self.metaData.graveyard.pop()\n\n return Document(self._client, response['Headers']['location'])", "def mongoRestore( self, db, infile ):\n\t\tsys_command = \"mongorestore --db \" + db + \" --host \" + self.host + \" --port \" + str( self.port ) + \" \" + infile \n\t\tos.system(sys_command)", "def restore(self, checkpoint_data):\n\n self.iteration = checkpoint_data['iteration']\n self.action_requests = checkpoint_data['action_requests']\n\n self.experience = deque()\n for episode in checkpoint_data['experience']:\n ep = []\n for transition in episode:\n ep.append(tuple(transition))\n\n self.experience.append(ep)", "def reloadData(self):\n self.dto.readFromData()\n print(\"Record reloaded.\")", "def post_revert(self):", "def RestoreSnapshot(\r\n self,\r\n snapshot_data: Any,\r\n ):\r\n\r\n (\r\n self_id,\r\n iter,\r\n num_results,\r\n ignore_whitespace_ctr,\r\n ) = snapshot_data\r\n\r\n assert self_id == id(self)\r\n assert iter.Offset <= self.normalized_iter.Offset\r\n assert num_results <= len(self.results)\r\n assert ignore_whitespace_ctr <= self._ignore_whitespace_ctr\r\n\r\n self.normalized_iter = iter\r\n self._ignore_whitespace_ctr = ignore_whitespace_ctr\r\n\r\n if len(self.results) != num_results:\r\n del self.results[num_results - len(self.results):]", "def restore_from_snapshot(SnapshotId=None):\n pass", "def stopEvaluationMode(self):\n self.data_ref = self.saved_dat_ref", "def save(self):\n if self._data is None and self._meta is None:\n w = \"No data/meta components found in the DataSet.\"\n warnings.warn(w)\n return None\n ds_clone = self.clone()\n self._cache['savepoint'] = ds_clone.split()\n return None", "def stopEvaluationMode(self):\r\n self.dataRef = self.storeDataRef", "def test_restore_from_compacted_backup(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self.backup_compact()\n self.backup_restore_validate()", "def restore(self, checkpoint_frame=None):\n\n if checkpoint_frame:\n self.saver.restore(self.sess, self.path + '/tensorflow-model-%d' % checkpoint_frame)\n else:\n self.saver.restore(self.sess, self.saver.latest_checkpoint())", "def restore_object(Bucket=None, Key=None, VersionId=None, RestoreRequest=None, RequestPayer=None):\n pass", "def restore_to_salesforce_from_database(\r\n self,\r\n objects_to_restore=None,\r\n destination_client=None,\r\n destination_instance=None,\r\n destination_backupset=None,\r\n sf_options=None):\r\n file_restore_option = {}\r\n\r\n if sf_options is None:\r\n sf_options = {}\r\n\r\n # check if client name is correct\r\n if destination_client is None:\r\n destination_client = self._backupset_object._agent_object._client_object\r\n\r\n if isinstance(destination_client, Client):\r\n dest_client = destination_client\r\n elif isinstance(destination_client, basestring):\r\n dest_client = Client(self._commcell_object, destination_client)\r\n else:\r\n raise SDKException('Subclient', '105')\r\n\r\n dest_agent = Agent(dest_client, 'Cloud Apps', '134')\r\n\r\n # check if instance name is correct\r\n if destination_instance is None:\r\n destination_instance = self._backupset_object._instance_object\r\n\r\n if isinstance(destination_instance, Instance):\r\n dest_instance = destination_instance\r\n elif isinstance(destination_instance, basestring):\r\n dest_instance = dest_agent.instances.get(destination_instance)\r\n else:\r\n raise SDKException('Subclient', '113')\r\n\r\n # check if backupset name is correct\r\n if destination_backupset is None:\r\n destination_backupset = self._backupset_object\r\n\r\n if isinstance(destination_backupset, SalesforceBackupset):\r\n dest_backupset = destination_backupset\r\n elif isinstance(destination_backupset, basestring):\r\n dest_backupset = SalesforceBackupset(dest_instance, destination_backupset)\r\n else:\r\n raise SDKException('Subclient', '114')\r\n\r\n if not self._backupset_object.is_sync_db_enabled:\r\n if not (\r\n 'db_host_name' in sf_options and 'db_instance' in sf_options and\r\n 'db_name' in sf_options and 'db_user_name' in sf_options and\r\n 'db_user_password' in sf_options):\r\n raise SDKException('Salesforce', '101')\r\n\r\n # set salesforce destination client\r\n file_restore_option[\"dest_client_name\"] = dest_client.client_name\r\n file_restore_option[\"dest_instance_name\"] = dest_instance.instance_name\r\n file_restore_option[\"dest_backupset_name\"] = dest_backupset.backupset_name\r\n\r\n self._restore_salesforce_destination_json(file_restore_option)\r\n\r\n # process the objects to restore\r\n if isinstance(objects_to_restore, list):\r\n objects_to_restore_list = objects_to_restore\r\n\r\n else:\r\n objects_to_restore_list = [objects_to_restore]\r\n\r\n file_restore_option[\"paths\"] = []\r\n browse_files, _ = self.browse(\r\n path='/Objects', from_time=sf_options.get(\"from_time\", 0),\r\n to_time=sf_options.get(\"to_time\", 0))\r\n\r\n for each_object in objects_to_restore_list:\r\n if each_object.find('/Files') < 0:\r\n file_restore_option[\"paths\"].append(\r\n self.check_object_in_browse(\r\n \"%s\" %\r\n each_object,\r\n browse_files))\r\n\r\n # set the salesforce options\r\n file_restore_option[\"staging_path\"] = sf_options.get(\r\n \"destination_path\",\r\n dest_backupset.download_cache_path)\r\n file_restore_option[\"dependent_level\"] = sf_options.get(\"dependent_level\", 0)\r\n file_restore_option[\"streams\"] = sf_options.get(\"streams\", 2)\r\n file_restore_option[\"to_fs\"] = False\r\n file_restore_option[\"to_cloud\"] = True\r\n file_restore_option[\"from_database\"] = True\r\n file_restore_option[\"db_enabled\"] = True\r\n if self._backupset_object.is_sync_db_enabled or ('db_host_name' in sf_options):\r\n if self._backupset_object.sync_db_type is None:\r\n dbtype = 'SQLSERVER'\r\n else:\r\n dbtype = self._backupset_object.sync_db_type\r\n file_restore_option[\"db_type\"] = sf_options.get(\"db_type\", dbtype)\r\n file_restore_option[\"db_host_name\"] = sf_options.get(\r\n \"db_host_name\", self._backupset_object.sync_db_host\r\n )\r\n file_restore_option[\"db_instance\"] = sf_options.get(\r\n \"db_instance\", self._backupset_object.sync_db_instance\r\n )\r\n file_restore_option[\"db_name\"] = sf_options.get(\r\n \"db_name\", self._backupset_object.sync_db_name\r\n )\r\n file_restore_option[\"db_port\"] = sf_options.get(\r\n \"db_port\", self._backupset_object.sync_db_port\r\n )\r\n file_restore_option[\"db_user_name\"] = sf_options.get(\r\n \"db_user_name\", self._backupset_object.sync_db_user_name\r\n )\r\n\r\n if 'db_user_password' in sf_options:\r\n sf_options['_db_base64_password'] = b64encode(\r\n sf_options['db_user_password'].encode()).decode()\r\n\r\n file_restore_option[\"db_user_password\"] = sf_options.get(\r\n \"_db_base64_password\",\r\n self._backupset_object._sync_db_user_password)\r\n else:\r\n raise SDKException('Salesforce', '101')\r\n\r\n file_restore_option[\"override_table\"] = sf_options.get(\"override_table\", True)\r\n\r\n # set the browse option\r\n file_restore_option[\"client_name\"] = self._backupset_object._agent_object._client_object.client_name\r\n file_restore_option[\"copy_precedence_applicable\"] = True\r\n file_restore_option[\"copy_precedence\"] = sf_options.get(\"copy_precedence\", 0)\r\n file_restore_option[\"from_time\"] = sf_options.get(\"from_time\", 0)\r\n file_restore_option[\"to_time\"] = sf_options.get(\"to_time\", 0)\r\n\r\n # prepare and execute the Json\r\n request_json = self._prepare_salesforce_restore_json(file_restore_option)\r\n\r\n return self._process_restore_response(request_json)", "def from_dict(cls, dikt) -> 'Operations':\n return util.deserialize_model(dikt, cls)", "def restore_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreState(state_ref)\n self.ale.deleteState(state_ref)", "def save_to_db(self, data, db_operations):\n self.from_dict(data)\n self._id = str(db_operations.insert_one(self.to_dict()).inserted_id)", "def save_undo_operation(self, fn, *args):\r\n inverse_op, args = self.get_inverse_operation_and_args(fn, *args)\r\n self.__undo_repository.record_inverse_operations(inverse_op, *args)", "def recall_objects(self):\n statement = self.ask(\"neo\", \"sql_statement\")\n conn = sqlite3.connect('neo_test.db')\n\n cursor = conn.cursor()\n\n cursor.execute(statement)\n\n self.short_term_memory = cursor.fetchall()\n conn.close()", "def revert(self, checkpoint):\n self._validate_checkpoint(checkpoint)\n\n for key, value in self.journal.pop_checkpoint(checkpoint).items():\n if value is None:\n self.wrapped_db.delete(key)\n else:\n self.wrapped_db.set(key, value)", "def undo(self):\n LOG.debug(\"In the undo method, will attempt to restore\")\n\n # validate detected nothing to do for this, nothing was done\n # for execute, so simply return\n if self.no_op:\n return\n\n if not self.source_dev or not self.target_dev:\n return\n LOG.debug(\"The source dictionary is: %s\", self.source_dict_restore)\n LOG.debug(\"The target dictionary is: %s\", self.target_dict_restore)\n\n # In scenario where no source IP Address...\n if self.source_dict_restore:\n self.commandex.send_ifcfg(self.source_dev,\n self.source_dict_restore)\n\n # May have failed because the ifcfg didn't even exist, nothing\n # to roll back then\n if self.target_dict_restore:\n self.commandex.send_ifcfg(self.target_dev,\n self.target_dict_restore)", "def Restore(binary_file, format='default'):\n from dragon.config import logger\n assert os.path.exists(binary_file), \\\n 'Binary file({}) does not exist.'.format(binary_file)\n\n if format == 'default':\n try:\n state_dict = cPickle.load(open(binary_file, 'rb'))\n except UnicodeDecodeError:\n state_dict = cPickle.load(open(binary_file, 'rb'), encoding='iso-8859-1')\n logger.info('Restore From Model@: ' + binary_file)\n logger.info('Model Format: cPickle')\n for k, v in state_dict.items():\n if not HasTensor(k):\n logger.info('[Warning]: Tensor({}) does not exist in any Graphs, skip.'.format(k))\n else:\n FeedTensor(k, v)\n logger.info('[Info]: Tensor({}) is restored.'.format(k))\n\n elif format == 'caffe':\n # Caffe models can't save the tensor name\n # We simply use \"layer_name/param:X\"\n RestoreCC(binary_file, 1)\n\n else:\n raise TypeError('Unknown binary format: {}'.format(format))", "def saveData(self):\n pass", "def _load(self) -> None:\n self.record = self._saved_record\n self.counter = self._saved_counter\n self.current_objects = self._saved_objects", "def Run(self, args):\n sql = self.context['sql']\n instance_id = util.GetInstanceIdWithoutProject(args.instance)\n project_id = util.GetProjectId(args.instance)\n # TODO(user): as we deprecate P:I args, simplify the call to .Parse().\n instance_ref = resources.Parse(\n instance_id, collection='sql.instances',\n params={'project': project_id})\n due_time = args.due_time\n instance = self.command.ParentGroup().ParentGroup().instances.get(\n instance=instance_ref.instance)\n # At this point we support only one backup-config. So, we just use that id.\n backup_config = instance['settings']['backupConfiguration'][0]['id']\n request = sql.instances().restoreBackup(\n project=instance_ref.project, instance=instance_ref.instance,\n backupConfiguration=backup_config, dueTime=due_time)\n try:\n result = request.execute()\n operations = self.command.ParentGroup().ParentGroup().operations()\n operation = operations.get(instance=str(instance_ref),\n operation=result['operation'])\n return operation\n except errors.HttpError as error:\n raise exceptions.HttpException(util.GetError(error))\n except errors.Error as error:\n raise exceptions.ToolException(error)", "def restore(self, commit=True):\n self.trashed_time = None\n if commit:\n self.save()", "def reload(self):\n self.restore()", "def test_copy_astore(self):\n job = mr.copy_analytic_store(self.MODEL_NAME)\n\n assert job.state == \"pending\"", "def _before_stockpyle_deserialize(self, obj):\n \n # only merge SA objects\n if _is_sqlalchemy_object(obj):\n self.__session.merge(obj, load=False)", "def rollback(obj, commit):\n copy = deepcopy(obj)\n if commit.uses_slots is False:\n copy.__dict__ = commit.state\n else:\n for k, v in commit.state.items():\n setattr(copy, k, v)\n\n return copy", "def save_object(self, data):\n return Order(**data)", "def Restore(self):\n\n return self._persistentHandler.Restore()", "def restore(cls):\n step_path = TaskOps().step_path\n _file = os.path.join(step_path, \".reports\")\n if os.path.exists(_file):\n with open(_file, \"rb\") as f:\n data = pickle.load(f)\n cls._hist_records = data[0]\n cls.__instances__ = data[1]", "def deserialize(self):\n with open(self.path+self.name, \"rb\") as pfile:\n dataSet = pickle.load(pfile)\n return dataSet", "def restore(self, memento):\n self.state = memento.state", "def get_operation_old(operation_name):\n op = operations_api.get_operation(operation_name)\n return op", "def _loadBackTxn(self, oid, back, fail=True):\n return self._loadBack_impl(oid, back, fail)[:2]", "def _restore_default(self):\n self._data = self._default", "def exportDB(self):\n sourcesession=svc.connect(self.__source,accessMode=coral.access_Update)\n destsession=svc.connect(self.__dest,accessMode = coral.access_Update)\n try:\n dbcp=DBCopy(sourcesession,destsession,1024)\n if self.__all:\n dbcp.copyDB()\n elif self.__inv:\n dbcp.copyInventory()\n elif len(self.__tree) != 0:\n dbcp.copyTrees([self.__tree])\n del sourcesession\n del destsession\n except Exception, e:\n print str(e)\n del sourcesession\n del destsession", "def testUndoRedo(self):\n cmds.file(new=True, force=True)\n cmds.group(name=\"group1\", empty=True)\n adaptor = mayaUsdLib.Adaptor(\"group1\")\n self.assertEqual(adaptor.GetAppliedSchemas(), [])\n\n # Do a single operation, then undo, then redo.\n adaptor.ApplySchema(UsdGeom.ModelAPI)\n self.assertEqual(adaptor.GetAppliedSchemas(), [\"GeomModelAPI\"])\n cmds.undo()\n self.assertEqual(adaptor.GetAppliedSchemas(), [])\n cmds.redo()\n self.assertEqual(adaptor.GetAppliedSchemas(), [\"GeomModelAPI\"])\n\n # Do a compound operation, then undo, then redo.\n cmds.undoInfo(openChunk=True)\n adaptor.ApplySchema(UsdGeom.MotionAPI).CreateAttribute(\n UsdGeom.Tokens.motionVelocityScale).Set(0.42)\n self.assertEqual(adaptor.GetAppliedSchemas(),\n [\"GeomModelAPI\", \"MotionAPI\"])\n self.assertAlmostEqual(adaptor.GetSchema(UsdGeom.MotionAPI).GetAttribute(\n UsdGeom.Tokens.motionVelocityScale).Get(), 0.42)\n cmds.undoInfo(closeChunk=True)\n cmds.undo()\n self.assertEqual(adaptor.GetAppliedSchemas(), [\"GeomModelAPI\"])\n self.assertFalse(adaptor.GetSchema(UsdGeom.MotionAPI).GetAttribute(\n UsdGeom.Tokens.motionVelocityScale))\n self.assertIsNone(adaptor.GetSchema(UsdGeom.MotionAPI).GetAttribute(\n UsdGeom.Tokens.motionVelocityScale).Get())\n cmds.redo()\n self.assertEqual(adaptor.GetAppliedSchemas(),\n [\"GeomModelAPI\", \"MotionAPI\"])\n self.assertAlmostEqual(adaptor.GetSchema(UsdGeom.MotionAPI).GetAttribute(\n UsdGeom.Tokens.motionVelocityScale).Get(), 0.42)", "def undo(self) :\n \n raise NotImplementedError()", "def __store(self):\n # connection strings are accessed directly by dbo\n dbo = dbo.connect()\n dbo.save(self.__to_dict())\n # not supre important to call but a nice idea\n dbo.destroy()", "def restoreData(filename='laue.dat'):\r\n import cPickle\r\n with open(filename, 'rb') as fp:\r\n return cPickle.load(fp)" ]
[ "0.6201652", "0.6147818", "0.5943375", "0.5855127", "0.5704961", "0.56648844", "0.56335723", "0.55953205", "0.55918145", "0.55536777", "0.55432135", "0.5536391", "0.5470216", "0.54262424", "0.539617", "0.5361314", "0.5206639", "0.51915765", "0.5184326", "0.5164573", "0.51495296", "0.51482344", "0.5126082", "0.51207787", "0.51192796", "0.5117878", "0.51160187", "0.51050395", "0.51037204", "0.5085244", "0.5077151", "0.50434864", "0.50425166", "0.5020266", "0.5003357", "0.4995503", "0.49869448", "0.49433544", "0.49392402", "0.4933335", "0.493092", "0.4925828", "0.49174577", "0.49069992", "0.4903645", "0.48987007", "0.48665598", "0.48501232", "0.48258916", "0.48181954", "0.481452", "0.4807965", "0.4803863", "0.4801005", "0.4780686", "0.47658312", "0.4758344", "0.47545978", "0.47535", "0.47493184", "0.47453946", "0.47416434", "0.47364032", "0.47352797", "0.47325063", "0.47252876", "0.4717362", "0.47166777", "0.4713121", "0.4704778", "0.4701062", "0.4694755", "0.46852654", "0.46841982", "0.4683384", "0.4683252", "0.46741626", "0.46732897", "0.46714675", "0.46643427", "0.46567142", "0.46560028", "0.46457374", "0.46407", "0.46405017", "0.4639864", "0.46366996", "0.46355742", "0.4634763", "0.4627422", "0.4617476", "0.46171555", "0.46141174", "0.46139574", "0.46123645", "0.4612236", "0.46108854", "0.46101874", "0.4609966", "0.4609742" ]
0.6955749
0
Recursively executes the workloads of Operation's Childoperations It hereby catches exceptions in the workloads, sets the OPE_STATUS to 2 (FAILED) if a catch occurs, then passes the exception on to the higher layer. If an Operation succeeds, it's entry in DB gets deleted
def process_children(cls, operation): db = cls._core.get_db() stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;" stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;" cur = db.query(cls._core,stmnt,(operation.get_id(),)) for row in cur.fetchallmap(): child_operation = cls.restore_operation(row) db.query(cls._core,stmnt_lock,(child_operation.get_id(),),commit=True) try: cls.process_children(child_operation) child_operation.do_workload() except Exception,e: stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;" db.query(cls._core,stmnt_err,(int(row["OPE_ID"]),),commit=True) #TODO GENERATE ERROR IN LOG raise e stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_ID = ?;" db.query(cls._core,stmnt_delete,(child_operation.get_id(),),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retry_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.retry_operation(row[\"OPE_ID\"])\n\n stmnt = \"UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def cancel_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.cancel_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def process_next(cls):\n db = cls._core.get_db()\n configuration = cls._core.get_configuration()\n if os.path.exists(configuration.get_entry(\"core.webpath\")+\"/scv_operating.lck\"):\n return False\n lockfile = open(configuration.get_entry(\"core.webpath\")+\"/scv_operating.lck\",\"w\")\n lockfile.close()\n stmnt_lock = \"UPDATE OPERATIONS SET OPE_STATUS = 1 \\\n WHERE OPE_ID IN ( \\\n SELECT OPE_ID FROM OPERATIONS \\\n WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0 \\\n AND OPE_INVOKED = ( \\\n SELECT MIN(OPE_INVOKED) FROM OPERATIONS \\\n WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0) \\\n ) ;\"\n stmnt = \"SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 1 ;\"\n db.query(cls._core,stmnt_lock,commit=True)\n cur = db.query(cls._core,stmnt)\n res = cur.fetchallmap()\n if len(res) > 0:\n operation = cls.restore_operation(res[0])\n try:\n cls.process_children(operation)\n operation.do_workload()\n except Exception, e:\n stmnt_err = \"UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;\"\n db.query(cls._core,stmnt_err,(operation.get_id(),),commit=True)\n error = StringIO()\n print_exc(None,error)\n cls._core.log(error.getvalue())\n ret = True\n else:\n ret = False\n stmnt_delete = \"DELETE FROM OPERATIONS WHERE OPE_STATUS = 1 ;\"\n db.query(cls._core,stmnt_delete,commit=True)\n db.commit()\n try:\n os.unlink(configuration.get_entry(\"core.webpath\")+\"/scv_operating.lck\")\n except OSError,e :\n raise OperationException(OperationException.get_msg(0))\n return ret", "def drop_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.drop_operation(row[\"OPE_ID\"])\n\n stmnt = \"DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def execute(self,data):\n\n try:\n\n start = time.time()\n\n self.cursor.executemany(self.operation, data)\n\n end = time.time()\n\n logger.info(\"Operation [{}] took {:.3f} seconds; {} operations processed\".format(self.operation, end-start, len(data)))\n\n except Exception, exc:\n\n # Not so typical: handle integrity constraints (generate warnings)\n if exc.__class__.__name__ != \"IntegrityError\":\n raise\n\n self.conn.rollback()\n\n for record in data:\n\n try:\n self.cursor.execute(self.operation, record)\n self.conn.commit()\n\n except Exception, exc:\n\n # This record is the culprit\n if exc.__class__.__name__ != \"IntegrityError\":\n logger.error(\"Exception [{}] occurred inserting record {}\".format(exc.message, record))\n logger.error(\"Operation was: {}\".format(self.operation))\n raise\n\n error_msg = str(exc.message).rstrip()\n logger.warn( \"Integrity error (\\\"{}\\\"); data={}\".format(error_msg, record) )\n\n else:\n # If all goes well, we just need a single commit\n self.conn.commit()", "def _recover(self,):\n modlogger.debug( \"starting recovery\")\n with self.id_lock: #Prevent new ops being created.\n logs = [ LogFile(x,readonly=True) for x in self._findlogs() ]\n logiter = [ iter(x) for x in logs ]\n ops = [ _getop(x) for x in logiter ]\n opids = [ _getid(x) for x in ops ]\n #order the log files by operation Id.\n data = sorted(zip(logs,logiter,ops,opids),key =lambda x:x[3])\n modlogger.debug( \"SR:%s\"%data)\n #And now got through all log files in Id order\n state = 'init'\n unrecoverable = []\n for log,it,op,opid in data:\n for cur_op in chain([op],it):\n #cur_op None indicated end of that logfile.\n if cur_op is None: break\n\n #We ignore any ops until we see a 'startTxn' marker, but we\n # keep a record of there ids to ensure we see a later checkpoint.\n # if we don't we can't replay partial Txn.\n modlogger.debug( \"R:%s,%s\",cur_op,state)\n if state=='init':\n #Record all operations we see before we see the first\n #start tx marker.\n if cur_op.optype == b'start_txn':\n state='txcomplete'\n elif cur_op.optype == b'abort_txn':\n #If the partial transaction we found was aborted\n # we don't need to worry about its operations. \n unrcoverable = [ ]\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else:\n unrecoverable += [ op.opid]\n \n\n #We are looking for a starttxn, marker to mark the operation\n #as valid. The only other meaningful transaction in the\n #journal in the state is a checkpoint making which ops have been\n #detected as committed to the main store by the FS.\n if state=='txcomplete':\n if cur_op.optype == b'start_txn':\n tx = cur_op.txn_id\n txops = [ ]\n state = 'txstarted'\n continue\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else: raise RecoveryError(\"Operation outside tx\")\n\n #In this state all operations are meaningful.\n # we store all operations (except checkpoint) until we see\n # a EndTxn op. At the end TxnOp we synchronously complete\n # all operations.\n if state =='txstarted':\n if cur_op.optype == b'end_txn': \n #The test below finds 'overlapped' tx, (or ones missing a commit record\n #for some reason. This forces us not to accept this log file.\n if cur_op.txn_id != tx: raise RecoveryError(\"Non matching Tx commit found\")\n else:\n for top in txops:\n top.do(sync = True)\n state = 'txcomplete'\n elif cur_op.optype == b'abort_txn':\n state = 'txcomplete'\n elif cur_op.optype == b'Checkpoint':\n unrecoverable = _remove_commited(unrecoverable,cur_op.opid)\n else:\n txops += [ cur_op ] \n #Log file has been processed successfully - remove it from the Fs.\n #we could call close() here and reused the allocated space on the\n #FS - but the logfile is readonly - and close() adds a terminator\n #to mark the file as empty.\n try:\n log.unlink()\n except OSError: pass\n\n #If there are any partial txn's left we have failed to recover.\n if unrecoverable: raise RecoveryError(\"Partial uncommitted txn found\")", "def _simple_deletion(self, operation, labels):\n from mogwai.models.edge import Edge\n\n label_strings = []\n for label in labels:\n if inspect.isclass(label) and issubclass(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, Edge):\n label_string = label.get_label()\n elif isinstance(label, string_types):\n label_string = label\n else:\n raise MogwaiException('traversal labels must be edge classes, instances, or strings')\n label_strings.append(label_string)\n\n future = connection.future_class()\n future_result = self._delete_related(operation, label_strings)\n\n def on_read(f2):\n try:\n result = f2.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future.set_result(result)\n\n def on_save(f):\n try:\n stream = f.result()\n except Exception as e:\n future.set_exception(e)\n else:\n future_read = stream.read()\n future_read.add_done_callback(on_read)\n\n future_result.add_done_callback(on_save)\n\n return future", "def execute(self, ops, exceptions=[], delay=5, maxretries=3):\n retry_errors = [NFS4ERR_DELAY, NFS4ERR_GRACE]\n state_errors = [NFS4ERR_STALE_CLIENTID, NFS4ERR_BADSESSION,\n NFS4ERR_BADSLOT, NFS4ERR_DEADSESSION]\n while True:\n res = self.sess.compound(ops)\n if res.status == NFS4_OK or res.status in exceptions:\n return res\n elif res.status in retry_errors:\n if maxretries > 0:\n maxretries -= 1\n time.sleep(delay)\n else:\n log.error(\"Too many retries with DS %s\" % self.server)\n raise Exception(\"Dataserver communication retry error\")\n elif res.status in state_errors:\n self.disconnect()\n self.connect()\n else:\n log.error(\"Unhandled status %s from DS %s\" %\n (nfsstat4[res.status], self.server))\n raise Exception(\"Dataserver communication error\")", "def _executeOperation(self, request:CSERequest, reqRi:str) -> Result:\n\t\t# Execute the actual operation\n\t\trequest.args.operation == Operation.RETRIEVE and (operationResult := CSE.dispatcher.processRetrieveRequest(request, request.headers.originator)) is not None\n\t\trequest.args.operation == Operation.CREATE and (operationResult := CSE.dispatcher.processCreateRequest(request, request.headers.originator)) is not None\n\t\trequest.args.operation == Operation.UPDATE and (operationResult := CSE.dispatcher.processUpdateRequest(request, request.headers.originator)) is not None\n\t\trequest.args.operation == Operation.DELETE and (operationResult := CSE.dispatcher.processDeleteRequest(request, request.headers.originator)) is not None\n\n\t\t# Retrieve the <request> resource\n\t\tif (res := CSE.dispatcher.retrieveResource(reqRi)).resource is None:\t\n\t\t\treturn Result(status=False) \t\t\t\t\t\t\t\t\t\t\t\t\t\t# No idea what we should do if this fails\n\t\treqres = res.resource\n\n\t\t# Fill the <request>\n\t\treqres['ors'] = {\t# operationResult\n\t\t\t'rsc'\t: operationResult.rsc,\n\t\t\t'rqi'\t: reqres.rid,\n\t\t\t'to'\t: request.id,\n\t\t\t'fr'\t: reqres.org,\n\t\t\t'ot'\t: reqres['mi/ot'],\n\t\t\t'rset'\t: reqres.et\n\t\t}\n\t\tif operationResult.rsc in [ RC.OK, RC.created, RC.updated, RC.deleted ] :\t\t\t# OK, created, updated, deleted -> resource\n\t\t\treqres['rs'] = RequestStatus.COMPLETED\n\t\t\tif operationResult.resource is not None:\n\t\t\t\treqres['ors/pc'] = operationResult.resource.asDict()\n\t\telse:\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Error\n\t\t\treqres['rs'] = RequestStatus.FAILED\n\t\t\tif operationResult.dbg is not None:\n\t\t\t\treqres['ors/pc'] = { 'm2m:dbg' : operationResult.dbg }\n\n\t\t# Update in DB\n\t\treqres.dbUpdate()\n\n\t\treturn Result(resource=reqres, status=True)", "def rollback(self, stage, enodes, exception):", "def main():\n session = create_session()\n delete_orphans(session)\n check_children(session)\n session.commit()", "def _run_operations(self):\n # get job informations\n job = self.active_queue[0]\n job_id = job[\"id\"]\n job_logdir = self._create_logdir(job_id)\n\n for operation in self.config[\"operations\"]:\n self._acquire_lock(job_id + \",\" + operation)\n self._run_operation(operation, job_logdir)\n self._release_lock(job_id + \",\" + operation)\n\n files_to_archives = [job[\"objects_filename\"], job[\"config_filename\"]]\n self._archive_logs(job_logdir, files_to_archives)\n self._update_state(job_id)", "def __call__(self, node, operations, last_operation):\n if last_operation == NO_OPERATION:\n return 0\n return 1", "def _operation_tree(self):\n\n # initial state\n i = 0\n level = 0\n stack = []\n current = None\n\n def _create_operation(args):\n profile_stats = None\n name = args[0].strip()\n args.pop(0)\n if len(args) > 0 and \"Records produced\" in args[-1]:\n records_produced = int(\n re.search(\"Records produced: (\\\\d+)\", args[-1]).group(1)\n )\n execution_time = float(\n re.search(\"Execution time: (\\\\d+.\\\\d+) ms\", args[-1]).group(1)\n )\n profile_stats = ProfileStats(records_produced, execution_time)\n args.pop(-1)\n return Operation(\n name, None if len(args) == 0 else args[0].strip(), profile_stats\n )\n\n # iterate plan operations\n while i < len(self.plan):\n current_op = self.plan[i]\n op_level = current_op.count(\" \")\n if op_level == level:\n # if the operation level equal to the current level\n # set the current operation and move next\n child = _create_operation(current_op.split(\"|\"))\n if current:\n current = stack.pop()\n current.append_child(child)\n current = child\n i += 1\n elif op_level == level + 1:\n # if the operation is child of the current operation\n # add it as child and set as current operation\n child = _create_operation(current_op.split(\"|\"))\n current.append_child(child)\n stack.append(current)\n current = child\n level += 1\n i += 1\n elif op_level < level:\n # if the operation is not child of current operation\n # go back to it's parent operation\n levels_back = level - op_level + 1\n for _ in range(levels_back):\n current = stack.pop()\n level -= levels_back\n else:\n raise Exception(\"corrupted plan\")\n return stack[0]", "def test_sub_doc_with_process_crash(self):\n if self.num_replicas < 2:\n self.assertTrue(False, msg=\"Required: num_replicas > 1\")\n\n # Override num_of_nodes affected to 1\n self.num_nodes_affected = 1\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n def_bucket = self.cluster.buckets[0]\n\n self.load_data_for_sub_doc_ops()\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs = cbstat_obj[node.ip] .vbucket_list(def_bucket.name,\n \"active\")\n active_vbs_in_target_nodes += active_vbs\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n def_bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Remove active vbuckets from doc_loading to avoid errors\n\n load_spec = dict()\n # load_spec[\"target_vbuckets\"] = list(set(target_vbuckets)\n # ^ set(active_vbs_in_target_nodes))\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"subdoc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 10\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 50\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 25\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 25\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n self.sleep(5, \"Wait for doc loaders to start loading data\")\n\n for node in target_nodes:\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=2)\n\n # Wait for document_loader tasks to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Sub_doc CRUDs failed with process crash\")\n\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(def_bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Failover validation\n val = \\\n failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n error_msg = \"Failover stats not updated after error condition\"\n self.assertTrue(val, msg=error_msg)\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Disconnect the shell connection\n for node in target_nodes:\n shell_conn[node.ip].disconnect()\n\n self.validate_test_failure()\n # Doc count validation\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def process_operation(self, resources, resource, api, operation, context):\n pass", "def __exit__(self, exc_type, exc_instance, exc_traceback):\n # if there were no errors detected\n if exc_type is None:\n # commit the transaction to the datastore\n self.execute(*self.sql.commit())\n # otherwise\n else:\n # roll back\n self.execute(*self.sql.rollback())\n\n # indicate that we want to re-raise any exceptions that occurred while executing the\n # body of the {with} statement\n return False", "async def run(\n self,\n ctx: BaseInputSetContext,\n octx: BaseOrchestratorContext,\n operation: Operation,\n inputs: Dict[str, Any],\n ) -> Union[bool, Dict[str, Any]]:\n if not operation.retry:\n return await self.run_no_retry(ctx, octx, operation, inputs)\n for retry in range(0, operation.retry):\n try:\n return await self.run_no_retry(ctx, octx, operation, inputs)\n except Exception:\n # Raise if no more tries left\n if (retry + 1) == operation.retry:\n raise\n # Otherwise if there was an exception log it\n self.logger.error(\n \"%r: try %d: %s\",\n operation.instance_name,\n retry + 1,\n traceback.format_exc().rstrip(),\n )", "def fail_local_operation(operation, node, environment):\n run_operation(operation, node, environment, succeed=False)", "def run_transaction(self, op, max_retries=3):\n\n if Database.conn is None:\n raise TypeError(\"Connection should not be None. Did you run connect_to_db()?\")\n\n # leaving this block the transaction will commit or rollback\n # (if leaving with an exception)\n with Database.conn:\n for retry in range(1, max_retries + 1):\n try:\n result = op(Database.conn)\n\n # If we reach this point, we were able to commit, so we break\n # from the retry loop.\n return result\n\n except SerializationFailure as e:\n # This is a retry error, so we roll back the current\n # transaction and sleep for a bit before retrying. The\n # sleep time increases for each failed transaction.\n logging.debug(\"got error: %s\", e)\n Database.conn.rollback()\n logging.debug(\"EXECUTE SERIALIZATION_FAILURE BRANCH\")\n sleep_ms = (2 ** retry) * 0.1 * (random.random() + 0.5)\n logging.debug(\"Sleeping %s seconds\", sleep_ms)\n time.sleep(sleep_ms)\n\n except psycopg2.Error as e:\n logging.debug(\"got error: %s\", e)\n logging.debug(\"EXECUTE NON-SERIALIZATION_FAILURE BRANCH\")\n raise e\n\n raise ValueError(f\"Transaction did not succeed after {max_retries} retries\")", "async def resume_operations(self):\n await asyncio.sleep(10)\n for op in await self.get_service('data_svc').locate('operations', match=dict(finish=None)):\n self.loop.create_task(self.run_operation(op))", "def _operation_traverse(self, op, op_f, aggregate_f, combine_f): # noqa\n # apply op_f for each operation\n op_res = op_f(op)\n if len(op.children) == 0:\n return op_res # no children return\n else:\n # apply _operation_traverse recursively\n children = [\n self._operation_traverse(child, op_f, aggregate_f, combine_f)\n for child in op.children\n ]\n # combine the operation result with the children aggregated result\n return combine_f(op_res, aggregate_f(children))", "def run_all(operations=ops):\n for operation in operations:\n run(operation)", "def do_operation(self):\n operation = self.inputs['operation']\n res = self.entity.do_operation(self.context, **self.inputs)\n if res:\n return self.RES_OK, \"Node operation '%s' succeeded.\" % operation\n else:\n return self.RES_ERROR, \"Node operation '%s' failed.\" % operation", "async def run(self) -> Optional[BaseException]: # pylint: disable=too-many-branches,too-many-statements\n active = Invocation.active.get(self.name)\n if active is not None:\n return await self.done(self.wait_for(active))\n\n self._become_current()\n Logger.trace(\"Call\")\n\n global rebuild_changed_actions # pylint: disable=invalid-name\n if rebuild_changed_actions.value:\n self.new_persistent_actions.append(PersistentAction())\n self.read_old_persistent_actions()\n\n assert self.name not in Invocation.active\n Invocation.active[self.name] = self\n self.collect_initial_outputs()\n\n try:\n assert self.step is not None\n try:\n await self.done(self.step.function(**self.kwargs))\n except RestartException:\n self._restart()\n await self.done(self.step.function(**self.kwargs))\n await self.done(self.sync())\n await self.done(self.collect_final_outputs())\n\n except StepException as exception: # pylint: disable=broad-except\n self.exception = exception\n\n finally:\n self._become_current()\n\n if self.exception is None:\n assert not self.async_actions\n if self.new_persistent_actions:\n if len(self.new_persistent_actions) > 1 and self.new_persistent_actions[-1].is_empty():\n self.new_persistent_actions.pop()\n\n if not self.did_skip_actions:\n self.write_new_persistent_actions()\n elif len(self.new_persistent_actions) < len(self.old_persistent_actions):\n Logger.warning(\"Skipped some action(s) \" \"even though changed to remove some final action(s)\")\n\n if self.did_run_actions:\n Logger.trace(\"Done\")\n elif self.did_skip_actions:\n Logger.trace(\"Skipped\")\n else:\n Logger.trace(\"Complete\")\n\n else:\n while self.async_actions:\n try:\n await self.done(self.async_actions.pop())\n except StepException:\n pass\n if self.did_run_actions:\n self.poison_all_outputs()\n self.remove_old_persistent_data()\n if not isinstance(self.exception, DryRunException):\n Logger.trace(\"Fail\")\n\n del Invocation.active[self.name]\n if self.condition is not None:\n await self.done(self.condition.acquire())\n self.condition.notify_all()\n self.condition.release()\n\n global failure_aborts_build # pylint: disable=invalid-name\n if self.exception is not None and failure_aborts_build.value:\n no_additional_complaints()\n raise self.exception\n\n return self.exception", "def execute(self, trans):\n \n # a \"circular\" task is a circularly-sorted collection of UOWTask/UOWTaskElements\n # derived from the components of this UOWTask, which accounts for inter-row dependencies. \n # if one was created for this UOWTask, it replaces the execution for this UOWTask.\n if self.circular is not None:\n self.circular.execute(trans)\n return\n\n # TODO: add a visitation system to the UOW classes and have this execution called\n # from a separate executor object ? (would also handle dumping)\n \n self._save_objects(trans)\n self._execute_cyclical_dependencies(trans, False)\n self._execute_per_element_childtasks(trans, False)\n self._execute_dependencies(trans)\n self._execute_cyclical_dependencies(trans, True)\n self._execute_childtasks(trans)\n self._execute_per_element_childtasks(trans, True)\n self._delete_objects(trans)", "def transaction_failed_before_processing(self):", "def clean_up():\n for action in reversed(undo_actions):\n try:\n action()\n except Exception, exc:\n sys.stderr.write(\"BAD CLEANUP: Call to %s failed\\n\"\n % action.func_name)\n sys.stderr.write(\" %s\\n\" % exc)", "def test_flush_wrapper_operational_error(self):\n\n _session = self.sessionmaker()\n\n with _session.begin():\n foo = self.Foo(counter=1)\n _session.add(foo)\n\n _session.begin()\n self.addCleanup(_session.rollback)\n foo = self.Foo(counter=sqla.func.imfake(123))\n _session.add(foo)\n matched = self.assertRaises(sqla.exc.OperationalError, _session.flush)\n self.assertIn(\"no such function\", str(matched))", "def workflow_complete():\n\n if request.method == \"POST\":\n \"\"\"\n request looks like:\n {\n \"workflow_name\": \"test-workflow\",\n \"dataset_id\": \"HRI107\",\n \"operation\": \"std-dev\",\n \"PID\": 1\n \"other_cardinals\": [(2, \"23.45.67.89\"), (3, \"34.56.78.90\")],\n \"jiff_server\": \"45.67.89.01\"\n }\n \"\"\"\n\n req = request.get_json(force=True)\n\n pods = get_pod_by_workflow_and_pid(req[\"workflow_name\"], req[\"PID\"])\n if pods is not None:\n for pod in pods:\n delete_entry(pod)\n\n jiff_server = get_jiff_server_by_workflow(req[\"workflow_name\"])\n if jiff_server is not None:\n delete_entry(jiff_server)\n\n orch = Orchestrator(req, app, len(get_running_workflows()))\n\n orch.stop_workflow()\n\n app.logger.info(f\"Workflow {req['workflow_name']} complete, removed from running jobs.\")\n\n event_timestamps = get_pod_event_timestamp_by_workflow_and_pid(req['workflow_name'],req['PID'])\n if event_timestamps is not None:\n delete_entry(event_timestamps)\n\n event_timestamps_dict = {x.name: str(getattr(event_timestamps, x.name)) for x in event_timestamps.__table__.columns}\n\n pod_resource_usage = get_pod_resource_consumption_by_workflow_and_pid(req['workflow_name'],req['PID'])\n usage = {'cpu': {'avg': None, 'max': None}, 'memory': {'avg': None, 'max': None}}\n if pod_resource_usage is not None:\n cpu_consumptions = [obj.cpu_usage for obj in pod_resource_usage]\n memory_consumptions = [obj.memory_usage for obj in pod_resource_usage]\n\n if len(cpu_consumptions) > 0:\n usage['cpu'] = {\n 'avg': sum(cpu_consumptions) / len(cpu_consumptions),\n 'max': max(cpu_consumptions)\n }\n\n if len(memory_consumptions) > 0:\n usage['memory'] = {\n 'avg': sum(memory_consumptions) / len(memory_consumptions),\n 'max': max(memory_consumptions)\n }\n\n for obj in pod_resource_usage:\n delete_entry(obj)\n\n app.logger.info(\"ABOUT TO send pod stats\")\n orch.send_pod_stats(usage, event_timestamps_dict)\n response = {\n \"MSG\": \"OK\",\n \"timestamps\": event_timestamps_dict,\n \"resource_consumption\": usage\n }\n else:\n\n app.logger.error(\n f\"Received request indicating the workflow {req['workflow_name']} \"\n f\"completed, but this workflow is not present in running jobs\"\n f\"record. Nothing to do.\")\n response = {\n \"MSG\": f\"ERR: {req['workflow_name']} not in running jobs record.\"\n }\n\n return jsonify(response)", "def tearDown(self):\n if not self.io_validation_complete:\n g.log.info(\"Wait for IO to complete as IO validation did not \"\n \"succeed in test method\")\n ret = wait_for_io_to_complete(self.all_mounts_procs, self.mounts)\n if not ret:\n raise ExecutionError(\"IO failed on some of the clients\")\n g.log.info(\"IO is successful on all mounts\")\n\n # Cleanup and umount volume\n g.log.info(\"Starting to Unmount Volume and Cleanup Volume\")\n ret = self.unmount_volume_and_cleanup_volume(mounts=self.mounts)\n if not ret:\n raise ExecutionError(\"Failed to umount the vol & cleanup Volume\")\n g.log.info(\"Successful in umounting the volume and Cleanup\")\n\n # Calling GlusterBaseClass teardown\n GlusterBaseClass.tearDown.im_func(self)", "def process_exception(self, request, exception):\r\n if transaction.is_dirty():\r\n transaction.rollback()\r\n transaction.leave_transaction_management()", "def execute_handler_action(self):\n try:\n # fetch seq_no\n self.seq_no = self.ext_config_settings_handler.get_seq_no(is_enable_request=True)\n if self.seq_no is None:\n self.logger.log_error(\"Sequence number for current operation not found\")\n exit(Constants.ExitCode.ConfigurationError)\n\n # read status file, to load any preserve existing context\n self.ext_output_status_handler.read_file(self.seq_no)\n\n config_settings = self.ext_config_settings_handler.read_file(self.seq_no)\n\n # set activity_id in telemetry\n if self.telemetry_writer is not None:\n self.telemetry_writer.set_operation_id(config_settings.__getattribute__(self.config_public_settings.activity_id))\n\n operation = config_settings.__getattribute__(self.config_public_settings.operation)\n\n # Allow only certain operations\n if operation not in [Constants.NOOPERATION, Constants.ASSESSMENT, Constants.INSTALLATION, Constants.CONFIGURE_PATCHING]:\n self.logger.log_error(\"Requested operation is not supported by the extension\")\n self.ext_output_status_handler.write_status_file(operation, self.seq_no, status=Constants.Status.Error.lower(), message=\"Requested operation {0} is not supported by the extension\".format(str(operation)), code=Constants.ExitCode.OperationNotSupported)\n exit(Constants.ExitCode.OperationNotSupported)\n\n prev_patch_max_end_time = self.cmd_exec_start_time + datetime.timedelta(hours=0, minutes=Constants.ENABLE_MAX_RUNTIME)\n self.ext_state_handler.create_file(self.seq_no, operation, prev_patch_max_end_time)\n core_state_content = self.core_state_handler.read_file()\n\n # log tmp folder size\n self.ext_env_handler.log_temp_folder_details()\n\n # if NoOperation is requested, terminate all running processes from previous operation and update status file\n if operation == Constants.NOOPERATION:\n self.process_nooperation(config_settings, core_state_content)\n else:\n # if any of the other operations are requested, verify if request is a new request or a re-enable, by comparing sequence number from the prev request and current one\n if core_state_content is None or core_state_content.__getattribute__(self.core_state_fields.number) is None:\n # first patch request for the VM\n self.logger.log(\"No state information was found for any previous patch operation. Launching a new patch operation.\")\n self.launch_new_process(config_settings, create_status_output_file=True)\n else:\n if int(core_state_content.__getattribute__(self.core_state_fields.number)) != int(self.seq_no):\n # new request\n self.process_enable_request(config_settings, prev_patch_max_end_time, core_state_content)\n else:\n # re-enable request\n self.process_reenable_request(config_settings, core_state_content)\n\n except Exception as error:\n self.logger.log_error(\"Failed to execute enable. [Exception={0}]\".format(repr(error)))\n raise", "def _execute(self, operation):\n # Use handle to query\n return self.session.execute_statement(self._statement_cache.get_statement(), operation)", "def _is_valid_delete_operation(session, row):\n # Check for any pending or processing create or update\n # ops on the row itself\n if db.check_for_pending_or_processing_ops(\n session, row.object_uuid, operation=[odl_const.ODL_UPDATE,\n odl_const.ODL_CREATE]):\n return False\n\n # Check for dependent operations\n dependent_resource_types = _DELETE_DEPENDENCIES.get(row.object_type)\n if dependent_resource_types is not None:\n for resource_type in dependent_resource_types:\n if db.check_for_pending_delete_ops_with_parent(\n session, resource_type, row.object_uuid):\n return False\n return True", "def test_with_process_crash(self):\n if self.num_replicas < 2:\n self.assertTrue(False, msg=\"Required: num_replicas > 1\")\n\n # Override num_of_nodes affected to 1 (Positive case)\n self.num_nodes_affected = 1\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs_in_target_nodes += cbstat_obj[node.ip].vbucket_list(\n self.bucket.name,\n \"active\")\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n self.bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n # Remove active vbuckets from doc_loading to avoid errors\n load_spec = dict()\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 100\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 25\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n load_spec[\"target_vbuckets\"] = list(set(range(0, 1024))\n ^ set(active_vbs_in_target_nodes))\n\n self.log.info(\"Perform 'create', 'update', 'delete' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=1,\n async_load=True)\n\n self.sleep(5, \"Wait for doc loaders to start loading data\")\n\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud()\n\n # Wait for document_loader tasks to complete\n self.task_manager.get_task_result(doc_loading_task)\n self.bucket_util.validate_doc_loading_results(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed with process crash\")\n\n if self.simulate_error \\\n not in [DiskError.DISK_FULL, DiskError.DISK_FAILURE]:\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=self.bucket.name)\n\n # Disconnect the shell connection\n shell_conn[node.ip].disconnect()\n self.sleep(10, \"Wait for node recovery to complete\")\n\n # In case of error with Ephemeral bucket, need to rebalance\n # to make sure data is redistributed properly\n if self.bucket_type == Bucket.Type.EPHEMERAL:\n retry_num = 0\n result = None\n while retry_num != 2:\n result = self.task.rebalance(\n self.servers[0:self.nodes_init],\n [], [])\n if result:\n break\n retry_num += 1\n self.sleep(10, \"Wait before retrying rebalance\")\n\n self.assertTrue(result, \"Rebalance failed\")\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(self.bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(self.bucket.name)\n\n # Failover stat validation\n if self.simulate_error == CouchbaseError.KILL_MEMCACHED:\n val = failover_info[\"init\"][node.ip] \\\n != failover_info[\"afterCrud\"][node.ip]\n else:\n if self.simulate_error != CouchbaseError.STOP_MEMCACHED \\\n and self.bucket_type == Bucket.Type.EPHEMERAL:\n val = failover_info[\"init\"][node.ip] \\\n != failover_info[\"afterCrud\"][node.ip]\n else:\n val = failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n error_msg = \"Failover stats mismatch after error condition:\" \\\n \" %s != %s\" \\\n % (failover_info[\"init\"][node.ip],\n failover_info[\"afterCrud\"][node.ip])\n self.assertTrue(val, msg=error_msg)\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Doc count validation\n self.validate_test_failure()\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def __exit__(self, exc_type, exc_value, traceback):\n\t\tself.delete_extracted()\n\t\tself.delete()", "def _rollback_context(self, persister):\n try:\n # Rollback the job transactional context.\n persister.rollback()\n\n except _errors.DatabaseError as error:\n _LOGGER.error(\n \"Error in %s rolling back job's context.\",\n self.__action.__name__, exc_info=error\n )\n\n # Update the job status.\n self.__result = False\n message = \"Tried to execute action ({0}).\".format(\n self.__action.__name__)\n self._add_status(Job.ERROR, Job.COMPLETE, message, True)\n\n # Finish context which means mark the job as finished\n # and update procedure's information.\n self._finish_context(False)", "def _process_operations(self, operations: List[MemoryOperation], slices: SlicesType = None) -> None:\n for op in operations:\n if op.inst == MemoryOperation.NOOP:\n pass # do nothing\n elif op.inst == MemoryOperation.LOAD:\n if MemoryOperation.LOAD_SUBARRAY in op.flag:\n # build slices mapping first\n self._array.set_slices_mapping(op.dst, slices)\n\n # check flag to see if dst is current device\n dst_is_current_device = op.flag != MemoryOperation.SWITCH_DEVICE_FLAG\n\n # copy data\n if num_gpu > 0:\n cupy.cuda.stream.get_current_stream().synchronize()\n \n self._array.copy_data_between_device(\n op.dst, op.src, dst_is_current_device)\n\n # sync stream before set it as ready, so asyc call is ensured to be done\n if num_gpu > 0:\n cupy.cuda.stream.get_current_stream().synchronize()\n elif op.inst == MemoryOperation.EVICT:\n # decrement the reference counter, relying on GC to free the memor\n self._array.clear(op.src)\n elif op.inst == MemoryOperation.ERROR:\n raise RuntimeError(\"PArray gets an error from coherence protocol\")\n else:\n raise RuntimeError(f\"PArray gets invalid memory operation from coherence protocol, \"\n f\"detail: opcode {op.inst}, dst {op.dst}, src {op.src}\")", "def test_cancel_operation(self):\n con = sqlite.connect(\":memory:\")\n def progress():\n return 1\n con.set_progress_handler(progress, 1)\n curs = con.cursor()\n self.assertRaises(\n sqlite.OperationalError,\n curs.execute,\n \"create table bar (a, b)\")", "def execute(self):\n\n with self._lock_c:\n self.count = 0\n self.numtasks = 0\n self.taskset = []\n self.results = {}\n self.totaltime = time.time()\n # Start all tasks\n for task in self.taskseq:\n self.taskset.append(task)\n self.numtasks += 1\n task.init_and_start(self)\n\n num_tasks = self.getNumTasks()\n # Wait on each task to clean up results\n while num_tasks > 0:\n\n self.check_state()\n\n for i in range(num_tasks):\n try:\n try:\n task = self.getTask(i)\n except IndexError:\n # A task got deleted from the set. Jump back out\n # to outer loop and repoll the number of tasks\n break\n\n #self.logger.debug(\"waiting on %s\" % task)\n res = task.wait(timeout=self.idletime)\n\n #self.logger.debug(\"finished: %s\" % task)\n self.child_done(res, task)\n\n except TaskTimeout:\n continue\n\n except Exception as e:\n #self.logger.warning(\"Subtask propagated exception: %s\" % str(e))\n self.child_done(e, task)\n continue\n\n # wait a bit and try again\n #self.ev_quit.wait(self.idletime)\n\n # re-get number of tasks, in case some were added or deleted\n num_tasks = self.getNumTasks()\n\n # Scan results for errors (exceptions) and raise the first one we find\n for key in self.results.keys():\n value = self.results[key]\n if isinstance(value, Exception):\n (count, task) = key\n self.logger.error(\"Child task %s terminated with exception: %s\" % (\n task.tag, str(value)))\n raise value\n\n # Return value of last child to complete\n return value", "def batch_write(self, operations: list, allowed_errors: List[str] = None) -> List[dict]:\n allowed_errors = allowed_errors or []\n responses = [] # contains succesful responses\n while True:\n try:\n for i in range(0, len(operations), self._batch_write_max):\n ops = operations[i:i + self._batch_write_max]\n responses.extend(\n cd_client.batch_write(\n DirectoryArn=self._dir_arn,\n Operations=ops)['Responses'])\n break\n except cd_client.exceptions.BatchWriteException as ex:\n parsed_msg = ex.response['Error']['Message'].split(\" \")\n failed_op_index, error = (int(parsed_msg[1]), parsed_msg[2])\n logger.warning({\n \"message\": ex,\n \"response\": ex.response,\n \"operations\": {\n \"failed\": operations.pop(i + failed_op_index),\n \"skipped\": len(operations[i:]),\n \"sucessful\": len(operations[:i + failed_op_index])\n }\n })\n if error[:-1] in allowed_errors:\n operations = operations[i:]\n else:\n raise ex\n\n return responses", "def _run_micro_op_list(self, mo_list):\n current_dom = copy.deepcopy(self.current_dom)\n warning_list = []\n return_dict = {}\n ifcfgs = ''\n ovsvsctl_show = {}\n\n # always run validation, even if force flag is\n # set to True to make sure there are no errors\n # encountered.\n ops_ran_list = []\n for micro_op in mo_list:\n try:\n ops_ran_list.append(micro_op.__class__.__name__)\n LOG.debug(\"running micro op %s with DOM %s\" %\n (micro_op.__class__,\n current_dom))\n current_dom, curr_warning_list = \\\n micro_op.validate(current_dom)\n warning_list.extend(curr_warning_list)\n for warning in curr_warning_list:\n LOG.warn(_('Warning \"%(warn_name)s\" occurred during '\n 'validation of operation %(oper)s: %(warn)s') %\n {'warn_name': warning.name,\n 'oper': micro_op.__class__.__name__,\n 'warn': warning})\n except Exception as exc:\n LOG.exception(exc)\n LOG.error(_(\"List of operations run: %s\" % ops_ran_list))\n return_dict[agent.ERRORS_KEY] = [{'message': '%s' % exc}]\n break\n\n # if the force flag is set we can ignore warnings but\n # we cannot avoid errors, so check to be sure there\n # were no errors\n ops_ran_list = []\n\n if((self.force_flag or len(warning_list) == 0)\n and agent.ERRORS_KEY not in return_dict):\n\n # in case of error, last_index is used to determine\n # where to start undo from\n last_index = -1\n\n # execute micro op list\n for i in range(0, len(mo_list)):\n try:\n ops_ran_list.append(mo_list[i].__class__.__name__)\n mo_list[i].execute()\n except Exception as exc:\n LOG.exception(exc)\n LOG.error(_(\"List of operations run: %s\" % ops_ran_list))\n return_dict[agent.ERRORS_KEY] = [{'message': \"%s\" % exc}]\n last_index = i\n break\n\n # do we need to undo because of error?\n if last_index != -1:\n LOG.error(_(\"Error during operation execution, undoing \"\n \"operations...\"))\n\n # Get the current state of the ifcfg files and ovs to log later\n try:\n ifcfgs = commandlet.CommandExecutor.\\\n get_all_ifcfg_files_for_logging()\n ovsvsctl_show = \\\n commandlet.CommandExecutor.send_vsctl_command()\n except Exception as e:\n LOG.exception(e)\n\n undo_list = []\n # yes, undo needed; undo in reverse order\n reversed_list = self._reorder_ops_for_undo(mo_list,\n last_index)\n for op in reversed_list:\n try:\n op.undo()\n undo_list.append(op.__class__.__name__)\n except Exception as exc:\n # if we hit an error during undo, we will\n # add the error to the error list and continue\n # to attempt to undo the remaining micro ops\n LOG.exception(exc)\n return_dict[agent.ERRORS_KEY].append(\n {'message': '%s' % exc})\n\n LOG.error(_(\"Undone operations: %s\" % undo_list))\n\n # we are not doing an execution, so return the warnings\n else:\n # add warnings to return list\n if len(warning_list) > 0:\n return_dict[agent.WARNINGS_KEY] = []\n for warning in warning_list:\n return_dict[agent.WARNINGS_KEY].append(\n {'message': '%s' % warning})\n\n # Errors/warnings occurred. Log initial dom, request dom, the dom as\n # it was when the error occurred, operations run, ifcfg files, and\n # ovs-vsctl show output\n if return_dict is not {}:\n debug_info_list = []\n debug_info_list.append(_('Initial Object Model is:'))\n debug_info_list.append(json.dumps(self.current_dom.to_dict(),\n sort_keys=True, indent=4))\n debug_info_list.append(_('Requested Object Model is:'))\n debug_info_list.append(json.dumps(self.desired_dom.to_dict(),\n sort_keys=True, indent=4))\n debug_info_list.append(_('Current Object Model is:'))\n debug_info_list.append(json.dumps(current_dom.to_dict(),\n sort_keys=True, indent=4))\n debug_info_list.append(_(\"List of operations returned by builder: \"\n \"%s\" % [mo_list[i].__class__.__name__\n for i in range(0, len(mo_list))]))\n if ifcfgs:\n # These are only logged on errors, not on warnings\n debug_info_list.append(_(\"Contents of ifcfg files: %s\" %\n ifcfgs))\n debug_info_list.append(_(\"ovs-vsctl show: \"))\n debug_info_list.append(json.dumps(ovsvsctl_show,\n sort_keys=True,\n indent=4))\n\n if agent.ERRORS_KEY in return_dict:\n for message in debug_info_list:\n LOG.error(message)\n else:\n for message in debug_info_list:\n LOG.warn(message)\n\n return return_dict", "def execute(self, op_state: OperationState = None):\n if op_state is None:\n op_state = OperationState()\n\n for op in self._operations:\n op_state = op.execute(op_state)\n\n return op_state", "def Exec(self, feedback_fn):\n self.feedback_fn = feedback_fn\n # Process here the warnings from CheckPrereq, as we don't have a\n # feedback_fn there.\n # TODO: Replace with self.LogWarning\n for warn in self.warn:\n feedback_fn(\"WARNING: %s\" % warn)\n\n assert ((self.op.disk_template is None) ^\n bool(self.owned_locks(locking.LEVEL_NODE_RES))), \\\n \"Not owning any node resource locks\"\n\n result = []\n\n # New primary node\n if self.op.pnode_uuid:\n self.instance.primary_node = self.op.pnode_uuid\n\n # runtime memory\n if self.op.runtime_mem:\n rpcres = self.rpc.call_instance_balloon_memory(self.instance.primary_node,\n self.instance,\n self.op.runtime_mem)\n rpcres.Raise(\"Cannot modify instance runtime memory\")\n result.append((\"runtime_memory\", self.op.runtime_mem))\n\n # Apply disk changes\n inst_disks = self.cfg.GetInstanceDisks(self.instance.uuid)\n ApplyContainerMods(\"disk\", inst_disks, result, self.diskmod,\n self._CreateNewDisk, self._AttachDisk, self._ModifyDisk,\n self._RemoveDisk, self._DetachDisk,\n post_add_fn=self._PostAddDisk)\n\n if self.op.disk_template:\n if __debug__:\n check_nodes = set(self.cfg.GetInstanceNodes(self.instance.uuid))\n if self.op.remote_node_uuid:\n check_nodes.add(self.op.remote_node_uuid)\n for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:\n owned = self.owned_locks(level)\n assert not (check_nodes - owned), \\\n (\"Not owning the correct locks, owning %r, expected at least %r\" %\n (owned, check_nodes))\n\n r_shut = ShutdownInstanceDisks(self, self.instance)\n if not r_shut:\n raise errors.OpExecError(\"Cannot shutdown instance disks, unable to\"\n \" proceed with disk template conversion\")\n #TODO make heterogeneous conversions work\n mode = (self.cfg.GetInstanceDiskTemplate(self.instance.uuid),\n self.op.disk_template)\n try:\n if mode in self._DISK_CONVERSIONS:\n self._DISK_CONVERSIONS[mode](self, feedback_fn)\n else:\n self._ConvertInstanceDisks(feedback_fn)\n except:\n for disk in inst_disks:\n self.cfg.ReleaseDRBDMinors(disk.uuid)\n raise\n result.append((\"disk_template\", self.op.disk_template))\n\n disk_info = self.cfg.GetInstanceDisks(self.instance.uuid)\n assert utils.AllDiskOfType(disk_info, [self.op.disk_template]), \\\n (\"Expected disk template '%s', found '%s'\" %\n (self.op.disk_template,\n self.cfg.GetInstanceDiskTemplate(self.instance.uuid)))\n\n # Release node and resource locks if there are any (they might already have\n # been released during disk conversion)\n ReleaseLocks(self, locking.LEVEL_NODE)\n ReleaseLocks(self, locking.LEVEL_NODE_RES)\n\n # Apply NIC changes\n if self._new_nics is not None:\n self.instance.nics = self._new_nics\n result.extend(self._nic_chgdesc)\n\n # hvparams changes\n if self.op.hvparams:\n self.instance.hvparams = self.hv_inst\n for key, val in self.op.hvparams.items():\n result.append((\"hv/%s\" % key, val))\n\n # beparams changes\n if self.op.beparams:\n self.instance.beparams = self.be_inst\n for key, val in self.op.beparams.items():\n result.append((\"be/%s\" % key, val))\n\n # OS change\n if self.op.os_name:\n self.instance.os = self.op.os_name\n\n # osparams changes\n if self.op.osparams:\n self.instance.osparams = self.os_inst\n for key, val in self.op.osparams.items():\n result.append((\"os/%s\" % key, val))\n\n if self.op.osparams_private:\n self.instance.osparams_private = self.os_inst_private\n for key, val in self.op.osparams_private.items():\n # Show the Private(...) blurb.\n result.append((\"os_private/%s\" % key, repr(val)))\n\n self.cfg.Update(self.instance, feedback_fn, self.proc.GetECId())\n\n if self.op.offline is None:\n # Ignore\n pass\n elif self.op.offline:\n # Mark instance as offline\n self.instance = self.cfg.MarkInstanceOffline(self.instance.uuid)\n result.append((\"admin_state\", constants.ADMINST_OFFLINE))\n else:\n # Mark instance as online, but stopped\n self.instance = self.cfg.MarkInstanceDown(self.instance.uuid)\n result.append((\"admin_state\", constants.ADMINST_DOWN))\n\n UpdateMetadata(feedback_fn, self.rpc, self.instance)\n\n assert not (self.owned_locks(locking.LEVEL_NODE_RES) or\n self.owned_locks(locking.LEVEL_NODE)), \\\n \"All node locks should have been released by now\"\n\n return result", "def test_noWorkDoneWhenConcurrentlyDeleted(self):\n # Provide access to a method called \"concurrently\" everything using\n original = self.store.newTransaction\n\n def decorate(*a, **k):\n result = original(*a, **k)\n result.concurrently = self.store.newTransaction\n return result\n\n self.store.newTransaction = decorate\n\n def operation(txn):\n return txn.enqueue(\n DummyWorkItem, a=30, b=40, workID=5678,\n deleteOnLoad=1,\n notBefore=datetime.datetime.utcnow()\n )\n\n proposal = yield inTransaction(self.store.newTransaction, operation)\n yield proposal.whenExecuted()\n\n # Sanity check on the concurrent deletion.\n def op2(txn):\n return Select(\n [schema.DUMMY_WORK_ITEM.WORK_ID],\n From=schema.DUMMY_WORK_ITEM\n ).on(txn)\n\n rows = yield inTransaction(self.store.newTransaction, op2)\n self.assertEquals(rows, [])\n\n def op3(txn):\n return Select(\n [\n schema.DUMMY_WORK_DONE.WORK_ID,\n schema.DUMMY_WORK_DONE.A_PLUS_B,\n ],\n From=schema.DUMMY_WORK_DONE\n ).on(txn)\n\n rows = yield inTransaction(self.store.newTransaction, op3)\n self.assertEquals(rows, [])", "def rollback(self, exc):\n USER.info('%s: Rolling Back Failed Build', self.recipe.name)\n cascade = False\n if isinstance(exc, AssertionError):\n logging.error('Error during verify() of %s', self.recipe.name)\n cascade = True\n if cascade or isinstance(exc, PakitLinkError):\n if not cascade:\n logging.error('Error during linking of %s', self.recipe.name)\n walk_and_unlink(self.recipe.install_dir, self.recipe.link_dir)\n cascade = True\n if cascade or (not isinstance(exc, PakitLinkError) and\n not isinstance(exc, AssertionError)):\n if not cascade:\n logging.error('Error during build() of %s', self.recipe.name)\n try:\n Command('rm -rf ' + self.recipe.install_dir).wait()\n except PakitCmdError: # pragma: no cover\n pass", "def fix(self):\n exceptionError = ''\n for each in self.errorNodes:\n try:\n pm.delete(each)\n except exceptionError:\n print exceptionError", "def after():\n to_call = []\n exceptions = []\n yield to_call\n for o in to_call:\n try:\n o()\n except Exception as e:\n exceptions.append(e)\n if exceptions:\n\n raise Exception('Exception(s) raised when cleaning up' +\n ('\\n'.join(str(e) for e in exceptions)))", "def _transition_from_CLEANING_UP(self, run_state):\n\n def remove_path_no_fail(path):\n try:\n remove_path(path)\n except Exception:\n logger.error(traceback.format_exc())\n\n if run_state.container_id is not None:\n while self.bundle_runtime.container_exists(run_state.container_id):\n try:\n finished, _, _ = self.bundle_runtime.check_finished(run_state.container_id)\n if finished:\n self.bundle_runtime.remove(run_state.container_id)\n run_state = run_state._replace(container=None, container_id=None)\n break\n else:\n try:\n self.bundle_runtime.kill(run_state.container_id)\n except RuntimeAPIError:\n logger.error(traceback.format_exc())\n time.sleep(1)\n except RuntimeAPIError:\n logger.error(traceback.format_exc())\n time.sleep(1)\n\n try:\n # Fetching dependencies from the Dependency Manager can fail.\n # Finish cleaning up on the next iteration of this transition function.\n for dep in run_state.bundle.dependencies:\n if not self.shared_file_system: # No dependencies if shared fs worker\n dep_key = DependencyKey(dep.parent_uuid, dep.parent_path)\n self.dependency_manager.release(run_state.bundle.uuid, dep_key)\n except (ValueError, EnvironmentError):\n # Do nothing if an error is thrown while reading from the state file\n logging.exception(\n f\"Error reading from dependencies state file while releasing a dependency from {run_state.bundle.uuid}\"\n )\n return run_state\n\n # Clean up dependencies paths\n for path in run_state.paths_to_remove or []:\n remove_path_no_fail(path)\n run_state = run_state._replace(paths_to_remove=[])\n\n if run_state.is_restaged:\n log_bundle_transition(\n bundle_uuid=run_state.bundle.uuid,\n previous_stage=run_state.stage,\n next_stage=RunStage.RESTAGED,\n reason=self.RESTAGED_REASON,\n )\n return run_state._replace(stage=RunStage.RESTAGED)\n\n if not self.shared_file_system and run_state.has_contents:\n log_bundle_transition(\n bundle_uuid=run_state.bundle.uuid,\n previous_stage=run_state.stage,\n next_stage=RunStage.UPLOADING_RESULTS,\n )\n return run_state._replace(\n stage=RunStage.UPLOADING_RESULTS, run_status='Uploading results.', container=None\n )\n else:\n # No need to upload results since results are directly written to bundle store\n # Delete any files that match the exclude_patterns .\n for exclude_pattern in run_state.bundle.metadata[\"exclude_patterns\"]:\n full_pattern = os.path.join(run_state.bundle_path, exclude_pattern)\n for file_path in glob.glob(full_pattern, recursive=True):\n # Only remove files that are subpaths of run_state.bundle_path, in case\n # that exclude_pattern is something like \"../../../\".\n if path_is_parent(parent_path=run_state.bundle_path, child_path=file_path):\n remove_path(file_path)\n return self.finalize_run(run_state)", "def test_00_cascade(self):\n cat = self.cat\n\n # get the id's of all objects that should be deleted.\n uid = cat.uaccess.id\n orid = self.scratching.id\n arid = self.scratching.raccess.id\n ogid = self.felines.id\n agid = self.felines.gaccess.id\n gpid = UserGroupPrivilege.objects.get(user=cat).id\n rpid = UserResourcePrivilege.objects.get(user=cat).id\n mpid = GroupMembershipRequest.objects.get(request_from=cat).id\n\n # all objects exist before the delete\n self.assertEqual(UserAccess.objects.filter(id=uid).count(), 1)\n self.assertEqual(UserGroupPrivilege.objects.filter(id=gpid).count(), 1)\n self.assertEqual(\n UserResourcePrivilege.objects.filter(\n id=rpid).count(), 1)\n self.assertEqual(\n GroupMembershipRequest.objects.filter(\n id=mpid).count(), 1)\n self.assertEqual(ResourceAccess.objects.filter(id=arid).count(), 1)\n self.assertEqual(GroupAccess.objects.filter(id=agid).count(), 1)\n self.assertEqual(BaseResource.objects.filter(id=orid).count(), 1)\n self.assertEqual(Group.objects.filter(id=ogid).count(), 1)\n\n cat.delete()\n\n # objects tied to the user are deleted, other objects continue to exist\n self.assertEqual(UserAccess.objects.filter(id=uid).count(), 0)\n self.assertEqual(UserGroupPrivilege.objects.filter(id=gpid).count(), 0)\n self.assertEqual(\n UserResourcePrivilege.objects.filter(\n id=rpid).count(), 0)\n self.assertEqual(\n GroupMembershipRequest.objects.filter(\n id=mpid).count(), 0)\n # deleting a user should not remove the groups that user owns\n self.assertEqual(GroupAccess.objects.filter(id=agid).count(), 1)\n self.assertEqual(Group.objects.filter(id=ogid).count(), 1)\n\n # the following tests will fail, because the resource field\n # \"creator\" is a foreign key to User with on_delete=models.CASCADE\n # and null=False. Thus removing the creator of a resource will\n # remove the resource record (and orphan many files in the process).\n\n # print('resource access count is ', ResourceAccess.objects.filter(id=arid).count())\n # print('resource count is ', BaseResource.objects.filter(id=orid).count())\n # self.assertEqual(ResourceAccess.objects.filter(id=arid).count(), 1)\n # self.assertEqual(BaseResource.objects.filter(id=orid).count(), 1)", "def test_rollback(self):\n os.system('rm config.txt; touch config.txt')\n test_oplog, primary_conn, mongos, solr = self.get_new_oplog()\n\n if not start_cluster():\n self.fail('Cluster could not be started successfully!')\n\n solr = DocManager()\n test_oplog.doc_manager = solr\n solr._delete() # equivalent to solr.delete(q='*: *')\n\n mongos['test']['test'].remove({})\n mongos['test']['test'].insert( \n {'_id': ObjectId('4ff74db3f646462b38000001'),\n 'name': 'paulie'},\n safe=True\n )\n while (mongos['test']['test'].find().count() != 1):\n time.sleep(1)\n cutoff_ts = test_oplog.get_last_oplog_timestamp()\n\n first_doc = {'name': 'paulie', '_ts': bson_ts_to_long(cutoff_ts),\n 'ns': 'test.test',\n '_id': ObjectId('4ff74db3f646462b38000001')}\n\n #try kill one, try restarting\n kill_mongo_proc(primary_conn.host, PORTS_ONE['PRIMARY'])\n\n new_primary_conn = Connection(HOSTNAME, int(PORTS_ONE['SECONDARY']))\n admin = new_primary_conn['admin']\n while admin.command(\"isMaster\")['ismaster'] is False:\n time.sleep(1)\n time.sleep(5)\n count = 0\n while True:\n try:\n mongos['test']['test'].insert({\n '_id': ObjectId('4ff74db3f646462b38000002'),\n 'name': 'paul'}, \n safe=True)\n break\n except OperationFailure:\n count += 1\n if count > 60:\n self.fail('Call to insert doc failed too many times')\n time.sleep(1)\n continue\n while (mongos['test']['test'].find().count() != 2):\n time.sleep(1)\n kill_mongo_proc(primary_conn.host, PORTS_ONE['SECONDARY'])\n start_mongo_proc(PORTS_ONE['PRIMARY'], \"demo-repl\", \"/replset1a\",\n \"/replset1a.log\", None)\n\n #wait for master to be established\n while primary_conn['admin'].command(\"isMaster\")['ismaster'] is False:\n time.sleep(1)\n\n start_mongo_proc(PORTS_ONE['SECONDARY'], \"demo-repl\", \"/replset1b\",\n \"/replset1b.log\", None)\n\n #wait for secondary to be established\n admin = new_primary_conn['admin']\n while admin.command(\"replSetGetStatus\")['myState'] != 2:\n time.sleep(1)\n while retry_until_ok(mongos['test']['test'].find().count) != 1:\n time.sleep(1)\n\n self.assertEqual(str(new_primary_conn.port), PORTS_ONE['SECONDARY'])\n self.assertEqual(str(primary_conn.port), PORTS_ONE['PRIMARY'])\n\n last_ts = test_oplog.get_last_oplog_timestamp()\n second_doc = {'name': 'paul', '_ts': bson_ts_to_long(last_ts),\n 'ns': 'test.test', \n '_id': ObjectId('4ff74db3f646462b38000002')}\n\n test_oplog.doc_manager.upsert(first_doc)\n test_oplog.doc_manager.upsert(second_doc)\n\n test_oplog.rollback()\n test_oplog.doc_manager.commit()\n results = solr._search()\n\n assert(len(results) == 1)\n\n self.assertEqual(results[0]['name'], 'paulie')\n self.assertTrue(results[0]['_ts'] <= bson_ts_to_long(cutoff_ts))\n\n #test_oplog.join()", "def cleanup_funsies(db: Redis[bytes]) -> None:\n queues = rq.Queue.all(connection=db)\n for queue in queues:\n queue.delete(delete_jobs=True)\n\n # Reset operation status\n ops = join(OPERATIONS, hash_t(\"*\"), \"owner\")\n keys = db.keys(ops)\n if len(keys):\n logger.info(f\"clearing {len(keys)} unfinished ops\")\n for k in keys:\n db.delete(k)", "def _simple_deletion(self, operation, labels):\r\n label_strings = []\r\n for label in labels:\r\n if inspect.isclass(label) and issubclass(label, Edge):\r\n label_string = label.get_label()\r\n elif isinstance(label, Edge):\r\n label_string = label.get_label()\r\n label_strings.append(label_string)\r\n\r\n return self._delete_related(operation, label_strings)", "def restore_operation(cls, operation_record):\n classname = operation_record[\"OPE_TYPE\"]\n module = \"\" #TODO Implement modulename from database if Operation belongs to Module\n is_operation_of_module = False\n exec \"\"\"\ntry:\n type(%(class)s)\nexcept NameError,e:\n is_operation_of_module = True\"\"\"%{'class':classname}\n\n if is_operation_of_module:\n exec \"\"\"\nfrom %(module)s import %(class)s\noperation = %(class)s(cls._core)\"\"\"%{'class':classname,'module':module}\n else:\n exec \"\"\"\noperation = %(class)s(cls._core)\"\"\"%{'class':classname}\n\n operation.set_id(operation_record['OPE_ID'])\n db = cls._core.get_db()\n stmnt = \"SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation_record[\"OPE_ID\"],))\n for row in cur.fetchallmap():\n val = row[\"OPD_VALUE\"]\n exec \"\"\"val = %s(val)\"\"\"%row[\"OPD_TYPE\"]\n operation.set_value(row[\"OPD_KEY\"], val)\n return operation", "def test_sub_doc_with_persistence_issues(self):\n\n if self.durability_level.upper() in [\n Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE,\n Bucket.DurabilityLevel.PERSIST_TO_MAJORITY]:\n self.log.critical(\"Test not valid for persistence durability\")\n return\n\n error_sim = dict()\n shell_conn = dict()\n cbstat_obj = dict()\n failover_info = dict()\n vb_info_info = dict()\n active_vbs_in_target_nodes = list()\n failover_info[\"init\"] = dict()\n failover_info[\"afterCrud\"] = dict()\n vb_info_info[\"init\"] = dict()\n vb_info_info[\"afterCrud\"] = dict()\n def_bucket = self.cluster.buckets[0]\n\n load_spec = dict()\n load_spec[\"doc_crud\"] = dict()\n load_spec[\"subdoc_crud\"] = dict()\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \"test_collections\"\n load_spec[\"doc_crud\"][\n MetaCrudParams.DocCrud.READ_PERCENTAGE_PER_COLLECTION] = 50\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 20\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 10\n load_spec[\"subdoc_crud\"][\n MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 10\n\n self.log.info(\"Selecting nodes to simulate error condition\")\n target_nodes = DurabilityHelper.getTargetNodes(self.cluster,\n self.nodes_init,\n self.num_nodes_affected)\n\n # Create new docs for sub-doc operations to run\n self.load_data_for_sub_doc_ops()\n\n self.log.info(\"Will simulate error condition on %s\" % target_nodes)\n for node in target_nodes:\n # Create shell_connections\n shell_conn[node.ip] = RemoteMachineShellConnection(node)\n cbstat_obj[node.ip] = Cbstats(node)\n active_vbs = cbstat_obj[node.ip] .vbucket_list(def_bucket.name,\n \"active\")\n active_vbs_in_target_nodes += active_vbs\n vb_info_info[\"init\"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(\n def_bucket.name)\n failover_info[\"init\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n for node in target_nodes:\n # Perform specified action\n error_sim[node.ip] = CouchbaseError(self.log,\n shell_conn[node.ip],\n node=node)\n error_sim[node.ip].create(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Perform CRUDs with induced error scenario is active\n self.log.info(\"Perform 'insert', 'upsert', 'remove' mutations\")\n doc_loading_task = \\\n self.bucket_util.run_scenario_from_spec(\n self.task,\n self.cluster,\n self.cluster.buckets,\n load_spec,\n mutation_num=0,\n async_load=True)\n\n # Perform new scope/collection creation during doc ops in parallel\n self.__perform_collection_crud(mutation_num=1)\n\n # Wait for doc_loading to complete and validate the doc ops\n self.task_manager.get_task_result(doc_loading_task)\n if doc_loading_task.result is False:\n self.log_failure(\"Doc CRUDs failed with persistence issue\")\n\n # Revert the induced error condition\n for node in target_nodes:\n error_sim[node.ip].revert(self.simulate_error,\n bucket_name=def_bucket.name)\n\n # Fetch latest failover stats and validate the values are updated\n self.log.info(\"Validating failover and seqno cbstats\")\n for node in target_nodes:\n vb_info_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].vbucket_seqno(def_bucket.name)\n failover_info[\"afterCrud\"][node.ip] = \\\n cbstat_obj[node.ip].failover_stats(def_bucket.name)\n\n # Failover validation\n val = \\\n failover_info[\"init\"][node.ip] \\\n == failover_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"Failover stats not updated\")\n\n # Seq_no validation (High level)\n val = \\\n vb_info_info[\"init\"][node.ip] \\\n != vb_info_info[\"afterCrud\"][node.ip]\n self.assertTrue(val, msg=\"vbucket seq_no not updated after CRUDs\")\n\n # Disconnect the shell connection\n for node in target_nodes:\n shell_conn[node.ip].disconnect()\n\n self.validate_test_failure()\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def perform_rollbacks(self, args: Tuple, kwargs: dict) -> None:\n for value in iter_nested_value((args, kwargs)):\n if isinstance(value, Handle):\n self.backend.rollback_handle(value)", "def execute(self, **kwargs):\n # Since node.cluster_id could be reset to '' during action execution,\n # we record it here for policy check and cluster lock release.\n forced = (self.action in [consts.NODE_DELETE, consts.NODE_OPERATION])\n saved_cluster_id = self.entity.cluster_id\n if saved_cluster_id:\n if self.cause == consts.CAUSE_RPC:\n res = senlin_lock.cluster_lock_acquire(\n self.context, self.entity.cluster_id, self.id, self.owner,\n senlin_lock.NODE_SCOPE, False)\n\n if not res:\n return self.RES_RETRY, 'Failed in locking cluster'\n\n try:\n self.policy_check(self.entity.cluster_id, 'BEFORE')\n finally:\n if self.data['status'] != pb.CHECK_OK:\n # Don't emit message since policy_check should have\n # done it\n senlin_lock.cluster_lock_release(\n saved_cluster_id, self.id, senlin_lock.NODE_SCOPE)\n return self.RES_ERROR, ('Policy check: ' +\n self.data['reason'])\n elif self.cause == consts.CAUSE_DERIVED_LCH:\n self.policy_check(saved_cluster_id, 'BEFORE')\n\n try:\n res = senlin_lock.node_lock_acquire(self.context, self.entity.id,\n self.id, self.owner, forced)\n if not res:\n res = self.RES_RETRY\n reason = 'Failed in locking node'\n else:\n res, reason = self._execute()\n if saved_cluster_id and self.cause == consts.CAUSE_RPC:\n self.policy_check(saved_cluster_id, 'AFTER')\n if self.data['status'] != pb.CHECK_OK:\n res = self.RES_ERROR\n reason = 'Policy check: ' + self.data['reason']\n finally:\n senlin_lock.node_lock_release(self.entity.id, self.id)\n if saved_cluster_id and self.cause == consts.CAUSE_RPC:\n senlin_lock.cluster_lock_release(saved_cluster_id, self.id,\n senlin_lock.NODE_SCOPE)\n return res, reason", "def cleanup_service_tree():\n r = None\n try:\n r = requests.delete(EtcdController.ETCD_CLEANUP_URL)\n if r and not r.ok:\n assert False,\\\n \"request to cleanup the etcd contents was not successfull: status code {0}\".format(r.status_code)\n except requests.exceptions.RequestException as e:\n assert False, \"exception when cleaning up etcd contents: {0}\".format(e)", "def delete(self) -> None:\n try:\n result_subpath = get_result_subpath(self.db_root, self.cache_path)\n del result_subpath[self.cache_path[-1]]\n except Exception as e:\n raise e\n finally:\n self._close_transaction()", "def libvirt_retry(self, op):\n end_time = time.time() + 30.0\n ignore = [\n # libvirt connection closed for some reason, just retry\n \"Unable to read from monitor: Connection reset by peer\",\n # lxc container starting often fails as they're started\n # simultaneously with the same device names, use a unique\n # name to work around it.\n # http://www.redhat.com/archives/libvir-list/2013-August/msg01475.html\n \"RTNETLINK answers: File exists\",\n ]\n while True:\n try:\n return op()\n except libvirt.libvirtError as error:\n if not any(ignorable in str(error) for ignorable in ignore):\n # some other error, raise immediately\n raise\n\n time_left = max(end_time - time.time(), 0)\n if not time_left:\n # timeout\n raise\n\n self.log.warning(\"got possibly transient error '%s' from libvirt, retrying for %.1fs...\",\n error, time_left)\n time.sleep(1.0)", "def visit_papi_operations(json_metadata: JsonObject,\n call_fn: OperationMappingCallFunction,\n initial_accumulator: Accumulator) -> Accumulator:\n\n accumulator = initial_accumulator\n\n def examine_calls(calls: JsonObject, path_so_far: List[AnyStr]) -> None:\n for call_name in calls:\n attempts = calls[call_name]\n for attempt in attempts:\n operation_id = attempt.get('jobId')\n sub_workflow_metadata = attempt.get('subWorkflowMetadata')\n path = build_call_path(call_name, path_so_far, attempt)\n if operation_id:\n call_fn(accumulator, operation_id, path, attempt)\n if sub_workflow_metadata:\n examine_calls(sub_workflow_metadata.get('calls', {}), path)\n\n def build_call_path(call_name: str, path_so_far: List[AnyStr], attempt: dict) -> List[AnyStr]:\n call_path = path_so_far.copy()\n\n # Remove confusing duplication in subworkflow call names.\n # A parent workflow would name a subworkflow call \"parent_wf.sub_wf\".\n # The subworkflow would name its calls \"sub_wf.sub_call\".\n # If those call components were simply joined the result would be\n # \"parent_wf.sub_wf.sub_wf.sub_call\". This logic removes the duplication of \"sub_wf\",\n # resulting in \"parent_wf.sub_wf.sub_call\".\n deduplicated_call_name = call_name\n if len(path_so_far) > 0:\n this_call_components = call_name.split('.')\n if len(this_call_components) > 1 and path_so_far[-1].endswith('.' + this_call_components[0]):\n deduplicated_call_name = '.'.join(this_call_components[1:])\n\n call_path.append(deduplicated_call_name)\n shard_index = attempt.get('shardIndex', -1)\n if shard_index != -1:\n call_path.append(f\"shard_{shard_index:04d}\")\n\n return call_path\n\n examine_calls(calls=json_metadata.get('calls', {}), path_so_far=[])\n\n return accumulator", "def execute(self, parent_ud = None):\n # Clear the ready event\n self._ready_event.clear()\n \n # Reset child outcomes\n self._child_outcomes = {}\n\n # Copy input keys\n self._copy_input_keys(parent_ud, self.userdata)\n\n # Spew some info\n smach.loginfo(\"Concurrence starting with userdata: \\n\\t%s\" %\n (str(list(self.userdata.keys()))))\n\n # Call start callbacks\n self.call_start_cbs()\n\n # Create all the threads\n for (label, state) in ((k,self._states[k]) for k in self._states):\n # Initialize child outcomes\n self._child_outcomes[label] = None\n self._threads[label] = threading.Thread(\n name='concurrent_split:'+label,\n target=self._state_runner,\n args=(label,))\n\n # Launch threads\n for thread in self._threads.values():\n thread.start()\n \n # Wait for done notification\n self._done_cond.acquire()\n \n # Notify all threads ready to go\n self._ready_event.set()\n \n # Wait for a done notification from a thread\n self._done_cond.wait()\n self._done_cond.release()\n\n # Preempt any running states\n smach.logdebug(\"SMACH Concurrence preempting running states.\")\n for label in self._states:\n if self._child_outcomes[label] == None:\n self._states[label].request_preempt()\n\n # Wait for all states to terminate\n while not smach.is_shutdown():\n if all([not t.is_alive() for t in self._threads.values()]):\n break\n self._done_cond.acquire()\n self._done_cond.wait(0.1)\n self._done_cond.release()\n\n # Check for user code exception\n if self._user_code_exception:\n self._user_code_exception = False\n raise smach.InvalidStateError(\"A concurrent state raised an exception during execution.\")\n\n # Check for preempt\n if self.preempt_requested():\n # initialized serviced flag\n children_preempts_serviced = True\n\n # Service this preempt if \n for (label,state) in ((k,self._states[k]) for k in self._states):\n if state.preempt_requested():\n # Reset the flag\n children_preempts_serviced = False\n # Complain\n smach.logwarn(\"State '%s' in concurrence did not service preempt.\" % label) \n # Recall the preempt if it hasn't been serviced\n state.recall_preempt()\n if children_preempts_serviced:\n smach.loginfo(\"Concurrence serviced preempt.\")\n self.service_preempt()\n\n # Spew some debyg info\n smach.loginfo(\"Concurrent Outcomes: \"+str(self._child_outcomes))\n\n # Initialize the outcome\n outcome = self._default_outcome\n\n # Determine the outcome from the outcome map\n smach.logdebug(\"SMACH Concurrence determining contained state outcomes.\")\n for (container_outcome, outcomes) in ((k,self._outcome_map[k]) for k in self._outcome_map):\n if all([self._child_outcomes[label] == outcomes[label] for label in outcomes]):\n smach.logdebug(\"Terminating concurrent split with mapped outcome.\")\n outcome = container_outcome\n\n # Check outcome callback\n if self._outcome_cb:\n try:\n cb_outcome = self._outcome_cb(copy.copy(self._child_outcomes))\n if cb_outcome:\n if cb_outcome == str(cb_outcome):\n outcome = cb_outcome\n else:\n smach.logerr(\"Outcome callback returned a non-string '%s', using default outcome '%s'\" % (str(cb_outcome), self._default_outcome))\n else:\n smach.logwarn(\"Outcome callback returned None, using outcome '%s'\" % outcome)\n except:\n raise smach.InvalidUserCodeError((\"Could not execute outcome callback '%s': \" % self._outcome_cb)+traceback.format_exc())\n\n # Cleanup\n self._threads = {}\n self._child_outcomes = {}\n\n # Call termination callbacks\n self.call_termination_cbs(list(self._states.keys()), outcome)\n\n # Copy output keys\n self._copy_output_keys(self.userdata, parent_ud)\n\n return outcome", "def ks_execute_operation(self):\n if self.ks_sync_orders or self.ks_sync_customers or self.ks_sync_coupons or self.ks_sync_products or \\\n self.ks_sync_attributes or self.ks_sync_product_tags or self.ks_sync_product_category or \\\n self.ks_sync_payment_gateways or self.ks_publish_products or self.ks_unpublish_products or \\\n self.ks_update_customers or self.ks_update_products or self.ks_update_coupons or \\\n self.ks_update_attributes or self.ks_update_category or self.ks_update_tags or \\\n self.ks_update_order_status or self.ks_update_stock or self.ks_import_stock:\n for each_instance in self.ks_woo_instances:\n if each_instance.ks_instance_state == 'active':\n try:\n wcapi = each_instance.ks_api_authentication()\n if wcapi.get(\"\").status_code in [200, 201]:\n if self.ks_sync_attributes:\n _logger.info('Attribute Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_product_attribute_woocommerce_to_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_product_tags:\n _logger.info('Tag Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_product_tag_to_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_product_category:\n _logger.info('Category Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_product_category_to_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_import_stock:\n _logger.info('Stock importing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_import_stock_woocommerce_in_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_products:\n _logger.info('Product Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_product_woocommerce_in_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_customers:\n _logger.info('Customer Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_customer_woocommerce_in_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_coupons:\n _logger.info('Coupon Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_coupon_from_woo_to_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_payment_gateways:\n _logger.info('Payment Gateway Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_payment_gateway_in_queue(\n wcapi=wcapi, instance_id=each_instance)\n if self.ks_sync_orders:\n _logger.info('Orders Syncing start For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['ks.woo.queue.jobs'].ks_sync_sale_order_to_queue(\n wcapi=wcapi, instance_id=each_instance)\n\n # Below operations are from odoo to woo\n if self.ks_unpublish_products or self.ks_publish_products:\n if self.ks_publish_products:\n _logger.info('Publishing the products For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n elif self.ks_unpublish_products:\n _logger.info('UnPublishing the products For WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n product_records = self.env['product.template'].search(\n [('ks_woo_instance_id', '=', each_instance.id),\n ('ks_woo_id', '!=', False)])\n product_records_data = self.env['product.template'].ks_publish_unpublish_data(\n product_records,\n op_type='unpublish' if\n self.ks_unpublish_products else\n 'publish')\n if wcapi.get(\"\").status_code in [200, 201]:\n if len(product_records_data['update']) >= 100:\n no_of_batches=len(product_records_data['update'])//100\n for each_rec in range(0,no_of_batches):\n batch_product_records = product_records_data['update'][0+(100*each_rec):100+ (100*each_rec)]\n batch_product_records_data = {'update': batch_product_records}\n woo_response = wcapi.post(\"products/batch\", batch_product_records_data)\n\n self.ks_batch_update_response(woo_response, each_instance,\n self.ks_unpublish_products)\n\n\n if woo_response.status_code in [200, 201]:\n product_records.write(\n {'ks_woo_status': False if self.ks_unpublish_products else True})\n else:\n self.env['ks.woo.sync.log'].create_log_param(ks_woo_id=False,\n ks_status='success' if wcapi.get(\n \"\").status_code in [200,\n 201] else 'failed',\n ks_type='system_status',\n ks_woo_instance_id=each_instance,\n ks_operation='odoo_to_woo',\n ks_operation_type='connection',\n response='Connection successful' if wcapi.get(\n \"\").status_code in [200,\n 201] else wcapi.get(\n \"\").text)\n\n else:\n woo_response = wcapi.post(\"products/batch\", product_records_data)\n\n self.ks_batch_update_response(woo_response, each_instance,\n self.ks_unpublish_products)\n\n if woo_response.status_code in [200, 201]:\n product_records.write(\n {'ks_woo_status': False if self.ks_unpublish_products else True})\n else:\n self.env['ks.woo.sync.log'].create_log_param(ks_woo_id=False,\n ks_status='success' if wcapi.get(\n \"\").status_code in [200,\n 201] else 'failed',\n ks_type='system_status',\n ks_woo_instance_id=each_instance,\n ks_operation='odoo_to_woo',\n ks_operation_type='connection',\n response='Connection successful' if wcapi.get(\n \"\").status_code in [200,\n 201] else wcapi.get(\n \"\").text)\n\n if self.ks_update_stock:\n _logger.info('Updating Stock of the products on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['product.template'].ks_update_product_stock(each_instance, wcapi)\n if self.ks_update_products:\n _logger.info('Updating the products on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n products_records = self.env['product.template'].search(\n [('ks_woo_instance_id', '=', each_instance.id), ('ks_to_be_export', '!=', False)])\n self.env['ks.woo.queue.jobs'].ks_update_product_to_queue(products_records, each_instance)\n if self.ks_update_attributes:\n _logger.info('Updating the attributes on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n attributes_records = self.env['product.attribute'].search(\n [('ks_woo_instance_id', '=', each_instance.id), ('ks_woo_id', '!=', -1)])\n self.env['ks.woo.queue.jobs'].ks_update_product_attribute_to_queue(attributes_records, each_instance)\n if self.ks_update_tags:\n _logger.info('Updating the tags on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n tags_records = self.env['ks.woo.product.tag'].search(\n [('ks_woo_instance_id', '=', each_instance.id)])\n self.env['ks.woo.queue.jobs'].ks_update_product_tag_to_queue(tags_records, each_instance)\n if self.ks_update_category:\n _logger.info('Updating the categories on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n category_records = self.env['product.category'].search(\n [('ks_woo_instance_id', '=', each_instance.id)])\n self.env['ks.woo.queue.jobs'].ks_update_product_category_to_queue(category_records, each_instance)\n if self.ks_update_coupons:\n _logger.info('Updating the coupons on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n coupons_records = self.env['ks.woo.coupon'].search(\n [('ks_woo_instance_id', '=', each_instance.id)])\n self.env['ks.woo.queue.jobs'].ks_update_coupon_to_queue(coupons_records, each_instance)\n if self.ks_update_customers:\n _logger.info('Updating the customers on WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n customer_records = self.env['res.partner'].search(\n [('ks_woo_instance_id', '=', each_instance.id)])\n self.env['ks.woo.queue.jobs'].ks_update_customer_woocommerce_in_queue(customer_records, each_instance)\n if self.ks_update_order_status:\n _logger.info('Updating the Saler Order status for WooCommerce Instance [%s -(%s)]'\n , each_instance.ks_name, each_instance.id)\n self.env['sale.order'].ks_update_order_status(each_instance, wcapi)\n cron_record = self.env.ref('ks_woocommerce.ks_ir_cron_job_process')\n if cron_record:\n next_exc_time = datetime.now()\n cron_record.sudo().write({'nextcall': next_exc_time, 'active': True})\n else:\n self.env['ks.woo.sync.log'].create_log_param(\n ks_woo_id=False,\n ks_status='success' if wcapi.get(\"\").status_code in [200, 201] else 'failed',\n ks_type='system_status',\n ks_woo_instance_id=each_instance,\n ks_operation='odoo_to_woo',\n ks_operation_type='connection',\n response='Connection successful' if wcapi.get(\"\").status_code in [200,\n 201] else wcapi.get(\n \"\").text\n )\n except ConnectionError:\n self.env['ks.woo.sync.log'].ks_connection_error_log(each_instance, type='system_status',\n operation=False)\n except Exception as e:\n self.env['ks.woo.sync.log'].ks_exception_log(record=False, type=\"system_status\",\n operation_type=False, instance_id=each_instance,\n operation=False, exception=e)\n else:\n return self.env['ks.message.wizard'].ks_pop_up_message(names='Error',\n message=\"WooCommerce instance must be in \"\n \"active state to perform operations.\")\n else:\n raise ValidationError('Please select an operation to Execute..!')\n\n return self.env['ks.message.wizard'].ks_pop_up_message(names='Success', message=\"WooCommerce Operations has \"\n \"been performed, Please refer \"\n \"logs for further details.\")", "def test_stop_process(self):\n error_to_simulate = self.input.param(\"simulate_error\", None)\n target_node = self.getTargetNode()\n remote = RemoteMachineShellConnection(target_node)\n error_sim = CouchbaseError(self.log, remote)\n target_vbuckets = Cbstats(target_node).vbucket_list(\n self.bucket.name, target_node)\n\n bucket_dict = BucketUtils.get_random_collections(\n self.cluster.buckets,\n req_num=1,\n consider_scopes=\"all\",\n consider_buckets=\"all\")\n\n bucket = BucketUtils.get_bucket_obj(self.cluster.buckets,\n bucket_dict.keys()[0])\n scope_name = bucket_dict[bucket.name][\"scopes\"].keys()[0]\n collection_name = bucket_dict[bucket.name][\n \"scopes\"][scope_name][\"collections\"].keys()[0]\n scope = BucketUtils.get_scope_obj(\n bucket, scope_name)\n collection = BucketUtils.get_collection_obj(scope, collection_name)\n\n if len(target_vbuckets) == 0:\n self.log.error(\"No target vbucket list generated to load data\")\n remote.disconnect()\n return\n\n self.start_doc_loading_tasks(target_vbuckets, scope_name, collection)\n\n # Induce the error condition\n error_sim.create(error_to_simulate)\n\n if self.allowed_hosts:\n self.set_allowed_hosts()\n\n self.sleep(20, \"Wait before reverting the error condition\")\n # Revert the simulated error condition and close the ssh session\n error_sim.revert(error_to_simulate)\n remote.disconnect()\n\n # Wait for doc loading task to complete\n self.task.jython_task_manager.get_task_result(self.doc_loading_task)\n if self.atomicity:\n self.task.jython_task_manager.get_task_result(\n self.transaction_load_task)\n elif self.N1qltxn:\n self.task.jython_task_manager.get_task_result(\n self.N1ql_load_task)\n\n if len(self.doc_loading_task.fail.keys()) != 0:\n if self.target_node == \"active\" or self.num_replicas in [2, 3]:\n self.log_failure(\"Unwanted failures for keys: %s\"\n % self.doc_loading_task.fail.keys())\n\n validate_passed = \\\n self.durability_helper.validate_durability_exception(\n self.doc_loading_task.fail,\n SDKException.DurabilityAmbiguousException)\n if not validate_passed:\n self.log_failure(\"Unwanted exception seen during validation\")\n\n # Get SDK client for CRUD retries\n sdk_client = self.sdk_client_pool.get_client_for_bucket(self.bucket)\n for doc_key, crud_result in self.doc_loading_task.fail.items():\n result = sdk_client.crud(DocLoading.Bucket.DocOps.CREATE,\n doc_key,\n crud_result[\"value\"],\n replicate_to=self.replicate_to,\n persist_to=self.persist_to,\n durability=self.durability_level,\n timeout=self.sdk_timeout)\n if result[\"status\"] is False:\n self.log_failure(\"Retry of doc_key %s failed: %s\"\n % (doc_key, result[\"error\"]))\n # Close the SDK connection\n self.sdk_client_pool.release_client(sdk_client)\n\n self.validate_test_failure()\n\n self.bucket_util._wait_for_stats_all_buckets(self.cluster,\n self.cluster.buckets)\n # Update self.num_items and validate docs per collection\n if not self.N1qltxn and self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def transaction(fn):\n @wraps(fn)\n def transaction_inner(*args, **kwargs): #1\n start = time()\n stmp_id = id_gen()\n session = operation.session\n sessionid = id(session)\n \n # set distributed transaction id to 0 for single transaction\n try:\n operation.id\n except: \n operation.id = str(uuid4())\n \n try:\n # get runtime info\n cp = current_process()\n ct = current_thread() \n \n # format request params\n params = []\n for item in args:\n params.append(unicode(item))\n for k,v in kwargs.iteritems():\n params.append(u\"'%s':'%s'\" % (k, v))\n \n # call internal function\n res = fn(*args, **kwargs)\n \n session.commit()\n elapsed = round(time() - start, 4)\n logger.debug(u'%s.%s - %s - transaction - %s - %s - OK - %s' % (\n operation.id, stmp_id, sessionid, fn.__name__, \n params, elapsed))\n \n return res\n except ModelError as ex:\n elapsed = round(time() - start, 4)\n logger.error(u'%s.%s - %s - transaction - %s - %s - KO - %s' % (\n operation.id, stmp_id, sessionid, fn.__name__, \n params, elapsed))\n if ex.code not in [409]:\n #logger.error(ex.desc, exc_info=1)\n logger.error(ex.desc)\n \n session.rollback()\n raise TransactionError(ex.desc, code=ex.code)\n except IntegrityError as ex:\n elapsed = round(time() - start, 4)\n logger.error(u'%s.%s - %s - transaction - %s - %s - KO - %s' % (\n operation.id, stmp_id, sessionid, fn.__name__, \n params, elapsed))\n logger.error(ex.orig, exc_info=1)\n logger.error(ex.orig)\n\n session.rollback()\n raise TransactionError(ex.orig)\n except DBAPIError as ex:\n elapsed = round(time() - start, 4)\n logger.error(u'%s.%s - %s - transaction - %s - %s - KO - %s' % (\n operation.id, stmp_id, sessionid, fn.__name__, \n params, elapsed))\n #logger.error(ex.orig, exc_info=1)\n logger.error(ex.orig)\n \n session.rollback()\n raise TransactionError(ex.orig)\n \n except Exception as ex:\n elapsed = round(time() - start, 4)\n logger.error(u'%s.%s - %s - transaction - %s - %s - KO - %s' % (\n operation.id, stmp_id, sessionid, fn.__name__, \n params, elapsed))\n #logger.error(ex, exc_info=1)\n logger.error(ex)\n \n session.rollback()\n raise TransactionError(ex)\n\n return transaction_inner", "def __exit__(self, exception_type, exception_val, trace):\n if not exception_type:\n self.commit()\n else:\n self.rollback()\n self.close()", "def main(self):\n self.delete_details()\n self.delete_cleaned()\n self.vacuum()", "def process(introspection_data):\n unprocessed_data = copy.deepcopy(introspection_data)\n failures = []\n _run_pre_hooks(introspection_data, failures)\n node_info = _find_node_info(introspection_data, failures)\n if node_info:\n # Locking is already done in find_node() but may be not done in a\n # node_not_found hook\n node_info.acquire_lock()\n if failures or node_info is None:\n msg = _('The following failures happened during running '\n 'pre-processing hooks:\\n%s') % '\\n'.join(failures)\n if node_info is not None:\n node_info.finished(istate.Events.error, error='\\n'.join(failures))\n _store_logs(introspection_data, node_info)\n raise utils.Error(msg, node_info=node_info, data=introspection_data)\n LOG.info('Matching node is %s', node_info.uuid,\n node_info=node_info, data=introspection_data)\n\n if node_info.finished_at is not None:\n # race condition or introspection canceled\n raise utils.Error(_('Node processing already finished with '\n 'error: %s') % node_info.error,\n node_info=node_info, code=400)\n # NOTE(TheJulia): this was previously called as a background\n # process, but we can't do that with sqlite.\n _store_unprocessed_data(node_info.uuid, unprocessed_data)\n\n try:\n node = node_info.node()\n except ir_utils.NotFound as exc:\n with excutils.save_and_reraise_exception():\n node_info.finished(istate.Events.error, error=str(exc))\n _store_logs(introspection_data, node_info)\n\n try:\n result = _process_node(node_info, node, introspection_data)\n except utils.Error as exc:\n node_info.finished(istate.Events.error, error=str(exc))\n with excutils.save_and_reraise_exception():\n _store_logs(introspection_data, node_info)\n except Exception as exc:\n LOG.exception('Unexpected exception during processing')\n msg = _('Unexpected exception %(exc_class)s during processing: '\n '%(error)s') % {'exc_class': exc.__class__.__name__,\n 'error': exc}\n node_info.finished(istate.Events.error, error=msg)\n _store_logs(introspection_data, node_info)\n raise utils.Error(msg, node_info=node_info, data=introspection_data,\n code=500)\n\n if CONF.processing.always_store_ramdisk_logs:\n _store_logs(introspection_data, node_info)\n return result", "def test_transaction_explitic_canceling(self):\n\n proxy = self.node.get_proxy('/')\n\n # look under the hood to verify that branches are added\n # recursively\n _latest_root_rev = self.node._branches[None].latest\n adapter_node = _latest_root_rev._children['adapters'][2].node\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n tx = proxy.open_transaction()\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 1)\n\n self.make_change(tx, '/adapters/2', 'config.log_level', 4)\n\n self.assertEqual(len(self.node._branches.keys()), 2)\n self.assertEqual(len(adapter_node._branches.keys()), 2)\n\n tx.cancel()\n\n self.assertEqual(len(self.node._branches.keys()), 1)\n self.assertEqual(len(adapter_node._branches.keys()), 1)", "def select_execute2(self):\n for _ in range(self.times_b4_data_lost):\n try:\n cursor = self.conn.cursor()\n # execute(text, 700sec)\n cursor.execute(\n \"SELECT containerID FROM `StaticModule-Files`.ContainersTasks WHERE timestamp < NOW() - INTERVAL %s SECOND;\",\n (cf.config_get_param(\"Script\", \"container_timeout_to_kill_sec\"),))\n\n # extract all remaining strings from response\n res = cursor.fetchall()\n if res:\n commands = [\"docker\", \"rm\", \"-f\"]\n for container in res:\n # commands == docker rm -f cowrie ntp dns\n commands.append(container[0])\n try:\n # run command and return output (res; CalledProcessError if exitcode != 0) -> then strip output\n output = subprocess.check_output(commands, universal_newlines=True,\n stderr=subprocess.STDOUT).strip()\n except subprocess.CalledProcessError as e:\n logging.error(e)\n output = e.output.strip()\n logging.error(\"Stucked containers was deleted: \" + output)\n # output = \", \".join([\"'{}'\".format(i) for i in output.splitlines()])\n # re.findall(r\"Error: No such container: (.*)\")\n # execute(text, 700sec * 3)\n cursor.execute(\n \"DELETE FROM `StaticModule-Files`.ContainersTasks WHERE timestamp < NOW() - INTERVAL %s SECOND;\",\n (cf.config_get_param(\"Script\", \"container_timeout_to_kill_sec\") * 3,))\n cursor.close()\n except Exception as e:\n logging.exception(e)\n self.create_connection()\n else:\n break\n finally:\n time.sleep(5)\n # queue.put(\"chkcontainers\")", "async def run_no_retry(\n self,\n ctx: BaseInputSetContext,\n octx: BaseOrchestratorContext,\n operation: Operation,\n inputs: Dict[str, Any],\n ) -> Union[bool, Dict[str, Any]]:\n # Check that our network contains the operation\n await self.ensure_contains(operation)\n # Create an opimp context and run the operation\n async with self.operations[operation.instance_name](\n ctx, octx\n ) as opctx:\n self.logger.debug(\"---\")\n self.logger.debug(\n \"Stage: %s: %s\",\n operation.stage.value.upper(),\n operation.instance_name,\n )\n str_inputs = str(inputs)\n self.logger.debug(\n \"Inputs: %s\",\n str_inputs\n if len(str_inputs) < 512\n else (str_inputs[:512] + \"...\"),\n )\n self.logger.debug(\n \"Conditions: %s\",\n dict(\n zip(\n map(\n lambda condition: condition.name,\n operation.conditions,\n ),\n ([True] * len(operation.conditions)),\n )\n ),\n )\n outputs = await opctx.run(inputs)\n str_outputs = str(outputs)\n self.logger.debug(\n \"Outputs: %s\",\n str_outputs\n if len(str_outputs) < 512\n else (str_outputs[:512] + \"...\"),\n )\n self.logger.debug(\"---\")\n return outputs", "def test_crash_process(self):\n def_bucket = self.cluster.buckets[0]\n target_node = self.getTargetNode()\n remote = RemoteMachineShellConnection(target_node)\n target_vbuckets = range(0, self.cluster.vbuckets)\n retry_exceptions = list()\n self.transaction_load_task = None\n self.doc_loading_task = None\n self.N1ql_load_task = None\n\n # If Memcached is killed, we should not perform KV ops on\n # particular node. If not we can target all nodes for KV operation.\n if self.process_name == \"memcached\":\n target_vbuckets = Cbstats(target_node).vbucket_list(\n def_bucket.name, self.target_node)\n if self.target_node == \"active\":\n retry_exceptions = [SDKException.TimeoutException]\n if len(target_vbuckets) == 0:\n self.log.error(\"No target vbucket list generated to load data\")\n remote.disconnect()\n return\n\n bucket_dict = BucketUtils.get_random_collections(\n self.cluster.buckets,\n req_num=1,\n consider_scopes=\"all\",\n consider_buckets=\"all\")\n\n bucket = BucketUtils.get_bucket_obj(self.cluster.buckets,\n bucket_dict.keys()[0])\n scope_name = bucket_dict[bucket.name][\"scopes\"].keys()[0]\n collection_name = bucket_dict[bucket.name][\n \"scopes\"][scope_name][\"collections\"].keys()[0]\n scope = BucketUtils.get_scope_obj(\n bucket, scope_name)\n collection = BucketUtils.get_collection_obj(\n scope, collection_name)\n\n self.start_doc_loading_tasks(target_vbuckets, scope_name, collection)\n\n task_info = dict()\n task_info[self.doc_loading_task] = \\\n self.bucket_util.get_doc_op_info_dict(\n def_bucket, DocLoading.Bucket.DocOps.CREATE, 0,\n replicate_to=self.replicate_to, persist_to=self.persist_to,\n durability=self.durability_level,\n timeout=self.sdk_timeout, time_unit=\"seconds\",\n retry_exceptions=retry_exceptions)\n\n self.sleep(10, \"Wait for doc_ops to start\")\n self.log.info(\"Killing {0}:{1} on node {2}\"\n .format(self.process_name, self.service_name,\n target_node.ip))\n remote.kill_process(self.process_name, self.service_name,\n signum=signum[self.sig_type])\n remote.disconnect()\n # Wait for tasks completion and validate failures\n if self.transaction_load_task:\n self.task.jython_task_manager.get_task_result(\n self.transaction_load_task)\n if self.N1qltxn:\n self.task.jython_task_manager.get_task_result(\n self.N1ql_load_task)\n self.task_manager.get_task_result(self.doc_loading_task)\n self.bucket_util.verify_doc_op_task_exceptions(task_info,\n self.cluster)\n self.bucket_util.log_doc_ops_task_failures(task_info)\n\n # Verification stats\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 2*self.num_items\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"pending_writes\"] = 0\n if self.__is_sync_write_enabled:\n verification_dict[\"sync_write_committed_count\"] = 2*self.num_items\n\n if self.bucket_type == Bucket.Type.EPHEMERAL \\\n and self.process_name == \"memcached\":\n result = self.task.rebalance(self.cluster, [], [])\n self.assertTrue(result, \"Rebalance failed\")\n\n # Validate doc count\n if self.process_name != \"memcached\":\n stats_failed = \\\n self.durability_helper.verify_vbucket_details_stats(\n def_bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if stats_failed:\n self.fail(\"Cbstats verification failed\")\n\n # Doc count validation per collection\n if not self.N1qltxn and self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)", "def roll_back_demo():\n # return harvey rupp to belmont hill team\n bh = Team.query.get(161)\n print(f'retrieved {bh}')\n hr = Runner.query.get(1700)\n print(f'retrieved {hr}')\n if bh not in hr.teams:\n bh.runners.append(hr)\n db.session.commit()\n\n # set primary_key values below which will be untouched\n first_deleted_race = 19\n first_deleted_runner = 3712\n first_deleted_result = 4750\n first_deleted_school = 68\n first_deleted_team = 315\n first_deleted_location = 8\n first_deleted_course = 9\n first_deleted_league = 4\n\n # do not allow unless user is administrator\n if not current_user.is_administrator():\n return redirect(url_for('races.results', race_id=race.id))\n\n # delete races and associated results for races in delete range\n races = Race.query.all()\n for race in races:\n if race.id >= first_deleted_race:\n delete_race_by_id(race.id)\n\n # disassociate runners from teams and delete\n teams = Team.query.all()\n for team in teams:\n if team.id >= first_deleted_team:\n team.runners.clear()\n db.session.commit()\n\n runners = Runner.query.all()\n for runner in runners:\n if runner.id >= first_deleted_runner:\n db.session.delete(runner)\n db.session.commit()\n\n # delete teams\n for team in teams:\n if team.id >= first_deleted_team:\n db.session.delete(team)\n db.session.commit()\n\n # delete courses\n courses = Course.query.all()\n for course in courses:\n if course.id >= first_deleted_course:\n db.session.delete(course)\n db.session.commit()\n\n # disassociate locaions from schools and delete\n schools = School.query.all()\n for school in schools:\n if school.id >= first_deleted_school:\n school.locations.clear()\n db.session.commit()\n\n locations = Location.query.all()\n for location in locations:\n if location.id >= first_deleted_location:\n db.session.delete(location)\n db.session.commit()\n\n # disassociate schools from leagues and delete\n leagues = League.query.all()\n for league in leagues:\n if league.id >= first_deleted_league:\n league.schools.clear()\n db.session.commit()\n\n for school in schools:\n if school.id >= first_deleted_school:\n db.session.delete(school)\n db.session.commit()\n\n # delete leagues\n for league in leagues:\n if league.id >= first_deleted_league:\n db.session.delete(league)\n db.session.commit()\n\n # recalculate all runners seed times\n async_update_all_seed_times.delay()\n\n # update league standings via background task\n for league_id in [1, 2]:\n async_update_league_standings.delay(league_id=league_id)\n return redirect(url_for('core.index'))", "def _run_operation(self, operation, logdir):\n op_start_time = datetime.datetime.now()\n drush_cmd = subprocess.Popen([self.drupal.drush_bin,\n \"--root=\" + self.drupal.root,\n \"--uri=\" + self.drupal.uri,\n \"maps-import\",\n str(self.id),\n \"--op=\" + operation],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n (drush_out, drush_err) = drush_cmd.communicate()\n op_end_time = datetime.datetime.now()\n\n self._log_operation(operation, logdir,\n drush_out, drush_err)\n self._update_operation_state(operation, op_start_time, op_end_time)", "def test_dependencies(self):\n process_parent = Process.objects.filter(slug=\"test-dependency-parent\").latest()\n process_child = Process.objects.filter(slug=\"test-dependency-child\").latest()\n data_parent = Data.objects.create(\n name=\"Test parent\", contributor=self.contributor, process=process_parent\n )\n data_child1 = Data.objects.create(\n name=\"Test child\",\n contributor=self.contributor,\n process=process_child,\n input={},\n )\n data_child2 = Data.objects.create(\n name=\"Test child\",\n contributor=self.contributor,\n process=process_child,\n input={\"parent\": data_parent.pk},\n )\n data_child3 = Data.objects.create(\n name=\"Test child\",\n contributor=self.contributor,\n process=process_child,\n input={\"parent\": None},\n )\n\n data_parent.refresh_from_db()\n data_child1.refresh_from_db()\n data_child2.refresh_from_db()\n data_child3.refresh_from_db()\n\n # Check locks are created in manager.\n self.assertFalse(data_parent.access_logs.exists())\n self.assertFalse(data_child1.access_logs.exists())\n self.assertTrue(data_child2.access_logs.exists())\n self.assertFalse(data_child3.access_logs.exists())\n\n # Check that the data_parent location was locked.\n access_log = data_child2.access_logs.get()\n self.assertEqual(\n access_log.storage_location.file_storage.data.get().id, data_parent.id\n )\n # Check that the log is released.\n self.assertIsNotNone(access_log.started)\n self.assertIsNotNone(access_log.finished)\n\n # Check status.\n self.assertEqual(data_parent.status, Data.STATUS_DONE)\n self.assertEqual(data_child1.status, Data.STATUS_DONE)\n self.assertEqual(data_child2.status, Data.STATUS_DONE)\n self.assertEqual(data_child3.status, Data.STATUS_DONE)", "def test_operational_error_asis(self):\n\n matched = self._run_test(\n \"mysql\", \"select some_operational_error\",\n self.OperationalError(\"some op error\"),\n sqla.exc.OperationalError\n )\n self.assertSQLAException(\n matched,\n \"OperationalError\", \"some op error\"\n )", "def _execute(self,\n native,\n command,\n data=None,\n returning=True,\n mapper=dict):\n if data is None:\n data = {}\n\n with native.cursor() as cursor:\n log.debug('***********************')\n log.debug(command % data)\n log.debug('***********************')\n\n try:\n rowcount = 0\n for cmd in command.split(';'):\n cmd = cmd.strip()\n if cmd:\n cursor.execute(cmd.strip(';') + ';', data)\n rowcount += cursor.rowcount\n\n # look for a disconnection error\n except pymysql.InterfaceError:\n raise orb.errors.ConnectionLost()\n\n # look for integrity errors\n except (pymysql.IntegrityError, pymysql.OperationalError) as err:\n native.rollback()\n\n # look for a duplicate error\n if err[0] == 1062:\n raise orb.errors.DuplicateEntryFound(err[1])\n\n # look for a reference error\n reference_error = re.search('Key .* is still referenced from table \".*\"', nstr(err))\n if reference_error:\n msg = 'Cannot remove this record, it is still being referenced.'\n raise orb.errors.CannotDelete(msg)\n\n # unknown error\n log.debug(traceback.print_exc())\n raise orb.errors.QueryFailed(command, data, nstr(err))\n\n # connection has closed underneath the hood\n except pymysql.Error as err:\n native.rollback()\n log.error(traceback.print_exc())\n raise orb.errors.QueryFailed(command, data, nstr(err))\n\n try:\n raw = cursor.fetchall()\n results = [mapper(record) for record in raw]\n except pymysql.ProgrammingError:\n results = []\n\n return results, rowcount", "def rollback(self):\n self.remove_repo_config()\n self.remove_repos()\n\n self.supervisor.start_all_services()", "def operation_on_events(path_to_data_dir, list_of_test_id, operation, num_of_proc=1):\n\ttest_id = [\"test%s\"%i for i in list_of_test_id]\n\tpool = Pool(processes = num_of_proc)\n\tpath_to_final_selected_events = path_to_data_dir + \"final_selected_events.json\"\n\tif os.path.exists(path_to_final_selected_events):\n\t\tfinal_selected_events = json.load(open(path_to_final_selected_events,\"r\"))\n\t\tfinal_interested_events = []\n\t\tfor event in final_selected_events:\n\t\t\tif event[0] in test_id:\n\t\t\t\tfinal_interested_events.append(event)\n\telse:\n\t\tfinal_interested_events = []\n\t\tfor test in list_of_test_id:\n\t\t\tpath_to_curr_test = data_dir_to_test_dir(path_to_data_dir, test)\n\t\t\tpath_to_test_result = path_to_curr_test +\"/results\"\n\t\t\tpath_to_event_list = path_to_test_result + \"/selected_events.json\"\n\t\t\tif os.path.exists(path_to_event_list):\n\t\t\t\tevent_list = json.load(open(path_to_event_list,\"r\"))\n\t\t\t\tfor value in event_list.values():\n\t\t\t\t\tevent = [\"test%s\"%test,[value[0],value[1],value[2]]]\n\t\t\t\t\tfinal_interested_events.append(event)\n\t\t\telse:\n\t\t\t\tprint \"skip current test:\", \"test%s\"%test, \"there is no selected events\"\n\t\n\t# if function operation has no return value, it will return a list of Nones\n\tresult_list = pool.map(operation,final_interested_events)\n\treturn result_list", "async def execute(self, child_order):\r\n print(f\"Emir is executing, {child_order.parent_order_no}-{child_order.sliced_no}\")\r\n try:\r\n POV_CALCULATOR = self.POV_CALCULATOR(symbol=child_order.security_id,\r\n var_storages=self.VAR_STORAGES,\r\n last_n_minutes=child_order.parent_slice_interval)\r\n await POV_CALCULATOR.calculate_pov()\r\n\r\n child_order.trade(twap_=TWAP_CALCULATOR.twap_val)\r\n\r\n if child_order.order_no is not None and child_order.status == 1:\r\n with await self.algo_parents_lock:\r\n related_parent = self.ORDER_STORAGES.algo_parents[child_order.parent_order_no]\r\n related_parent.child_orders_islem.update({child_order.sliced_no: child_order})\r\n\r\n except Exception as e:\r\n print(str(e))", "def test_delete_complex_tree_06(comp):\n comp.delete(9)\n assert tuple(comp.in_order()) == (4, 6, 7, 8, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 13, 6, 10, 12, 14, 4, 7, 15)", "def apply_decisions(base, decisions):\n\n merged = copy.deepcopy(base)\n prev_path = None\n parent = None\n last_key = None\n resolved = None\n diffs = None\n # clear_parent actions should override other decisions on same obj, so\n # we need to track it\n clear_parent_flag = False\n for md in decisions:\n path, line = split_string_path(merged, md.common_path)\n # We patch all decisions with the same path in one op\n if path == prev_path:\n # Same path as previous, collect entry\n if clear_parent_flag:\n # Another entry will clear the parent, all other decisions\n # should be dropped\n pass\n else:\n if md.action == \"clear_parent\":\n clear_parent_flag = True\n # Clear any exisiting decsions!\n diffs = []\n ad = resolve_action(resolved, md)\n if line:\n ad = push_path(line, ad)\n diffs.extend(ad)\n\n else:\n # Different path, start a new collection\n if prev_path is not None:\n # First, apply previous diffs\n if parent is None:\n # Operations on root create new merged object\n merged = patch(resolved, diffs)\n else:\n # If not, overwrite entry in parent (which is an entry in\n # merged). This is ok, as no paths should point to\n # subobjects of the patched object\n parent[last_key] = patch(resolved, diffs)\n\n prev_path = path\n # Resolve path in base and output\n resolved = merged\n parent = None\n last_key = None\n for key in path:\n parent = resolved\n resolved = resolved[key] # Should raise if key missing\n last_key = key\n diffs = resolve_action(resolved, md)\n if line:\n diffs = push_path(line, diffs)\n clear_parent_flag = md.action == \"clear_parent\"\n # Apply the last collection of diffs, if present (same as above)\n if prev_path is not None:\n if parent is None:\n merged = patch(resolved, diffs)\n else:\n parent[last_key] = patch(resolved, diffs)\n\n merged = nbformat.from_dict(merged)\n return merged", "def _on_task_fail(self, exc):\n LOG.info(\"We could cleanup some resources or log the error\")\n raise exc", "def applyBatch(self, authority, operations):\n pass", "def _baseline_context(self) -> Iterator[None]:\n repo = get_git_repo()\n\n if not repo:\n yield\n return\n\n self._abort_on_pending_changes()\n self._abort_on_conflicting_untracked_paths()\n\n current_tree = git(\"write-tree\").stdout.decode().strip()\n try:\n for a in self._status.added:\n a.unlink()\n git.checkout(self._base_commit, \"--\", \".\")\n yield\n finally:\n # git checkout will fail if the checked-out index deletes all files in the repo\n # In this case, we still want to continue without error.\n # Note that we have no good way of detecting this issue without inspecting the checkout output\n # message, which means we are fragile with respect to git version here.\n try:\n git.checkout(current_tree.strip(), \"--\", \".\")\n except sh.ErrorReturnCode as error:\n output = error.stderr.decode()\n if (\n output\n and len(output) >= 2\n and \"pathspec '.' did not match any file(s) known to git\"\n in output.strip()\n ):\n debug_echo(\n \"Restoring git index failed due to total repository deletion; skipping checkout\"\n )\n else:\n raise ActionFailure(\n f\"Fatal error restoring Git state; please restore your repository state manually:\\n{output}\"\n )\n\n if self._status.removed:\n # Need to check if file exists since it is possible file was deleted\n # in both the base and head. Only call if there are files to delete\n to_remove = [r for r in self._status.removed if r.exists()]\n if to_remove:\n git.rm(\"-f\", *(str(r) for r in to_remove))", "def process(self):\n try:\n return self.process_throw()\n except PreflightException as pe:\n self.context.record_push_rejected_p4key(pe)\n # Unnecessary, but nice-to-have a quieter log: don't cause a later\n # push/pull to try to roll back this push when we KNOW we told Git\n # it was unacceptable and won't need a rollback.\n PRLFile(self.context.config.repo_name).delete()\n raise", "def test_delete_complex_tree_02(comp):\n comp.delete(4)\n assert tuple(comp.in_order()) == (6, 7, 8, 9, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 8, 13, 6, 10, 12, 14, 7, 9, 15)", "def RmTreeOnError(function, path, excinfo):\n\n exception_type = excinfo[0]\n exception_value = excinfo[1]\n # If shutil.rmtree encounters a symbolic link on Windows, os.listdir will\n # fail with a WindowsError exception with an ENOENT errno (i.e., file not\n # found). We'll ignore that error. Note that WindowsError is not defined\n # for non-Windows platforms, so we use OSError (of which it is a subclass)\n # to avoid lint complaints about an undefined global on non-Windows\n # platforms.\n if (function is os.listdir) and issubclass(exception_type, OSError):\n if exception_value.errno == errno.ENOENT:\n # File does not exist, and we're trying to delete, so we can ignore the\n # failure.\n print 'WARNING: Failed to list %s during rmtree. Ignoring.\\n' % path\n else:\n raise\n else:\n raise", "def RmTreeOnError(function, path, excinfo):\n\n exception_type = excinfo[0]\n exception_value = excinfo[1]\n # If shutil.rmtree encounters a symbolic link on Windows, os.listdir will\n # fail with a WindowsError exception with an ENOENT errno (i.e., file not\n # found). We'll ignore that error. Note that WindowsError is not defined\n # for non-Windows platforms, so we use OSError (of which it is a subclass)\n # to avoid lint complaints about an undefined global on non-Windows\n # platforms.\n if (function is os.listdir) and issubclass(exception_type, OSError):\n if exception_value.errno == errno.ENOENT:\n # File does not exist, and we're trying to delete, so we can ignore the\n # failure.\n print('WARNING: Failed to list %s during rmtree. Ignoring.\\n' % path)\n else:\n raise\n else:\n raise", "def processEscrows(self):\n\n try:\n self.processOutOfOrders()\n self.processPartials()\n self.processDuplicitous()\n self.processUnverifieds()\n self.processTransUnverifieds()\n\n except Exception as ex: # log diagnostics errors etc\n if logger.isEnabledFor(logging.DEBUG):\n logger.exception(\"Kevery escrow process error: %s\\n\", ex.args[0])\n else:\n logger.error(\"Kevery escrow process error: %s\\n\", ex.args[0])", "def test_callbacks_tied_to_execute(self):\r\n\r\n call_history = []\r\n def my_callback(*args, **kwargs):\r\n call_history.append(args)\r\n\r\n with BatchQuery() as batch:\r\n batch.add_callback(my_callback)\r\n pass\r\n\r\n assert len(call_history) == 1\r\n\r\n class SomeError(Exception):\r\n pass\r\n\r\n with self.assertRaises(SomeError):\r\n with BatchQuery() as batch:\r\n batch.add_callback(my_callback)\r\n # this error bubbling up through context manager\r\n # should prevent callback runs (along with b.execute())\r\n raise SomeError\r\n\r\n # still same call history. Nothing added\r\n assert len(call_history) == 1\r\n\r\n # but if execute ran, even with an error bubbling through\r\n # the callbacks also would have fired\r\n with self.assertRaises(SomeError):\r\n with BatchQuery(execute_on_exception=True) as batch:\r\n batch.add_callback(my_callback)\r\n # this error bubbling up through context manager\r\n # should prevent callback runs (along with b.execute())\r\n raise SomeError\r\n\r\n # still same call history\r\n assert len(call_history) == 2", "def run(operation):\n result_file = RES_FMT.format(operation)\n rm_file(result_file)\n cmd = CMD_FMT.format(operation, file=result_file)\n print(\"Running:\", cmd)\n call(cmd, shell=True)", "def test_flush_wrapper_plain_integrity_error(self):\n\n _session = self.sessionmaker()\n\n with _session.begin():\n foo = self.Foo(counter=1)\n _session.add(foo)\n\n _session.begin()\n self.addCleanup(_session.rollback)\n foo = self.Foo(counter=None)\n _session.add(foo)\n self.assertRaises(exception.DBError, _session.flush)", "def test_delete_complex_tree_05(comp):\n comp.delete(8)\n assert tuple(comp.in_order()) == (4, 6, 7, 9, 10, 11, 12, 13, 14, 15)\n assert tuple(comp.breadth_first()) == (11, 9, 13, 6, 10, 12, 14, 4, 7, 15)", "def executemany(self, operation, seq_of_parameters):\r\n if self._closed:\r\n raise Error('The cursor has been closed.')\r\n if self.connection._closed:\r\n raise Error('The connection to the database has been closed.')\r\n for parameters in seq_of_parameters:\r\n self.execute(operation, parameters)", "def __exit__(self, exc_type, exc_val, exc_tb):\n session = self.session\n # cancel pending changes\n session.rollback()\n # if self._acquire_lock:\n # # remove the lock\n # session.delete_lock()\n session.close()", "def bulk_del_from_table(cursor, operation, cnx, \n mute=True, autocommit=True): \n op_list=re.split(';\\s*',operation)\n count=0 \n for op in op_list:\n if mute:\n try:\n cursor.execute(op)\n if autocommit:\n if not cnx.autocommit:\n cnx.commit()\n result = cursor\n count += result.rowcount\n print(\"Warnings: \" + str(result.fetchwarnings()) + \".\")\n except mysql.connector.Error as err:\n print(err.msg + \".\")\n else:\n try:\n cursor.execute(op)\n if autocommit:\n if not cnx.autocommit:\n cnx.commit()\n result = cursor\n print(f\"Number of rows affected by statement '\"\n f\"{result.statement}': {result.rowcount}.\")\n print(\"Warnings: \" + str(result.fetchwarnings()) + \".\")\n except mysql.connector.Error as err:\n print(err.msg + \".\")\n if mute:\n print(f\"Number of rows affected by statement: {count}.\")\n print(\"Done Deleting.\")", "def deleteAllRecords(self):\n\n with self.getConnection() as conn:\n try:\n cur = conn.cursor()\n cur.execute(\"delete from process_run\")\n\n except sqlite3.Error as error:\n cur.close()\n raise workflowException('Database {0}: {1}'.format(self.wfname, repr(error)))\n return", "def run(conf: CephCIConfig) -> None:\n # Always check if the operation is cleanup first before proceeding with workflow\n if conf.get(\"cleanup\"):\n delete_vms(conf[\"cleanup\"])\n delete_volumes(conf[\"cleanup\"])\n return\n\n delete_vms()\n\n LOG.info(\"Successfully completed the execution\")" ]
[ "0.6481785", "0.6258558", "0.6212218", "0.5927201", "0.5507661", "0.5277229", "0.5242107", "0.5207474", "0.51513904", "0.5112994", "0.5106702", "0.5086462", "0.50150937", "0.4995987", "0.49878561", "0.497371", "0.49726665", "0.4905259", "0.49032328", "0.48811585", "0.48808104", "0.48575678", "0.484289", "0.48146537", "0.48051938", "0.48050416", "0.4778383", "0.47707385", "0.4760732", "0.47510603", "0.47347632", "0.47325256", "0.47256956", "0.47255197", "0.47197932", "0.4719221", "0.47091547", "0.47060147", "0.47027314", "0.46845126", "0.46769968", "0.46668708", "0.46625668", "0.4661613", "0.46534523", "0.46496204", "0.46478564", "0.46446416", "0.46393034", "0.46346518", "0.46315435", "0.4628266", "0.46276546", "0.46132836", "0.46127632", "0.4600412", "0.45978954", "0.45971456", "0.45891342", "0.4586372", "0.45849743", "0.45845285", "0.45788026", "0.45716408", "0.45704317", "0.4568658", "0.45621192", "0.45617002", "0.45582712", "0.4555496", "0.45438775", "0.4542692", "0.45337412", "0.45324418", "0.45222872", "0.45217106", "0.45182535", "0.45181647", "0.45131224", "0.45030937", "0.45016328", "0.4499054", "0.4495411", "0.44949973", "0.4493625", "0.4492254", "0.44892412", "0.44856206", "0.44821602", "0.4480267", "0.44773108", "0.44743904", "0.44742173", "0.4466779", "0.44634488", "0.44616857", "0.44605157", "0.4460242", "0.44590414", "0.44583768" ]
0.766613
0
Sets the status of the next toplevel operation to 1 (ACTIVE) Fetches the next topleveloperation from the database, applies a FILESYSTEMLOCK! Which is /tmp/scv_operating.lck !!!
def process_next(cls): db = cls._core.get_db() configuration = cls._core.get_configuration() if os.path.exists(configuration.get_entry("core.webpath")+"/scv_operating.lck"): return False lockfile = open(configuration.get_entry("core.webpath")+"/scv_operating.lck","w") lockfile.close() stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 \ WHERE OPE_ID IN ( \ SELECT OPE_ID FROM OPERATIONS \ WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0 \ AND OPE_INVOKED = ( \ SELECT MIN(OPE_INVOKED) FROM OPERATIONS \ WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0) \ ) ;" stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 1 ;" db.query(cls._core,stmnt_lock,commit=True) cur = db.query(cls._core,stmnt) res = cur.fetchallmap() if len(res) > 0: operation = cls.restore_operation(res[0]) try: cls.process_children(operation) operation.do_workload() except Exception, e: stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;" db.query(cls._core,stmnt_err,(operation.get_id(),),commit=True) error = StringIO() print_exc(None,error) cls._core.log(error.getvalue()) ret = True else: ret = False stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_STATUS = 1 ;" db.query(cls._core,stmnt_delete,commit=True) db.commit() try: os.unlink(configuration.get_entry("core.webpath")+"/scv_operating.lck") except OSError,e : raise OperationException(OperationException.get_msg(0)) return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def arm_oplock_future(self):\n self.oplock_future = self.tree.session.client.oplock_break_future(self.file_id)", "def processLock(self):\r\n self.controller.executionLock()", "def active(value):\r\n self.context.active = threading.BoundedSemaphore(value=value)", "def state_wait_enter(cfg, app, win):", "def attempt_to_acquire_leader(self, permanent=False):", "def Operational(self):\r\n self.LogDebug(\"Port Operational - {}\".format(datetime.now().isoformat(\" \")))\r\n\r\n if \"SFTPConf\" in self.ConfigDict:\r\n self.SFTPConf = self.ConfigDict[\"SFTPConf\"]\r\n self.LogDebug(str(self.SFTPConf))\r\n\r\n # initialise the status of the binaries to be monitored\r\n for index in self.SFTPConf[\"IndexList\"]:\r\n self.binary[index] = {'status': False, 'start_time': None}\r\n\r\n try:\r\n with open('list_backup', 'r') as f:\r\n file_data = json.loads(f.read())\r\n self.file_list = [datetime.strptime(x, \"%Y-%m-%dT%H:%M:%S\") for x in file_data]\r\n self.LogDebug(\"Port Operational - File list loaded from backup file: {}\".format(file_data))\r\n except:\r\n self.LogDebug(\"Port Operational - no backup list to load\")\r\n\r\n # start the file retrieval timer\r\n timer_duration_ms = self.TimerDuration()\r\n odc.SetTimer(self.guid, 1, timer_duration_ms) \r\n next_timer = datetime.now()+timedelta(seconds=timer_duration_ms/1000)\r\n self.LogDebug(\"Next file retrieval: {}\".format(next_timer.isoformat(\" \")))\r\n\r\n return", "def retry_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.retry_operation(row[\"OPE_ID\"])\n\n stmnt = \"UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;\"\n db.query(cls._core,stmnt,(operation_id,),commit=True)", "def state_processing_enter(cfg, app, win):", "def lock_control(self):\n raise NotImplementedError('PlatformService: Implementation incomplete')", "def gate(self):\n locked = self.is_locked()\n if locked:\n self.PAUSED() # pause at locked gate\n self.fsm_gate.wait() # wait for gate to unlock\n self.CONTINUE() # continue through open gate", "def _lock(self):\n import os\n from time import sleep\n # Waits until another process completes it's process\n while os.path.isfile(self.db_path+\".lock\"):\n print(\"Another process is using\",\n self.db_path, \". Waiting for release.\")\n sleep(1)\n with open(self.db_path+\".lock\", 'w') as fp:\n pass", "def open(self, wait=True):\n if self.SE == 6:\n self.evr.polarity.put('VAL', 1, use_complete=True)\n else:\n self.RESET_PG = 0\n if self._follower_mode:\n self.follower_mode()\n self.evr.polarity.put('VAL', 1, use_complete=True)\n else:\n self.records.S_OPEN.put('VAL', 1, use_complete=True, wait=wait)", "def wm(self):\n return self.get_par(\"readback\")", "def _doLid1State(self, state = True):\n if state:\n self._executeServerTask(self._cmdOpenLid1)\n else:\n self._executeServerTask(self._cmdCloseLid1)", "def status(self) -> NoReturn:\n\n curr_status= self.percent_done()\n while(curr_status < 100):\n\n update_status(name=self.name, status=curr_status)\n time.sleep(0.5)\n\n curr_status = self.percent_done()\n\n update_status(name=self.name, status=curr_status)", "def set_lock_status(use_lock):\r\n get_lock.lock_is_enabled = use_lock", "def check_current_lock(con, host, warning, critical,perf_data):\n warning = warning or 10\n critical = critical or 30\n data=get_server_status(con)\n\n lockTime=float(data['globalLock']['lockTime']) \n totalTime=float(data['globalLock']['totalTime']) \n\n err,delta=maintain_delta([totalTime,lockTime],host,\"locktime\") \n if err==0: \n lock_percentage = delta[2]/delta[1]*100 #lockTime/totalTime*100\n message = \"Current Lock Percentage: %.2f%%\" % lock_percentage\n message+=performance_data(perf_data,[(\"%.2f\" % lock_percentage,\"current_lock_percentage\",warning,critical)])\n return check_levels(lock_percentage,warning,critical,message)\n else :\n return exit_with_general_warning(\"problem reading data from temp file\")", "def local(self):\n logging.info(__name__ + ' : Set control to local & locked')\n self.set_remote_status(0)", "def reqNodeStatus(self):\n while self.status != Modem.Status.IDLE :\n sleep(0.1)\n if self.status != Modem.Status.IDLE:\n raise ValueError(\"Modem getNodeStatus unexpected status: \\\n \" + str(self.status))\n self.status = Modem.Status.BUSY2REQ\n self.send(self.interpreter.buildGetStatus())\n while self.status != Modem.Status.IDLE and self.status != Modem.Status.KILL:\n sleep(self.m_to)\n #self.recvCommand()\n if self.status == Modem.Status.KILL:\n return self.close()\n return self.errorCheck()", "def os_start_db( self, ):\r\n pass", "def i_am_locking(self):\r\n pass", "def next_status(self):\n if self.status == self.DRAFT:\n self._advance_to_registration()\n elif self.status == self.REGISTRATION:\n self._advance_to_pending()\n elif self.status == self.PENDING:\n self._advance_to_running()", "def lock_table(self):\n\n self.status = 'Locked'", "def _handler_acquiring_status_enter(self):\n self._async_raise_fsm_event(ProtocolEvent.ACQUIRE_STATUS_ASYNC)", "async def resume_operations(self):\n await asyncio.sleep(10)\n for op in await self.get_service('data_svc').locate('operations', match=dict(finish=None)):\n self.loop.create_task(self.run_operation(op))", "def exec_dopq_lock_state(self, is_enqueue_update):\n if is_enqueue_update:\n self.dopq_wrp_obj.lock_update(True)\n else:\n self.dopq_wrp_obj.lock_update(False)\n\n print(\"(Data Platform)--> lock state: \", self.dopq_wrp_obj.lock_state)", "def main(self):\n debug(\"Using %s\" % (self.PROC_DISKSTATS))\n\n initial = self.get_status()\n time.sleep(self.interval)\n final = self.get_status()\n\n # Get bytes/sec\n for d in self.partitions:\n r_diff = ((final[d].r_sectors - initial[d].r_sectors) * self.sector_size) / self.interval\n w_diff = ((final[d].w_sectors - initial[d].w_sectors) * self.sector_size) / self.interval\n final[d].r_rate = r_diff\n final[d].w_rate = w_diff\n \n # Status string\n msg = \" \".join([ \"%s (r: %d KB/s, w: %d KB/s)\" % (i.dev, i.r_rate / 1024, i.w_rate / 1024) for i in sorted(final.values(), key=lambda x:x.dev) ])\n performance = \" \".join([ \"'%s read'=%d '%s write'=%d\" % (i.dev, i.r_rate, i.dev, i.w_rate) for i in sorted(final.values(), key=lambda x:x.dev) ])\n\n return (EX_OK, msg, performance)", "def acquire(self, access_mode=None):", "def test_locked_file_03(self):\n \n f = open(\"tests/locked.db3\", \"a+\")\n fcntl.lockf(f.fileno(), fcntl.LOCK_EX) \n \n x = subprocess.Popen([\"sqlbak\", \"tests\", \"--ms-towait=4000\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n\n time.sleep(3)\n fcntl.lockf(f.fileno(), fcntl.LOCK_UN)\n f.close()\n\n result = x.communicate()[0]\n\n self.assertTrue(\"cannot obtain lock\" not in result)", "def update_sys_resource():\n\n cpu_cores = get_cpu_cores()\n logger.debug(\"starting top module\")\n cpu_usage = get_cpu_usage()\n mem_usage = get_mem_usage()\n df_usage = get_df_usage()\n logger.debug(\"round instrument data ready, next is top 5data\")\n fields = [\n 'check_time', 'cpu_usage', 'cpu_all', 'cpu_using', 'mem_usage',\n 'mem_all', 'mem_using', 'disk_usage', 'disk_all', 'disk_using',\n 'cpu_topN', 'mem_topN', 'disk_topN', 'net_in_topN', 'net_out_topN'\n ]\n # result = {}\n # result.fromkeys(field, None)\n result = {i: None for i in fields}\n result['check_time'] = int(time.time())\n result['cpu_all'] = cpu_cores\n result['cpu_usage'] = cpu_usage\n result['mem_all'], result['mem_using'] = mem_usage\n result['disk_all'], result['disk_using'] = df_usage\n try:\n result['mem_usage'] = result['mem_using'] / result['mem_all']\n except ZeroDivisionError:\n result['mem_usage'] = 0.0\n try:\n result['disk_usage'] = result['disk_using'] / result['disk_all']\n except ZeroDivisionError:\n result['disk_usage'] = 0.0\n result['cpu_topN'] = get_topN_cpu()\n net_topn_data = get_topN_netIO()\n mnd_topn_data = get_topN_mnd()\n result[\"mem_topN\"] = mnd_topn_data[\"mem.bytes.memavailable\"]\n result[\"disk_topN\"] = mnd_topn_data[\"df.bytes.used\"]\n result[\"net_in_topN\"] = net_topn_data[\"cluster.net.dev.receive\"]\n result[\"net_out_topN\"] = net_topn_data[\"cluster.net.dev.transmit\"]\n # print(result)\n send_to_db('argus-statistics', 'sys_resource', result)\n logger.debug(\"update is already success\")", "def rest_api_status(self):\n with self.resource_lock:\n pass", "def open(self):\n \n return self.set_level('up')", "def idle():", "def __init__(self, width, height):\n # Initialize the tk root window\n self._controller = Controller(self)\n self.root = tk.Tk()\n self.init_dx = width\n self.init_dy = height\n self.root.geometry(\"%dx%d+50+30\"%(self.init_dx, self.init_dy))\n self.root.title(\"RDM user interface version: 1.0\")\n self.root.maxsize(1600, 900)\n self.root.lift()\n self.root.update_idletasks()\n # Assigning fields\n self.universe = tk.IntVar(self.root)\n self.universe.set(1)\n self.universe_list = [1, 2, 3, 4, 5]\n self.cur_uid = None\n self.id_state = tk.IntVar(self.root)\n self.auto_disc = tk.BooleanVar(self.root)\n self.id_state.set(0)\n# self.state = 0\n self._uid_dict = {}\n \n # Call initialing functions\n self._pid_store = PidStore.GetStore()\n self.ola_thread = olathread.OLAThread(self._pid_store)\n self.ola_thread.start()\n self.build_frames()\n self.build_cntrl()\n self._notebook = notebook.RDMNotebook(self.root, self._controller)\n self.discover()\n self.auto_disc.set(False)\n\n print \"currently in thread: %d\"%threading.currentThread().ident\n time.sleep(1)\n print \"back from sleep\"", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def getNextOrderNum(cur,vID):\n orderNum = execute_query(cur,\"\"\"SELECT Count(*) FROM OpenTasks where vID = ?\"\"\", [vID])[0][0]\n orderNum = int(orderNum) + 1\n return orderNum", "def state_wait_do(cfg, app, win, events):", "def wait_for_status(self, status):\n code = self.instance.state['Code']\n while code != status:\n time.sleep(3)\n self.instance.reload()\n code = self.instance.state['Code']", "def set_valve_open(status):\n # Initialize variables.\n global tank_valve_open\n global valve_open_time\n global valve_open_seconds\n\n tank_valve_open = status\n if tank_valve_open:\n valve_open_time = time.time()\n else:\n valve_open_seconds = time.time() - valve_open_time\n\n # Turn on/off the LED.\n led_pin.value(tank_valve_open)", "def vcac_getvm_sr_status(self, serv_req):\n self.reqdata=serv_req\n #Keep requesting the status of the deployment and break when the process is no longer \"IN_PROGRESS\"\n flag=1\n mailer=0\n s_once=1\t\t\n while flag:\n mailer += 1\n start = time.time()\t\t\n #sleep(10)\n try:\n jfile=self.data['rundir'] + '/' + self.reqdata + '.json'\n vrapath=BASE_DIR + '/' + 'tools/vracc/bin/'\n cmd = \"cd %s && ./cloudclient.sh vra request detail --id %s \" \\\n \"--format JSON --export %s\" % \\\n ( vrapath, self.reqdata, jfile )\n logging.info(\"- vcac cloudclient monitor \" \\\n \"request id \" + self.reqdata + \" status\")\n request = execute_action(cmd)\n except APIError, e:\n print \"Found error## vcac_getvm_sr_status: %s\" % str(e)\n sys.exit(1)\n\t\t\t\t\n\t\t\t# check file exist and not empty\n if os.path.exists(jfile) and os.stat(jfile).st_size > 0:\n with open(jfile) as data_file:\n\t\t\t\t requestData = json.load(data_file)\n if requestData['state'] == \"SUCCESSFUL\":\n flag=0\n self.gtintval=mailer\n tdate=str(datetime.timedelta(seconds=self.gtintval))\n print \"\\n\"\n print \"SR Reached: %s (HH:MM:SS)\\n\" % tdate\n print \"SR [ %s ] done, status changed from \" \\\n \"IN_PROGRESS to %s\\n\" % \\\n ( requestData['requestNumber'], requestData['state'])\n print \"\\n\"\n break\n\n #Work out of the task failed and if not set \n #the state variable\n if requestData['state'] == \"PROVIDER_FAILED\" or \\\n requestData['state'] == \"FAILED\":\n state = requestData['state']\n reason = requestData['requestCompletion']['completionDetails']\n print \"- vcac cloudclient ERROR: %s\" % state\n ops=\"\"\n self.update_helpdesk(requestData)\n # Need to add some valuable failed data and do not exit.\n #sys.exit(\" - CLOUDCLIENT ERROR: \" + state)\n return requestData\n\n end = time.time()\n g=str(datetime.timedelta(seconds=(end - start)))\n parts=g.split(\":\")\n seconds = int(parts[0])*(60*60) + \\\n int(parts[1])*60 + \\\n float(parts[2])\n time.sleep(60.0)\n mailer = mailer + seconds\n mailer = mailer + 60\n logging.debug('mailer count %s' % mailer)\n if int(mailer) >= 7200 and s_once:\n print \"\\n\"\n print \"%s\\n\" % msgtext\n try:\n print \"Sending notification to IT for \", \\\n \"service request: %s\\n\" % requestData['requestNumber']\n print \"\\n\"\n self.ops='gen'\n self.notify_user(requestData, self.ops)\n logging.info('Notification send ......')\n except:\n pass\n s_once=0\n continue\n else:\n logging.info('No need to send notification ......')\n\n logging.info(\"- vcac cloudclient request \" \\\n \"status : %s\" % ( requestData['state'] ))\n \n return requestData", "def refresh_status() -> None:\n ...", "def reactivate(self):\n self.write({'active': True, 'state': 'running'})\n STAGE = self.env['anytracker.stage']\n for ticket in self:\n starts = STAGE.search([('method_id', '=', ticket.method_id.id),\n ('progress', '=', 0)])\n if len(starts) != 1:\n raise except_orm(\n _('Configuration error !'),\n _('One and only one stage should have a 0% progress'))\n # write stage in a separate line to recompute progress & risk\n ticket.write({'stage_id': starts[0].id})\n self.recompute_parents()", "def on_oplock_break(self, cb):\n\n def simple_handle_break(op, smb_res, cb_ctx):\n \"\"\"\n note that op is not used in this callback,\n since it already closes over self\n \"\"\"\n notify = smb_res[0]\n if self.oplock_level != smb2.SMB2_OPLOCK_LEVEL_II:\n ack = self.channel.oplock_break_acknowledgement(self, smb_res)\n ack.oplock_level = cb(notify.oplock_level)\n ack_res = self.channel.connection.transceive(ack.parent.parent)[0][0]\n if ack.oplock_level != smb2.SMB2_OPLOCK_LEVEL_NONE:\n self.arm_oplock_future()\n self.on_oplock_break(cb)\n self.oplock_level = ack_res.oplock_level\n else:\n self.oplock_level = notify.oplock_level\n\n self.on_oplock_break_request(simple_handle_break)", "def action_lock(self):\n self.state = 'locked'", "def acquire_lock (self):\n\n self._exec (self.select)\n self.locked = True", "def current_operation(self):\n return self.state", "def f_lock(self):\n self._locked = True", "def run(self):\n operation_manager = self._core.get_operation_manager()\n while True:\n while operation_manager.process_next():\n pass\n sleep(2)", "def open(self):\n self.solenoid.set(self.OPEN)", "def _StatusUpdateThreadMain(self):\n while self._status_update_active:\n self._UpdateStatus()\n time.sleep(self._status_update_interval)", "def main():\r\n while 1:\r\n response = s.displaymainmenu()\r\n if response!=QUIT:\r\n if response == FIRSTTIME:\r\n print 'firsttime'\r\n s.dresumed = FALSE # this is not a resumed session. Need new user data (full name, etc).\r\n else: s.dresumed = TRUE\r\n response = s.getuserdata() #get user data. If s.dresumed, this also loads saved data, session position, session label.\r\n\r\n if not s.dresumed: #need to set up initial sessionlists (resumed sessions should have stored values)\r\n if s.sessiontype == 'var':\r\n print s.sessiontype\r\n s.sessionlist = VARSESSIONLIST\r\n s.sortlist = TRUE\r\n else:\r\n s.sessionlist = SESSIONLIST\r\n \r\n if response != MYQUIT:\r\n if s.dresumed:\r\n print 'MAIN: attempting to run %s, with sessiontype %s' %(s.dlabel, s.sessiontype)\r\n #s.dtrialblocksize = 2 #settings are retained for subsequent sessions.\r\n #s.dintermissiondelay = 10\r\n \r\n setPerSessionValues()\r\n \r\n retval = s.run() #run the session (er, go through the directory of images corresponding to a particular label)\r\n #...need to do other sessions as necessary\r\n if s.dcompleted and (retval != MYQUIT): \r\n startpos = s.sessionlist.index(s.dlabel) #find how far they've progressed through SESSIONLIST\r\n if not ((startpos + 1) == len(s.sessionlist)): #if not last item in list\r\n continuex(s.sessionlist[startpos + 1:])\r\n else: #we're done.\r\n pass\r\n \r\n else: #start fresh with practice. (full sessionlist)\r\n continuex(s.sessionlist)\r\n else: #exit program\r\n return", "def wait_for_update(self):\n while \"updating_db\" in self.status():\n time.sleep(1)", "def test_001_start(self):\n HEADING()\n self.db.start()\n up = self.db.isup()\n result = up\n assert result", "def doCurrentTask(self,):\n \n #\n # Imports\n #\n import sys\n \n #\n # Set the database path\n #\n self.database = Database(self.analysisPath+'/dataSettingsAndResults.db')\n\n #\n # call current command\n #\n try:\n self.availableCommands[self.command].__call__()\n #except AttributeError:\n # print 'ERROR: the command \"'+self.command+'\" is not implemented yet try again in the future.'\n # sys.exit(1)\n except KeyError:\n print 'ERROR: command is not valid.\\nAvialable commands are: '+', '.join(self.availableCommands.keys()[:-1])+' and '+self.availableCommands.keys()[-1]+'.\\nUse: \"SEAseq2 help\" to get help\\n'\n return 1", "def _get_lsp_frr_operational_status_active(self):\n return self.__lsp_frr_operational_status_active", "def test_locked_file_01(self):\n \n f = open(\"tests/locked.db3\", \"a+\")\n fcntl.lockf(f.fileno(), fcntl.LOCK_EX) \n \n x = subprocess.Popen([\"sqlbak\", \"tests\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n\n time.sleep(1)\n fcntl.lockf(f.fileno(), fcntl.LOCK_UN)\n f.close()\n\n result = x.communicate()[0]\n\n self.assertTrue(\"cannot obtain lock\" not in result)", "def check_active(self):\n sql = '''select to_char(case when inst_cnt > 0 then 1 else 0 end, \n 'FM99999999999999990') retvalue from (select count(*) inst_cnt \n from v$instance where status = 'OPEN' and logins = 'ALLOWED' \n and database_status = 'ACTIVE')'''\n self.cur.execute(sql)\n res = self.cur.fetchall()\n for i in res:\n print(i[0])", "def test_locked_file_02(self):\n \n f = open(\"tests/locked.db3\", \"a+\")\n fcntl.lockf(f.fileno(), fcntl.LOCK_EX) \n \n x = subprocess.Popen([\"sqlbak\", \"tests\"],\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n\n time.sleep(3)\n fcntl.lockf(f.fileno(), fcntl.LOCK_UN)\n f.close()\n\n result = x.communicate()[0]\n\n self.assertTrue(\"cannot obtain lock\" in result)\n self.assertTrue(\"lock held by pid %d\" % os.getpid() in result)", "def _doLid2State(self, state = True):\n if state:\n self._executeServerTask(self._cmdOpenLid2)\n else:\n self._executeServerTask(self._cmdCloseLid2)", "def check_lock_server(self):\n file_locked = True\n while file_locked:\n response = requests.get(LOCK_SERVER_ADDR+\"getLockStatus\", {'file_path': self.filename, 'user_id': self.user_id})\n if response.json()['file_locked']:\n file_locked = True\n time.sleep(5)\n else:\n file_locked = False\n return", "def open(self):\n self.close() \n self.readDataFromFile() \n self.resetWriteCount()\n \n taskMgr.remove('%s-syncTask'%(self.className,))\n t = taskMgr.add(self.syncTask,'%s-syncTask'%(self.className,))\n t.timeElapsed = 0.0", "def half_open(self) -> None:\n with self._lock:\n self.state = self._state_storage.state = STATE_HALF_OPEN # type: ignore[assignment]", "def execute_handler_action(self):\n try:\n # fetch seq_no\n self.seq_no = self.ext_config_settings_handler.get_seq_no(is_enable_request=True)\n if self.seq_no is None:\n self.logger.log_error(\"Sequence number for current operation not found\")\n exit(Constants.ExitCode.ConfigurationError)\n\n # read status file, to load any preserve existing context\n self.ext_output_status_handler.read_file(self.seq_no)\n\n config_settings = self.ext_config_settings_handler.read_file(self.seq_no)\n\n # set activity_id in telemetry\n if self.telemetry_writer is not None:\n self.telemetry_writer.set_operation_id(config_settings.__getattribute__(self.config_public_settings.activity_id))\n\n operation = config_settings.__getattribute__(self.config_public_settings.operation)\n\n # Allow only certain operations\n if operation not in [Constants.NOOPERATION, Constants.ASSESSMENT, Constants.INSTALLATION, Constants.CONFIGURE_PATCHING]:\n self.logger.log_error(\"Requested operation is not supported by the extension\")\n self.ext_output_status_handler.write_status_file(operation, self.seq_no, status=Constants.Status.Error.lower(), message=\"Requested operation {0} is not supported by the extension\".format(str(operation)), code=Constants.ExitCode.OperationNotSupported)\n exit(Constants.ExitCode.OperationNotSupported)\n\n prev_patch_max_end_time = self.cmd_exec_start_time + datetime.timedelta(hours=0, minutes=Constants.ENABLE_MAX_RUNTIME)\n self.ext_state_handler.create_file(self.seq_no, operation, prev_patch_max_end_time)\n core_state_content = self.core_state_handler.read_file()\n\n # log tmp folder size\n self.ext_env_handler.log_temp_folder_details()\n\n # if NoOperation is requested, terminate all running processes from previous operation and update status file\n if operation == Constants.NOOPERATION:\n self.process_nooperation(config_settings, core_state_content)\n else:\n # if any of the other operations are requested, verify if request is a new request or a re-enable, by comparing sequence number from the prev request and current one\n if core_state_content is None or core_state_content.__getattribute__(self.core_state_fields.number) is None:\n # first patch request for the VM\n self.logger.log(\"No state information was found for any previous patch operation. Launching a new patch operation.\")\n self.launch_new_process(config_settings, create_status_output_file=True)\n else:\n if int(core_state_content.__getattribute__(self.core_state_fields.number)) != int(self.seq_no):\n # new request\n self.process_enable_request(config_settings, prev_patch_max_end_time, core_state_content)\n else:\n # re-enable request\n self.process_reenable_request(config_settings, core_state_content)\n\n except Exception as error:\n self.logger.log_error(\"Failed to execute enable. [Exception={0}]\".format(repr(error)))\n raise", "def run(self):\n\n # Run bom compare if selected\n if self.root.compare_select.get():\n print('Starting BOM Compare')\n self.ccl.set_bom_compare(self.root.bom_compare_old, self.root.bom_compare_new)\n self.ccl.save_compare(self.root.bom_compare_save)\n progressbar.add_current(1)\n print('BOM Compare finished')\n # Run CCL Update\n # Note that ccl update is ran again even if already run once, could be room for improvement\n if self.root.update_select.get():\n print('Starting to update the CCL')\n self.ccl.ccl_docx = self.root.ccl_update_loc\n self.ccl.set_bom_compare(self.root.ccl_update_old, self.root.ccl_update_new)\n self.ccl.update_ccl(self.root.ccl_update_save_loc)\n print('CCL Has been updated and saved')\n progressbar.add_current(1)\n # Collect documents\n if self.root.docs_select.get():\n print('Collecting Documents')\n self.ccl.ccl_docx = self.root.docs_ccl\n self.ccl.path_checks = self.root.docs_paths\n self.ccl.path_ccl_data = self.root.docs_savedir\n self.ccl.username = self.root.docs_user\n self.ccl.password = self.root.docs_pass\n self.ccl.collect_documents(headless=self.root.headless.get())\n print('Documents have been successfully collected')\n # Progressbar progress will be updated in the filehandler module\n # Collect documents\n if self.root.ills_select.get():\n print('Starting to Collect Illustrations')\n self.ccl.ccl_docx = self.root.ill_ccl\n self.ccl.path_ccl_data = self.root.ill_scan\n self.ccl.path_illustration = self.root.ill_save\n self.ccl.collect_illustrations()\n self.ccl.insert_illustration_data(self.root.ill_cclsave)\n print('Illustrations have been collected and CCL has been updated')\n # Progressbar progress will be updated in the CCL module\n # Progress bar final update after all process has finished\n self.progressbar['value'] = progressbar.total\n self.progress_label.config(text='Done')\n print('FINISHED!')", "def getSystemAwake(self):\n print 'start of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n try:\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n if self.db['system_awake'] == False:\n print 'start of if true - getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n self.system_awake = self.db['system_awake']\n self.db.close()\n else:\n self.system_awake = True\n self.db['system_awake'] = self.system_awake\n self.db.close()\n \n print 'End of getSystemAwak() system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n \n except Exception, e:\n self.log_file.logEntry('{0}\\nUnable to load previous system_awake value, setting value to True'.format(e))\n self.system_awake = True", "def thread_status(self,status): # general function to get datas/infos from all threads back to the main\n if status[0]==\"Update_Status\":\n if len(status)>2:\n self.update_status(status[1],wait_time=self.wait_time,log_type=status[2])\n else:\n self.update_status(status[1],wait_time=self.wait_time)\n\n elif status[0]==\"Update_scan_index\":\n #status[1] = [ind_scan,ind_average]\n self.ind_scan=status[1][0]\n self.ui.indice_scan_sb.setValue(status[1][0])\n self.ind_average = status[1][1]\n self.ui.indice_average_sb.setValue(status[1][1])\n\n elif status[0]==\"Scan_done\":\n self.ui.scan_done_LED.set_as_true()\n self.save_scan()\n if not self.overshoot:\n self.set_ini_positions()\n self.ui.set_scan_pb.setEnabled(True)\n self.ui.set_ini_positions_pb.setEnabled(True)\n self.ui.start_scan_pb.setEnabled(True)\n elif status[0]==\"Timeout\":\n self.ui.log_message.setText('Timeout occurred')", "def change_status(self):\n if self.status == 'in progress':\n self.status = 'done'\n return self.status\n elif self.status == 'done':\n self.status = 'in progress'\n self.eisenhower_priority()\n return self.status", "def standby() -> None:", "def setstatus(self, status):\n with self.lock:\n self.status = status", "def your_process(seconds):\r\n global STATUS\r\n sleep(seconds)\r\n STATUS = True", "def hardwareConcurrency(self):\n return 1", "def _run_next_state(self):\n if self.state != \"STOP\":\n self.state = self.get_state_info(\"next\")\n self._run_state()", "def next_state(self):\n\n # Increases current path index\n self.current_state_index += 1\n\n # Retrieves the current state in the path and updates it\n self.status = self.path_states[self.current_state_index]", "def get_acquire_status(self, update=0):\r\n if (update != 0): self.pvs.read.putw(1)\r\n acquiring = self.pvs.acquiring.getw()\r\n return acquiring", "def open(self):\n\n self.st_time = time.strftime('%H:%M %A %d %B')\n self.is_active = True", "def lock(self):\n\n self.wait = True", "def startworking():\r\n #In the future have the manager program or from the website implement this arguments to a route\r\n #the program will download the file from the website\r\n global exe_name\r\n global Task_Conditional\r\n task_data = None\r\n while task_data is None:\r\n task_data = recieve_data_from_server(\"get_task\")\r\n if task_data is None:\r\n time.sleep(5)\r\n else:\r\n exe_name = task_data[\"exe_name\"]\r\n print('Working on the task \"{}\"'.format(exe_name))\r\n get_file(exe_name)\r\n Task_Conditional = task_data[\"Task_conditional\"]\r\n print(\"loading\")\r\n t1 = time.time()\r\n task_divider(task_data[\"first_num\"], task_data[\"last_num\"])\r\n t2 = time.time()\r\n print(\"ready {}\".format(t2-t1))", "def get_block(blk):\n global active_block_queries\n #This one is for processing the results from get_block\n def process_block(event, client):\n \"\"\"Process the result from block getting request.\"\"\"\n global active_block_queries\n global nextblock\n global sync_block\n active_block_queries = active_block_queries - 1\n if event != None:\n if sync_block != None and blk >= sync_block:\n sync_block = None\n #Itterate over all operations in the block.\n for t in event[\"transactions\"]:\n for o in t[\"operations\"]:\n #We are only interested in downvotes\n if o[0] == \"vote\" and o[1][\"weight\"] < 0:\n #Call process_vote for each downvote\n process_vote(o[1],client)\n #fetching network clients alive.\n get_block(nextblock)\n nextblock = nextblock + 1\n if active_block_queries < 8:\n treshold = active_block_queries * 20\n behind = (dt.utcnow() - dateutil.parser.parse(event[\"timestamp\"])).seconds\n if behind >= treshold:\n print(\"Behind\",behind,\"seconds while\",active_block_queries,\"queries active. Treshold =\",treshold)\n print(\"Spinning up an extra parallel query loop.\")\n get_block(nextblock)\n nextblock = nextblock + 1\n else:\n if sync_block == None or blk <= sync_block:\n sync_block = blk\n get_block(blk)\n else:\n print(\"Overshot sync_block\")\n if active_block_queries == 0:\n print(\"Keeping one loop alive\")\n get_block(blk)\n else:\n print(\"Scaling down paralel HTTPS queries\",active_block_queries)\n #Create a new JSON-RPC entry on the queue to fetch a block.\n opp = rpcclient.condenser_api.get_block(blk)\n active_block_queries = active_block_queries + 1\n #Bind the above closure to the result of get_block\n opp.on_result(process_block)", "def __nextTask(self):\n self.activeWindow().nextTask()", "def refresh_status(self):\n\n pass", "def lock(self):\r\n out = self._authsvn('lock').strip()\r\n if not out:\r\n # warning or error, raise exception\r\n raise Exception(out[4:])", "def top():\n print (\"\")\n double_line()\n print (\"Starting sp_controller...\")", "def monitor(state: int):\n while True:\n if get_state() < state:\n logger.awaiting(\n \"awaiting reconciliation system state: %d with desired state: %d\" % (get_state(), state)\n )\n elif get_state() == state:\n logger.stable(\"number of processes: %d\" % (state))\n\n pids = \", \".join([str(obj.pid) for obj in active_children()])\n logger.info(\n \"current PIDs: %s\" % (pids)\n )\n time.sleep(1)", "def enable_lock(self, lock_on=True):\n if lock_on:\n self.write('ScanM_Mode=2') #Search\n time.sleep(10)\n self.write('ScanM_Mode=3') #Lock, its unclear from manual if\n #this is redundant. i.e. autolocks\n #at end of search\n if not self.query_lock_status():\n raise ac_excepts.CouplingkError('Not meeting threshold power',\n self.enable_lock)\n if not lock_on:\n self.write('ScanM_Mode=0') #Off", "def sync_start(self):", "def lock_status(self) -> Dict[str, str]:\n self.__logger.debug('Eva.lock_status called')\n return self.__http_client.lock_status()", "def idle(self):\n return", "def event_m20_11_x34(z102=20111500):\n \"\"\"State 0,1: Waiting for insect key activation\"\"\"\n CompareObjState(0, z102, 20, 0)\n assert ConditionGroup(0)\n \"\"\"State 2: End state\"\"\"\n return 0", "def _try_acquire_listgen_lock(self):\n with self._conn as conn, conn.cursor() as cursor:\n cursor.execute('SELECT pg_try_advisory_lock(%s::BIGINT)', [self._lock_key])\n return cursor.fetchone()[0]", "def manage_nr_locks(nr, ora_cursor,action,con):\r\n\r\n eid = _get_event_id(ora_cursor)\r\n\r\n current_app.logger.debug('event ID for NR Details edit:{}'.format(eid))\r\n _create_nro_transaction(ora_cursor, nr, eid, transaction_type='ADMIN')\r\n con.commit()\r\n\r\n if action == 'LOCK':\r\n current_app.logger.debug('got to checkout_nr() for NR:{}'.format(nr.nrNum))\r\n _update_nro_request_state_to_hold(ora_cursor, nr, eid)\r\n else:\r\n current_app.logger.debug('got to checkin_nr() for NR:{}'.format(nr.nrNum))\r\n _update_nro_request_state_to_draft(ora_cursor, nr, eid)\r\n\r\n con.commit()\r\n\r\n current_app.logger.debug('got to the end of checkinout_nr()')", "def busy(self):\n pass", "def test_allocation_scope_open_personal(self):\n self.computer_partition_amount = 2\n sequence_list = SequenceList()\n sequence_string = self.stabilise_accounting + \"\"\"\n LoginTestVifibCustomer\n CustomerRegisterNewComputer\n Tic\n Logout\n\n LoginDefaultUser\n SetComputerCoordinatesFromComputerTitle\n Logout\n\n SetSequenceSlaXmlCurrentComputer\n\n SlapLoginCurrentComputer\n FormatComputer\n Tic\n SlapLogout\n\n LoginDefaultUser\n CheckComputerAllocationScopeOpenPersonal\n CheckComputerTradeConditionDestinationSectionTestVifibCustomer\n Logout\n \"\"\" + self.prepare_published_software_release + \\\n self.request_and_install_software + \"\"\"\n # request as owner\n LoginTestVifibCustomer\n PersonRequestSoftwareInstance\n Tic\n Logout\n\n # instantiate for owner\n LoginDefaultUser\n CallConfirmOrderedSaleOrderAlarm\n Tic\n SetSelectedComputerPartition\n SelectCurrentlyUsedSalePackingListUid\n Logout\n LoginDefaultUser\n CheckComputerPartitionInstanceSetupSalePackingListDelivered\n Logout\n\n # request as someone else\n LoginTestVifibAdmin\n PersonRequestSoftwareInstance\n Tic\n Logout\n\n # fail to instantiate for someone else\n LoginDefaultUser\n CallConfirmOrderedSaleOrderAlarm\n Tic\n CheckNoRelatedSalePackingListLineForSoftwareInstance\n Logout\n\n # request as owner\n LoginTestVifibCustomer\n PersonRequestSoftwareInstance\n Tic\n Logout\n\n # instantiate for owner\n LoginDefaultUser\n CallConfirmOrderedSaleOrderAlarm\n Tic\n SetSelectedComputerPartition\n SelectCurrentlyUsedSalePackingListUid\n Logout\n LoginDefaultUser\n CheckComputerPartitionInstanceSetupSalePackingListDelivered\n Logout\n\n LoginERP5TypeTestCase\n CheckSiteConsistency\n Logout\n \"\"\"\n sequence_list.addSequenceString(sequence_string)\n sequence_list.play(self)", "def idle(self) -> None:\n # Like RadioHead library, turn off high power boost if enabled.\n self.set_boost(_TEST_PA1_NORMAL)\n self.operation_mode = STANDBY_MODE", "def get_next_status(self, peer, button_name):\n data_base = DataSource(r'src/controllers')\n self[peer][0] = data_base.sql_select(\n 'Buttons',\n ['next_stat'],\n {'current_stat': self[peer][0], 'button_name': button_name}\n )", "def pre_step(self,status):\n self.t0 = time.time()\n pass", "def status(self):\n self.scion_sh('status')", "def _acquire_pr_lock(self):\n\n ep = self.extended_properties\n is_processing_results = ep.get(\"processing_results\", False)\n\n if not is_processing_results:\n ep[\"processing_results\"] = True\n self.extended_properties = ep\n self.save()\n return True\n else:\n log.warning(\n \"Unable to aquire lock. Processing results already occurring. Skipping...\"\n )\n return False", "def progMode(state):\n\t# Envoie la commande setTorque a tous les servos\n\taxDriver.setTorque(axDriver.BROADCASTID, state)", "def _set_status(self):\n result = self._get_status()\n if result and result[0]['state'] == 'aborted':\n raise Exception(\"Aborted because the status flag is set to 'aborted' in dynamodb\")\n\n # record the status\n self.status['timestamp'] = time.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n self.db_handler.update_item({'api_version': TsV2CatalogHandler.api_version}, self.status)", "def assess_status(self):\n if not self.configuration_complete():\n hookenv.status_set('blocked',\n 'Kerberos configuration incomplete')\n elif os_utils.is_unit_upgrading_set():\n hookenv.status_set('blocked',\n 'Ready for do-release-upgrade and reboot. '\n 'Set complete when finished.')\n else:\n hookenv.status_set('active',\n 'Unit is ready')" ]
[ "0.54746443", "0.5449821", "0.54388434", "0.5305737", "0.52546555", "0.52230906", "0.5174595", "0.5144526", "0.5143462", "0.5060711", "0.5041392", "0.50204164", "0.5015011", "0.5006248", "0.50048566", "0.5001075", "0.49896038", "0.4973004", "0.49551147", "0.49547556", "0.49520612", "0.49466154", "0.49461028", "0.49428773", "0.49293905", "0.49125865", "0.49088362", "0.48780382", "0.48766387", "0.48683947", "0.48641616", "0.48565912", "0.48481476", "0.48390478", "0.48170632", "0.48067635", "0.48054275", "0.48024184", "0.48011845", "0.4797814", "0.47937936", "0.47905234", "0.4779813", "0.47707108", "0.47599852", "0.4748947", "0.47404432", "0.47306153", "0.47273046", "0.47256204", "0.47225648", "0.47101542", "0.4706748", "0.4704088", "0.47000462", "0.46988663", "0.4694528", "0.46874362", "0.4682345", "0.4681455", "0.46744373", "0.46725538", "0.4665283", "0.46601477", "0.46449742", "0.46439794", "0.46430904", "0.4642033", "0.46393466", "0.46382028", "0.46372032", "0.46354365", "0.46341315", "0.46328807", "0.46318096", "0.4631163", "0.46269825", "0.46238434", "0.46201336", "0.46193463", "0.46172106", "0.46166527", "0.461065", "0.46101588", "0.46088898", "0.45966056", "0.45956546", "0.45952332", "0.45940232", "0.45885894", "0.45870537", "0.4584501", "0.45844764", "0.4580005", "0.45799288", "0.45661557", "0.45643204", "0.4563045", "0.45596552", "0.45591587" ]
0.7192307
0
Returns all Operations in an associative array. The array's indices are the operationIDs The Objects contain all information about the operations, including the Data
def get_current_operations_for_gui(cls, operation_types=None): db = cls._core.get_db() #TODO CHECK HOW LISTS ARE HANDLED IN FDB if operation_types is not None and type(operation_types) == list: stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS WHERE OPE_TYPE IN (?) ORDER BY OPE_INVOKED ;" cur = db.query(cls._core,stmnt,(operation_types)) else: stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS ORDER BY OPE_INVOKED ;" cur = db.query(cls._core,stmnt) ret = {} for row in cur.fetchallmap(): operation = cls.restore_operation(row) custom_values = operation.get_values() ret[row["OPE_ID"]] = {"id":row["OPE_ID"], "parent":row["OPE_OPE_PARENT"], "invoked":str(row["OPE_INVOKED"]), "type":row["OPE_TYPE"], "status":row["OPE_STATUS"], "data":custom_values} return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def get_operations(self):\n return self.operations[:] # Returns a copy instead of actual attribute", "def operation_list(self):\n return OPERATION_LIST", "def list_operations():", "def operations(self):\n return set(self._operation_map.keys())", "def operations(self):\n return set(self._operation_map.keys())", "def getAxesOperations(self):\n \n axisOpsDict = {}\n for axis in self.axisWidgets:\n op = str(axis.getAxisOperationsButton().text()).strip()\n axisOpsDict[axis.getID()] = op\n \n return axisOpsDict", "def operations_map(self):\n # type: () -> Dict[Union[str, None], str]\n document_ast = self.document_ast\n operations = {} # type: Dict[Union[str, None], str]\n for definition in document_ast.definitions:\n if isinstance(definition, ast.OperationDefinition):\n if definition.name:\n operations[definition.name.value] = definition.operation\n else:\n operations[None] = definition.operation\n\n return operations", "def all_operations():\n return OperationHandler().get_all_classes()", "def operartors(self) -> List[Operator]:\n return list(self.__ops.keys())", "def ExtractOperations(toolF):\n return [o[\"uri\"] for o in toolF[\"operation\"]]", "def get_all(self):\n return self._name_to_operator.values()", "def read_ops(self):\n return self._read_ops", "def list_operations(\n self,\n ) -> Callable[\n [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_operations\" not in self._stubs:\n self._stubs[\"list_operations\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/ListOperations\",\n request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,\n response_deserializer=operations_pb2.ListOperationsResponse.FromString,\n )\n return self._stubs[\"list_operations\"]", "def list_operations(\n self,\n ) -> Callable[\n [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_operations\" not in self._stubs:\n self._stubs[\"list_operations\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/ListOperations\",\n request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,\n response_deserializer=operations_pb2.ListOperationsResponse.FromString,\n )\n return self._stubs[\"list_operations\"]", "def list_operations(\n self,\n ) -> Callable[\n [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_operations\" not in self._stubs:\n self._stubs[\"list_operations\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/ListOperations\",\n request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,\n response_deserializer=operations_pb2.ListOperationsResponse.FromString,\n )\n return self._stubs[\"list_operations\"]", "def operation_list(self):\n return [\n STATE_IDLE,\n STATE_HEAT,\n STATE_COOL,\n STATE_AUTO,\n STATE_FAN_ONLY,\n ]", "def create_ops(self):\n return self._create_ops", "def ListOperations(\n self,\n request: google.longrunning.operations_pb2.ListOperationsRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.ListOperationsResponse:", "def getAllOperation(cls):\n # pylint: disable=W0108\n return list(map(lambda x: cls.getConfigOperation(x),\n cls.getConfigStages()))", "def get_operation_data(self):\n op_data = {}\n try:\n op_data = self.factory.odoo_con.get_op_data(self.user_id, self.task_id)\n\n\n # if op_data:\n # self.state = \"scan_op\"\n except Exception, e:\n expt_str = e.message\n self._snd(expt_str)\n return op_data", "def get_ops_list(model_data):\n model = schema_fb.Model.GetRootAsModel(model_data, 0)\n op_set = set()\n\n for subgraph_idx in range(model.SubgraphsLength()):\n subgraph = model.Subgraphs(subgraph_idx)\n for op_idx in range(subgraph.OperatorsLength()):\n op = subgraph.Operators(op_idx)\n opcode = model.OperatorCodes(op.OpcodeIndex())\n builtin_code = schema_util.get_builtin_code_from_operator_code(opcode)\n if builtin_code == schema_fb.BuiltinOperator.CUSTOM:\n opname = opcode.CustomCode().decode(\"utf-8\")\n op_set.add(opname)\n else:\n op_set.add(visualize.BuiltinCodeToName(builtin_code))\n return op_set", "def get_ops (self, names):\n return operator.attrgetter(names)(self.core) if isinstance(names,str) else [\n operator.attrgetter(n)(self.core) for n in names ]", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def operations(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"operations\")", "def list_operators():\n for operator_symbol in operations:\n print(operator_symbol)", "def operations_per_joinpoint(self):\n ops = []\n current_ops = set()\n\n allocs = self.allocations\n # assumption: the shape of allocs is rectangular (i.e. each client contains the same number of elements)\n for idx in range(0, len(allocs[0])):\n for client in range(0, self.clients):\n task = allocs[client][idx]\n if isinstance(task, track.Task):\n current_ops.add(task.operation)\n elif isinstance(task, JoinPoint) and len(current_ops) > 0:\n ops.append(current_ops)\n current_ops = set()\n\n return ops", "def application_operations(self):\n\n return self._get_list_field(\"applicationsOperations\", lambda x: ApplicationOperation(x))", "def getOperations(tasks):\n uniqueOps = []\n for t in tasks:\n if t.operation not in uniqueOps:\n uniqueOps.append(t.operation)\n return uniqueOps", "def get_ops(self, mode='train'):\r\n if not self._ready_to_write:\r\n self._prepare_for_write()\r\n if mode == 'test' or mode == 'full_test': # Always return all ops for test case\r\n return self._expensive_ops[mode]\r\n elif mode == 'train': # Select ops to evaluate based on defined frequency\r\n check_func = self._model.time.has_been_n_seconds_since_last\r\n if check_func('expensive_summaries_train', self._expensive_ops_every_n_secs):\r\n return self._expensive_ops[mode]\r\n elif check_func('cheap_summaries_train', self._cheap_ops_every_n_secs):\r\n return self._cheap_ops[mode]\r\n return {}", "def get_currently_processed_modules(cls):\n db = cls._core.get_db()\n stmnt = \"SELECT OPE_ID, OPE_OPE_PARENT, OPE_TYPE FROM OPERATIONS \\\n WHERE OPE_TYPE = 'ModuleInstallOperation' \\\n or OPE_TYPE = 'ModuleUninstallOperation' ;\"\n cur = db.query(cls._core,stmnt);\n ret = []\n for row in cur.fetchallmap():\n ret.append(Operation.restore_operation(row).get_meta())\n return ret", "def operation_calls(self):\n return self._operation_calls", "def parse_operations(self, operation_type: str) -> Tuple[Operation]:\n if operation_type is None:\n return tuple()\n query_type: SchemaType = self.types.get(operation_type)\n if query_type is None:\n return tuple()\n return tuple([Operation(f, self.settings) for f in query_type.fields])", "def ListOperations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def ListOperations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def ListOperations(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def lookup_ops(self):\n return self._lookup_ops", "def generate_operations(self):\n combinations = self.COMBINATIONS.items()[:self.limit]\n for (term1, term2), type in combinations:\n yield (term1, term2, type)", "def get_op_types(self):\n return self.cur_config['ops']", "def __init__(self, operations = []):\n self.operations = operations", "def add_operations_from(self, obj):\n\n for name in dir(obj):\n op = getattr(obj, name)\n if isinstance(op, Operation):\n self.add_operation(op)", "def _pull_argops(op_dict):\n import inspect\n out = []\n keys = op_dict.keys()\n keys.sort() # Not necessary, but makes scanning the printout easier\n for k in keys:\n # Create a dictionary that will be used to fill the 'code' template\n d = {}\n d[\"enum_name\"] = enum_name = op_dict[k][3:] # <NAME>\n d[\"funct_name\"] = \"%s\" % enum_name.lower() # <name>\n class_name = \"%s4args\" % enum_name\n klass = getattr(_type, class_name, None)\n if klass is None:\n # This operation takes no arguments\n d[\"funct_args\"] = d[\"create_args\"] = d[\"set_args\"] = \"\"\n else:\n if type(klass) is dict:\n arg_list = \"enum_value\"\n d[\"create_args\"] = \"args = enum_value\"\n else:\n arg_list = \", \".join(inspect.getargspec(klass.__init__)[0][1:])\n d[\"create_args\"] = \"args = _type.%s(%s)\" % (class_name, arg_list)\n d[\"funct_args\"] = arg_list\n if enum_name.startswith(\"CB_\"):\n d[\"set_args\"] = \"opcb%s=args\" % enum_name.lower()[3:]\n else:\n d[\"set_args\"] = \"op%s=args\" % enum_name.lower()\n if enum_name.startswith(\"CB_\"):\n d[\"argop\"] = \"nfs_cb_argop4\"\n else:\n d[\"argop\"] = \"nfs_argop4\"\n out.append(d)\n return out", "def _register_ops(self):\n ops = []\n ops.append(BatchAppsOps.register(\"shared.home\",\n \"Home\",\n self._home))\n ops.append(BatchAppsOps.register(\"shared.management_portal\",\n \"Management Portal\",\n self._management_portal))\n return ops", "def get_operation_obect(self, method):\n pass", "def __init__(self):\n\n self.operations = {}", "def make_operation_space():\n operation_space = {}\n\n # Set integInfo and integBranch\n operation_space['prepare_delenv'] = rmdmod.PrepareDelEnvOperation()\n\n # Call p4 integ for delete revisions\n operation_space['call_p4_integ'] = rmdmod.CallIntegOperation()\n\n # checkout README and place into a pending cln\n operation_space['create_changelist'] = rmdmod.CreateChangelistOperation()\n\n # open file for edit within changelist\n operation_space['reopen'] = rmdmod.ReopenOperation()\n\n # list history of deleted files\n operation_space['list_history'] = rmdmod.ListDelHistoryOperation()\n\n return operation_space", "def all(self, tmin=None, tmax=None):\n stats = pd.DataFrame(columns=['Value'])\n for k in self.ops.keys():\n stats.loc[k] = (getattr(self, k)(tmin=tmin, tmax=tmax))\n\n return stats", "def get_ops():\n li = [\"EOF\",\"ADD\",\"SUB\",\"MUL\",\"DIV\",\"POW\",\"BITAND\",\"BITOR\",\"CMP\",\"GET\", \\\n \"SET\",\"NUMBER\",\"STRING\",\"GGET\",\"GSET\",\"MOVE\",\"DEF\",\"PASS\", \\\n \"JUMP\",\"CALL\",\"RETURN\",\"IF\",\"DEBUG\",\"EQ\",\"LE\",\"LT\",\"DICT\", \\\n \"LIST\",\"NONE\",\"LEN\",\"LINE\",\"PARAMS\",\"IGET\",\"FILE\",\"NAME\", \\\n \"NE\",\"HAS\",\"RAISE\",\"SETJMP\",\"MOD\",\"LSH\",\"RSH\",\"ITER\",\"DEL\", \\\n \"REGS\",\"BITXOR\", \"IFN\", \"NOT\", \"BITNOT\"]\n dic = {}\n for i in li:\n dic[i] = li.index(i)\n return dic", "def nncf_operations(self) -> List[NNCFOperation]:\n return [op for hook in getattr(self, \"_hooks\") for op in hook.operations]", "def _simple_traversal(self, operation):\r\n results = execute_query('g.e(eid).%s()'%operation, {'eid':self.eid})\r\n return [Element.deserialize(r) for r in results]", "def _create_iterable_operations(self, node, interface_identifier):\n return {\n Identifier('forEach'):\n self._create_operation(Identifier('forEach'),\n arguments=self._create_arguments([\n (Identifier('callback'),\n Identifier('ForEachIteratorCallback')),\n (Identifier('thisArg'), 'any', 'null'),\n ]),\n extended_attributes={\n 'CallWith':\n ('ScriptState', 'ThisValue'),\n 'RaisesException': None,\n 'ImplementedAs': 'forEachForBinding',\n },\n node=node),\n Identifier('entries'):\n self._create_operation(\n Identifier('entries'),\n return_type=SyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'entriesForBinding',\n },\n node=node),\n Identifier('keys'):\n self._create_operation(\n Identifier('keys'),\n return_type=SyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'keysForBinding',\n },\n node=node),\n Identifier('values'):\n self._create_operation(\n Identifier('values'),\n return_type=SyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'valuesForBinding',\n },\n node=node),\n }", "def dataObjects(self):\n\t\treturn self._objects", "def get_objects_data(self):\n return dict(result=self.objects)", "def batch_read(self, operations: List[Dict[str, Any]], **kwargs) -> Dict[str, Any]:\n return cd_client.batch_read(DirectoryArn=self._dir_arn, Operations=operations, **kwargs)", "def get_operations(self):\n op = self.act.get_operations()\n op.extend(Character.decr_attr)\n return op", "def find_operations(self, span_kind: str, service: str) -> List[Operation]:\n match_query = [{\"process.serviceName\": service}]\n if span_kind != \"\":\n tag_query = {\"tags\": {\"$elemMatch\": {\"key\": SPAN_KIND_NAME, \"vStr\": span_kind}}}\n match_query.append(tag_query)\n match_stage = {\"$and\": match_query}\n aggregation = [\n {\"$match\": match_stage},\n {\"$unwind\": {\"path\": \"$tags\"}},\n {\"$match\": {\"tags.key\": \"span.kind\"}},\n {\"$group\": {\"_id\": {\"operationName\": \"$operationName\", \"tags\": \"$tags\"}}},\n {\"$replaceRoot\": {\"newRoot\": \"$_id\"}},\n ]\n results = self.collection.aggregate(aggregation)\n return [\n Operation(name=result[\"operationName\"], span_kind=result[\"tags\"][\"vStr\"])\n for result in results\n ]", "def ops():\n\tret = open(os.path.join(SERVER_DIR, 'ops.txt')).read().strip().split('\\n')\n\tret = [unicode(name.lower()) for name in ret]\n\treturn ret", "def __operations(self, conf):\n result = \"\"\"## Operations [back to top](#toc)\nThe operations that this API implements are:\n\"\"\"\n ops = \"\\n\"\n\n for op in conf[\"conf_json\"][1:]:\n params = []\n for p in findall(PARAM_NAME, op[\"url\"]):\n p_type = \"str\"\n p_shape = \".+\"\n if p in op:\n p_type, p_shape = findall(\"^\\s*([^\\(]+)\\((.+)\\)\\s*$\", op[p])[0]\n\n params.append(\n \"<em>%s</em>: type <em>%s</em>, regular expression shape <code>%s</code>\"\n % (p, p_type, p_shape)\n )\n result += \"\\n* [%s](#%s): %s\" % (\n op[\"url\"],\n op[\"url\"],\n op[\"description\"].split(\"\\n\")[0],\n )\n ops += \"\"\"<div id=\"%s\">\n<h3>%s <a href=\"#operations\">back to operations</a></h3>\n\n%s\n\n<p class=\"attr\"><strong>Accepted HTTP method(s)</strong> <span class=\"attr_val method\">%s</span></p>\n<p class=\"attr params\"><strong>Parameter(s)</strong> <span class=\"attr_val\">%s</span></p>\n<p class=\"attr\"><strong>Result fields type</strong><span class=\"attr_val\">%s</span></p>\n<p class=\"attr\"><strong>Example</strong><span class=\"attr_val\"><a target=\"_blank\" href=\"%s\">%s</a></span></p>\n<p class=\"ex attr\"><strong>Exemplar output (in JSON)</strong></p>\n<pre><code>%s</code></pre></div>\"\"\" % (\n op[\"url\"],\n op[\"url\"],\n markdown(op[\"description\"]),\n \", \".join(split(\"\\s+\", op[\"method\"].strip())),\n \"</li><li>\".join(params),\n \", \".join(\n [\n \"%s <em>(%s)</em>\" % (f, t)\n for t, f in findall(FIELD_TYPE_RE, op[\"field_type\"])\n ]\n ),\n conf[\"website\"] + conf[\"base_url\"] + op[\"call\"],\n op[\"call\"],\n op[\"output_json\"],\n )\n return markdown(result) + ops", "def _get_design_mesh_operations(self):\n meshops = []\n try:\n for ds in self._app.design_properties[\"MeshRegion\"][\"MeshSetup\"][\"MeshOperations\"]:\n if isinstance(self._app.design_properties[\"MeshRegion\"][\"MeshSetup\"][\"MeshOperations\"][\n ds], (OrderedDict, dict)):\n meshops.append(\n MeshOperation(\n self,\n ds,\n self._app.design_properties[\"MeshRegion\"][\"MeshSetup\"][\"MeshOperations\"][ds],\n \"Icepak\",\n )\n )\n except:\n pass\n return meshops", "def getattr_ops(self):\n return self._getattr_ops", "def GetFileOperations():\n values = __get_current_values()\n to_return = []\n for i in range(int(len(values) / 2)):\n to_return.append((values[2*i].replace(\"\\\\??\\\\\", \"\"), values[2*i+1].replace(\"\\\\??\\\\\", \"\")))\n return to_return", "def get_compute_op_list(job_content):\n op_list = job_content[\"op_list\"]\n op_compute_list = []\n for op in op_list:\n if op[\"type\"] != \"Data\":\n op_compute_list.append(op)\n return op_compute_list", "def get_operagion(self):\n if self.OP_GID not in self._data_dict:\n return None\n return dao.get_operation_by_gid(self._data_dict.get(self.OP_GID, None))", "def operation(self, name):\n\n try:\n return self.operations[name]\n except KeyError:\n return self.operation_not_found(name)", "def write_ops(self):\n return self._write_ops", "def _operation_tree(self):\n\n # initial state\n i = 0\n level = 0\n stack = []\n current = None\n\n def _create_operation(args):\n profile_stats = None\n name = args[0].strip()\n args.pop(0)\n if len(args) > 0 and \"Records produced\" in args[-1]:\n records_produced = int(\n re.search(\"Records produced: (\\\\d+)\", args[-1]).group(1)\n )\n execution_time = float(\n re.search(\"Execution time: (\\\\d+.\\\\d+) ms\", args[-1]).group(1)\n )\n profile_stats = ProfileStats(records_produced, execution_time)\n args.pop(-1)\n return Operation(\n name, None if len(args) == 0 else args[0].strip(), profile_stats\n )\n\n # iterate plan operations\n while i < len(self.plan):\n current_op = self.plan[i]\n op_level = current_op.count(\" \")\n if op_level == level:\n # if the operation level equal to the current level\n # set the current operation and move next\n child = _create_operation(current_op.split(\"|\"))\n if current:\n current = stack.pop()\n current.append_child(child)\n current = child\n i += 1\n elif op_level == level + 1:\n # if the operation is child of the current operation\n # add it as child and set as current operation\n child = _create_operation(current_op.split(\"|\"))\n current.append_child(child)\n stack.append(current)\n current = child\n level += 1\n i += 1\n elif op_level < level:\n # if the operation is not child of current operation\n # go back to it's parent operation\n levels_back = level - op_level + 1\n for _ in range(levels_back):\n current = stack.pop()\n level -= levels_back\n else:\n raise Exception(\"corrupted plan\")\n return stack[0]", "def objects(self):\n return self.obj_to_id.keys()", "def print_operation(operations):\n for operation in operations:\n print ' ',\n change_color_by_tag(operation)\n if operation['ExtAttributes']:\n print_extattributes_of_member(operation['ExtAttributes'])\n print operation['Type'],\n if operation['Arguments']:\n print operation['Name'],\n print_argument(operation['Arguments'])\n else:\n print operation['Name']", "def dump(self):\n return [action.get_data() for action in self.h]", "def ole_objects(self):\n return self.container['ole_objects']", "def operation_counts(self) -> Dict[int, Dict[str, int]]:\n return self._operation_counts", "def destroy_operators(self) -> List[Tuple[str, _OperatorType]]:\n return list(self._d_ops.items())", "def callables(self):\n \n if hasattr(self, \"_callables\"):\n return self._callables\n \n # build a list of all the Callable objects\n # The old backend processed all operations first\n # (FIXME: duplicate for the sake of easy checking)\n self._callables = []\n\n for c in self._node.callables():\n if isinstance(c, idlast.Operation):\n self._callables.append(call.operation(self, c))\n \n for c in self._node.callables():\n if isinstance(c, idlast.Attribute):\n self._callables = self._callables + call.read_attributes(self, c)\n if c.readonly(): continue\n self._callables = self._callables + call.write_attributes(self, c)\n \n return self._callables", "def _create_async_iterable_operations(self, node, interface_identifier,\n arguments):\n return {\n Identifier('entries'):\n self._create_operation(\n Identifier('entries'),\n arguments=make_copy(arguments),\n return_type=AsyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'entriesForBinding',\n },\n node=node),\n Identifier('keys'):\n self._create_operation(\n Identifier('keys'),\n arguments=make_copy(arguments),\n return_type=AsyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'keysForBinding',\n },\n node=node),\n Identifier('values'):\n self._create_operation(\n Identifier('values'),\n arguments=make_copy(arguments),\n return_type=AsyncIterator.identifier_for(interface_identifier),\n extended_attributes={\n 'CallWith': 'ScriptState',\n 'RaisesException': None,\n 'ImplementedAs': 'valuesForBinding',\n },\n node=node),\n }", "def GetOperation(name):\n client = GetClientInstance()\n messages = client.MESSAGES_MODULE\n request = messages.ApikeysOperationsGetRequest(name=name)\n try:\n return client.operations.Get(request)\n except (apitools_exceptions.HttpForbiddenError,\n apitools_exceptions.HttpNotFoundError) as e:\n exceptions.ReraiseError(e, exceptions.OperationErrorException)", "def all(self):\n return (self.__objects)", "def getOp(self):\n return self._OPERATION", "def GetObjects(self): \r\n return self.model.GetObjects()", "def convert_to_model(self, *args):\n operation_types_data, *_ = args\n return [OperationType(**operation_type) for operation_type in operation_types_data]", "def get_mds_ops_data(self, datetime_start, datetime_end, timechunk=datetime.timedelta(hours=1)):\n return self.get_timeseries_data('MDS_OPS_DATA',\n datetime_start,\n datetime_end,\n timechunk=timechunk)", "def create_commandlist(self):\n \n #go through all commands and parse the information\n command_list = []\n for c in self.command_series:\n \n #get start and end frames. For \"At frame...\" statements end == start\n if c[0].split()[0] == 'From':\n start = int(re.findall('From frame (\\d+) to*', c[0])[0])\n end = int(re.findall('to frame (\\d+) *', c[0])[0])\n else:\n start = int(re.findall('At frame (\\d+).*', c[0])[0])\n end = int(re.findall('At frame (\\d+).*', c[0])[0])\n \n #For each group of statements parse the commands\n for c2 in c[1]:\n parsed = self.parse_command(c2)\n #if parsing returns a list, it means that the operation has been split into parts\n #mainly to handle large rotations\n if type(parsed) is list:\n interm_steps = np.linspace(start,end,len(parsed)+1).astype(int)\n for i in range(len(interm_steps)-1):\n command_list.append([interm_steps[i], interm_steps[i+1], parsed[i]])\n else:\n command_list.append([start, end, parsed]) \n \n #sort commands by time\n command_list = np.array(command_list)\n command_list = command_list[np.argsort(command_list[:,0]),:]\n \n #create list of dictionaries\n command_list = [{'start': x[0], 'end': x[1], 'operation': x[2][0], 'params': x[2][1:]} for x in command_list]\n self.command_list = command_list", "def ops2alg(ops):\n return Model(cardinality=len(ops[0]), \n operations=dict([\"h\"+str(i),list(ops[i])] for i in range(len(ops))))", "def repair_operators(self) -> List[Tuple[str, _OperatorType]]:\n return list(self._r_ops.items())", "def get_objects(self, oid=None,\n since=None, until=None, last=None, first=None):\n resource = self.kvpath('registry/objects', ('int', oid),\n since=('isobasic', absdatetime(since)),\n until=('isobasic', absdatetime(until)),\n first=('int', first), last=('int', last))\n return self.request('get', resource)", "def operation_invoices(self):\r\n for operation in self:\r\n invoices = self.env['account.invoice'].search([\r\n ('freight_hbl', '=', operation.id),\r\n ('type', 'in', ['out_invoice', 'out_refund']),\r\n ('state', '!=', 'cancel'),\r\n ])\r\n action = self.env.ref('account.action_invoice_tree1').read()[0]\r\n if len(invoices) > 1:\r\n action['domain'] = [('id', 'in', invoices.ids)]\r\n elif len(invoices) == 1:\r\n action['views'] = [(self.env.ref('account.invoice_form').id, 'form')]\r\n action['res_id'] = invoices.ids[0]\r\n else:\r\n action = {'type': 'ir.actions.act_window_close'}\r\n return action", "def __init__(self):\r\n self.operation_map = {}", "def extract_commands(self):\n # import pdb; pdb.set_trace()\n left_i = 0\n right_i = 1\n commands = {}\n cmd = self.cmd\n\n if not cmd:\n return\n while left_i < len(cmd):\n sub_cmd = cmd[left_i:right_i]\n if sub_cmd in self.action_list:\n arg_len, arguments = self.extract_command_arguments(right_i)\n commands[sub_cmd] = arguments\n left_i = right_i + arg_len\n right_i = left_i + 1\n else:\n left_i, right_i = self.update_i(left_i, right_i)\n return commands", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def operators(self):\n return self.domain.operators.keys()", "def getResult(self, data_path, tree_path=None):\r\n otu_table = self.getBiomData(data_path)\r\n\r\n calc_names = []\r\n for calc in self.Calcs:\r\n # add either calc's multiple return value names, or fn name\r\n calc_names.extend(getattr(calc.Metric, 'return_names',\r\n (calc.Metric.__name__,)))\r\n needs_tree = max([c.IsPhylogenetic for c in self.Calcs])\r\n if needs_tree:\r\n tree = self.getTree(tree_path)\r\n else:\r\n tree = None\r\n # calculations\r\n res = []\r\n for c in self.Calcs:\r\n # add either calc's multiple return value names, or fn name\r\n metric_res = c(data_path=data_path,\r\n taxon_names=otu_table.ObservationIds,\r\n tree_path=tree,\r\n sample_names=otu_table.SampleIds)\r\n if len(metric_res.shape) == 1:\r\n res.append(metric_res)\r\n elif len(metric_res.shape) == 2:\r\n for met in metric_res.T:\r\n res.append(met)\r\n else:\r\n raise RuntimeError(\"alpha div shape not as expected\")\r\n res_data = array(res).T\r\n\r\n return res_data, otu_table.SampleIds, calc_names", "def all_objects():\n objs = {}\n objs['Section'] = list(h.all_sec())\n objs['Segment'] = []\n for sec in objs['Section']:\n objs['Segment'].extend(list(sec.allseg()))\n objs['PointProcess'] = []\n for seg in objs['Segment']:\n objs['PointProcess'].extend(list(seg.point_processes()))\n \n return objs", "def get_file_operations() -> dict:\n\n from FileWrangler.fileops.CompletelyReplace import CompletelyReplaceUIOperation\n from FileWrangler.fileops.Separator import SeparatorUIOperation\n from FileWrangler.fileops.PatternFinding import PatternExtractingUIOperation\n from FileWrangler.fileops.PathComponents import PathComponentsUIOperation\n operations = [\n CompletelyReplaceUIOperation(),\n SeparatorUIOperation(),\n PatternExtractingUIOperation(),\n PathComponentsUIOperation()\n ]\n return {x.name: x for x in operations}", "def get(self):\n\n return ({\"can-do\": TeproAlgo.getAvailableOperations()}, int(HTTPStatus.OK))", "def operators(self):\n return self._operators" ]
[ "0.7069516", "0.7069516", "0.7069516", "0.7069516", "0.6523942", "0.6325791", "0.63029575", "0.6191638", "0.6191638", "0.61165065", "0.60694075", "0.6018854", "0.60050935", "0.5908322", "0.58018446", "0.5792496", "0.5778409", "0.5778409", "0.5778409", "0.575747", "0.5727349", "0.56417656", "0.56347823", "0.5610372", "0.55806655", "0.556928", "0.5560916", "0.5560916", "0.5560916", "0.5560916", "0.55510026", "0.55353725", "0.553528", "0.55011946", "0.5501132", "0.5487797", "0.54640085", "0.54508096", "0.54476273", "0.54476273", "0.54476273", "0.542754", "0.53650194", "0.5359049", "0.53441405", "0.5310745", "0.5302518", "0.5299355", "0.5279106", "0.52581465", "0.5250412", "0.5247652", "0.5244617", "0.5241486", "0.52305233", "0.5225163", "0.51707655", "0.51680094", "0.5150985", "0.51486975", "0.514368", "0.5111666", "0.51079684", "0.5105753", "0.51032466", "0.5097114", "0.5085001", "0.5065478", "0.5063317", "0.50542116", "0.5047204", "0.50428367", "0.5017753", "0.50087744", "0.5005986", "0.5005432", "0.49976024", "0.49916607", "0.4987815", "0.4980688", "0.49657184", "0.49629876", "0.4961074", "0.495879", "0.49566334", "0.49566332", "0.49446252", "0.4937552", "0.49340695", "0.4931774", "0.49272388", "0.49220088", "0.4910329", "0.4910329", "0.490638", "0.48935148", "0.48928562", "0.48804682", "0.4877122", "0.48612452" ]
0.628271
7
Get a new Operation Id from the Database and assign it to this Operation if this Operation's id is null. Afterwards return the new Id
def set_db_id(self): if self._id is None: db = self._core.get_db() self._id = db.get_seq_next('OPE_GEN') return self._id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_id(self) -> Optional[str]:\n return pulumi.get(self, \"operation_id\")", "def get_unique_operation_id(self) -> np.uint64:\n\n counter_key = serialize_key('counter')\n\n # Incrementer row keys start with an \"i\"\n row_key = serialize_key(\"ioperations\")\n append_row = self.table.row(row_key, append=True)\n append_row.increment_cell_value(self.incrementer_family_id,\n counter_key, 1)\n\n # This increments the row entry and returns the value AFTER incrementing\n latest_row = append_row.commit()\n operation_id_b = latest_row[self.incrementer_family_id][counter_key][0][0]\n operation_id = int.from_bytes(operation_id_b, byteorder=\"big\")\n\n return np.uint64(operation_id)", "def get_operation_id(self):\n operation_id = self.yaml_parser.object.get('operationId', None)\n if not operation_id:\n operation_id = self.method + \"-\" + self.path.strip(\"/\").replace(\"/\", \"-\")\n\n return operation_id", "def setOperationId(self, opid) :\n self.operation_id = opid", "def getid_saveifneeded(self):\n #if (not hasattr(self,'id') or self.id == None):\n if (self.id == None):\n self.save()\n return self.id", "def resolve_operation_id(self, operation):\n oid = operation.operation_id\n if \".\" in oid:\n oid = oid.split(\".\")[-1]\n # Append the operation function to this module.\n setattr(self.me, oid, noop)\n return self.me.__name__ + \".\" + oid", "def _id_maybe_add(self, get_sql, add_sql, params=list()):\n try:\n self.cur.execute(get_sql, params)\n return self.cur.fetchone()[0]\n except TypeError:\n self.cur.execute(add_sql, params)\n return self.cur.lastrowid", "def get_or_create_unique_id(self):\n if not self.unique_id:\n self.unique_id = uuid.uuid4().hex\n return self.unique_id", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def opid(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"opid\")", "def id(self) -> Optional[int]:\n return self.__id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def _get_id(self):\n return self.id", "def id(self):\n return self.odb_id", "def new_mission_id(self):\n newMissionId = self.lastMissionId + 1\n if newMissionId in self.missions.keys():\n # This should never happen but TODO strengthen it just in case\n raise RuntimeError(\"Id of newly created mission already exists.\")\n return newMissionId", "def get_operation_old(operation_name):\n op = operations_api.get_operation(operation_name)\n return op", "def _get_id(self) -> int:\n if len(self._id_pool) == 0:\n raise ArchonError(\"No ids reamining in the pool!\")\n return self._id_pool.pop()", "def lookup_operation(self, result):\r\n if (not isinstance(result, autodiff.tensor.Tensor) or \r\n result.id not in self.operation_map):\r\n return None\r\n return self.operation_map[result.id]", "def newId():\n global lastId\n lastId += 1\n return 'id%d' % lastId", "def get_id(self, desired: int = -1) -> int:\n\n if desired == -1:\n return super().get_id()\n else:\n self._used.add(desired)\n return desired", "def GetId(self):\n return int(self.id)", "def get_objective_id(self):\n return Id(self._my_map['objectiveId'])", "def getOperationNameForId(i):\n for key in operations:\n if int(operations[key]) is int(i):\n return key\n return \"Unknown Operation ID %d\" % i", "def getOperationNameForId(i):\n for key in operations:\n if int(operations[key]) is int(i):\n return key\n return \"Unknown Operation ID %d\" % i", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def GetId(self):\r\n\r\n return self.id", "def newid(self, target_table):\n self.new_id[target_table] += 1\n return self.new_id[target_table]", "def ID(self):\n if hasattr(self, 'currentID'):\n return self.currentID\n if hasattr(self, 'callDict'):\n thisID = hashIt(self.callDict)\n if hasattr(self, 'pastInfo'):\n self.pastInfo[thisID] = {'callDict': self.callDict}\n else:\n thisID = None\n self.currentID = thisID\n return thisID", "def __getNewIPpoolID(self):\n return db_main.getHandle().seqNextVal(\"ippool_id_seq\")", "def get_id(self) -> int:\n return self.id", "def get_id(self) -> int:\n return self.id", "def identity(self):\n return self.id", "def GetCommandId(self):\r\n \r\n return self._last_id", "def get_primary_id(self):", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def GetCommandId(self):\r\n\r\n return self._last_id", "def getID(self) -> int:\n ...", "def id(self) -> int:\n\t\t# pylint: disable=invalid-name\n\t\treturn self._oid", "def get_new_oid(cls):\n return OidGenerator.allocate()", "def get_id(self) -> Optional[str]:\n return self.id_", "def restore_operation(cls, operation_record):\n classname = operation_record[\"OPE_TYPE\"]\n module = \"\" #TODO Implement modulename from database if Operation belongs to Module\n is_operation_of_module = False\n exec \"\"\"\ntry:\n type(%(class)s)\nexcept NameError,e:\n is_operation_of_module = True\"\"\"%{'class':classname}\n\n if is_operation_of_module:\n exec \"\"\"\nfrom %(module)s import %(class)s\noperation = %(class)s(cls._core)\"\"\"%{'class':classname,'module':module}\n else:\n exec \"\"\"\noperation = %(class)s(cls._core)\"\"\"%{'class':classname}\n\n operation.set_id(operation_record['OPE_ID'])\n db = cls._core.get_db()\n stmnt = \"SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation_record[\"OPE_ID\"],))\n for row in cur.fetchallmap():\n val = row[\"OPD_VALUE\"]\n exec \"\"\"val = %s(val)\"\"\"%row[\"OPD_TYPE\"]\n operation.set_value(row[\"OPD_KEY\"], val)\n return operation", "def getOid(self):\n if self.__state & self.stClean:\n return self.__oid\n else:\n raise SmiError('%s object not fully initialized' % self.__class__.__name__)", "def _getNewImgId(self):\n\n newImgId = COCO_PLUS.IMG_ID\n COCO_PLUS.IMG_ID += 1\n\n return newImgId", "def _add_ID(self, preferred_id):\n self.id = preferred_id\n while self.id in Thing.ID_dict: # unique-ify self.id if necessary\n self.id = self.id + str(random.randint(0, 9))\n Thing.ID_dict[self.id] = self\n return self.id", "def insert_get_last_id(self, sql: str) -> int:\n with self.connection.cursor() as cursor:\n self.connection.ping(reconnect=True)\n cursor.execute(sql)\n last_id = cursor.lastrowid\n self.connection.commit()\n return last_id", "def get_operation(project_id: str, region: str, operation_id: str) -> Operation:\n return get_operation_by_name(\n f\"projects/{project_id}/locations/{region}/operations/{operation_id}\"\n )", "def getID():", "def set_operation(self, operation):\n self._data_dict[self.OP_GID] = operation.gid", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id" ]
[ "0.7058039", "0.6560749", "0.64451057", "0.63077974", "0.6068187", "0.5920608", "0.58303344", "0.58039576", "0.5725634", "0.5725634", "0.56132185", "0.55941796", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.5580039", "0.55765927", "0.5546043", "0.5518853", "0.54759467", "0.54586345", "0.54543", "0.5450236", "0.54143804", "0.5380736", "0.5378624", "0.5378624", "0.53601855", "0.53601855", "0.53601855", "0.53601855", "0.5357541", "0.53422314", "0.53284097", "0.5292739", "0.52925116", "0.52925116", "0.5285604", "0.5284042", "0.52649546", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.526103", "0.5252569", "0.5251367", "0.5247358", "0.5245609", "0.5233077", "0.52171755", "0.5203093", "0.5200969", "0.52002454", "0.5190226", "0.5183422", "0.5177669", "0.5163797", "0.515913", "0.515913", "0.515913", "0.515913", "0.515913", "0.515913", "0.515913", "0.515913", "0.515913" ]
0.0
-1
Stores this Operation to database. Also saves every user defined value in $_values as long as it is a valid type
def store(self): db = self._core.get_db() if self._id is None: db = self._core.get_db() self._id = db.get_seq_next('OPE_GEN') stmnt = "UPDATE OR INSERT INTO OPERATIONS (OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE) \ VALUES (?,?,CURRENT_TIMESTAMP,?) MATCHING (OPE_ID);" db.query(self._core,stmnt,(self._id,self._parent,self.__class__.__name__),commit=True) stmnt = "UPDATE OR INSERT INTO OPERATIONDATA (OPD_OPE_ID, OPD_KEY, OPD_VALUE, OPD_TYPE) \ VALUES ( ?, ?, ?, ?) MATCHING(OPD_OPE_ID,OPD_KEY);" for key, value in self._values.items(): typ = str(type(value)).replace("<type '","",1).replace("'>","",1) if typ not in Operation.VALID_STORAGE_TYPES: continue db.query(self._core,stmnt,(self._id,key,value,typ),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_vals (self):\n raise NotImplementedError", "def save(self, values):", "def persist(self, values):\n pass", "def save(self):\n data = self.serialize()\n\n self.validate(data)\n\n saved_data = DATABASE_CONNECTION.insert(self.__class__.__name__, data)\n\n self.__dict__.update(saved_data)", "def store(\n self, cursor: sqlite3.Cursor, connector_value: int, values: PrimitiveTypes\n ) -> None:", "def save(self, data):\n\t\tif self.value:\n\t\t\tdata['value'] = self.value", "def store(self):\n\n pass", "async def _insert_values(self, model: Model):\n\n raise NotImplementedError", "def save_data(self):\n pass", "def saveProgrammingValuesToDatabase(self):\n #print(\"save programming values\")\n #print(\"username: \", self.user.username)\n #self.user.data.printData()\n self.dbManager.setUserProgramData(self.user.username, self.user.data)", "def save_to_db(self, data, db_operations):\n self.from_dict(data)\n self._id = str(db_operations.insert_one(self.to_dict()).inserted_id)", "def saveData(self):\n pass", "def insert_values():\n pass", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError", "def SaveValues(self, e):\n if (self.output_type is None):\n msg = \"The quantities are not associated with an output type. Choose one and re-save.\"\n ShowMessage(msg, kind='warn')\n return\n self.num_values = len(self.selected_values)\n\n nml = self.mainparent.input_file.namelists[self.mainparent.namelist]\n var_names = [v.name for v in nml.variables]\n\n name = self.output_types[self.output_type] + \"_values\"\n values = list(set(self.selected_values))\n\n if (name in var_names): # name already in namelist, add to old values\n ind = var_names.index(name)\n current_values = nml.variables[ind].value\n values = list(set(current_values + values))\n\n new_variable = Variable(name, values)\n nml.add_variable(new_variable, modify=True)\n\n self.mainparent.statusbar.SetStatusText(\"Added {} {} quantities\".format(self.num_values, self.output_type), 0)", "def save(self):\n args = list(map(self._get_value_or_default, self.COLUMN_TO_FILED))\n columns = list(map(lambda k: k, self.COLUMN_TO_FILED))\n sql = 'INSERT INTO {} ({}) VALUES({});'.format(\n self.TABLE_NAME,\n ', '.join(columns),\n '%s,'.join(' '*len(columns)) + '%s'\n )\n cursor = yield self._pool.execute(sql, args)\n app_log.info('save arg %s', args)\n count = cursor.rowcount\n result = True if count == 1 else False\n return result", "def _save_data(self):\n super()._save_data()\n if self.data:\n self.state['inserted_elements'] = len(SeaLevelRiseMeasure.objects.bulk_create(self.data))\n self.logger.info('Successfully saved %d elements.' % self.state['inserted_elements'])\n else:\n self.logger.info('No elements were saved because no elements were available.')\n self.data = None", "def save(self):\n raise NotImplementedError()", "def save(self):\n\n pass", "def _storeData(self, data, table, query=None):\n print ('Storing data')\n conn = dbo.getConnection()\n\n if query == None:\n num_cols = len(data[0])\n cols = ','.join(['%s ' for i in range(0, num_cols)])\n query = \"INSERT INTO \" + table + \" VALUES (\" + cols + \")\"\n\n dbo.execute_query(conn, query, data, multiple=True)\n dbo.closeConnection(conn)\n return", "def save_many(self, values, expires_in=None):\n raise NotImplementedError()", "def save_values(self):\n # TODO: Add self.prefix and extension\n NetworkTables.saveEntries(self.file.get_filename(), prefix='/vision/' + self.name + '_')", "def save (self):\n pass", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def save_data(self):\n db.session.add(self)\n db.session.commit( )", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self, *args, **kwargs):\n pass", "def save_db(self) -> None:", "def save(self)->None:\n database.cursor.execute(\"INSERT INTO votes(question,user_id,value) VALUES(%s,%s,%s) RETURNING id\", (\n self.question,\n self.user,\n self.value\n ))\n super().save()", "def save(self):\r\n if self.table == \"Category\":\r\n Category(self.db).insert_query(self.elements)\r\n elif self.table == \"Brand\":\r\n Brand(self.db).insert_query(self.elements)\r\n elif self.table == \"Store\":\r\n Store(self.db).insert_query(self.elements)\r\n elif self.table == \"Product\":\r\n Product(self.db).insert_query(self.elements)", "def __store(self):\n # connection strings are accessed directly by dbo\n dbo = dbo.connect()\n dbo.save(self.__to_dict())\n # not supre important to call but a nice idea\n dbo.destroy()", "def save(self):\n self.add_statements(self.triples())", "def save_run(self):\n values = get_serializeArray_form_values(self.request)\n\n plates, ichips, aliquots = self.transmogrify_inputs(values['plates'])\n plates = self.remove_empty_plates(plates)\n plates = self.reorder_plates(plates)\n\n solutions = [values[x] for x in values if x.startswith('solution-')]\n\n run = self.context\n run.plates = plates\n run.run_date = values.get('run_date', run.run_date)\n run.solutions = solutions", "def save_state(self) -> None:\n raise NotImplementedError(\"Save state is is not implemented.\")", "def _save_data(self):\n super()._save_data()\n if self.data:\n # FIXES [BUG-034].\n WeatherForecastObservation.objects.all().delete()\n self.state['inserted_elements'] = len(WeatherForecastObservation.objects.bulk_create(self.data))\n self.logger.info('Successfully saved %d elements.' % self.state['inserted_elements'])\n else:\n self.logger.info('No elements were saved because no elements were available.')\n self.data = None", "def save(self, *args, **kwargs):\n raise NotImplementedError()", "def save(self, *args, **kwargs):\n raise NotImplementedError('missing data mixin')", "def make_save(self):\n\t\tsave = {}\n\t\tsave['p'] = self.p\n\t\tsave['injail'] = self.injail.copy()\n\t\tsave['tile'] = self.tile.copy()\n\t\tsave['bal'] = self.bal.copy()\n\t\tsave['goojf'] = self.goojf.copy()\n\t\tsave['isalive'] = self.isalive.copy()\n\t\tsave['jailturn'] = self.jailturn.copy()\n\t\tsave['ownedby'] = self.ownedby.copy()\n\t\tsave['numhouse'] = self.numhouse.copy()\n\t\tsave['ismortgaged'] = self.ismortgaged.copy()\n\t\tsave['num'] = self.num\n\t\tsave['numalive'] = self.numalive\n\t\tsave['uid'] = self.uid.copy()\n\t\tsave['freeparkingsum'] = self.freeparkingsum\n\t\tself.autosave = save", "def data_insertion(self, data_dict: Dict):\n\n #self.__create_db()\n self.__create_table()\n\n self.current_state = self.system.insert().values(\n timestamp = data_dict['timestamp'],\n vibration_sensor = data_dict['vibration_sensor'],\n flow = data_dict['flow'],\n pressure = data_dict['pressure'],\n power_consumption = data_dict['power_consumption'],\n failure_times = data_dict['failure_times'],\n operational = data_dict['operational']\n )\n\n self.connection.execute(self.current_state)\n\n if self.max_table_size is not None:\n self.__cleanup_dt()", "def save(self):\n self.__db.commit()", "def save(self, *args, **kwargs) -> None:\n pass", "def save(self, *args, **kwargs) -> None:\n pass", "def save(self, *args, **kwargs) -> None:\n pass", "def save(self):\n # TODO (Pierre): code", "def save(self, *args, **kwargs) -> Any:\n pass", "def saveGridValues( self ):\n assert(self.hasSaveMemory)\n assert(self.notSaved)\n\n self._my_data[self._saveIdx][:self._layout.size] = self._f[:].flatten()\n self._savedLayout = self._current_layout_name\n\n self.notSaved = False", "def _save(self):\n self.logger.debug(\"Saving to persistence\")\n try:\n data = self.persistence_serialize()\n except NotImplementedError:\n # allow backwards compatibility or persisted_values way\n # generate item to be persisted by gathering all variables\n # to be persisted into a dictionary\n data = {persisted_var: getattr(self, persisted_var)\n for persisted_var in self.persisted_values()}\n\n # save generated dictionary under block's id\n self._persistence.save(data, self.id())", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def save(self):\n self.wallet.storage.put(\n \"slp_data_version\", None\n ) # clear key of other older formats.\n data = {\n \"validity\": self.validity,\n \"token_quantities\": {\n k: [[v0, v1] for v0, v1 in v.items()]\n for k, v in self.token_quantities.items()\n },\n \"txo_byaddr\": {\n k.to_storage_string(): list(v) for k, v in self.txo_byaddr.items()\n },\n \"version\": self.DATA_VERSION,\n }\n self.wallet.storage.put(\"slp\", data)", "def values(self, values):\n self.data.values = values", "def save(self, key, value):\n # deepcopy so that later modifications to value aren't reflected in the db\n self.data[key] = copy.deepcopy(value)", "def save_to_settings(self):\n for group_name, group in self.input_elements.items():\n for param, widget in group.items():\n val = widget.get_value()\n # Convert value to integer of float\n try:\n val = float(val)\n if val.is_integer():\n val = int(val)\n except:\n pass\n self.settings['pharos'][group_name][param] = val", "def save(self):\n\n # If 'id' wasn't blank, it's added to the list of dirty fields; this\n # way the entry will be updated in the SugarCRM connection.\n if self['id'] != '':\n self._dirty_fields.append('id')\n \n # nvl is the name_value_list, which has the list of attributes.\n nvl = []\n for field in set(self._dirty_fields):\n # Define an individual name_value record.\n nv = {}\n nv['name'] = field\n nv['value'] = self[field]\n nvl.append(nv)\n \n # Use the API's set_entry to update the entry in SugarCRM.\n result = self._module._connection.set_entry(self._module._name, nvl)\n self._fields['id'] = result['id']\n self._dirty_fields = []\n\n return True", "def save(self, *args, **kwargs):\n return", "def store(self, args):\n pass", "def _save (self, expires):\n\n pickled_data = pickle.dumps (self._data, self.pickle_protocol)\n\n self._delete ()\n self._exec (\n \"\"\"\\\n insert into table_name (id, expires, data)\n values (%(id)s, %(expires)s, %(data)s)\n \"\"\",\n data = pickled_data,\n expires = expires\n )", "def save(self):\n self.db.commit()", "def _db_store(self, labels: Sequence[Tuple[int, np.ndarray]], table: str) -> None:\r\n # Labels are expected to be\r\n # [\r\n # (class, points),\r\n # (class, points)\r\n # .\r\n # .\r\n # .\r\n # ]\r\n # Where points are np.arrays\r\n # There should also always be one fish in the scene => len(labels) >= 1\r\n\r\n n_points = np.prod(labels[0][1].shape)\r\n\r\n gen = ((self.n, class_, *points.ravel().round(3)) for class_, points in labels)\r\n\r\n # First two \"?\" are for image id and class respectively, rest are for points\r\n sql_command = (\r\n f'INSERT INTO {table} VALUES {(\"?\",\"?\",*[\"?\" for i in range(n_points)])}'\r\n ).replace(\"'\", \"\")\r\n\r\n self.cursor.executemany(sql_command, gen)", "def save(self):\r\n if self.instance is None:\r\n raise CQLEngineException(\"DML Query intance attribute is None\")\r\n assert type(self.instance) == self.model\r\n\r\n nulled_fields = set()\r\n if self.instance._has_counter or self.instance._can_update():\r\n return self.update()\r\n else:\r\n insert = InsertStatement(self.column_family_name, ttl=self._ttl, timestamp=self._timestamp)\r\n for name, col in self.instance._columns.items():\r\n val = getattr(self.instance, name, None)\r\n if col._val_is_null(val):\r\n if self.instance._values[name].changed:\r\n nulled_fields.add(col.db_field_name)\r\n continue\r\n insert.add_assignment_clause(AssignmentClause(\r\n col.db_field_name,\r\n col.to_database(getattr(self.instance, name, None))\r\n ))\r\n\r\n # skip query execution if it's empty\r\n # caused by pointless update queries\r\n if not insert.is_empty:\r\n self._execute(insert)\r\n\r\n # delete any nulled columns\r\n self._delete_null_columns()", "def values(self, values):\n\n self._values = values", "def values(self, values):\n\n self._values = values", "def save(self):\n self.rpc.call(MsfRpcMethod.CoreSave)", "def _db(self, value):", "def save (self):\n if self.newobj:\n using_sequence = self.sequence ()\n self.keyvals['id'] = using_sequence\n self.seq = using_sequence\n else:\n using_sequence = self.seq\n for key, val in self.keyvals.items ():\n r_key = self.prepare_key (key, using_sequence)\n r.set (r_key, val)\n self.keyvals = {}\n self.newobj = False", "def save(self):\n for t in self.ace_types:\n self.api.api_request(\"PUT\", self.url + t, data={t: self[t]})", "def save_state(self):\n pass", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.session.commit()", "def store(self, key, value):\n pass", "def save():", "def _save_state(self):\n try:\n param_value = json.dumps({\n self._TIME_KEY: self.last_timestamp,\n self._STATE_KEY: self.current_state,\n self._CONTEXT_KEY: self.context,\n })\n except TypeError as err:\n raise AppStateError('Could not serialize state for name \\'{}\\'. Error: '\n '{}'.format(self._state_name, str(err)))\n\n @backoff.on_exception(backoff.expo,\n ClientError,\n max_tries=self.MAX_STATE_SAVE_TRIES,\n jitter=backoff.full_jitter)\n def save():\n \"\"\"Function to save the value of the state dictionary to parameter store\"\"\"\n self.SSM_CLIENT.put_parameter(\n Name=self._state_name,\n Description=self._STATE_DESCRIPTION.format(self._app_type, self.function_name),\n Value=param_value,\n Type='SecureString',\n Overwrite=True\n )\n try:\n save()\n except ClientError as err:\n raise AppStateError('Could not save current state to parameter '\n 'store with name \\'{}\\'. Response: '\n '{}'.format(self._state_name, err.response))", "def save(self):\n\n self.__session.commit()", "def save(self):\n\n self.__session.commit()", "def action_store(raw_val):\n if raw_val not in EMPTY_VALUES:\n return auto_type_convert(raw_val)\n else:\n return raw_val", "def save():\n pass", "def save(self):\n db.session.commit()", "def save(self) -> Any:\n if self._unsaved_values:\n requestor = Requestor(local_api_key=self._api_key)\n params = {}\n for k in self._unsaved_values:\n params[k] = getattr(self, k)\n if type(params[k]) is EasyPostObject:\n params[k] = params[k].flatten_unsaved()\n params = {self.snakecase_name(): params}\n url = self.instance_url()\n response, api_key = requestor.request(method=RequestMethod.PATCH, url=url, params=params)\n self.refresh_from(values=response, api_key=api_key)\n\n return self", "def save(self, db):\n pass", "def save(self, *args, **kwargs):\n super(self.__class__, self).save(*args, **kwargs)", "def Save(self) -> None:\n self.__conn.commit()" ]
[ "0.74617493", "0.7196511", "0.7009068", "0.6361609", "0.62717307", "0.6257779", "0.6165036", "0.608339", "0.60653645", "0.60352385", "0.6013189", "0.5978669", "0.59265023", "0.58840144", "0.58840144", "0.58840144", "0.5868349", "0.5846248", "0.58364046", "0.5794738", "0.5792802", "0.57707757", "0.5718734", "0.5717751", "0.57060856", "0.5697715", "0.5697715", "0.5697715", "0.5693904", "0.5675346", "0.5675346", "0.5675346", "0.5675346", "0.5675346", "0.56644595", "0.5652366", "0.5619787", "0.5612274", "0.56110257", "0.55943507", "0.5588275", "0.5551418", "0.55385196", "0.5538236", "0.55362123", "0.5530031", "0.55271536", "0.5521293", "0.5515727", "0.5515727", "0.5515727", "0.5510913", "0.5510045", "0.55095357", "0.55049026", "0.54772395", "0.54772395", "0.54772395", "0.54772395", "0.54772395", "0.54772395", "0.5469215", "0.5462068", "0.54498744", "0.54385895", "0.5435041", "0.5432171", "0.5426199", "0.5424075", "0.54145455", "0.539836", "0.5394503", "0.5392975", "0.5392975", "0.5373807", "0.5368814", "0.53650635", "0.5364264", "0.53566164", "0.5353436", "0.5353436", "0.5353436", "0.5353436", "0.5353436", "0.5353436", "0.5353436", "0.5353436", "0.53526354", "0.53493714", "0.5346257", "0.53336126", "0.5332692", "0.5332692", "0.53250813", "0.53197896", "0.5319028", "0.53066915", "0.53057647", "0.52985334", "0.52978396" ]
0.6750283
3
This method must be overridden by inheriting classes. The code inside this method will be executed when the
def do_workload(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def override(self):\n return None", "def __call__(self):\r\n raise NotImplementedError('override me')", "def __call__(self):\n\t\treturn", "def run(self):\n raise Exception('derived class should redefine this function')", "def __call__( self ):\n pass", "def runThis(self):\n print(\"Override method..\")", "def post_execute(self):", "def do_before(self):\r\n pass", "def pre_execute(self):", "def _hook(self):", "def override(self,scope):", "def __call__(self):\n pass", "def __call__(self):\n pass", "def post_init(self):\n\t\tpass", "def __post_init__(self):\n pass", "def on_before_execution(self):\n pass", "def post_process(self):\n pass", "def post_process(self):\n pass", "def post_process(self):\n pass", "def post_process(self):\n pass", "def post_process(self):\n pass", "def postLoad(self):\n pass", "def on(self) -> None:", "def processing(self):\n pass", "def on_run(self):\r\n\r\n\t\tpass", "def __call__(self):\n raise NotImplementedError", "def think(self):\n pass", "def postRun(self):\n pass", "def beforeUpdate(self):", "def on(self):", "def on_execute(self):\n pass", "def _post_init(self):\n pass", "def afterInit(self):", "def __call__(self) -> None:", "def on(self):\n raise NotImplementedError", "def run(self):\n raise NotImplementedError # implement in subclass", "def __post_init__(self):\n super().__post_init__()", "def run(self): \r\n return", "def post_start(self):", "def handle(self):", "def __call__(self):\n raise NotImplementedError()", "def _postprocess(self):", "def post_build(self):", "def done(self):\n assert False, \"Deriving class must implement\"", "def process(self):\n raise NotImplementedError('Method must be implemented by subclass.')", "def final(self):\n pass", "def _post_load(self):\n pass", "def before(self) -> None:\n pass", "def on_load(self):\n pass", "def on_load(self):\n pass", "def callback(self):\n pass # pragma: no cover", "def __post_init__(self) -> 'None':", "def activated(self):", "def post_exec(self):\n raise NotImplementedError(\"Must implement in frontend subclass.\")", "def _update(self):\n raise _InheritanceError('Function not defined')", "def use(self):", "def _post_init(self) -> None:\n return", "def _afterInit(self):\n pass", "def process(self):\n pass", "def process(self):", "def process(self):", "def process(self):", "def PreExecute(self):\n return True", "def post_build(self):\n pass", "def run(self):\r\n pass", "def perform(self):\n pass", "def primary(self):\n ...", "def _prepare(self):", "def _prepare(self):", "def support(self):", "def on_load(self):", "def onUpdated(self):", "def on_run(self):\n pass", "def fire(self):", "def run(self):\n raise NotImplemented(\"Inheriting classes should implement this\")", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def run(self):", "def on_all(self):\n raise NotImplementedError(\"Base class: cannot be called directly\")", "def dummy_update( self ):\r\n pass", "def extension (self):\n assert False, \"To be implemented by child\"", "def do_after(self):\r\n pass", "def _proceed(self):\n raise NotImplementedError", "def on(self) -> None:\n ...", "def onInit(self):\n pass", "def run(self):\n raise NotImplementedError(\"Subclass must implement abstract method\")", "def _on_finalize(self):\n pass", "def _update(self):\n pass", "def begin(self):\n pass", "def _run(self):\n # We usually override this in __init__\n # pylint: disable=method-hidden\n return", "def PostExecute(self):\n return True", "def intuit(self):\n raise NotImplemented()", "def _activate(self):\n raise NotImplementedError('Subclasses must implement _activate()')", "def method(self):" ]
[ "0.75245625", "0.7466125", "0.73410434", "0.7315841", "0.7287032", "0.72735256", "0.7255045", "0.72209936", "0.7202259", "0.7187639", "0.71060145", "0.7067497", "0.7067497", "0.70535463", "0.6988847", "0.6977445", "0.6951473", "0.6951473", "0.6951473", "0.6951473", "0.6951473", "0.6925192", "0.6922637", "0.69056153", "0.6902238", "0.6894131", "0.6888244", "0.68729365", "0.6853604", "0.684849", "0.6829954", "0.6825279", "0.6823618", "0.6808933", "0.6794548", "0.67773384", "0.6763133", "0.6758484", "0.6754597", "0.67532533", "0.67504275", "0.67330825", "0.6704194", "0.6697284", "0.6692392", "0.6684243", "0.667754", "0.66752344", "0.66644895", "0.66644895", "0.66598004", "0.66420317", "0.6641442", "0.6635525", "0.6615393", "0.6606878", "0.6603671", "0.65937364", "0.6589668", "0.6578056", "0.6578056", "0.6578056", "0.6570187", "0.6565842", "0.655834", "0.6551", "0.65496325", "0.6543441", "0.6543441", "0.65386724", "0.6538242", "0.6511533", "0.65009403", "0.64912206", "0.64561564", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.6455675", "0.64552015", "0.64461434", "0.64415425", "0.6441318", "0.6432626", "0.642422", "0.64156544", "0.64116424", "0.640876", "0.6404345", "0.6403967", "0.640324", "0.640092", "0.63893896", "0.638863", "0.6388539" ]
0.0
-1
Sets this operations values from module metadata
def set_values(self,module): if type(module) == dict: self.set_value("name",module["name"]) self.set_value("hrname",module["hrname"]) self.set_value("version_major",module["version_major"]) self.set_value("version_minor",module["version_minor"]) self.set_value("revision",module["revision"]) if module.has_key("signature"): self.set_value("signature",module["signature"]) elif module.__class__.__name__ == "Module": pass #TODO IMPLEMENT / DISCUSS AFTER IMPLEMENTING MODULE-SUBSYSTEM
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_metadata(self, data):\r\n pass", "def __init__(self):\n\n self.operations = {}", "def PopulateModuleMetadata(self, mod, mojom_file):\n mod.name = os.path.basename(mojom_file.file_name)\n mod.path = mojom_file.file_name\n mod.namespace = mojom_file.module_namespace\n if mojom_file.attributes:\n mod.attributes = {attr.key: attr.value for attr in mojom_file.attributes}", "def set_metadata(self, metadata):\n return self.client._perform_json(\n \"PUT\", \"/projects/%s/recipes/%s/metadata\" % (self.project_key, self.recipe_name),\n body=metadata)", "def __init__(self, operations = []):\n self.operations = operations", "def __init__(self, op, op_param_list, op_reg_list):\n self. operation = {\n 'op': op,\n 'op_param_list': op_param_list,\n 'op_reg_list': op_reg_list\n }", "def update_metadata(self):\n self.data[\"keywords\"] = self.repo.topics(self.data.get(\"keywords\", []))\n self.data[\"description\"] = self.data.get(\"description\") or self.repo.description\n self.data[\"codeRepository\"] = (\n self.data.get(\"codeRepository\") or self.repo.html_url\n )\n self.data[\"name\"] = self.data.get(\"name\") or self.repo.name\n self.data[\"issueTracker\"] = (\n self.data.get(\"issueTracker\") or self.repo.issues_url\n )\n self.data[\"license\"] = self.data.get(\"license\") or self.repo.license", "def exec_module(self, **kwargs):\n\n for key in list(self.module_arg_spec.keys()) + ['tags']:\n if hasattr(self, key):\n setattr(self, key, kwargs[key])\n elif kwargs[key] is not None:\n if key == \"location\":\n self.parameters[\"location\"] = kwargs[key]\n elif key == \"plan\":\n self.parameters[\"plan\"] = kwargs[key]\n elif key == \"open_shift_version\":\n self.parameters[\"open_shift_version\"] = kwargs[key]\n elif key == \"public_hostname\":\n self.parameters[\"public_hostname\"] = kwargs[key]\n elif key == \"fqdn\":\n self.parameters[\"fqdn\"] = kwargs[key]\n elif key == \"network_profile\":\n self.parameters[\"network_profile\"] = kwargs[key]\n elif key == \"router_profiles\":\n self.parameters[\"router_profiles\"] = kwargs[key]\n elif key == \"master_pool_profile\":\n ev = kwargs[key]\n if 'vm_size' in ev:\n if ev['vm_size'] == 'standard_d2s_v3':\n ev['vm_size'] = 'Standard_D2s_v3'\n elif ev['vm_size'] == 'standard_d4s_v3':\n ev['vm_size'] = 'Standard_D4s_v3'\n if 'os_type' in ev:\n if ev['os_type'] == 'linux':\n ev['os_type'] = 'Linux'\n elif ev['os_type'] == 'windows':\n ev['os_type'] = 'Windows'\n self.parameters[\"master_pool_profile\"] = ev\n elif key == \"agent_pool_profiles\":\n ev = kwargs[key]\n if 'vm_size' in ev:\n if ev['vm_size'] == 'standard_d2s_v3':\n ev['vm_size'] = 'Standard_D2s_v3'\n elif ev['vm_size'] == 'standard_d4s_v3':\n ev['vm_size'] = 'Standard_D4s_v3'\n if 'os_type' in ev:\n if ev['os_type'] == 'linux':\n ev['os_type'] = 'Linux'\n elif ev['os_type'] == 'windows':\n ev['os_type'] = 'Windows'\n self.parameters[\"agent_pool_profiles\"] = ev\n elif key == \"auth_profile\":\n self.parameters[\"auth_profile\"] = kwargs[key]\n\n old_response = None\n response = None\n\n self.mgmt_client = self.get_mgmt_svc_client(ContainerServiceClient,\n base_url=self._cloud_environment.endpoints.resource_manager)\n\n resource_group = self.get_resource_group(self.resource_group)\n\n if \"location\" not in self.parameters:\n self.parameters[\"location\"] = resource_group.location\n\n old_response = self.get_openshiftmanagedcluster()\n\n if not old_response:\n self.log(\"Open Shift Managed Cluster instance doesn't exist\")\n if self.state == 'absent':\n self.log(\"Old instance didn't exist\")\n else:\n self.to_do = Actions.Create\n else:\n self.log(\"Open Shift Managed Cluster instance already exists\")\n if self.state == 'absent':\n self.to_do = Actions.Delete\n elif self.state == 'present':\n self.log(\"Need to check if Open Shift Managed Cluster instance has to be deleted or may be updated\")\n self.to_do = Actions.Update\n\n if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):\n self.log(\"Need to Create / Update the Open Shift Managed Cluster instance\")\n\n if self.check_mode:\n self.results['changed'] = True\n return self.results\n\n response = self.create_update_openshiftmanagedcluster()\n\n if not old_response:\n self.results['changed'] = True\n else:\n self.results['changed'] = old_response.__ne__(response)\n self.log(\"Creation / Update done\")\n elif self.to_do == Actions.Delete:\n self.log(\"Open Shift Managed Cluster instance deleted\")\n self.results['changed'] = True\n\n if self.check_mode:\n return self.results\n\n self.delete_openshiftmanagedcluster()\n # make sure instance is actually deleted, for some Azure resources, instance is hanging around\n # for some time after deletion -- this should be really fixed in Azure.\n while self.get_openshiftmanagedcluster():\n time.sleep(20)\n else:\n self.log(\"Open Shift Managed Cluster instance unchanged\")\n self.results['changed'] = False\n response = old_response\n\n if self.state == 'present':\n self.results.update(self.format_item(response))\n return self.results", "def adjust_custom_op_info(compute_op_info):\n py_module_path = compute_op_info[\"py_module_path\"]\n if os.path.isfile(py_module_path):\n py_module_path, file_name = os.path.split(py_module_path)\n module_name, _ = os.path.splitext(file_name)\n compute_op_info[\"py_module_path\"] = py_module_path\n compute_op_info[\"module_name\"] = module_name", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def set_invocation_metadata(self, items: Tuple[Tuple[str, str]]):\n self._invocation_metadata = items", "def _set_attributes(self):", "def _setModule(self, module):\n self._module = module\n # copy the original module for exploration\n self.n_values = deepcopy(module)\n self.n_values._params[:] = 0", "def __setattr__(self, name, value):\n if isinstance(value, torch.jit.ScriptModule):\n object.__setattr__(self, name, value)\n elif isinstance(value, FrameworkTensor):\n self.role.register_state_tensor(value)\n self.state_attributes[name] = value\n elif isinstance(value, FrameworkLayerModule):\n for param in value.parameters():\n self.role.register_state_tensor(param)\n self.state_attributes[name] = value\n else:\n object.__setattr__(self, name, value)", "def update_metadata(self):\n parser = GenericParser(\n fn_re='{}/(e\\d+s\\d+)_.*/Production.nc'.format(self.data_folder),\n group_names=['sim'],\n group_transforms=[lambda x: x],\n top_fn='',\n step_ps=self.timestep\n )\n meta = gather_metadata('{}/e*/*nc'.format(self.data_folder), parser)\n meta['top_fn'] = sorted(glob('{}/e*/structure.prmtop'.format(self.input_folder)))\n self.meta = meta", "def __init__(self):\r\n self.operation_map = {}", "def __init__(self, rpc, mtype, mname):\n\n self.moduletype = mtype\n self.modulename = mname\n self.rpc = rpc\n self._info = rpc.call(MsfRpcMethod.ModuleInfo, [mtype, mname])\n property_attributes = [\"advanced\", \"evasion\", \"options\", \"required\", \"runoptions\"]\n for k in self._info:\n if k not in property_attributes:\n # don't try to set property attributes\n setattr(self, k, self._info.get(k))\n self._moptions = rpc.call(MsfRpcMethod.ModuleOptions, [mtype, mname])\n self._roptions = []\n self._aoptions = []\n self._eoptions = []\n self._runopts = {}\n for o in self._moptions:\n if self._moptions[o]['required']:\n self._roptions.append(o)\n if self._moptions[o]['advanced']:\n self._aoptions.append(o)\n if self._moptions[o]['evasion']:\n self._eoptions.append(o)\n if 'default' in self._moptions[o]:\n self._runopts[o] = self._moptions[o]['default']\n\n if mtype in [\"auxiliary\", \"post\"]:\n d_act = self._info.get('default_action')\n if d_act is not None:\n act = 'ACTION'\n self._moptions[act] = {\"default\": d_act}\n self._runopts[act] = self._moptions[act]['default']", "def set_metadata(self, key, val):\n \n self.metadata[key] = val", "def setMetadata(self, metadata):\n document_properties = self.document_loaded.getDocumentProperties()\n user_defined_properties = document_properties.getUserDefinedProperties()\n new_properties = []\n for prop, value in metadata.items():\n for container in [document_properties, user_defined_properties]:\n current_value = getattr(container, prop, None)\n if current_value is not None:\n if isinstance(current_value, tuple):\n if isinstance(value, list):\n value = tuple(value)\n elif isinstance(value, basestring):\n # BBB: old ERP5 code sends Keywords as a string\n # separated by a whitespace.\n value = tuple(value.split(' '))\n if isinstance(value, type(current_value)):\n setattr(container, prop, value)\n break\n else:\n new_properties.append([prop, value])\n for prop, value in new_properties:\n if isinstance(value, basestring):\n user_defined_properties.addProperty(prop, 0, '')\n user_defined_properties.setPropertyValue(prop, value)\n self.document_loaded.store()\n self.document_loaded.dispose()", "def __setstate__(self,values):\n self.initDefault()\n setter = object.__setattr__\n for value,attr in zip(values,self.persistent):\n setter(self,attr,value)\n if self.dirty_sizeCrc == None:\n self.dirty_sizeCrc = {} #--Use empty dict instead.\n self.refreshDataSizeCrc()", "def set_metadata(self, metadata):\n self.metadata = metadata\n return self", "def __metadata__(self):\n raise NotImplementedError", "def operation(self, operation: str):\n\n self._operation = operation", "def __init__(self, operation_inputs):\n\n full_operation_name = ctx.operation.name\n self.operation_name = full_operation_name.split('.').pop()\n\n # These should not make their way into the Operation inputs.\n os.environ['_PAGINATION_OFFSET'] = \\\n text_type(operation_inputs.pop('pagination_offset', 0))\n os.environ['_PAGINATION_SIZE'] = \\\n text_type(operation_inputs.pop('pagination_size', 1000))\n\n # cloudify client\n self.client_config = get_desired_value(\n 'client', operation_inputs,\n ctx.instance.runtime_properties,\n ctx.node.properties\n )\n\n if self.client_config:\n self.client = CloudifyClient(**self.client_config)\n else:\n self.client = manager.get_rest_client()\n\n # plugins\n self.plugins = get_desired_value(\n 'plugins', operation_inputs,\n ctx.instance.runtime_properties,\n ctx.node.properties\n )\n\n # secrets\n self.secrets = get_desired_value(\n 'secrets', operation_inputs,\n ctx.instance.runtime_properties,\n ctx.node.properties\n )\n\n # resource_config\n self.config = get_desired_value(\n 'resource_config', operation_inputs,\n ctx.instance.runtime_properties,\n ctx.node.properties)\n\n # Blueprint-related properties\n self.blueprint = self.config.get('blueprint', {})\n self.blueprint_id = self.blueprint.get('id') or ctx.instance.id\n self.blueprint_file_name = self.blueprint.get('main_file_name')\n self.blueprint_archive = self.blueprint.get('blueprint_archive')\n\n # Deployment-related properties\n self.deployment = self.config.get('deployment', {})\n self.deployment_id = self.deployment.get('id') or ctx.instance.id\n self.deployment_inputs = self.deployment.get('inputs', {})\n self.deployment_outputs = self.deployment.get('outputs')\n self.deployment_all_outputs = self.deployment.get('all_outputs', True)\n self.deployment_logs = self.deployment.get('logs', {})\n\n # Node-instance-related properties\n self.node_instance_proxy = self.config.get('node_instance')\n\n # Execution-related properties\n self.workflow_id = \\\n operation_inputs.get('workflow_id',\n 'create_deployment_environment')\n self.workflow_state = \\\n operation_inputs.get(\n 'workflow_state',\n 'terminated')\n self.reexecute = \\\n self.config.get('reexecute') \\\n or ctx.instance.runtime_properties.get('reexecute') \\\n or False\n\n # Polling-related properties\n self.interval = operation_inputs.get('interval', POLLING_INTERVAL)\n self.state = operation_inputs.get('state', 'terminated')\n self.timeout = operation_inputs.get('timeout', EXECUTIONS_TIMEOUT)\n\n # This ``execution_id`` will be set once execute workflow done\n # successfully\n self.execution_id = None", "def set_params():\n global module \n global ora_inst\n global response_loc\n\n module_args=dict(\n ora_inst=dict(type='str', required=True),\n response_loc=dict(type='str', required=True)\n )\n\n module=AnsibleModule(\n argument_spec=module_args\n )\n\n ora_inst = module.params['ora_inst']\n response_loc = module.params['response_loc']", "def __init__(self, *args, **kwargs):\r\n super(XModuleDescriptor, self).__init__(*args, **kwargs)\r\n # update_version is the version which last updated this xblock v prev being the penultimate updater\r\n # leaving off original_version since it complicates creation w/o any obv value yet and is computable\r\n # by following previous until None\r\n # definition_locator is only used by mongostores which separate definitions from blocks\r\n self.edited_by = self.edited_on = self.previous_version = self.update_version = self.definition_locator = None\r\n self.xmodule_runtime = None", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def metadata(self, metadata):\n\n self._metadata = metadata", "def set_token_metadata(self, metadata):\n self.update_initial_storage(\n token_metadata = sp.big_map(\n {\n 0: sp.record(token_id = 0, token_info = self.normalize_metadata(metadata))\n },\n tkey = sp.TNat,\n tvalue = sp.TRecord(token_id = sp.TNat, token_info = sp.TMap(sp.TString, sp.TBytes))\n )\n )", "def __init__(self):\r\n self.inputs = []\r\n self.op = None\r\n self.const_attr = None\r\n self.name = \"\"\r\n self.dtype = None", "def set_metadata(self, metadata):\n return self.manager.set_metadata(self, metadata)", "def set_metadata(self, metadata):\n return self.manager.set_metadata(self, metadata)", "def _configure(self):\n Values._configure(self)\n self.values = [self.inventory.one, self.inventory.two]\n return", "def set_properties_from_api_repr(self, resource):\n self.name = resource.get('name')\n self.number = resource['projectNumber']\n self.labels = resource.get('labels', {})\n self.status = resource['lifecycleState']", "def update_metadata(self, key, value):\n sp.verify(self.is_administrator(sp.sender), FA12_Error.NotAdmin)\n self.data.metadata[key] = value", "def __init__(self, op_name, attr_key, attr_value):\n self.op = relay.op.get(op_name)\n self.attr_key = attr_key\n self.attr_value = attr_value", "def __setattr__(self, key, value):\n super(SPA, self).__setattr__(key, value)\n if isinstance(value, Module):\n if value.label is None:\n value.label = key\n self._modules[key] = value\n for k, (obj, v) in iteritems(value.inputs):\n if type(v) == int:\n value.inputs[k] = (obj, self.get_default_vocab(v))\n self.config[obj].vocab = value.inputs[k][1]\n for k, (obj, v) in iteritems(value.outputs):\n if type(v) == int:\n value.outputs[k] = (obj, self.get_default_vocab(v))\n self.config[obj].vocab = value.outputs[k][1]\n\n value.on_add(self)", "def __init__(self):\n self.inputs = []\n self.op = None\n self.const_attr = None\n self.name = \"\"", "def set_operation(self, operation):\n self._data_dict[self.OP_GID] = operation.gid", "def __init__(self, **kwargs):\n\n super(RefactoringOperation, self).__init__(**kwargs)", "def exec_module(self, **kwargs):\n\n for key in list(self.module_arg_spec.keys()):\n if hasattr(self, key):\n setattr(self, key, kwargs[key])\n elif kwargs[key] is not None:\n if key == \"kind\":\n self.parameters[\"kind\"] = kwargs[key]\n elif key == \"server_key_type\":\n self.parameters[\"server_key_type\"] = _snake_to_camel(kwargs[key], True)\n elif key == \"uri\":\n self.parameters[\"uri\"] = kwargs[key]\n elif key == \"thumbprint\":\n self.parameters[\"thumbprint\"] = kwargs[key]\n elif key == \"creation_date\":\n self.parameters[\"creation_date\"] = kwargs[key]\n\n old_response = None\n response = None\n\n self.mgmt_client = self.get_mgmt_svc_client(SqlManagementClient,\n base_url=self._cloud_environment.endpoints.resource_manager)\n\n resource_group = self.get_resource_group(self.resource_group)\n\n old_response = self.get_serverkey()\n\n if not old_response:\n self.log(\"Server Key instance doesn't exist\")\n if self.state == 'absent':\n self.log(\"Old instance didn't exist\")\n else:\n self.to_do = Actions.Create\n else:\n self.log(\"Server Key instance already exists\")\n if self.state == 'absent':\n self.to_do = Actions.Delete\n elif self.state == 'present':\n self.log(\"Need to check if Server Key instance has to be deleted or may be updated\")\n self.to_do = Actions.Update\n\n if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):\n self.log(\"Need to Create / Update the Server Key instance\")\n\n if self.check_mode:\n self.results['changed'] = True\n return self.results\n\n response = self.create_update_serverkey()\n\n if not old_response:\n self.results['changed'] = True\n else:\n self.results['changed'] = old_response.__ne__(response)\n self.log(\"Creation / Update done\")\n elif self.to_do == Actions.Delete:\n self.log(\"Server Key instance deleted\")\n self.results['changed'] = True\n\n if self.check_mode:\n return self.results\n\n self.delete_serverkey()\n # make sure instance is actually deleted, for some Azure resources, instance is hanging around\n # for some time after deletion -- this should be really fixed in Azure\n while self.get_serverkey():\n time.sleep(20)\n else:\n self.log(\"Server Key instance unchanged\")\n self.results['changed'] = False\n response = old_response\n\n if response:\n self.results[\"id\"] = response[\"id\"]\n\n return self.results", "def set_resource_data(self, resource, meta):", "def _set_controls(self, control_operations: dict):\n control_index = 1\n for id, operations in control_operations.items():\n link = self.pumps[id] if id in self.pumps else self.valves[id] if id in self.valves else self.pipes[id]\n for op in operations:\n epamodule.ENsetcontrol(control_index,\n epamodule.EN_TIMER,\n link.en_index,\n op[0], # operation setting\n 0,\n op[1]) # operation time\n control = epamodule.ENgetcontrol(control_index)\n epanet_control_time = int(control[4])\n link.add_control_operation(epanet_control_time, op[0])\n\n control_index += 1", "def setParameter(self, name, value):", "def add_metadata (self, name, value):\n self.metadata[name] = value\n return self", "def __init__(self, operation, constargs, randomargs):\n Operation.__init__(self)\n self.operation = operation\n self.constargs = constargs\n self.randomargs = randomargs\n if type(operation) is str:\n import CCAugmentation.outputs as cca_out\n import CCAugmentation.transformations as cca_trans\n self.operation = eval(self._get_op_str())\n self.args = {'operation': self.operation.__name__, 'constargs': constargs, 'randomargs': randomargs}", "def _set_int(self, commands, name):\n if name in commands:\n try:\n value = int(commands[name])\n setattr(self, name, value)\n except ValueError:\n pass", "def update_from_args(self, args):\n args = vars(args)\n for key in args:\n if isinstance(getattr(self, key), tf.Variable):\n getattr(self, key).assign(args[key])\n else:\n setattr(self, key, args[key])\n \n # Set the config on the data class\n self.data = DataConfig(\n self.xml_annotation_path,\n self.csv_annotation_path,\n self.oxford_annotations_path,\n self.oxford_images_path,\n )", "def _modify_controls(self, catalog: cat.Catalog) -> cat.Catalog:\n logger.debug(f'modify specify catalog {catalog.metadata.title} for profile {self._profile.metadata.title}')\n self._catalog_interface = CatalogInterface(catalog)\n param_dict: Dict[str, prof.SetParameter] = {}\n alters: Optional[List[prof.Alter]] = None\n # find the modify and alters\n # build a param_dict for all the modifys\n if self._profile.modify is not None:\n if self._profile.modify.set_parameters is not None:\n param_list = self._profile.modify.set_parameters\n for param in param_list:\n param_dict[param.param_id] = param\n alters = self._profile.modify.alters\n\n if alters is not None:\n for alter in alters:\n if alter.control_id is None:\n raise TrestleError('Alters must have control id specified.')\n if alter.removes is not None:\n raise TrestleError('Alters not supported for removes.')\n if alter.adds is None:\n raise TrestleError('Alter has no adds to perform.')\n for add in alter.adds:\n if add.position is not None and add.position.name is not None and add.position.name != 'after':\n raise TrestleError('Alter position must be \"after\" or None.')\n control = self._catalog_interface.get_control(alter.control_id)\n if add.by_id is not None:\n self._add_to_parts(control, add.by_id, add.parts)\n self._catalog_interface.replace_control(control)\n continue\n if add.props is not None:\n if add.by_id is not None:\n TrestleError('Alter cannot add props by id.')\n if not control.props:\n control.props = []\n control.props.extend(add.props)\n continue\n TrestleError('Alter must either add parts or props')\n\n # use the param_dict to apply all modifys\n control_ids = self._catalog_interface.get_control_ids()\n for control_id in control_ids:\n control = self._catalog_interface.get_control(control_id)\n if control.parts is not None:\n for part in control.parts:\n self._replace_part_prose(control, part, param_dict)\n self._catalog_interface.replace_control(control)\n\n catalog = self._catalog_interface._catalog\n\n # update the original profile metadata with new contents\n # roles and responsible-parties will be pulled in with new uuid's\n new_metadata = self._profile.metadata\n new_metadata.title = f'{catalog.metadata.title}: Resolved by profile {self._profile.metadata.title}'\n links: List[common.Link] = []\n for import_ in self._profile.imports:\n links.append(common.Link(**{'href': import_.href, 'rel': 'resolution-source'}))\n new_metadata.links = links\n # move catalog controls from dummy group 'catalog' into the catalog\n if catalog.groups:\n for group in catalog.groups:\n if group.id == 'catalog':\n catalog.controls = group.controls\n catalog.groups = [group for group in catalog.groups if group.id != 'catalog']\n break\n\n catalog.metadata = new_metadata\n\n return catalog", "def _setVals(self, *args, **kwargs):\n pass", "def _configure(self):\n Component._configure(self)\n self.dataDim = self.inventory.dataDim\n self.reader = self.inventory.reader\n self.coordsys = self.inventory.coordsys\n return", "def setOp(self, op):\n self.__op = op", "def setOp(self, op):\n self.__op = op", "def __init__(self, op, value):\n self.op = op\n self.value = value", "def set_params(self):\r\n pass", "def set_values(self):\n\n if self.featureType != \"gene\":\n self.transcriptId = self.meta['transcript_id']\n self.transcriptName = self.meta['transcript_name']\n self.transcriptBioType = self.meta['transcript_biotype']\n if self.featureType == 'exon':\n self.exonNum = self.meta['exon_number']\n self.exonId = self.meta['exon_id']\n elif self.featureType == 'CDS' or self.featureType == 'intron':\n self.exonNum = self.meta['exon_number']", "def initial_metadata(self):\n raise NotImplementedError()", "def define_parameters(self):", "def setModule(name, module):", "def set_meta(self, name, value):\n # note sometimes during .view, we won't have this var available\n check_meta = not hasattr(self, '_init_arg_check') or self._init_arg_check\n if check_meta and name in self._init_args:\n # note this is largely a failsafe, we shouldn't get to this\n # point via setattr since it'll match the hasattr(self.pobj, name)\n raise Exception('Cannot have member variables that clash with pandas constructor args')\n object.__setattr__(self, name, value)", "def metadata(self): # -> None:\n ...", "def set_metadata(self, attribute, value):\n self.metadata[attribute] = value", "def test_update_metadata(self):\n pass", "def _setVals(self, version=0, tp=0, app_id=0, length=0):\n self.version = version\n self.tp = tp\n self.app_id = app_id\n self.length = length\n self.is_set = True", "def __init__(self):\n super().__init__(interface.Metadata, DEFAULT_PRIORITIES)", "def set_metadata(self, metadata):\n return self.parent.set_metadata_for_node(self, metadata)", "def setOperation(self, *args):\n return _libsbml.FluxBound_setOperation(self, *args)", "def _store_package_metadata(self):", "def __init__(__self__, *,\n mode: Optional[pulumi.Input['WorkloadMetadataConfigMode']] = None,\n node_metadata: Optional[pulumi.Input['WorkloadMetadataConfigNodeMetadata']] = None):\n if mode is not None:\n pulumi.set(__self__, \"mode\", mode)\n if node_metadata is not None:\n pulumi.set(__self__, \"node_metadata\", node_metadata)", "def metadata(self, metadata: Mapping[str, str]):\r\n self._metadata = metadata", "def metadata(self, metadata: Mapping[str, str]):\r\n self._metadata = metadata", "def __init__(self, events={}, attributes={}):\n self.events = events.copy()\n self.attributes = attributes.copy()\n if not AT.VALUE_STRATEGY in self.attributes:\n self.attributes[AT.VALUE_STRATEGY] = ValueStrategy.PRESET", "def test_update_metadata1(self):\n pass", "def set_interface_metadata(cls, md):\n metadata.set_metadata(cls, METADATA_KEY, md)", "def __init__(self, db_api_conn, operation, types=None):\n self.conn = db_api_conn\n self.cursor = db_api_conn.cursor()\n self.operation = operation\n if types is not None:\n self.cursor.setinputsizes(*types)", "def __init__(self, operations=None):\n\n if operations is None:\n operations = self.default_operations\n\n if None in operations:\n operations.update(self.default_operations)\n\n self.operations = operations\n self.special = [\"(\", \")\", \",\"]", "def update(self, operation, operand0, operand1, operand2):\n self.operation = operation\n self.operand0 = operand0\n self.operand1 = operand1\n self.operand2 = operand2", "def set_readonly_values(self, *args, **kwargs):\n # Let's give it a try in unknown state\n if (self.get_current_state() != ProtocolState.COMMAND):\n raise InstrumentProtocolException(\"Not in command state. Unable to set read-only params\")\n\n self._go_to_root_menu()\n self._update_params()\n\n for param in self._param_dict.get_visibility_list(ParameterDictVisibility.READ_ONLY):\n if not Parameter.has(param):\n raise InstrumentParameterException()\n\n self._go_to_root_menu()\n # Only try to change them if they arent set right as it is\n log.trace(\"Setting read-only parameter: %s, current paramdict value: %s, init val: %s\",\n param, self._param_dict.get(param),\n self._param_dict.get_init_value(param))\n if (self._param_dict.get(param) != self._param_dict.get_init_value(param)):\n if (param == Parameter.METADATA_POWERUP):\n self._navigate(SubMenu.METADATA_POWERUP)\n result = self._do_cmd_resp(Command.DIRECT_SET, (1+ int(self._param_dict.get_init_value(param))),\n expected_prompt=Prompt.CHANGE_PARAM_MENU)\n if not result:\n raise InstrumentParameterException(\"Could not set param %s\" % param)\n \n self._go_to_root_menu() \n \n elif (param == Parameter.METADATA_RESTART):\n self._navigate(SubMenu.METADATA_RESTART)\n result = self._do_cmd_resp(Command.DIRECT_SET, (1 + int(self._param_dict.get_init_value(param))),\n expected_prompt=Prompt.CHANGE_PARAM_MENU)\n if not result:\n raise InstrumentParameterException(\"Could not set param %s\" % param)\n \n self._go_to_root_menu()\n \n elif (param == Parameter.VERBOSE):\n self._navigate(SubMenu.VERBOSE)\n result = self._do_cmd_resp(Command.DIRECT_SET, self._param_dict.get_init_value(param),\n expected_prompt=Prompt.CHANGE_PARAM_MENU)\n if not result:\n raise InstrumentParameterException(\"Could not set param %s\" % param)\n \n self._go_to_root_menu() \n \n elif (param == Parameter.EH_ISOLATION_AMP_POWER):\n result = self._navigate(SubMenu.EH_ISOLATION_AMP_POWER)\n while not result:\n result = self._navigate(SubMenu.EH_ISOLATION_AMP_POWER)\n \n elif (param == Parameter.HYDROGEN_POWER):\n result = self._navigate(SubMenu.HYDROGEN_POWER)\n while not result:\n result = self._navigate(SubMenu.HYDROGEN_POWER)\n \n elif (param == Parameter.INST_AMP_POWER):\n result = self._navigate(SubMenu.INST_AMP_POWER)\n while not result:\n result = self._navigate(SubMenu.INST_AMP_POWER)\n \n elif (param == Parameter.REFERENCE_TEMP_POWER):\n result = self._navigate(SubMenu.REFERENCE_TEMP_POWER)\n while not result:\n result = self._navigate(SubMenu.REFERENCE_TEMP_POWER)\n \n elif (param == Parameter.RES_SENSOR_POWER):\n result = self._navigate(SubMenu.RES_SENSOR_POWER)\n while not result:\n result = self._navigate(SubMenu.RES_SENSOR_POWER)\n \n # re-sync with param dict?\n self._go_to_root_menu()\n self._update_params()\n \n # Should be good by now, but let's double check just to be safe\n for param in self._param_dict.get_visibility_list(ParameterDictVisibility.READ_ONLY):\n if (param == Parameter.VERBOSE):\n continue\n if (self._param_dict.get(param) != self._param_dict.get_init_value(param)):\n raise InstrumentProtocolException(\"Could not set default values!\")", "def attributes(self):\n return {'op_type': self.__class__.__name__, 'arguments': {}}", "def updateData(self, *args):\n # if self.move_next_option == \"R\":\n # self.restSampling()\n # elif self.move_next_option == \"A\":\n # self.addExtra()\n # else:\n # self.continueReview()\n for name, value in self.parameter_inputs.items():\n self.parameters[name] = value.value\n # directly change the value of class variables\n logMsg((\"update settings: \", self.ml_classifier_cls, name, value.value))\n setattr(self.ml_classifier_cls, name, value.value)\n\n pass", "def setUp(self):\n super(ServerMetadataTest, self).setUp()\n self.meta = {'meta_key_1': 'meta_value_1',\n 'meta_key_2': 'meta_value_2'}\n self.servers_client.set_server_metadata(self.server.id, self.meta)", "def do_manipulations(self, *args, **kwargs):\n pass", "def set_params(self):\n raise NotImplementedError", "def __init__(self):\n self._OPERATION = None", "def change_metadata(self, **kwargs):\n metadata = self.state.get_player_state(PLAYER_IDENTIFIER)\n\n # Update saved metadata\n for key, value in kwargs.items():\n setattr(metadata, key, value)\n\n # Create a temporary metadata instance with requested parameters\n change = PlayingState(**kwargs)\n self.state.item_update(change, PLAYER_IDENTIFIER)", "def set(self, **kwargs):\n raise NotImplementedError", "def restore_operation(cls, operation_record):\n classname = operation_record[\"OPE_TYPE\"]\n module = \"\" #TODO Implement modulename from database if Operation belongs to Module\n is_operation_of_module = False\n exec \"\"\"\ntry:\n type(%(class)s)\nexcept NameError,e:\n is_operation_of_module = True\"\"\"%{'class':classname}\n\n if is_operation_of_module:\n exec \"\"\"\nfrom %(module)s import %(class)s\noperation = %(class)s(cls._core)\"\"\"%{'class':classname,'module':module}\n else:\n exec \"\"\"\noperation = %(class)s(cls._core)\"\"\"%{'class':classname}\n\n operation.set_id(operation_record['OPE_ID'])\n db = cls._core.get_db()\n stmnt = \"SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;\"\n cur = db.query(cls._core,stmnt,(operation_record[\"OPE_ID\"],))\n for row in cur.fetchallmap():\n val = row[\"OPD_VALUE\"]\n exec \"\"\"val = %s(val)\"\"\"%row[\"OPD_TYPE\"]\n operation.set_value(row[\"OPD_KEY\"], val)\n return operation", "def set_metadata(self, val, entry=None):\n \n if entry is None:\n self.metadata = val\n else:\n self.metadata[entry] = val", "def get_context(self):\r\n return {\r\n 'module': self,\r\n 'editable_metadata_fields': self.editable_metadata_fields\r\n }", "def _SetRequiredCoreModules(self, textEdit, frame, tab, controls):\n self.text_editor = self.system_modules[textEdit]\n self.mf = self.system_modules[frame]\n self.mt = self.system_modules[tab]\n self.mc = self.system_modules[controls]" ]
[ "0.59990793", "0.58392733", "0.5662214", "0.5603085", "0.55330473", "0.55237305", "0.5490515", "0.5485604", "0.5479347", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.5461847", "0.54556865", "0.54516035", "0.54180896", "0.54075307", "0.5385916", "0.53844965", "0.535654", "0.534018", "0.530905", "0.5296158", "0.5293843", "0.5267278", "0.5265926", "0.5261922", "0.5254495", "0.5246456", "0.5246456", "0.5246456", "0.5246456", "0.5246456", "0.5246456", "0.5246456", "0.5246066", "0.52408737", "0.52334595", "0.52334595", "0.52306986", "0.52294624", "0.52124774", "0.5210975", "0.51868856", "0.5181984", "0.5176932", "0.51561546", "0.515314", "0.51313347", "0.5123228", "0.5117852", "0.511722", "0.511435", "0.5106198", "0.51039463", "0.5102696", "0.50984627", "0.5094173", "0.5093785", "0.5093785", "0.50871545", "0.50853235", "0.5083498", "0.50820506", "0.5080774", "0.50767076", "0.5070003", "0.5063861", "0.50638133", "0.50585425", "0.50507164", "0.5049993", "0.50476485", "0.5036267", "0.50345075", "0.50308436", "0.5025962", "0.5025962", "0.5021079", "0.5013011", "0.50117654", "0.5009538", "0.5001886", "0.4998847", "0.49926966", "0.49858147", "0.49818233", "0.49771506", "0.49749896", "0.4974303", "0.49736422", "0.4970505", "0.49699408", "0.496695", "0.49609363", "0.495167", "0.49484497" ]
0.6323762
0
Returns an Array of ModuleOperationObjects that are currently listedin the queue
def get_currently_processed_modules(cls): db = cls._core.get_db() stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_TYPE FROM OPERATIONS \ WHERE OPE_TYPE = 'ModuleInstallOperation' \ or OPE_TYPE = 'ModuleUninstallOperation' ;" cur = db.query(cls._core,stmnt); ret = [] for row in cur.fetchallmap(): ret.append(Operation.restore_operation(row).get_meta()) return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def get_queue_list(self):\n return self.manager.get_queue_list()", "def get_operations(self):\n return self.operations[:] # Returns a copy instead of actual attribute", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def _get_queues(self):\n return self.__queues", "def list_operations():", "def listModules(self):\n modules = [(module.name,\n module.queue,\n module.Active) for module in self.db.getModules()]\n return modules", "def objects(self):\n\t\treturn self._objects", "def get_registered_jobs(self):\n with self.__lock:\n return list(self.__registered_jobs)", "def hbObjects(self):\r\n return self.__hbObjs", "def oplocks(self):\n return self._oplocks", "def _get_output_queue(self):\n return self.__output_queue", "def _get_output_queue(self):\n return self.__output_queue", "def _get_output_queue(self):\n return self.__output_queue", "def _get_output_queue(self):\n return self.__output_queue", "def _get_output_queue(self):\n return self.__output_queue", "def _get_output_queue(self):\n return self.__output_queue", "def generate_queue(self,pool):\n\t\tqueue = []\n\t\tfor ele in self.elements:\n\t\t\tif ele.pool == pool and ele.status == 'pending':\n\t\t\t\tele.abs_path = \"/%s/%s/%s/%s\" % (\n\t\t\t\t\tself.base_dir,\n\t\t\t\t\tself.parent_dir,\n\t\t\t\t\tself.project,\n\t\t\t\t\tele.filename\n\t\t\t\t\t)\n\t\t\t\tqueue.append(ele)\n\t\treturn queue", "def _pull_batch_from_queue(self):\n rollout = self.explorer.queue.get( timeout = 600.0 )\n while not rollout.terminal:\n try: \n rollout.extend( self.explorer.queue.get_nowait() )\n except queue.Empty:\n break\n print(rollout.size())\n return rollout", "def get(self):\n with self.lock:\n return list(self.jobShapes)", "def operation_calls(self):\n return self._operation_calls", "def syncdequeue(self):\n #FIXME: Handle exceptions caused when some queue in the list might be empty\n temp=[]\n for itr, contextqueue in enumerate(self.queues):\n try:\n temp.append(self.queues[itr].get())\n except:\n Queue.Empty\n return temp", "def list(self):\n return self.rpc.call(MsfRpcMethod.JobList)", "def get_waiting_jobs(self):\n return []", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def _get_queue(self):\n return self.__queue", "def obj_list(self):\n return self._obj_list", "def load_objects(self, queue):\n pass", "def objects(self):\r\n return self._objects", "def get_queue():\n\n return multiprocessing.Queue()", "def get_list():\r\n qry = ImportQueue.query\r\n qry = qry.order_by(ImportQueue.id)\r\n return qry.all()", "def operations_per_joinpoint(self):\n ops = []\n current_ops = set()\n\n allocs = self.allocations\n # assumption: the shape of allocs is rectangular (i.e. each client contains the same number of elements)\n for idx in range(0, len(allocs[0])):\n for client in range(0, self.clients):\n task = allocs[client][idx]\n if isinstance(task, track.Task):\n current_ops.add(task.operation)\n elif isinstance(task, JoinPoint) and len(current_ops) > 0:\n ops.append(current_ops)\n current_ops = set()\n\n return ops", "def get_executed_jobs(self):\n with self.__lock:\n return list(self.__executed_jobs)", "def _getqueue(self):\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty(): return self.outqueues[index]", "def queue_to_list(queue):\n result = []\n while queue.qsize() != 0:\n result.append(queue.get())\n return result", "def getActiveObjects(doc):\n lst = list()\n op = doc.GetFirstObject()\n while op:\n if op.GetBit(c4d.BIT_ACTIVE) == True: \n lst.append(op)\n op = Helpers.getHNext(op)\n return lst", "def _getqueue(self):\n\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty():\n return self.outqueues[index]", "def processing(self) -> list:\r\n\r\n return self.__processing", "def get_data_multiplication_queue(model_description_multiply):\n return utils.get_objectlist(model_description_multiply,\n config_key='data_multiplication',\n module=sys.modules[__name__])", "def toggled_objects(self):\n\t\tobjects = []\n\t\tfor i in range(len(self.object_list)):\n\t\t\tif(self.toggles[i]):\n\t\t\t\tnext_object = (self.object_list[i], self.quantities[i])\n\t\t\t\tobjects.append(next_object)\n\t\treturn objects", "def GetObjects(self): \r\n return self.model.GetObjects()", "def queue_job_ids(self):\n return list(self.queue.keys())", "def refresh_queue(self):\n state = self.get_state()\n return state.refresh_queue()", "def queue_all_instances(self):\n if not self.is_job:\n return []\n\n tasks_list = []\n for job_instance in self.instances:\n tasks_list.append(job_instance.queue())\n\n self.status = 'QUEUED'\n return tasks_list", "def input_queues(self):\n return [port.in_queue for port in self.ports]", "def get_job_list(self):\n return self.job_list", "def get_job_list(self):\n return self.job_list", "def curr_queue(self):\n pass", "def __init__(self):\n self.queues=[]", "def create_enqueue_op(self):\n if not self._names:\n return []\n\n tensors = []\n # TODO(jhseu): Consider deduping tensors.\n for name in self._names:\n tensors.extend(self._tensors[name])\n\n with ops.device(tpu.core(0)):\n return [tpu_ops.outfeed_enqueue_tuple(tensors)]", "def queue(self):\n return self._queue", "def queue(self):\n return self._queue", "def queue(self):\n return self._queue", "def queue(self):\n return self._queue", "def get_cmds_queue(self):\n\t\t\n\t\treturn Queue()", "def GetQueueList(handler, query):\n json_config = {}\n if 'TiVo' in query:\n tivoIP = query['TiVo'][0]\n with active_tivos_lock:\n if tivoIP in active_tivos:\n with active_tivos[tivoIP]['lock']:\n json_config['urls'] = [ status['url'] for status in active_tivos[tivoIP]['queue'] ]\n\n handler.send_json(json.dumps(json_config))", "def modules_registered(self) -> list[Module]:\n return [cmds[0].module for cmds in self._registry[\"by_module\"].values()]", "def jobs(self):\n return self.get_jobs()", "def _collect_operation_calls(\n *, response: Response, poll_interval_seconds: int = 3\n) -> List[Response]:\n\n client = utils.client._from_response(response)\n op = Operation.from_response(client, response)\n\n LOGGER.info(f\"Waiting for operation to complete: {op}\")\n request_while_pending = client.get(endpoint=f\"/api/versioned/v1/operations/{op.resource_id}\")\n\n while op.state == \"PENDING\":\n op = op.poll()\n sleep(poll_interval_seconds)\n request_while_running = client.get(endpoint=f\"/api/versioned/v1/operations/{op.resource_id}\")\n\n op.wait()\n request_when_complete = client.get(endpoint=f\"/api/versioned/v1/operations/{op.resource_id}\")\n\n return [request_while_pending, request_while_running, request_when_complete]", "def get_task_list(self):\n raise NotImplementedError()", "def operation_list(self):\n return OPERATION_LIST", "def nops(self):\n return self.rpc.call(MsfRpcMethod.ModuleNops)['modules']", "def get_compute_op_list(job_content):\n op_list = job_content[\"op_list\"]\n op_compute_list = []\n for op in op_list:\n if op[\"type\"] != \"Data\":\n op_compute_list.append(op)\n return op_compute_list", "def get_objects(self):\n return self._objects", "def queues(self):\r\n return queues.Queues(self)", "def _messages_list(self, queue):\n\n return queue.messages()", "def managed_objects(self):\n return self._managed_object_list", "def get_jobs(self):\n return list(self._jobs.values())", "def queue_communication(self, session):\n\n # Here we can queue all communication to be sent to the Client\n # Examples follow...\n session['queue'].append(GetObjects())\n session['queue'].append(DeleteObjects())\n session['queue'].append(RpcExecute())\n session['queue'].append(GetDeviceInfo())", "def workloads(self):\n return self._workloads", "def get_job_arrivals(self):\n return []", "def get_jobs():\n \n rate_limit()\n command = [\"bjobs\", \"-o\", \"\\\"JOBID\", \"USER\", \"STAT\", \"QUEUE\", \"JOB_NAME\", \\\n \"delimiter=';'\\\"\"]\n command = \" \".join(command)\n jobs = subprocess.check_output(command, shell=True, stderr=open(os.devnull))\n \n # if there aren't any currently running or pending jobs, then the output\n if jobs == \"\":\n return set([])\n \n jobs = jobs.decode().strip().split(\"\\n\")\n \n current_jobs = set([])\n for line in jobs:\n if line.startswith(\"JOBID\"): # ignore the header line\n continue\n \n line = line.split(\";\")\n job_name = line[4]\n current_jobs.add(job_name)\n \n return current_jobs", "def ReadFlowProcessingRequests(self):\n return list(self.flow_processing_requests.values())", "def all_operations():\n return OperationHandler().get_all_classes()", "def __iter__(self):\n # return self.options[:self.idx] + self.options[self.idx:]\n for op in self.queue():\n yield op", "def dataObjects(self):\n\t\treturn self._objects", "def read_queue(self):\n query = \"\"\"SELECT server,\n otp,\n modified,\n info,\n server_nonce\n FROM queue\"\"\"\n self._execute(query)\n return self._dictfetchall()" ]
[ "0.63590986", "0.63590986", "0.63590986", "0.63590986", "0.6177426", "0.5978019", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5798638", "0.57853013", "0.5771161", "0.5731278", "0.5724055", "0.56982976", "0.565988", "0.565988", "0.565988", "0.565988", "0.565988", "0.565988", "0.5647341", "0.5635224", "0.5632879", "0.5604197", "0.5596296", "0.5593619", "0.55845296", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5580695", "0.5561012", "0.5553151", "0.5537659", "0.5529986", "0.5507724", "0.5493157", "0.5492332", "0.5489047", "0.54793614", "0.5474189", "0.54740053", "0.5461512", "0.544282", "0.54369867", "0.54321533", "0.54271567", "0.54246104", "0.5422377", "0.54020923", "0.54006183", "0.54006183", "0.5381885", "0.53799117", "0.53782874", "0.53744376", "0.53744376", "0.53744376", "0.53744376", "0.5372567", "0.53681743", "0.5364186", "0.5363994", "0.53528357", "0.534941", "0.53491116", "0.53420115", "0.53392017", "0.5323772", "0.5319056", "0.53155035", "0.5313906", "0.5309606", "0.52955335", "0.5285298", "0.5284532", "0.5283619", "0.5283332", "0.5279636", "0.52758884", "0.5265686", "0.52644867" ]
0.7111473
0
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module_manager.install_module(self.get_meta())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['install', m_name]):\n print(NOT_INSTALLED_MSG.format(m_name))\n return 1\n else:\n return 1\n return 0", "def install_package(self, module: str, **kwargs):\n logging.message('Installing module from %s %s' % (module, str(kwargs)))\n package = importlib.import_module(module)\n if kwargs.get('package'):\n kwargs.pop('package')\n setup_return = package.Setup(self, module, **kwargs)\n ff_id = kwargs.get('ff_id')\n initial_values = kwargs.get('initial_values')\n if ff_id and initial_values:\n self.device_initial_values[ff_id] = initial_values\n scheduler.runInS(10, self.refresh_firebase, job_id='FIREBASE_REFRESH_CORE')\n scheduler.runInS(15, self.export_all_components, job_id='CORE_EXPORT_ALL')\n return setup_return", "def do(self):\r\n parameters = ParametersParserStr(self.args_parameters).get()\r\n self.core.install(self.product_names, parameters, with_dependencies=True)", "def install(self, egg, dir_path):", "def install():\n PackCommandExecutor().pack()\n InstallCommandExecutor().install()", "def action_install(args):\n\n dest = Path(args.dest)\n\n module_root = Path(\"modules/\")\n modules = load_modules(module_root)\n\n try:\n candidates = {modules[target] for target in args.targets}\n dependencies = set()\n for candidate in candidates:\n dependencies |= set(candidate.resolve_dependencies(modules))\n candidates |= dependencies\n except KeyError as e:\n key = e.args[0]\n print(f\"{key} module not found\")\n sys.exit(1)\n\n print(f\"Will install: {', '.join(c.name for c in candidates)}\")\n if not confirm(\"install?\", default=True):\n return\n\n for mod in candidates:\n print(f\"Installing {mod.name}...\")\n mod.install(dest)", "def install_from_rpm_py_package(self):\n raise NotImplementedError('Implement this method.')", "def install(repo, package, python, editable):\n if repo.install(package, python, editable):\n click.echo('Done.')", "def __do_single_module_install(item):\n\n name = item.name\n local_name = item.local_name\n install_name = item.install_name\n\n # First copy the new file.\n if copy_file(local_name, install_name, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % (local_name))\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (name))\n return -2\n\n log.i(TAG, \"Module '%s' installed successfully!\" % name)\n return 0", "def install_module(\n self,\n install_optional=False,\n production_only=False,\n force=False,\n frozen_lockfile=True,\n node_paths=None):\n args=self._get_installation_args(\n install_optional=install_optional,\n production_only=production_only,\n force=force,\n frozen_lockfile=frozen_lockfile)\n return self.run_command(args=args, node_paths=node_paths)", "def _install(self):\n # Default implementation\n for pm_name, package in self._provider_package.items():\n if helpers[pm_name]:\n helpers[pm_name].install_package(package)\n return\n raise self.unsure_how_to_install()", "def setup_module(module):\n print(\"Start rishabhSetupModule of Program\")", "def install(self):\n raise NotImplementedError", "def install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])", "def _install(self):\n\n pass", "def setModule(name, module):", "def on_install(self, event):\n unit = self.model.unit\n\n # Install your software and its dependencies\n\n unit.status = ActiveStatus()", "def install(self):\n # This installs the packages defined in self.packages\n super().install()\n # Do any other installation work that is needed. If a license key is\n # required then use the custom_assess_status_check() function below to\n # determine whether it is needed.\n # This assess_status() will determine what status the charm is at after\n # install.\n self.assess_status()", "def install(self):\n return self._process('install')", "def install(self, no_dependencies: bool = True):\n return PackageHelper.install_package(name=self.name, no_dependencies=no_dependencies)", "def test_setup_module(self):\n pluggable_package.setup(self._test_module)\n self._test_setup(self._test_module)", "def install(self, provider):\n pass # pragma: no cover", "def process_module(project_dir, config, section, overrides=None, symlink=False,\n interactive=True):\n # Extract fields from the section (and write-back any missing).\n if not config.has_option(section, NAME_KEY):\n config.set(section, NAME_KEY, section)\n name = config.get(section, NAME_KEY, vars=overrides)\n source = config.get(section, SOURCE_KEY, vars=overrides)\n target = config.get(section, TARGET_KEY, vars=overrides)\n optional = config.getboolean(section, OPTIONAL_MODULE_KEY)\n default = config.getboolean(section, DEFAULT_MODULE_KEY)\n override_target = config.getboolean(section, OVERRIDE_TARGET_KEY)\n # Collect user input if necessary.\n if interactive and optional:\n user = raw_input('Install %s [%s]? ' % (name, 'Y/n' if default else 'N/y'))\n if (user and user not in YES_VALUES) or (not user and not default):\n print ' Skipped'\n return\n else:\n print \"Installing %s...\" % (name)\n if interactive and override_target:\n user = raw_input(' Install directory [default: %s]: ' % (target))\n if user:\n config.set(name, TARGET_KEY, user)\n target = config.get(name, TARGET_KEY, vars=overrides)\n # Actually install the module.\n source = os.path.expanduser(os.path.join(project_dir, source))\n target = os.path.expanduser(target)\n # Check source exists.\n if not os.path.exists(source):\n raise ConfigError('Source file %s does not exist' % (source))\n # Check target directory exists and make it if necessary.\n target_dir = os.path.dirname(target)\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n # Check the target itself does not exist and remove it if it does.\n if os.path.exists(target):\n if os.path.isfile(target):\n os.remove(target)\n else:\n if interactive:\n user = raw_input(' WARNING: %s exists. Replace [Y/n]? ' % (target))\n if user and user not in YES_VALUES:\n print ' Skipped'\n return\n shutil.rmtree(target)\n # Install the file\n if symlink:\n os.symlink(os.path.abspath(source), target)\n else:\n # Copy the file\n if os.path.isfile(source):\n shutil.copy2(source, target)\n else:\n shutil.copytree(source, target)\n print ' Installed'", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def install_modules(self, capability=None, name=None):\n repositories = self.weboob.repositories\n # Update modules list\n repositories.update_repositories(DummyProgress())\n # Get module infos\n if name:\n modules = {name: repositories.get_module_info(name)}\n else:\n modules = repositories.get_all_modules_info(capability)\n # Install modules if required\n for infos in modules.values():\n if infos is not None and (\n not infos.is_installed() or\n not infos.is_local()\n ):\n try:\n repositories.install(infos, progress=DummyProgress())\n except ModuleInstallError as exception:\n logger.info(str(exception))\n return {\n module_name: dict(infos.dump())\n for module_name, infos in modules.items()\n if infos.is_installed()\n }", "def install(pac, man=\"solo\"):\n if man == \"solo\" and paths.count(\"None\") == 5:\n # if theres only one package manger, find it and use it.\n #Ok this might not work since I added pip,gem, and cpan\n if pacman['fink'] != 'None':\n install_fink(pac)\n if pacman['brew'] != 'None':\n install_brew(pac)\n if pacman['port'] != 'None':\n install_port(pac)\n if pacman['pip'] != 'None':\n install_pip(pac)\n if pacman['gem'] != 'None':\n install_gem(pac)\n if pacman['cpan'] != 'None':\n install_cpan(pac)\n else:\n instain = {'fink': install_fink, 'brew': install_brew, 'port': install_port, 'pip': install_pip, 'gem': install_gem, 'cpan': install_cpan} \n try:\n f = instain[man]\n print \"Trying to install package %s on %s\" % (pac, man)\n f(pac)\n except KeyError:\n print \"Please use install like this: haberdashery.py install package manager: \\nhaberdashery.py install %s %s\" % (man, pac)", "def install(self, *packages):\n raise NotImplementedError", "def _install(self, host):\n pass", "def __do_zip_module_install(export_zip, item):\n\n # First copy the new file.\n if export_zip.install_item_to(item, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % item.local_name)\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (item.name))\n return -2\n\n return 0", "def setup_module():\n common_setup_module()", "def install(package_path):\n logging.info(\"Installing package: \" + package_path)\n project_root = get_project_root()\n # Update the package short name to url using index.\n package_path = _install_package(project_root, package_path)", "def install():\n return InstallGit()", "def install():\n deploy()\n configure()", "def install_pkg(pip, package):\n if not os.path.isdir(INSTALL_DIR):\n os.makedirs(INSTALL_DIR)\n pip_cmds = ['mayapy', pip, 'install', package, '--target', INSTALL_DIR, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n print(\"Successfully installed package {}\".format(package))\n if installer.returncode != 0:\n raise RuntimeError(\"Failed to install package: {}, please check logs in: {}\".format(package, DEPENDENCY_INSTALL_LOG))", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from the package,\nbecause these must be already present on the system.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install():\n remote_egg_path = os.path.join(remote_egg_dir, get_egg_name())\n sudo('easy_install -U %s' % remote_egg_path)\n sudo('rm %s' % remote_egg_path)", "def install_and_import(package):\n import importlib\n try:\n importlib.import_module(package)\n except ImportError:\n checkPipInstalled()\n try:\n from pip import main as pipmain\n except:\n from pip._internal import main as pipmain\n pipmain(['install', package])\n finally:\n globals()[package] = importlib.import_module(package)", "def install(self, spec, prefix):\n make(\"install\", parallel=False)", "def addModule(self, name):\n if name in self.needed_modules: return\n self.needed_modules[name] = True #avoid circular references\n\n module = self.moduleResolver.find(name)\n ast = ast.parse(module.getContent(), module.getPath(), 'exec').body\n self.needed_modules[name] = ImportOneModule(self.getModule(name), ast, self).getModuleBody()", "def add_module(module_name: str):\n sys.path.append(module_name)", "def register_module(self, name: str, module: ModuleInstance) -> None:\n self.modules[name] = module", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def install_package(self, package):\n package = package.lower()\n command = shlex.split('sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" --force-yes -y install ' + package)\n try:\n print subprocess.check_call(command, stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n if \"unable to locate package\" in e.output.lower():\n print \"Can't identify package name. Check spelling of package name\"", "def install(self,toolname,adminuser,adminpass):\n\n self.logger.info(\"installing the tool '%s'\" % (toolname))\n\n self.push_admin_install_button(toolname)\n\n self.compile_code(toolname,adminuser,adminpass)\n\n self.flip_tool_status('ToolsStatusUploadedAdminPage',toolname,'Installed')", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Check and install\n self.guest.execute(\n ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n | self.operation_script(Command('install'), packages)\n )", "def install_from_repository(self) -> None:\n self.sort_packages()\n\n # Install recommended packages\n if self.recommended_packages:\n self.list_packages(self.recommended_packages, title=\"package\")\n for package in self.recommended_packages:\n try:\n self.perform_operation(\n Command('install'),\n Command(package)\n )\n except tmt.utils.RunError as error:\n self.debug(f\"Package installation failed: {error}\")\n self.warn(f\"Unable to install recommended package '{package}'.\")\n continue\n\n # Install required packages\n if self.required_packages:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.required_packages, title=\"package\")\n )", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def do_package(package):\n\tn_ucr = extFile(package, 'univention-config-registry')\n\tif not os.path.exists(n_ucr):\n\t\treturn\n\n\tf_ucr = open(n_ucr, 'r')\n\n\tfor item in univention.config_registry.parseRfc822(f_ucr.read()):\n\t\ttyp = item['Type'][0]\n\t\tif typ == 'file':\n\t\t\tf = item['File'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'subfile':\n\t\t\tf = item['Subfile'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\telif typ == 'multifile':\n\t\t\tf = item['Multifile'][0]\n\t\t\tif os.path.exists(f):\n\t\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'script':\n\t\t\tf = item['Script'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'scripts'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'scripts'))\n\t\t\tdoIt('chmod', 'a+x', destPath(f, package, 'scripts'))\n\t\telif typ == 'module':\n\t\t\tf = item['Module'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'modules'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'modules'))\n\t\telse:\n\t\t\tprint >>sys.stderr, 'Unknown type: %s' % typ\n\t\t\treturn\n\n\tf_ucr.close()\n\n\tdoIt('install', '-d', destDir('', package, 'info'))\n\tdoIt('install', '-m644', n_ucr, destPath(package+'.info', package, 'info'))\n\tmapping_file = extFile( package, 'univention-config-registry-mapping')\n\tif os.path.exists(mapping_file):\n\t\tdoIt('install', '-d', destDir('', package, 'mapping'))\n\t\tdoIt('install', '-m644', mapping_file, destPath(package+'.univention-config-registry-mapping', package, 'mapping'))\n\n\tdata = {\n\t\t\t'pkg': quote(package),\n\t\t\t'info': quote(\"/etc/univention/templates/info/%s.info\" % package),\n\t\t\t'removed': quote(\"/etc/univention/templates/removed/%s.info\" % package),\n\t\t\t}\n\n\tf_preinst = open(extFile(package, 'preinst.debhelper'), 'a')\n\tf_preinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_preinst.write('if [ \"$1\" = \"install\" ] ; then\\n')\n\tf_preinst.write(' [ -e %(removed)s ] && [ ! -e %(info)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_preinst.write('fi\\n')\n\tf_preinst.write('# End automatically added section\\n')\n\tf_preinst.close()\n\n\tf_postinst = open(extFile(package, 'postinst.debhelper'), 'a')\n\tf_postinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_postinst.write('if [ \"$1\" = \"abort-remove\" ]; then\\n')\n\tf_postinst.write(' [ -e %(removed)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_postinst.write('fi\\n')\n\tf_postinst.write('[ -x /usr/sbin/univention-config-registry ] && univention-config-registry register %(pkg)s || true\\n' % data)\n\tf_postinst.write('# End automatically added section\\n')\n\tf_postinst.close()\n\n\tf_prerm = open(extFile(package, 'prerm.debhelper'), 'a')\n\tf_prerm.write('# Automatically added by univention-install-config-registry\\n')\n\tf_prerm.write('if [ \"$1\" = \"remove\" ] && [ -e %(info)s ] ; then\\n' % data)\n\tf_prerm.write(' [ -x /usr/sbin/univention-config-registry ] && univention-config-registry unregister %(pkg)s || true\\n' % data)\n\tf_prerm.write(' mv %(info)s %(removed)s || true\\n' % data)\n\tf_prerm.write('fi\\n')\n\tf_prerm.write('# End automatically added section\\n')\n\tf_prerm.close()\n\n\tdoIt('perl', '-e', 'use Debian::Debhelper::Dh_Lib;addsubstvar(\"%s\", \"misc:Depends\", \"univention-config (>= 7.0.25)\");' % package)", "def install(self, parent):\r\n pass", "def install():\n ArticleDataProvider.register()\n ProductDataProvider.register()", "def register_packages(self, module_name, extra_package):\n self.creator_manager.register_packages(module_name, extra_package)", "async def depends_on_module(self, *modules: str) -> None:\n return await self.AD.app_management.register_module_dependency(self.name, *modules)", "def install(force, packages):\n setup_audit_log()\n for pspec in CFG.package_specs(packages):\n perform_install(pspec, is_upgrade=False, force=force, quiet=False)", "def on_install(self, request, trigger_context):\n raise NotImplementedError", "def set_module(self, module):\n setattr(self, \"module\", module)", "def npm_install(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install --save \" + package))", "def install():\n verun('pip install -r {0}'.format(requirements))", "def install_egg(self, egg_name):\n if not os.path.exists(self.egg_directory):\n os.makedirs(self.egg_directory)\n self.requirement_set.add_requirement(\n InstallRequirement.from_line(egg_name, None)\n )\n try:\n self.requirement_set.prepare_files(self.finder)\n self.requirement_set.install([\"--prefix=\" + self.egg_directory], [])\n except DistributionNotFound:\n self.requirement_set.requirements._keys.remove(egg_name)\n raise PipException()", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from package.\nBecause there is no RPM Python binding deb package.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install(\n action : Optional[List[str]] = None,\n **kw : Any\n ) -> SuccessTuple:\n from meerschaum.utils.misc import choose_subaction\n options = {\n 'plugins' : _install_plugins,\n 'packages' : _install_packages,\n }\n return choose_subaction(action, options, **kw)", "def install(self):\n if self.installed:\n return\n if not self.installable:\n raise self.unsure_how_to_install()\n logger.notice(\"Installing '%s'...\", self.name)\n # Call the subclass implementation\n self._install()\n # Make sure it actually performed as promised\n if not self.path:\n raise HelperNotFoundError(\n 1,\n \"Installation did not raise an exception, but afterward, \"\n \"unable to locate {0}!\".format(self.name))\n\n logger.notice(\"Successfully installed '%s'\", self.name)", "def test_pydroid_pip_install_cmd_line(self):\n\n module_name = 'simplekv'\n package_dir = os.path.join(site_packages_dir(), module_name)\n self.assertFalse(os.path.exists(package_dir))\n cmd = ['pydroid', 'pip', 'install', module_name]\n subprocess.call(cmd)\n self.assertTrue(os.path.exists(package_dir))", "def test_install(self):\n\n\n adminuser,adminpass = self.testdata.find_account_for('toolmanager')\n\n self.utils.account.login_as(adminuser,adminpass)\n\n self.contribtool.install(TOOLNAME,adminuser,adminpass)", "def npm_install_globally(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install -g \" + package))", "def install(self, repo):\n\n for subsystem in repo.options.get('subsystems', []):\n name = subsystem.get('name')\n args = subsystem.get('args', {})\n\n if name is None:\n raise InvalidSettingError('subsystem name', 'missing in settings file.')\n\n if name != 'SphinxDocumentation':\n raise InvalidSettingError('subsystem name', \"name '{}' is unknown\".format(name))\n\n repo.add_subsystem(SphinxDocumentation(repo, **args))\n\n repo.add_subsystem(BasicPythonSupport(repo))", "def YumInstall(vm):\n raise NotImplementedError", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Extra ignore/check for yum to workaround BZ#1920176\n check = ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n script = check | self.operation_script(Command('install'), packages)\n\n if self.skip_missing:\n script |= ShellScript('true')\n else:\n script &= check\n\n # Check and install\n self.guest.execute(script)", "def install():\n execute(generate)\n execute(upload)", "def install_from_repository(self) -> None:\n pass", "def YumInstall(vm):\n _Install(vm)", "def YumInstall(vm):\n _Install(vm)", "def install_module(request, monkeypatch):\n if request.param == 'sqlite':\n monkeypatch.setenv('DB_NAME', ':memory:')\n else:\n monkeypatch.setenv('DB_NAME', 'test_' + str(int(time.time())))\n\n from trytond.tests import test_tryton\n test_tryton.install_module('audit_trail')", "def test_install(self):\n pass", "def install(context):\n portal = context.getSite()\n sm = getSiteManager(portal)\n # Local components are not per-container; they are per-sitemanager. It just so happens that every Plone site has a sitemanager. Hooray.\n sm.registerAdapter(FolderText, name='FolderText')\n \n return \"Registered the extender at the root of the Plone site.\"", "def install_step(self):\n\n cmd = \"./INSTALL -noroot -silent -install_dir %s\" % self.installdir\n run_cmd(cmd, log_all=True, simple=True)\n\n adjust_permissions(self.installdir, stat.S_IWOTH, add=False)", "def install(self):\n if not self._is_installed():\n _logger.debug('Installing {name}...'.format(name=self.file_name))\n self._download_archive()\n self._unpack_archive()\n self._remove_archive()\n self._make_executable()\n else:\n _logger.debug('{name} is already installed.'.format(name=self.file_name))\n\n self._add_variables()", "def enablePackage(self, *args):\n return _libsbml.SBase_enablePackage(self, *args)", "def Install (self):\n if self in sys.meta_path:\n return\n sys.meta_path.insert (0, self)", "def pipInstall(self):\n\n print \"Does Nothing\"", "def add_module(self, *args, **kwargs):\n# if 'path' in kwargs:\n# path = kwargs['path']\n# else:\n# path = os.getcwd()\n#\n# if len(args) > 0:\n# module = args[0]\n# elif 'module' in kwargs:\n# module = kwargs['module']\n#\n# if 'path' not in kwargs:\n# path = os.getcwd()\n# kwargs['path'] = path\n\n if 'module' not in kwargs:\n if len(args) > 0:\n module = args[0]\n kwargs['module'] = module\n\n# if 'module' in kwargs:\n if len(kwargs) > 0:\n self._data.add_detector(self._name, **kwargs)", "def _installed_apps_add(self):\n config.add_plugin(self.module_path)", "def install_from_url(self) -> None:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.remote_packages, title=\"remote package\")\n )", "def install_module(request):\n reuse_db = request.config.getoption(\"--reuse-db\")\n\n if request.config.getoption(\"--db\") == 'sqlite':\n os.environ['TRYTOND_DATABASE_URI'] = \"sqlite://\"\n if reuse_db:\n # A hack to check if the database exists and if it\n # does, load that and run tests.\n Database = backend.get('Database')\n\n # cursor.test forgets to set flavor!\n # no time to report a bug!\n Flavor.set(Database.flavor)\n os.environ['DB_NAME'] = 'fulfilio'\n else:\n os.environ['DB_NAME'] = ':memory:'\n\n elif request.config.getoption(\"--db\") == 'postgres':\n os.environ['TRYTOND_DATABASE_URI'] = \"postgresql://\"\n if reuse_db:\n os.environ['DB_NAME'] = 'test_fulfilio'\n else:\n os.environ['DB_NAME'] = 'test_' + str(int(time.time()))\n\n if reuse_db:\n Database = backend.get('Database')\n database = Database().connect()\n cursor = database.cursor()\n databases = database.list(cursor)\n cursor.close()\n if os.environ['DB_NAME'] in databases:\n if request.config.getoption(\"--reset-db\"):\n cursor = database.cursor()\n databases = database.drop(cursor, os.environ['DB_NAME'])\n cursor.close()\n else:\n # tryton test forgets to init the pool\n # for existing database\n Pool(os.environ['DB_NAME']).init()\n\n config.set('database', 'uri', os.environ['TRYTOND_DATABASE_URI'])\n from trytond.tests import test_tryton\n test_tryton.install_module('payment_gateway_stripe')", "def installQPackage(self, name, domain, version, reconfigure=True):\n installPackageCommand = \"\"\"p = q.qp.find(name=\"%(name)s\", domain=\"%(domain)s\", version=\"%(version)s\")\nif not p:\n raise valueError(\"Package %(domain)s, %(name)s, %(version)s not found\")\nelif len(p) <> 1:\n raise valueError(\"Too many packages found with search criteria %(domain)s, %(name)s, %(version)s\")\nelif not p[0].isInstalled():\n p[0].install()\nelse:\n print \"Package %(domain)s, %(name)s, %(version)s is already installed\"\n\"\"\"%{'name':name,'version':version,'domain':domain,'reconfigure':reconfigure}\n self.executeQshell(installPackageCommand)\n if reconfigure:\n self.executeQshell(\"q.qp._runPendingReconfigeFiles()\")", "def add_module_import(self, module):\n self._main_model.add_module_import(module)", "def install():\n build()\n sh(\"%s setup.py develop\" % PYTHON)", "def install(i):\n\n cm_kernel.print_for_con('***********************************************')\n cm_kernel.print_for_con('Installing code ...')\n\n # Check vars\n if 'target_os_uoa' not in i: return {'cm_return':1, 'cm_error':'\"target_os_uoa\" is not defined in \"code install\"'}\n\n # Create entry\n ii={'cm_run_module_uoa':ini['cm_module_uid'],\n 'cm_action':'update'}\n if 'install_data_uid' in i and i['install_data_uid']!='': \n ii['cm_data_uid']=i['install_data_uid']\n if 'install_data_alias' in i and i['install_data_alias']!='': \n ii['cm_data_uoa']=i['install_data_alias']\n if 'install_data_display_as_alias' in i: \n ii['cm_display_as_alias']=i['install_data_display_as_alias']\n if 'install_module_uoa' in i and i['install_module_uoa']!='':\n ii['cm_run_module_uoa']=i['install_module_uoa']\n if 'cm_array' in i and len(i['cm_array'])>0: ii['cm_array']=i['cm_array']\n if 'install_repo_uoa' in i and i['install_repo_uoa']!='': \n ii['cm_repo_uoa']=i['install_repo_uoa']\n r=cm_kernel.access(ii)\n if r['cm_return']>0: return r\n\n target_path=r['cm_path']\n target_uid=r['cm_uid']\n target_alias=r['cm_alias']\n\n # Prepare script\n rx=get_env({'cm_data_uoa':target_uid,\n 'os_uoa':i['target_os_uoa']})\n if rx['cm_return']>0: return rx\n\n script=rx['cm_string']\n\n ii={'script_name':script,\n 'skip_extension':'yes',\n 'target_os_uoa':i['target_os_uoa'],\n 'cm_path':target_path}\n if 'code_deps' in i and i.get('skip_code_deps','')!='yes':\n ii['code_deps']=i['code_deps']\n\n # Add remark about how code was built\n if 'add_rem_to_script' in i:\n run_commands_before=[]\n run_commands_before.append('')\n for x in i['add_rem_to_script']:\n run_commands_before.append(x)\n ii['run_commands_before']=run_commands_before\n\n rx=prepare_script(ii)\n if rx['cm_return']>0: return rx\n\n r['script_name']=rx['cm_path']\n r['script_filename']=script\n\n return r", "def previewinstall(self, installed=[]):\n\n if( self.mode == \"install\"):\n \n # resolve circular dependencies\n if( self.name in installed ):\n return\n else:\n installed.append( self.name )\n \n print \"\\n\" + 20*'-' + \" Starting \" + self.name + \" Installation Test \" + 20*'-' + '\\n'\n \n # additional modules\n mods = self.optmodules + self.reqmodules + self.reqmodules_external + self.reqmodules_buildonly\n if( len(mods) > 0 ):\n for modname in mods:\n mod = self.parent.module(modname)\n if( mod.mode == \"install\" and not mod.name in installed ):\n print \"+ \" + self.name + \" will launch installation of \" + mod.name\n mod.previewinstall(installed)\n print \"+ \"+ self.name + \" using \" + mod.name + \" at [ \" + mod.installPath + \" ]\"\n\n print \"\\n+ Environment Settings used for building \" + self.name + \":\"\n # print environment settings recursively\n self.setEnv(self, [], True )\n\n if( self.hasCMakeBuildSupport ):\n #self.setCMakeVars(self, [])\n print \"\\n+ Generated CMake command for building \" + self.name + \":\"\n print ' $ ',self.genCMakeCmd()\n \n print \"\\n+ \" + self.name + \" installation finished.\"\n print '\\n' + 20*'-' + \" Finished \" + self.name + \" Installation Test \" + 20*'-' + '\\n'", "def test_run_pymodules_install_optional_project_dir(self):\n ctx = mock.Mock()\n modules = []\n project_dir = None\n with mock.patch('pythonforandroid.build.info') as m_info:\n assert run_pymodules_install(ctx, modules, project_dir) is None\n assert m_info.call_args_list[-1] == mock.call(\n 'No Python modules and no setup.py to process, skipping')", "def download_and_install(self):\n if self.is_installed_from_bin:\n try:\n self.installer.install_from_rpm_py_package()\n return\n except RpmPyPackageNotFoundError as exc:\n Log.warn('RPM Py Package not found. reason: {0}'.format(exc))\n\n # Download and install from the source.\n top_dir_name = self.downloader.download_and_expand()\n rpm_py_dir = os.path.join(top_dir_name, 'python')\n\n setup_py_in_found = False\n with Cmd.pushd(rpm_py_dir):\n if self.installer.setup_py.exists_in_path():\n setup_py_in_found = True\n self.installer.run()\n\n if not setup_py_in_found:\n self.installer.install_from_rpm_py_package()", "def install_package(package, remote):\n log.info('Installing package %s on %s', package, remote)\n flavor = remote.os.package_type\n if flavor == 'deb':\n pkgcmd = ['DEBIAN_FRONTEND=noninteractive',\n 'sudo',\n '-E',\n 'apt-get',\n '-y',\n '--force-yes',\n 'install',\n '{package}'.format(package=package)]\n elif flavor == 'rpm':\n # FIXME: zypper\n pkgcmd = ['sudo',\n 'yum',\n '-y',\n 'install',\n '{package}'.format(package=package)]\n else:\n log.error('install_package: bad flavor ' + flavor + '\\n')\n return False\n return remote.run(args=pkgcmd)", "def software_api(self, install_params):\n try:\n self.sw = jnpr.junos.utils.sw.SW(self.dev)\n ok, msg_ret = self.sw.install(**install_params)\n if ok is not True:\n raise AnsibleError('Unable to install the software %s' % msg_ret)\n msg = 'Package %s successfully installed. Response from device is: %s' % (\n install_params.get('package') or\n install_params.get('pkg_set'),\n msg_ret)\n self.queue_message(\"log\", \"%s\" % msg)\n return msg\n except (self.pyez_exception.ConnectError,\n self.pyez_exception.RpcError) as ex:\n raise AnsibleError('Installation failed. Error: %s' % str(ex))", "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def do_post_install(self, context):\n pass", "def pre_install_pkg(self, installable_pkg):\n pass", "def setUp(self):\n trytond.tests.test_tryton.install_module('nereid_webshop')", "def install_cached_package(self, package_name):\n self._log.info(\"Installing package {!r} from talus pypi\".format(package_name))\n pinfo = self.cache[\"pypi\"][package_name]\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n\n try:\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n package_name\n ])\n except SystemExit as e:\n raise Exception(\"Is SystemExit expected?\")", "def install(cli_config, path):\n commands = AssetsCommands(cli_config)\n\n click.secho(\"Installing React module...\", fg=\"green\")\n steps = commands.link_js_module(path)\n on_fail = \"Failed to install React module.\"\n on_success = \"React module installed successfully.\"\n\n run_steps(steps, on_fail, on_success)", "def _install_packages(module, path, packages, allowed_dists, repo):\n cmd = \"./LuaDist/bin/luadist install \"\n\n # Add packages to command\n for package in packages:\n cmd += package + \" \"\n\n # Add types of dists allowed to command\n source_allowed = \"true\"\n binary_allowed = \"true\"\n if allowed_dists == \"binary\":\n source_allowed = \"false\"\n elif allowed_dists == \"source\":\n binary_allowed = \"false\"\n cmd += \" -source=\" + source_allowed + \" -binary=\" + binary_allowed\n\n # Add repository to command\n cmd += ' -repos=\"' + repo + '\"'\n\n ret_code, out, err = module.run_command(cmd, cwd=path)\n already_installed = \"No packages to install\" in out\n\n if ret_code != 0 and not already_installed:\n module.fail_json(\n rc=ret_code,\n stdout=out,\n stderr=err,\n msg=\"Cannot install one or more of the specified packages, \"\n + \"make sure all packages exist in the configured repository.\",\n )\n\n return cmd" ]
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.6200066", "0.61510795", "0.6132066", "0.6122041", "0.6117013", "0.6113363", "0.60535926", "0.604949", "0.60188705", "0.60149723", "0.6005884", "0.6001865", "0.5994445", "0.5952823", "0.5950878", "0.59120035", "0.59031105", "0.5902215", "0.58973765", "0.5897239", "0.58967173", "0.5886299", "0.5875481", "0.586653", "0.58250093", "0.58176196", "0.58119714", "0.5811162", "0.5804783", "0.5802072", "0.5777229", "0.5762134", "0.5754137", "0.5749512", "0.5730237", "0.57127273", "0.57088447", "0.56937397", "0.569082", "0.56792736", "0.56730485", "0.5671629", "0.56652313", "0.56532127", "0.56483585", "0.56359285", "0.5615233", "0.56139237", "0.56120026", "0.56077254", "0.5596886", "0.5596886", "0.55855155", "0.55835664", "0.55749494", "0.5555037", "0.5553028", "0.5551551", "0.5542101", "0.5540291", "0.5537365", "0.5530371", "0.55150586", "0.55144536", "0.5511179", "0.54969233", "0.54825425", "0.5478699", "0.54667836", "0.5464257", "0.54629314", "0.5458849", "0.545141", "0.5439027", "0.5435583", "0.5434709", "0.54272467", "0.542712", "0.5424678", "0.5420664" ]
0.6157249
21
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module = module_manager.get_module_by_name(self._values["name"]) module_manager.uninstall_module(module)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['install', m_name]):\n print(NOT_INSTALLED_MSG.format(m_name))\n return 1\n else:\n return 1\n return 0", "def install_package(self, module: str, **kwargs):\n logging.message('Installing module from %s %s' % (module, str(kwargs)))\n package = importlib.import_module(module)\n if kwargs.get('package'):\n kwargs.pop('package')\n setup_return = package.Setup(self, module, **kwargs)\n ff_id = kwargs.get('ff_id')\n initial_values = kwargs.get('initial_values')\n if ff_id and initial_values:\n self.device_initial_values[ff_id] = initial_values\n scheduler.runInS(10, self.refresh_firebase, job_id='FIREBASE_REFRESH_CORE')\n scheduler.runInS(15, self.export_all_components, job_id='CORE_EXPORT_ALL')\n return setup_return", "def do(self):\r\n parameters = ParametersParserStr(self.args_parameters).get()\r\n self.core.install(self.product_names, parameters, with_dependencies=True)", "def install(self, egg, dir_path):", "def install():\n PackCommandExecutor().pack()\n InstallCommandExecutor().install()", "def action_install(args):\n\n dest = Path(args.dest)\n\n module_root = Path(\"modules/\")\n modules = load_modules(module_root)\n\n try:\n candidates = {modules[target] for target in args.targets}\n dependencies = set()\n for candidate in candidates:\n dependencies |= set(candidate.resolve_dependencies(modules))\n candidates |= dependencies\n except KeyError as e:\n key = e.args[0]\n print(f\"{key} module not found\")\n sys.exit(1)\n\n print(f\"Will install: {', '.join(c.name for c in candidates)}\")\n if not confirm(\"install?\", default=True):\n return\n\n for mod in candidates:\n print(f\"Installing {mod.name}...\")\n mod.install(dest)", "def install_from_rpm_py_package(self):\n raise NotImplementedError('Implement this method.')", "def install(repo, package, python, editable):\n if repo.install(package, python, editable):\n click.echo('Done.')", "def __do_single_module_install(item):\n\n name = item.name\n local_name = item.local_name\n install_name = item.install_name\n\n # First copy the new file.\n if copy_file(local_name, install_name, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % (local_name))\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (name))\n return -2\n\n log.i(TAG, \"Module '%s' installed successfully!\" % name)\n return 0", "def install_module(\n self,\n install_optional=False,\n production_only=False,\n force=False,\n frozen_lockfile=True,\n node_paths=None):\n args=self._get_installation_args(\n install_optional=install_optional,\n production_only=production_only,\n force=force,\n frozen_lockfile=frozen_lockfile)\n return self.run_command(args=args, node_paths=node_paths)", "def _install(self):\n # Default implementation\n for pm_name, package in self._provider_package.items():\n if helpers[pm_name]:\n helpers[pm_name].install_package(package)\n return\n raise self.unsure_how_to_install()", "def setup_module(module):\n print(\"Start rishabhSetupModule of Program\")", "def install(self):\n raise NotImplementedError", "def install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])", "def _install(self):\n\n pass", "def setModule(name, module):", "def on_install(self, event):\n unit = self.model.unit\n\n # Install your software and its dependencies\n\n unit.status = ActiveStatus()", "def install(self):\n # This installs the packages defined in self.packages\n super().install()\n # Do any other installation work that is needed. If a license key is\n # required then use the custom_assess_status_check() function below to\n # determine whether it is needed.\n # This assess_status() will determine what status the charm is at after\n # install.\n self.assess_status()", "def install(self):\n return self._process('install')", "def install(self, no_dependencies: bool = True):\n return PackageHelper.install_package(name=self.name, no_dependencies=no_dependencies)", "def do_workload(self):\n module_manager = self._core.get_module_manager()\n module_manager.install_module(self.get_meta())", "def test_setup_module(self):\n pluggable_package.setup(self._test_module)\n self._test_setup(self._test_module)", "def install(self, provider):\n pass # pragma: no cover", "def process_module(project_dir, config, section, overrides=None, symlink=False,\n interactive=True):\n # Extract fields from the section (and write-back any missing).\n if not config.has_option(section, NAME_KEY):\n config.set(section, NAME_KEY, section)\n name = config.get(section, NAME_KEY, vars=overrides)\n source = config.get(section, SOURCE_KEY, vars=overrides)\n target = config.get(section, TARGET_KEY, vars=overrides)\n optional = config.getboolean(section, OPTIONAL_MODULE_KEY)\n default = config.getboolean(section, DEFAULT_MODULE_KEY)\n override_target = config.getboolean(section, OVERRIDE_TARGET_KEY)\n # Collect user input if necessary.\n if interactive and optional:\n user = raw_input('Install %s [%s]? ' % (name, 'Y/n' if default else 'N/y'))\n if (user and user not in YES_VALUES) or (not user and not default):\n print ' Skipped'\n return\n else:\n print \"Installing %s...\" % (name)\n if interactive and override_target:\n user = raw_input(' Install directory [default: %s]: ' % (target))\n if user:\n config.set(name, TARGET_KEY, user)\n target = config.get(name, TARGET_KEY, vars=overrides)\n # Actually install the module.\n source = os.path.expanduser(os.path.join(project_dir, source))\n target = os.path.expanduser(target)\n # Check source exists.\n if not os.path.exists(source):\n raise ConfigError('Source file %s does not exist' % (source))\n # Check target directory exists and make it if necessary.\n target_dir = os.path.dirname(target)\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n # Check the target itself does not exist and remove it if it does.\n if os.path.exists(target):\n if os.path.isfile(target):\n os.remove(target)\n else:\n if interactive:\n user = raw_input(' WARNING: %s exists. Replace [Y/n]? ' % (target))\n if user and user not in YES_VALUES:\n print ' Skipped'\n return\n shutil.rmtree(target)\n # Install the file\n if symlink:\n os.symlink(os.path.abspath(source), target)\n else:\n # Copy the file\n if os.path.isfile(source):\n shutil.copy2(source, target)\n else:\n shutil.copytree(source, target)\n print ' Installed'", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def install_modules(self, capability=None, name=None):\n repositories = self.weboob.repositories\n # Update modules list\n repositories.update_repositories(DummyProgress())\n # Get module infos\n if name:\n modules = {name: repositories.get_module_info(name)}\n else:\n modules = repositories.get_all_modules_info(capability)\n # Install modules if required\n for infos in modules.values():\n if infos is not None and (\n not infos.is_installed() or\n not infos.is_local()\n ):\n try:\n repositories.install(infos, progress=DummyProgress())\n except ModuleInstallError as exception:\n logger.info(str(exception))\n return {\n module_name: dict(infos.dump())\n for module_name, infos in modules.items()\n if infos.is_installed()\n }", "def install(pac, man=\"solo\"):\n if man == \"solo\" and paths.count(\"None\") == 5:\n # if theres only one package manger, find it and use it.\n #Ok this might not work since I added pip,gem, and cpan\n if pacman['fink'] != 'None':\n install_fink(pac)\n if pacman['brew'] != 'None':\n install_brew(pac)\n if pacman['port'] != 'None':\n install_port(pac)\n if pacman['pip'] != 'None':\n install_pip(pac)\n if pacman['gem'] != 'None':\n install_gem(pac)\n if pacman['cpan'] != 'None':\n install_cpan(pac)\n else:\n instain = {'fink': install_fink, 'brew': install_brew, 'port': install_port, 'pip': install_pip, 'gem': install_gem, 'cpan': install_cpan} \n try:\n f = instain[man]\n print \"Trying to install package %s on %s\" % (pac, man)\n f(pac)\n except KeyError:\n print \"Please use install like this: haberdashery.py install package manager: \\nhaberdashery.py install %s %s\" % (man, pac)", "def install(self, *packages):\n raise NotImplementedError", "def _install(self, host):\n pass", "def __do_zip_module_install(export_zip, item):\n\n # First copy the new file.\n if export_zip.install_item_to(item, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % item.local_name)\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (item.name))\n return -2\n\n return 0", "def setup_module():\n common_setup_module()", "def install(package_path):\n logging.info(\"Installing package: \" + package_path)\n project_root = get_project_root()\n # Update the package short name to url using index.\n package_path = _install_package(project_root, package_path)", "def install():\n return InstallGit()", "def install():\n deploy()\n configure()", "def install_pkg(pip, package):\n if not os.path.isdir(INSTALL_DIR):\n os.makedirs(INSTALL_DIR)\n pip_cmds = ['mayapy', pip, 'install', package, '--target', INSTALL_DIR, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n print(\"Successfully installed package {}\".format(package))\n if installer.returncode != 0:\n raise RuntimeError(\"Failed to install package: {}, please check logs in: {}\".format(package, DEPENDENCY_INSTALL_LOG))", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from the package,\nbecause these must be already present on the system.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install():\n remote_egg_path = os.path.join(remote_egg_dir, get_egg_name())\n sudo('easy_install -U %s' % remote_egg_path)\n sudo('rm %s' % remote_egg_path)", "def install_and_import(package):\n import importlib\n try:\n importlib.import_module(package)\n except ImportError:\n checkPipInstalled()\n try:\n from pip import main as pipmain\n except:\n from pip._internal import main as pipmain\n pipmain(['install', package])\n finally:\n globals()[package] = importlib.import_module(package)", "def install(self, spec, prefix):\n make(\"install\", parallel=False)", "def addModule(self, name):\n if name in self.needed_modules: return\n self.needed_modules[name] = True #avoid circular references\n\n module = self.moduleResolver.find(name)\n ast = ast.parse(module.getContent(), module.getPath(), 'exec').body\n self.needed_modules[name] = ImportOneModule(self.getModule(name), ast, self).getModuleBody()", "def add_module(module_name: str):\n sys.path.append(module_name)", "def register_module(self, name: str, module: ModuleInstance) -> None:\n self.modules[name] = module", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def install_package(self, package):\n package = package.lower()\n command = shlex.split('sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" --force-yes -y install ' + package)\n try:\n print subprocess.check_call(command, stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n if \"unable to locate package\" in e.output.lower():\n print \"Can't identify package name. Check spelling of package name\"", "def install(self,toolname,adminuser,adminpass):\n\n self.logger.info(\"installing the tool '%s'\" % (toolname))\n\n self.push_admin_install_button(toolname)\n\n self.compile_code(toolname,adminuser,adminpass)\n\n self.flip_tool_status('ToolsStatusUploadedAdminPage',toolname,'Installed')", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Check and install\n self.guest.execute(\n ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n | self.operation_script(Command('install'), packages)\n )", "def install_from_repository(self) -> None:\n self.sort_packages()\n\n # Install recommended packages\n if self.recommended_packages:\n self.list_packages(self.recommended_packages, title=\"package\")\n for package in self.recommended_packages:\n try:\n self.perform_operation(\n Command('install'),\n Command(package)\n )\n except tmt.utils.RunError as error:\n self.debug(f\"Package installation failed: {error}\")\n self.warn(f\"Unable to install recommended package '{package}'.\")\n continue\n\n # Install required packages\n if self.required_packages:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.required_packages, title=\"package\")\n )", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def do_package(package):\n\tn_ucr = extFile(package, 'univention-config-registry')\n\tif not os.path.exists(n_ucr):\n\t\treturn\n\n\tf_ucr = open(n_ucr, 'r')\n\n\tfor item in univention.config_registry.parseRfc822(f_ucr.read()):\n\t\ttyp = item['Type'][0]\n\t\tif typ == 'file':\n\t\t\tf = item['File'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'subfile':\n\t\t\tf = item['Subfile'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\telif typ == 'multifile':\n\t\t\tf = item['Multifile'][0]\n\t\t\tif os.path.exists(f):\n\t\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'script':\n\t\t\tf = item['Script'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'scripts'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'scripts'))\n\t\t\tdoIt('chmod', 'a+x', destPath(f, package, 'scripts'))\n\t\telif typ == 'module':\n\t\t\tf = item['Module'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'modules'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'modules'))\n\t\telse:\n\t\t\tprint >>sys.stderr, 'Unknown type: %s' % typ\n\t\t\treturn\n\n\tf_ucr.close()\n\n\tdoIt('install', '-d', destDir('', package, 'info'))\n\tdoIt('install', '-m644', n_ucr, destPath(package+'.info', package, 'info'))\n\tmapping_file = extFile( package, 'univention-config-registry-mapping')\n\tif os.path.exists(mapping_file):\n\t\tdoIt('install', '-d', destDir('', package, 'mapping'))\n\t\tdoIt('install', '-m644', mapping_file, destPath(package+'.univention-config-registry-mapping', package, 'mapping'))\n\n\tdata = {\n\t\t\t'pkg': quote(package),\n\t\t\t'info': quote(\"/etc/univention/templates/info/%s.info\" % package),\n\t\t\t'removed': quote(\"/etc/univention/templates/removed/%s.info\" % package),\n\t\t\t}\n\n\tf_preinst = open(extFile(package, 'preinst.debhelper'), 'a')\n\tf_preinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_preinst.write('if [ \"$1\" = \"install\" ] ; then\\n')\n\tf_preinst.write(' [ -e %(removed)s ] && [ ! -e %(info)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_preinst.write('fi\\n')\n\tf_preinst.write('# End automatically added section\\n')\n\tf_preinst.close()\n\n\tf_postinst = open(extFile(package, 'postinst.debhelper'), 'a')\n\tf_postinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_postinst.write('if [ \"$1\" = \"abort-remove\" ]; then\\n')\n\tf_postinst.write(' [ -e %(removed)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_postinst.write('fi\\n')\n\tf_postinst.write('[ -x /usr/sbin/univention-config-registry ] && univention-config-registry register %(pkg)s || true\\n' % data)\n\tf_postinst.write('# End automatically added section\\n')\n\tf_postinst.close()\n\n\tf_prerm = open(extFile(package, 'prerm.debhelper'), 'a')\n\tf_prerm.write('# Automatically added by univention-install-config-registry\\n')\n\tf_prerm.write('if [ \"$1\" = \"remove\" ] && [ -e %(info)s ] ; then\\n' % data)\n\tf_prerm.write(' [ -x /usr/sbin/univention-config-registry ] && univention-config-registry unregister %(pkg)s || true\\n' % data)\n\tf_prerm.write(' mv %(info)s %(removed)s || true\\n' % data)\n\tf_prerm.write('fi\\n')\n\tf_prerm.write('# End automatically added section\\n')\n\tf_prerm.close()\n\n\tdoIt('perl', '-e', 'use Debian::Debhelper::Dh_Lib;addsubstvar(\"%s\", \"misc:Depends\", \"univention-config (>= 7.0.25)\");' % package)", "def install(self, parent):\r\n pass", "def install():\n ArticleDataProvider.register()\n ProductDataProvider.register()", "def register_packages(self, module_name, extra_package):\n self.creator_manager.register_packages(module_name, extra_package)", "async def depends_on_module(self, *modules: str) -> None:\n return await self.AD.app_management.register_module_dependency(self.name, *modules)", "def install(force, packages):\n setup_audit_log()\n for pspec in CFG.package_specs(packages):\n perform_install(pspec, is_upgrade=False, force=force, quiet=False)", "def on_install(self, request, trigger_context):\n raise NotImplementedError", "def set_module(self, module):\n setattr(self, \"module\", module)", "def npm_install(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install --save \" + package))", "def install():\n verun('pip install -r {0}'.format(requirements))", "def install_egg(self, egg_name):\n if not os.path.exists(self.egg_directory):\n os.makedirs(self.egg_directory)\n self.requirement_set.add_requirement(\n InstallRequirement.from_line(egg_name, None)\n )\n try:\n self.requirement_set.prepare_files(self.finder)\n self.requirement_set.install([\"--prefix=\" + self.egg_directory], [])\n except DistributionNotFound:\n self.requirement_set.requirements._keys.remove(egg_name)\n raise PipException()", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from package.\nBecause there is no RPM Python binding deb package.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install(\n action : Optional[List[str]] = None,\n **kw : Any\n ) -> SuccessTuple:\n from meerschaum.utils.misc import choose_subaction\n options = {\n 'plugins' : _install_plugins,\n 'packages' : _install_packages,\n }\n return choose_subaction(action, options, **kw)", "def install(self):\n if self.installed:\n return\n if not self.installable:\n raise self.unsure_how_to_install()\n logger.notice(\"Installing '%s'...\", self.name)\n # Call the subclass implementation\n self._install()\n # Make sure it actually performed as promised\n if not self.path:\n raise HelperNotFoundError(\n 1,\n \"Installation did not raise an exception, but afterward, \"\n \"unable to locate {0}!\".format(self.name))\n\n logger.notice(\"Successfully installed '%s'\", self.name)", "def test_pydroid_pip_install_cmd_line(self):\n\n module_name = 'simplekv'\n package_dir = os.path.join(site_packages_dir(), module_name)\n self.assertFalse(os.path.exists(package_dir))\n cmd = ['pydroid', 'pip', 'install', module_name]\n subprocess.call(cmd)\n self.assertTrue(os.path.exists(package_dir))", "def test_install(self):\n\n\n adminuser,adminpass = self.testdata.find_account_for('toolmanager')\n\n self.utils.account.login_as(adminuser,adminpass)\n\n self.contribtool.install(TOOLNAME,adminuser,adminpass)", "def npm_install_globally(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install -g \" + package))", "def install(self, repo):\n\n for subsystem in repo.options.get('subsystems', []):\n name = subsystem.get('name')\n args = subsystem.get('args', {})\n\n if name is None:\n raise InvalidSettingError('subsystem name', 'missing in settings file.')\n\n if name != 'SphinxDocumentation':\n raise InvalidSettingError('subsystem name', \"name '{}' is unknown\".format(name))\n\n repo.add_subsystem(SphinxDocumentation(repo, **args))\n\n repo.add_subsystem(BasicPythonSupport(repo))", "def YumInstall(vm):\n raise NotImplementedError", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Extra ignore/check for yum to workaround BZ#1920176\n check = ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n script = check | self.operation_script(Command('install'), packages)\n\n if self.skip_missing:\n script |= ShellScript('true')\n else:\n script &= check\n\n # Check and install\n self.guest.execute(script)", "def install():\n execute(generate)\n execute(upload)", "def install_from_repository(self) -> None:\n pass", "def YumInstall(vm):\n _Install(vm)", "def YumInstall(vm):\n _Install(vm)", "def install_module(request, monkeypatch):\n if request.param == 'sqlite':\n monkeypatch.setenv('DB_NAME', ':memory:')\n else:\n monkeypatch.setenv('DB_NAME', 'test_' + str(int(time.time())))\n\n from trytond.tests import test_tryton\n test_tryton.install_module('audit_trail')", "def test_install(self):\n pass", "def install(context):\n portal = context.getSite()\n sm = getSiteManager(portal)\n # Local components are not per-container; they are per-sitemanager. It just so happens that every Plone site has a sitemanager. Hooray.\n sm.registerAdapter(FolderText, name='FolderText')\n \n return \"Registered the extender at the root of the Plone site.\"", "def install_step(self):\n\n cmd = \"./INSTALL -noroot -silent -install_dir %s\" % self.installdir\n run_cmd(cmd, log_all=True, simple=True)\n\n adjust_permissions(self.installdir, stat.S_IWOTH, add=False)", "def install(self):\n if not self._is_installed():\n _logger.debug('Installing {name}...'.format(name=self.file_name))\n self._download_archive()\n self._unpack_archive()\n self._remove_archive()\n self._make_executable()\n else:\n _logger.debug('{name} is already installed.'.format(name=self.file_name))\n\n self._add_variables()", "def enablePackage(self, *args):\n return _libsbml.SBase_enablePackage(self, *args)", "def Install (self):\n if self in sys.meta_path:\n return\n sys.meta_path.insert (0, self)", "def pipInstall(self):\n\n print \"Does Nothing\"", "def add_module(self, *args, **kwargs):\n# if 'path' in kwargs:\n# path = kwargs['path']\n# else:\n# path = os.getcwd()\n#\n# if len(args) > 0:\n# module = args[0]\n# elif 'module' in kwargs:\n# module = kwargs['module']\n#\n# if 'path' not in kwargs:\n# path = os.getcwd()\n# kwargs['path'] = path\n\n if 'module' not in kwargs:\n if len(args) > 0:\n module = args[0]\n kwargs['module'] = module\n\n# if 'module' in kwargs:\n if len(kwargs) > 0:\n self._data.add_detector(self._name, **kwargs)", "def _installed_apps_add(self):\n config.add_plugin(self.module_path)", "def install_from_url(self) -> None:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.remote_packages, title=\"remote package\")\n )", "def install_module(request):\n reuse_db = request.config.getoption(\"--reuse-db\")\n\n if request.config.getoption(\"--db\") == 'sqlite':\n os.environ['TRYTOND_DATABASE_URI'] = \"sqlite://\"\n if reuse_db:\n # A hack to check if the database exists and if it\n # does, load that and run tests.\n Database = backend.get('Database')\n\n # cursor.test forgets to set flavor!\n # no time to report a bug!\n Flavor.set(Database.flavor)\n os.environ['DB_NAME'] = 'fulfilio'\n else:\n os.environ['DB_NAME'] = ':memory:'\n\n elif request.config.getoption(\"--db\") == 'postgres':\n os.environ['TRYTOND_DATABASE_URI'] = \"postgresql://\"\n if reuse_db:\n os.environ['DB_NAME'] = 'test_fulfilio'\n else:\n os.environ['DB_NAME'] = 'test_' + str(int(time.time()))\n\n if reuse_db:\n Database = backend.get('Database')\n database = Database().connect()\n cursor = database.cursor()\n databases = database.list(cursor)\n cursor.close()\n if os.environ['DB_NAME'] in databases:\n if request.config.getoption(\"--reset-db\"):\n cursor = database.cursor()\n databases = database.drop(cursor, os.environ['DB_NAME'])\n cursor.close()\n else:\n # tryton test forgets to init the pool\n # for existing database\n Pool(os.environ['DB_NAME']).init()\n\n config.set('database', 'uri', os.environ['TRYTOND_DATABASE_URI'])\n from trytond.tests import test_tryton\n test_tryton.install_module('payment_gateway_stripe')", "def installQPackage(self, name, domain, version, reconfigure=True):\n installPackageCommand = \"\"\"p = q.qp.find(name=\"%(name)s\", domain=\"%(domain)s\", version=\"%(version)s\")\nif not p:\n raise valueError(\"Package %(domain)s, %(name)s, %(version)s not found\")\nelif len(p) <> 1:\n raise valueError(\"Too many packages found with search criteria %(domain)s, %(name)s, %(version)s\")\nelif not p[0].isInstalled():\n p[0].install()\nelse:\n print \"Package %(domain)s, %(name)s, %(version)s is already installed\"\n\"\"\"%{'name':name,'version':version,'domain':domain,'reconfigure':reconfigure}\n self.executeQshell(installPackageCommand)\n if reconfigure:\n self.executeQshell(\"q.qp._runPendingReconfigeFiles()\")", "def add_module_import(self, module):\n self._main_model.add_module_import(module)", "def install():\n build()\n sh(\"%s setup.py develop\" % PYTHON)", "def install(i):\n\n cm_kernel.print_for_con('***********************************************')\n cm_kernel.print_for_con('Installing code ...')\n\n # Check vars\n if 'target_os_uoa' not in i: return {'cm_return':1, 'cm_error':'\"target_os_uoa\" is not defined in \"code install\"'}\n\n # Create entry\n ii={'cm_run_module_uoa':ini['cm_module_uid'],\n 'cm_action':'update'}\n if 'install_data_uid' in i and i['install_data_uid']!='': \n ii['cm_data_uid']=i['install_data_uid']\n if 'install_data_alias' in i and i['install_data_alias']!='': \n ii['cm_data_uoa']=i['install_data_alias']\n if 'install_data_display_as_alias' in i: \n ii['cm_display_as_alias']=i['install_data_display_as_alias']\n if 'install_module_uoa' in i and i['install_module_uoa']!='':\n ii['cm_run_module_uoa']=i['install_module_uoa']\n if 'cm_array' in i and len(i['cm_array'])>0: ii['cm_array']=i['cm_array']\n if 'install_repo_uoa' in i and i['install_repo_uoa']!='': \n ii['cm_repo_uoa']=i['install_repo_uoa']\n r=cm_kernel.access(ii)\n if r['cm_return']>0: return r\n\n target_path=r['cm_path']\n target_uid=r['cm_uid']\n target_alias=r['cm_alias']\n\n # Prepare script\n rx=get_env({'cm_data_uoa':target_uid,\n 'os_uoa':i['target_os_uoa']})\n if rx['cm_return']>0: return rx\n\n script=rx['cm_string']\n\n ii={'script_name':script,\n 'skip_extension':'yes',\n 'target_os_uoa':i['target_os_uoa'],\n 'cm_path':target_path}\n if 'code_deps' in i and i.get('skip_code_deps','')!='yes':\n ii['code_deps']=i['code_deps']\n\n # Add remark about how code was built\n if 'add_rem_to_script' in i:\n run_commands_before=[]\n run_commands_before.append('')\n for x in i['add_rem_to_script']:\n run_commands_before.append(x)\n ii['run_commands_before']=run_commands_before\n\n rx=prepare_script(ii)\n if rx['cm_return']>0: return rx\n\n r['script_name']=rx['cm_path']\n r['script_filename']=script\n\n return r", "def previewinstall(self, installed=[]):\n\n if( self.mode == \"install\"):\n \n # resolve circular dependencies\n if( self.name in installed ):\n return\n else:\n installed.append( self.name )\n \n print \"\\n\" + 20*'-' + \" Starting \" + self.name + \" Installation Test \" + 20*'-' + '\\n'\n \n # additional modules\n mods = self.optmodules + self.reqmodules + self.reqmodules_external + self.reqmodules_buildonly\n if( len(mods) > 0 ):\n for modname in mods:\n mod = self.parent.module(modname)\n if( mod.mode == \"install\" and not mod.name in installed ):\n print \"+ \" + self.name + \" will launch installation of \" + mod.name\n mod.previewinstall(installed)\n print \"+ \"+ self.name + \" using \" + mod.name + \" at [ \" + mod.installPath + \" ]\"\n\n print \"\\n+ Environment Settings used for building \" + self.name + \":\"\n # print environment settings recursively\n self.setEnv(self, [], True )\n\n if( self.hasCMakeBuildSupport ):\n #self.setCMakeVars(self, [])\n print \"\\n+ Generated CMake command for building \" + self.name + \":\"\n print ' $ ',self.genCMakeCmd()\n \n print \"\\n+ \" + self.name + \" installation finished.\"\n print '\\n' + 20*'-' + \" Finished \" + self.name + \" Installation Test \" + 20*'-' + '\\n'", "def test_run_pymodules_install_optional_project_dir(self):\n ctx = mock.Mock()\n modules = []\n project_dir = None\n with mock.patch('pythonforandroid.build.info') as m_info:\n assert run_pymodules_install(ctx, modules, project_dir) is None\n assert m_info.call_args_list[-1] == mock.call(\n 'No Python modules and no setup.py to process, skipping')", "def download_and_install(self):\n if self.is_installed_from_bin:\n try:\n self.installer.install_from_rpm_py_package()\n return\n except RpmPyPackageNotFoundError as exc:\n Log.warn('RPM Py Package not found. reason: {0}'.format(exc))\n\n # Download and install from the source.\n top_dir_name = self.downloader.download_and_expand()\n rpm_py_dir = os.path.join(top_dir_name, 'python')\n\n setup_py_in_found = False\n with Cmd.pushd(rpm_py_dir):\n if self.installer.setup_py.exists_in_path():\n setup_py_in_found = True\n self.installer.run()\n\n if not setup_py_in_found:\n self.installer.install_from_rpm_py_package()", "def install_package(package, remote):\n log.info('Installing package %s on %s', package, remote)\n flavor = remote.os.package_type\n if flavor == 'deb':\n pkgcmd = ['DEBIAN_FRONTEND=noninteractive',\n 'sudo',\n '-E',\n 'apt-get',\n '-y',\n '--force-yes',\n 'install',\n '{package}'.format(package=package)]\n elif flavor == 'rpm':\n # FIXME: zypper\n pkgcmd = ['sudo',\n 'yum',\n '-y',\n 'install',\n '{package}'.format(package=package)]\n else:\n log.error('install_package: bad flavor ' + flavor + '\\n')\n return False\n return remote.run(args=pkgcmd)", "def software_api(self, install_params):\n try:\n self.sw = jnpr.junos.utils.sw.SW(self.dev)\n ok, msg_ret = self.sw.install(**install_params)\n if ok is not True:\n raise AnsibleError('Unable to install the software %s' % msg_ret)\n msg = 'Package %s successfully installed. Response from device is: %s' % (\n install_params.get('package') or\n install_params.get('pkg_set'),\n msg_ret)\n self.queue_message(\"log\", \"%s\" % msg)\n return msg\n except (self.pyez_exception.ConnectError,\n self.pyez_exception.RpcError) as ex:\n raise AnsibleError('Installation failed. Error: %s' % str(ex))", "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def do_post_install(self, context):\n pass", "def pre_install_pkg(self, installable_pkg):\n pass", "def setUp(self):\n trytond.tests.test_tryton.install_module('nereid_webshop')", "def install_cached_package(self, package_name):\n self._log.info(\"Installing package {!r} from talus pypi\".format(package_name))\n pinfo = self.cache[\"pypi\"][package_name]\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n\n try:\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n package_name\n ])\n except SystemExit as e:\n raise Exception(\"Is SystemExit expected?\")", "def install(cli_config, path):\n commands = AssetsCommands(cli_config)\n\n click.secho(\"Installing React module...\", fg=\"green\")\n steps = commands.link_js_module(path)\n on_fail = \"Failed to install React module.\"\n on_success = \"React module installed successfully.\"\n\n run_steps(steps, on_fail, on_success)", "def _install_packages(module, path, packages, allowed_dists, repo):\n cmd = \"./LuaDist/bin/luadist install \"\n\n # Add packages to command\n for package in packages:\n cmd += package + \" \"\n\n # Add types of dists allowed to command\n source_allowed = \"true\"\n binary_allowed = \"true\"\n if allowed_dists == \"binary\":\n source_allowed = \"false\"\n elif allowed_dists == \"source\":\n binary_allowed = \"false\"\n cmd += \" -source=\" + source_allowed + \" -binary=\" + binary_allowed\n\n # Add repository to command\n cmd += ' -repos=\"' + repo + '\"'\n\n ret_code, out, err = module.run_command(cmd, cwd=path)\n already_installed = \"No packages to install\" in out\n\n if ret_code != 0 and not already_installed:\n module.fail_json(\n rc=ret_code,\n stdout=out,\n stderr=err,\n msg=\"Cannot install one or more of the specified packages, \"\n + \"make sure all packages exist in the configured repository.\",\n )\n\n return cmd" ]
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.6200066", "0.6157249", "0.61510795", "0.6132066", "0.6122041", "0.6117013", "0.6113363", "0.60535926", "0.604949", "0.60188705", "0.60149723", "0.6005884", "0.6001865", "0.5994445", "0.5952823", "0.5950878", "0.59120035", "0.59031105", "0.5902215", "0.58973765", "0.5897239", "0.58967173", "0.5886299", "0.5875481", "0.586653", "0.58250093", "0.58176196", "0.58119714", "0.5811162", "0.5804783", "0.5802072", "0.5777229", "0.5762134", "0.5754137", "0.5749512", "0.5730237", "0.57127273", "0.57088447", "0.56937397", "0.569082", "0.56792736", "0.56730485", "0.5671629", "0.56652313", "0.56532127", "0.56483585", "0.56359285", "0.5615233", "0.56139237", "0.56120026", "0.56077254", "0.5596886", "0.5596886", "0.55855155", "0.55835664", "0.55749494", "0.5555037", "0.5553028", "0.5551551", "0.5542101", "0.5540291", "0.5537365", "0.5530371", "0.55150586", "0.55144536", "0.5511179", "0.54969233", "0.54825425", "0.5478699", "0.54667836", "0.5464257", "0.54629314", "0.5458849", "0.545141", "0.5439027", "0.5435583", "0.5434709", "0.54272467", "0.542712", "0.5424678", "0.5420664" ]
0.0
-1
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module = module_manager.get_module_by_name(self._values["name"]) module_manager.update_module(module)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['install', m_name]):\n print(NOT_INSTALLED_MSG.format(m_name))\n return 1\n else:\n return 1\n return 0", "def install_package(self, module: str, **kwargs):\n logging.message('Installing module from %s %s' % (module, str(kwargs)))\n package = importlib.import_module(module)\n if kwargs.get('package'):\n kwargs.pop('package')\n setup_return = package.Setup(self, module, **kwargs)\n ff_id = kwargs.get('ff_id')\n initial_values = kwargs.get('initial_values')\n if ff_id and initial_values:\n self.device_initial_values[ff_id] = initial_values\n scheduler.runInS(10, self.refresh_firebase, job_id='FIREBASE_REFRESH_CORE')\n scheduler.runInS(15, self.export_all_components, job_id='CORE_EXPORT_ALL')\n return setup_return", "def do(self):\r\n parameters = ParametersParserStr(self.args_parameters).get()\r\n self.core.install(self.product_names, parameters, with_dependencies=True)", "def install(self, egg, dir_path):", "def install():\n PackCommandExecutor().pack()\n InstallCommandExecutor().install()", "def action_install(args):\n\n dest = Path(args.dest)\n\n module_root = Path(\"modules/\")\n modules = load_modules(module_root)\n\n try:\n candidates = {modules[target] for target in args.targets}\n dependencies = set()\n for candidate in candidates:\n dependencies |= set(candidate.resolve_dependencies(modules))\n candidates |= dependencies\n except KeyError as e:\n key = e.args[0]\n print(f\"{key} module not found\")\n sys.exit(1)\n\n print(f\"Will install: {', '.join(c.name for c in candidates)}\")\n if not confirm(\"install?\", default=True):\n return\n\n for mod in candidates:\n print(f\"Installing {mod.name}...\")\n mod.install(dest)", "def install_from_rpm_py_package(self):\n raise NotImplementedError('Implement this method.')", "def install(repo, package, python, editable):\n if repo.install(package, python, editable):\n click.echo('Done.')", "def __do_single_module_install(item):\n\n name = item.name\n local_name = item.local_name\n install_name = item.install_name\n\n # First copy the new file.\n if copy_file(local_name, install_name, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % (local_name))\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (name))\n return -2\n\n log.i(TAG, \"Module '%s' installed successfully!\" % name)\n return 0", "def install_module(\n self,\n install_optional=False,\n production_only=False,\n force=False,\n frozen_lockfile=True,\n node_paths=None):\n args=self._get_installation_args(\n install_optional=install_optional,\n production_only=production_only,\n force=force,\n frozen_lockfile=frozen_lockfile)\n return self.run_command(args=args, node_paths=node_paths)", "def _install(self):\n # Default implementation\n for pm_name, package in self._provider_package.items():\n if helpers[pm_name]:\n helpers[pm_name].install_package(package)\n return\n raise self.unsure_how_to_install()", "def setup_module(module):\n print(\"Start rishabhSetupModule of Program\")", "def install(self):\n raise NotImplementedError", "def install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])", "def _install(self):\n\n pass", "def setModule(name, module):", "def on_install(self, event):\n unit = self.model.unit\n\n # Install your software and its dependencies\n\n unit.status = ActiveStatus()", "def install(self):\n # This installs the packages defined in self.packages\n super().install()\n # Do any other installation work that is needed. If a license key is\n # required then use the custom_assess_status_check() function below to\n # determine whether it is needed.\n # This assess_status() will determine what status the charm is at after\n # install.\n self.assess_status()", "def install(self):\n return self._process('install')", "def install(self, no_dependencies: bool = True):\n return PackageHelper.install_package(name=self.name, no_dependencies=no_dependencies)", "def do_workload(self):\n module_manager = self._core.get_module_manager()\n module_manager.install_module(self.get_meta())", "def test_setup_module(self):\n pluggable_package.setup(self._test_module)\n self._test_setup(self._test_module)", "def install(self, provider):\n pass # pragma: no cover", "def process_module(project_dir, config, section, overrides=None, symlink=False,\n interactive=True):\n # Extract fields from the section (and write-back any missing).\n if not config.has_option(section, NAME_KEY):\n config.set(section, NAME_KEY, section)\n name = config.get(section, NAME_KEY, vars=overrides)\n source = config.get(section, SOURCE_KEY, vars=overrides)\n target = config.get(section, TARGET_KEY, vars=overrides)\n optional = config.getboolean(section, OPTIONAL_MODULE_KEY)\n default = config.getboolean(section, DEFAULT_MODULE_KEY)\n override_target = config.getboolean(section, OVERRIDE_TARGET_KEY)\n # Collect user input if necessary.\n if interactive and optional:\n user = raw_input('Install %s [%s]? ' % (name, 'Y/n' if default else 'N/y'))\n if (user and user not in YES_VALUES) or (not user and not default):\n print ' Skipped'\n return\n else:\n print \"Installing %s...\" % (name)\n if interactive and override_target:\n user = raw_input(' Install directory [default: %s]: ' % (target))\n if user:\n config.set(name, TARGET_KEY, user)\n target = config.get(name, TARGET_KEY, vars=overrides)\n # Actually install the module.\n source = os.path.expanduser(os.path.join(project_dir, source))\n target = os.path.expanduser(target)\n # Check source exists.\n if not os.path.exists(source):\n raise ConfigError('Source file %s does not exist' % (source))\n # Check target directory exists and make it if necessary.\n target_dir = os.path.dirname(target)\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n # Check the target itself does not exist and remove it if it does.\n if os.path.exists(target):\n if os.path.isfile(target):\n os.remove(target)\n else:\n if interactive:\n user = raw_input(' WARNING: %s exists. Replace [Y/n]? ' % (target))\n if user and user not in YES_VALUES:\n print ' Skipped'\n return\n shutil.rmtree(target)\n # Install the file\n if symlink:\n os.symlink(os.path.abspath(source), target)\n else:\n # Copy the file\n if os.path.isfile(source):\n shutil.copy2(source, target)\n else:\n shutil.copytree(source, target)\n print ' Installed'", "def __install(self):\n command = self.pipComboBox.currentText()\n if command == self.__default:\n command = \"\"\n \n packages = []\n for itm in self.resultList.selectedItems():\n packages.append(itm.text(0).strip())\n if packages:\n self.__pip.installPackages(packages, cmd=command)", "def install_modules(self, capability=None, name=None):\n repositories = self.weboob.repositories\n # Update modules list\n repositories.update_repositories(DummyProgress())\n # Get module infos\n if name:\n modules = {name: repositories.get_module_info(name)}\n else:\n modules = repositories.get_all_modules_info(capability)\n # Install modules if required\n for infos in modules.values():\n if infos is not None and (\n not infos.is_installed() or\n not infos.is_local()\n ):\n try:\n repositories.install(infos, progress=DummyProgress())\n except ModuleInstallError as exception:\n logger.info(str(exception))\n return {\n module_name: dict(infos.dump())\n for module_name, infos in modules.items()\n if infos.is_installed()\n }", "def install(pac, man=\"solo\"):\n if man == \"solo\" and paths.count(\"None\") == 5:\n # if theres only one package manger, find it and use it.\n #Ok this might not work since I added pip,gem, and cpan\n if pacman['fink'] != 'None':\n install_fink(pac)\n if pacman['brew'] != 'None':\n install_brew(pac)\n if pacman['port'] != 'None':\n install_port(pac)\n if pacman['pip'] != 'None':\n install_pip(pac)\n if pacman['gem'] != 'None':\n install_gem(pac)\n if pacman['cpan'] != 'None':\n install_cpan(pac)\n else:\n instain = {'fink': install_fink, 'brew': install_brew, 'port': install_port, 'pip': install_pip, 'gem': install_gem, 'cpan': install_cpan} \n try:\n f = instain[man]\n print \"Trying to install package %s on %s\" % (pac, man)\n f(pac)\n except KeyError:\n print \"Please use install like this: haberdashery.py install package manager: \\nhaberdashery.py install %s %s\" % (man, pac)", "def install(self, *packages):\n raise NotImplementedError", "def _install(self, host):\n pass", "def __do_zip_module_install(export_zip, item):\n\n # First copy the new file.\n if export_zip.install_item_to(item, DTF_MODULES_DIR) != 0:\n log.e(TAG, \"Error copying module '%s'\" % item.local_name)\n return -1\n\n # Update database\n if __update_module(item) == 0:\n log.e(TAG, \"Failed to update module '%s' details in database.\"\n % (item.name))\n return -2\n\n return 0", "def setup_module():\n common_setup_module()", "def install(package_path):\n logging.info(\"Installing package: \" + package_path)\n project_root = get_project_root()\n # Update the package short name to url using index.\n package_path = _install_package(project_root, package_path)", "def install():\n return InstallGit()", "def install():\n deploy()\n configure()", "def install_pkg(pip, package):\n if not os.path.isdir(INSTALL_DIR):\n os.makedirs(INSTALL_DIR)\n pip_cmds = ['mayapy', pip, 'install', package, '--target', INSTALL_DIR, '--log', DEPENDENCY_INSTALL_LOG]\n print(pip_cmds)\n installer = subprocess.Popen(pip_cmds)\n installer.wait()\n print(\"Successfully installed package {}\".format(package))\n if installer.returncode != 0:\n raise RuntimeError(\"Failed to install package: {}, please check logs in: {}\".format(package, DEPENDENCY_INSTALL_LOG))", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from the package,\nbecause these must be already present on the system.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install():\n remote_egg_path = os.path.join(remote_egg_dir, get_egg_name())\n sudo('easy_install -U %s' % remote_egg_path)\n sudo('rm %s' % remote_egg_path)", "def install_and_import(package):\n import importlib\n try:\n importlib.import_module(package)\n except ImportError:\n checkPipInstalled()\n try:\n from pip import main as pipmain\n except:\n from pip._internal import main as pipmain\n pipmain(['install', package])\n finally:\n globals()[package] = importlib.import_module(package)", "def install(self, spec, prefix):\n make(\"install\", parallel=False)", "def addModule(self, name):\n if name in self.needed_modules: return\n self.needed_modules[name] = True #avoid circular references\n\n module = self.moduleResolver.find(name)\n ast = ast.parse(module.getContent(), module.getPath(), 'exec').body\n self.needed_modules[name] = ImportOneModule(self.getModule(name), ast, self).getModuleBody()", "def add_module(module_name: str):\n sys.path.append(module_name)", "def register_module(self, name: str, module: ModuleInstance) -> None:\n self.modules[name] = module", "def install():\n return {\n \"actions\": [TaskCreator.get_pip() + \" install --upgrade dist/*.whl\"],\n \"verbosity\": 2,\n \"setup\": [\"make_distribution\"],\n }", "def install_package(self, package):\n package = package.lower()\n command = shlex.split('sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" --force-yes -y install ' + package)\n try:\n print subprocess.check_call(command, stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n if \"unable to locate package\" in e.output.lower():\n print \"Can't identify package name. Check spelling of package name\"", "def install(self,toolname,adminuser,adminpass):\n\n self.logger.info(\"installing the tool '%s'\" % (toolname))\n\n self.push_admin_install_button(toolname)\n\n self.compile_code(toolname,adminuser,adminpass)\n\n self.flip_tool_status('ToolsStatusUploadedAdminPage',toolname,'Installed')", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Check and install\n self.guest.execute(\n ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n | self.operation_script(Command('install'), packages)\n )", "def install_from_repository(self) -> None:\n self.sort_packages()\n\n # Install recommended packages\n if self.recommended_packages:\n self.list_packages(self.recommended_packages, title=\"package\")\n for package in self.recommended_packages:\n try:\n self.perform_operation(\n Command('install'),\n Command(package)\n )\n except tmt.utils.RunError as error:\n self.debug(f\"Package installation failed: {error}\")\n self.warn(f\"Unable to install recommended package '{package}'.\")\n continue\n\n # Install required packages\n if self.required_packages:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.required_packages, title=\"package\")\n )", "def install(self) -> None:\n if self.local_packages:\n self.prepare_install_local()\n self.install_local()\n if self.remote_packages:\n self.install_from_url()\n if self.repository_packages:\n self.install_from_repository()\n if self.debuginfo_packages:\n self.install_debuginfo()", "def do_package(package):\n\tn_ucr = extFile(package, 'univention-config-registry')\n\tif not os.path.exists(n_ucr):\n\t\treturn\n\n\tf_ucr = open(n_ucr, 'r')\n\n\tfor item in univention.config_registry.parseRfc822(f_ucr.read()):\n\t\ttyp = item['Type'][0]\n\t\tif typ == 'file':\n\t\t\tf = item['File'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'subfile':\n\t\t\tf = item['Subfile'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\telif typ == 'multifile':\n\t\t\tf = item['Multifile'][0]\n\t\t\tif os.path.exists(f):\n\t\t\t\tdoIt('install', '-d', destDir(f, package))\n\t\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package))\n\t\t\tfor key in ('Preinst', 'Postinst'):\n\t\t\t\tif key in item:\n\t\t\t\t\tmod = item[key][0]\n\t\t\t\t\tdoIt('install', '-d', destDir(mod, package, 'modules'))\n\t\t\t\t\tdoIt('cp', '-a', srcPath(mod), destPath(mod, package, 'modules'))\n\t\telif typ == 'script':\n\t\t\tf = item['Script'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'scripts'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'scripts'))\n\t\t\tdoIt('chmod', 'a+x', destPath(f, package, 'scripts'))\n\t\telif typ == 'module':\n\t\t\tf = item['Module'][0]\n\t\t\tdoIt('install', '-d', destDir(f, package, 'modules'))\n\t\t\tdoIt('cp', '-a', srcPath(f), destPath(f, package, 'modules'))\n\t\telse:\n\t\t\tprint >>sys.stderr, 'Unknown type: %s' % typ\n\t\t\treturn\n\n\tf_ucr.close()\n\n\tdoIt('install', '-d', destDir('', package, 'info'))\n\tdoIt('install', '-m644', n_ucr, destPath(package+'.info', package, 'info'))\n\tmapping_file = extFile( package, 'univention-config-registry-mapping')\n\tif os.path.exists(mapping_file):\n\t\tdoIt('install', '-d', destDir('', package, 'mapping'))\n\t\tdoIt('install', '-m644', mapping_file, destPath(package+'.univention-config-registry-mapping', package, 'mapping'))\n\n\tdata = {\n\t\t\t'pkg': quote(package),\n\t\t\t'info': quote(\"/etc/univention/templates/info/%s.info\" % package),\n\t\t\t'removed': quote(\"/etc/univention/templates/removed/%s.info\" % package),\n\t\t\t}\n\n\tf_preinst = open(extFile(package, 'preinst.debhelper'), 'a')\n\tf_preinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_preinst.write('if [ \"$1\" = \"install\" ] ; then\\n')\n\tf_preinst.write(' [ -e %(removed)s ] && [ ! -e %(info)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_preinst.write('fi\\n')\n\tf_preinst.write('# End automatically added section\\n')\n\tf_preinst.close()\n\n\tf_postinst = open(extFile(package, 'postinst.debhelper'), 'a')\n\tf_postinst.write('# Automatically added by univention-install-config-registry\\n')\n\tf_postinst.write('if [ \"$1\" = \"abort-remove\" ]; then\\n')\n\tf_postinst.write(' [ -e %(removed)s ] && mv %(removed)s %(info)s || true\\n' % data)\n\tf_postinst.write('fi\\n')\n\tf_postinst.write('[ -x /usr/sbin/univention-config-registry ] && univention-config-registry register %(pkg)s || true\\n' % data)\n\tf_postinst.write('# End automatically added section\\n')\n\tf_postinst.close()\n\n\tf_prerm = open(extFile(package, 'prerm.debhelper'), 'a')\n\tf_prerm.write('# Automatically added by univention-install-config-registry\\n')\n\tf_prerm.write('if [ \"$1\" = \"remove\" ] && [ -e %(info)s ] ; then\\n' % data)\n\tf_prerm.write(' [ -x /usr/sbin/univention-config-registry ] && univention-config-registry unregister %(pkg)s || true\\n' % data)\n\tf_prerm.write(' mv %(info)s %(removed)s || true\\n' % data)\n\tf_prerm.write('fi\\n')\n\tf_prerm.write('# End automatically added section\\n')\n\tf_prerm.close()\n\n\tdoIt('perl', '-e', 'use Debian::Debhelper::Dh_Lib;addsubstvar(\"%s\", \"misc:Depends\", \"univention-config (>= 7.0.25)\");' % package)", "def install(self, parent):\r\n pass", "def install():\n ArticleDataProvider.register()\n ProductDataProvider.register()", "def register_packages(self, module_name, extra_package):\n self.creator_manager.register_packages(module_name, extra_package)", "async def depends_on_module(self, *modules: str) -> None:\n return await self.AD.app_management.register_module_dependency(self.name, *modules)", "def install(force, packages):\n setup_audit_log()\n for pspec in CFG.package_specs(packages):\n perform_install(pspec, is_upgrade=False, force=force, quiet=False)", "def on_install(self, request, trigger_context):\n raise NotImplementedError", "def set_module(self, module):\n setattr(self, \"module\", module)", "def npm_install(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install --save \" + package))", "def install():\n verun('pip install -r {0}'.format(requirements))", "def install_egg(self, egg_name):\n if not os.path.exists(self.egg_directory):\n os.makedirs(self.egg_directory)\n self.requirement_set.add_requirement(\n InstallRequirement.from_line(egg_name, None)\n )\n try:\n self.requirement_set.prepare_files(self.finder)\n self.requirement_set.install([\"--prefix=\" + self.egg_directory], [])\n except DistributionNotFound:\n self.requirement_set.requirements._keys.remove(egg_name)\n raise PipException()", "def install_from_rpm_py_package(self):\n message = '''\nCan not install RPM Python binding from package.\nBecause there is no RPM Python binding deb package.\n'''\n raise RpmPyPackageNotFoundError(message)", "def install(\n action : Optional[List[str]] = None,\n **kw : Any\n ) -> SuccessTuple:\n from meerschaum.utils.misc import choose_subaction\n options = {\n 'plugins' : _install_plugins,\n 'packages' : _install_packages,\n }\n return choose_subaction(action, options, **kw)", "def install(self):\n if self.installed:\n return\n if not self.installable:\n raise self.unsure_how_to_install()\n logger.notice(\"Installing '%s'...\", self.name)\n # Call the subclass implementation\n self._install()\n # Make sure it actually performed as promised\n if not self.path:\n raise HelperNotFoundError(\n 1,\n \"Installation did not raise an exception, but afterward, \"\n \"unable to locate {0}!\".format(self.name))\n\n logger.notice(\"Successfully installed '%s'\", self.name)", "def test_pydroid_pip_install_cmd_line(self):\n\n module_name = 'simplekv'\n package_dir = os.path.join(site_packages_dir(), module_name)\n self.assertFalse(os.path.exists(package_dir))\n cmd = ['pydroid', 'pip', 'install', module_name]\n subprocess.call(cmd)\n self.assertTrue(os.path.exists(package_dir))", "def test_install(self):\n\n\n adminuser,adminpass = self.testdata.find_account_for('toolmanager')\n\n self.utils.account.login_as(adminuser,adminpass)\n\n self.contribtool.install(TOOLNAME,adminuser,adminpass)", "def npm_install_globally(self, package):\n self.summarize_operation(\"Installing \" + package)\n print subprocess.call(shlex.split(\"sudo npm install -g \" + package))", "def install(self, repo):\n\n for subsystem in repo.options.get('subsystems', []):\n name = subsystem.get('name')\n args = subsystem.get('args', {})\n\n if name is None:\n raise InvalidSettingError('subsystem name', 'missing in settings file.')\n\n if name != 'SphinxDocumentation':\n raise InvalidSettingError('subsystem name', \"name '{}' is unknown\".format(name))\n\n repo.add_subsystem(SphinxDocumentation(repo, **args))\n\n repo.add_subsystem(BasicPythonSupport(repo))", "def YumInstall(vm):\n raise NotImplementedError", "def install_from_repository(self) -> None:\n packages = self.list_packages(self.repository_packages, title=\"package\")\n\n # Extra ignore/check for yum to workaround BZ#1920176\n check = ShellScript(f'rpm -q --whatprovides {packages.to_script()}')\n script = check | self.operation_script(Command('install'), packages)\n\n if self.skip_missing:\n script |= ShellScript('true')\n else:\n script &= check\n\n # Check and install\n self.guest.execute(script)", "def install():\n execute(generate)\n execute(upload)", "def install_from_repository(self) -> None:\n pass", "def YumInstall(vm):\n _Install(vm)", "def YumInstall(vm):\n _Install(vm)", "def install_module(request, monkeypatch):\n if request.param == 'sqlite':\n monkeypatch.setenv('DB_NAME', ':memory:')\n else:\n monkeypatch.setenv('DB_NAME', 'test_' + str(int(time.time())))\n\n from trytond.tests import test_tryton\n test_tryton.install_module('audit_trail')", "def test_install(self):\n pass", "def install(context):\n portal = context.getSite()\n sm = getSiteManager(portal)\n # Local components are not per-container; they are per-sitemanager. It just so happens that every Plone site has a sitemanager. Hooray.\n sm.registerAdapter(FolderText, name='FolderText')\n \n return \"Registered the extender at the root of the Plone site.\"", "def install_step(self):\n\n cmd = \"./INSTALL -noroot -silent -install_dir %s\" % self.installdir\n run_cmd(cmd, log_all=True, simple=True)\n\n adjust_permissions(self.installdir, stat.S_IWOTH, add=False)", "def install(self):\n if not self._is_installed():\n _logger.debug('Installing {name}...'.format(name=self.file_name))\n self._download_archive()\n self._unpack_archive()\n self._remove_archive()\n self._make_executable()\n else:\n _logger.debug('{name} is already installed.'.format(name=self.file_name))\n\n self._add_variables()", "def enablePackage(self, *args):\n return _libsbml.SBase_enablePackage(self, *args)", "def Install (self):\n if self in sys.meta_path:\n return\n sys.meta_path.insert (0, self)", "def pipInstall(self):\n\n print \"Does Nothing\"", "def add_module(self, *args, **kwargs):\n# if 'path' in kwargs:\n# path = kwargs['path']\n# else:\n# path = os.getcwd()\n#\n# if len(args) > 0:\n# module = args[0]\n# elif 'module' in kwargs:\n# module = kwargs['module']\n#\n# if 'path' not in kwargs:\n# path = os.getcwd()\n# kwargs['path'] = path\n\n if 'module' not in kwargs:\n if len(args) > 0:\n module = args[0]\n kwargs['module'] = module\n\n# if 'module' in kwargs:\n if len(kwargs) > 0:\n self._data.add_detector(self._name, **kwargs)", "def _installed_apps_add(self):\n config.add_plugin(self.module_path)", "def install_from_url(self) -> None:\n self.perform_operation(\n Command('install'),\n self.list_packages(self.remote_packages, title=\"remote package\")\n )", "def install_module(request):\n reuse_db = request.config.getoption(\"--reuse-db\")\n\n if request.config.getoption(\"--db\") == 'sqlite':\n os.environ['TRYTOND_DATABASE_URI'] = \"sqlite://\"\n if reuse_db:\n # A hack to check if the database exists and if it\n # does, load that and run tests.\n Database = backend.get('Database')\n\n # cursor.test forgets to set flavor!\n # no time to report a bug!\n Flavor.set(Database.flavor)\n os.environ['DB_NAME'] = 'fulfilio'\n else:\n os.environ['DB_NAME'] = ':memory:'\n\n elif request.config.getoption(\"--db\") == 'postgres':\n os.environ['TRYTOND_DATABASE_URI'] = \"postgresql://\"\n if reuse_db:\n os.environ['DB_NAME'] = 'test_fulfilio'\n else:\n os.environ['DB_NAME'] = 'test_' + str(int(time.time()))\n\n if reuse_db:\n Database = backend.get('Database')\n database = Database().connect()\n cursor = database.cursor()\n databases = database.list(cursor)\n cursor.close()\n if os.environ['DB_NAME'] in databases:\n if request.config.getoption(\"--reset-db\"):\n cursor = database.cursor()\n databases = database.drop(cursor, os.environ['DB_NAME'])\n cursor.close()\n else:\n # tryton test forgets to init the pool\n # for existing database\n Pool(os.environ['DB_NAME']).init()\n\n config.set('database', 'uri', os.environ['TRYTOND_DATABASE_URI'])\n from trytond.tests import test_tryton\n test_tryton.install_module('payment_gateway_stripe')", "def installQPackage(self, name, domain, version, reconfigure=True):\n installPackageCommand = \"\"\"p = q.qp.find(name=\"%(name)s\", domain=\"%(domain)s\", version=\"%(version)s\")\nif not p:\n raise valueError(\"Package %(domain)s, %(name)s, %(version)s not found\")\nelif len(p) <> 1:\n raise valueError(\"Too many packages found with search criteria %(domain)s, %(name)s, %(version)s\")\nelif not p[0].isInstalled():\n p[0].install()\nelse:\n print \"Package %(domain)s, %(name)s, %(version)s is already installed\"\n\"\"\"%{'name':name,'version':version,'domain':domain,'reconfigure':reconfigure}\n self.executeQshell(installPackageCommand)\n if reconfigure:\n self.executeQshell(\"q.qp._runPendingReconfigeFiles()\")", "def add_module_import(self, module):\n self._main_model.add_module_import(module)", "def install():\n build()\n sh(\"%s setup.py develop\" % PYTHON)", "def install(i):\n\n cm_kernel.print_for_con('***********************************************')\n cm_kernel.print_for_con('Installing code ...')\n\n # Check vars\n if 'target_os_uoa' not in i: return {'cm_return':1, 'cm_error':'\"target_os_uoa\" is not defined in \"code install\"'}\n\n # Create entry\n ii={'cm_run_module_uoa':ini['cm_module_uid'],\n 'cm_action':'update'}\n if 'install_data_uid' in i and i['install_data_uid']!='': \n ii['cm_data_uid']=i['install_data_uid']\n if 'install_data_alias' in i and i['install_data_alias']!='': \n ii['cm_data_uoa']=i['install_data_alias']\n if 'install_data_display_as_alias' in i: \n ii['cm_display_as_alias']=i['install_data_display_as_alias']\n if 'install_module_uoa' in i and i['install_module_uoa']!='':\n ii['cm_run_module_uoa']=i['install_module_uoa']\n if 'cm_array' in i and len(i['cm_array'])>0: ii['cm_array']=i['cm_array']\n if 'install_repo_uoa' in i and i['install_repo_uoa']!='': \n ii['cm_repo_uoa']=i['install_repo_uoa']\n r=cm_kernel.access(ii)\n if r['cm_return']>0: return r\n\n target_path=r['cm_path']\n target_uid=r['cm_uid']\n target_alias=r['cm_alias']\n\n # Prepare script\n rx=get_env({'cm_data_uoa':target_uid,\n 'os_uoa':i['target_os_uoa']})\n if rx['cm_return']>0: return rx\n\n script=rx['cm_string']\n\n ii={'script_name':script,\n 'skip_extension':'yes',\n 'target_os_uoa':i['target_os_uoa'],\n 'cm_path':target_path}\n if 'code_deps' in i and i.get('skip_code_deps','')!='yes':\n ii['code_deps']=i['code_deps']\n\n # Add remark about how code was built\n if 'add_rem_to_script' in i:\n run_commands_before=[]\n run_commands_before.append('')\n for x in i['add_rem_to_script']:\n run_commands_before.append(x)\n ii['run_commands_before']=run_commands_before\n\n rx=prepare_script(ii)\n if rx['cm_return']>0: return rx\n\n r['script_name']=rx['cm_path']\n r['script_filename']=script\n\n return r", "def previewinstall(self, installed=[]):\n\n if( self.mode == \"install\"):\n \n # resolve circular dependencies\n if( self.name in installed ):\n return\n else:\n installed.append( self.name )\n \n print \"\\n\" + 20*'-' + \" Starting \" + self.name + \" Installation Test \" + 20*'-' + '\\n'\n \n # additional modules\n mods = self.optmodules + self.reqmodules + self.reqmodules_external + self.reqmodules_buildonly\n if( len(mods) > 0 ):\n for modname in mods:\n mod = self.parent.module(modname)\n if( mod.mode == \"install\" and not mod.name in installed ):\n print \"+ \" + self.name + \" will launch installation of \" + mod.name\n mod.previewinstall(installed)\n print \"+ \"+ self.name + \" using \" + mod.name + \" at [ \" + mod.installPath + \" ]\"\n\n print \"\\n+ Environment Settings used for building \" + self.name + \":\"\n # print environment settings recursively\n self.setEnv(self, [], True )\n\n if( self.hasCMakeBuildSupport ):\n #self.setCMakeVars(self, [])\n print \"\\n+ Generated CMake command for building \" + self.name + \":\"\n print ' $ ',self.genCMakeCmd()\n \n print \"\\n+ \" + self.name + \" installation finished.\"\n print '\\n' + 20*'-' + \" Finished \" + self.name + \" Installation Test \" + 20*'-' + '\\n'", "def test_run_pymodules_install_optional_project_dir(self):\n ctx = mock.Mock()\n modules = []\n project_dir = None\n with mock.patch('pythonforandroid.build.info') as m_info:\n assert run_pymodules_install(ctx, modules, project_dir) is None\n assert m_info.call_args_list[-1] == mock.call(\n 'No Python modules and no setup.py to process, skipping')", "def download_and_install(self):\n if self.is_installed_from_bin:\n try:\n self.installer.install_from_rpm_py_package()\n return\n except RpmPyPackageNotFoundError as exc:\n Log.warn('RPM Py Package not found. reason: {0}'.format(exc))\n\n # Download and install from the source.\n top_dir_name = self.downloader.download_and_expand()\n rpm_py_dir = os.path.join(top_dir_name, 'python')\n\n setup_py_in_found = False\n with Cmd.pushd(rpm_py_dir):\n if self.installer.setup_py.exists_in_path():\n setup_py_in_found = True\n self.installer.run()\n\n if not setup_py_in_found:\n self.installer.install_from_rpm_py_package()", "def install_package(package, remote):\n log.info('Installing package %s on %s', package, remote)\n flavor = remote.os.package_type\n if flavor == 'deb':\n pkgcmd = ['DEBIAN_FRONTEND=noninteractive',\n 'sudo',\n '-E',\n 'apt-get',\n '-y',\n '--force-yes',\n 'install',\n '{package}'.format(package=package)]\n elif flavor == 'rpm':\n # FIXME: zypper\n pkgcmd = ['sudo',\n 'yum',\n '-y',\n 'install',\n '{package}'.format(package=package)]\n else:\n log.error('install_package: bad flavor ' + flavor + '\\n')\n return False\n return remote.run(args=pkgcmd)", "def software_api(self, install_params):\n try:\n self.sw = jnpr.junos.utils.sw.SW(self.dev)\n ok, msg_ret = self.sw.install(**install_params)\n if ok is not True:\n raise AnsibleError('Unable to install the software %s' % msg_ret)\n msg = 'Package %s successfully installed. Response from device is: %s' % (\n install_params.get('package') or\n install_params.get('pkg_set'),\n msg_ret)\n self.queue_message(\"log\", \"%s\" % msg)\n return msg\n except (self.pyez_exception.ConnectError,\n self.pyez_exception.RpcError) as ex:\n raise AnsibleError('Installation failed. Error: %s' % str(ex))", "def pip_install():\n _require_environment()\n remote(PIP_INSTALL_PREFIX)", "def do_post_install(self, context):\n pass", "def pre_install_pkg(self, installable_pkg):\n pass", "def setUp(self):\n trytond.tests.test_tryton.install_module('nereid_webshop')", "def install_cached_package(self, package_name):\n self._log.info(\"Installing package {!r} from talus pypi\".format(package_name))\n pinfo = self.cache[\"pypi\"][package_name]\n pypi_hostname = re.match(r'^.*://([^/]+)/.*$', self.pypi_loc).group(1)\n\n try:\n self._run_pip_main([\n \"install\",\n \"--user\",\n \"--trusted-host\", pypi_hostname,\n \"-i\", self.pypi_loc,\n package_name\n ])\n except SystemExit as e:\n raise Exception(\"Is SystemExit expected?\")", "def install(cli_config, path):\n commands = AssetsCommands(cli_config)\n\n click.secho(\"Installing React module...\", fg=\"green\")\n steps = commands.link_js_module(path)\n on_fail = \"Failed to install React module.\"\n on_success = \"React module installed successfully.\"\n\n run_steps(steps, on_fail, on_success)", "def _install_packages(module, path, packages, allowed_dists, repo):\n cmd = \"./LuaDist/bin/luadist install \"\n\n # Add packages to command\n for package in packages:\n cmd += package + \" \"\n\n # Add types of dists allowed to command\n source_allowed = \"true\"\n binary_allowed = \"true\"\n if allowed_dists == \"binary\":\n source_allowed = \"false\"\n elif allowed_dists == \"source\":\n binary_allowed = \"false\"\n cmd += \" -source=\" + source_allowed + \" -binary=\" + binary_allowed\n\n # Add repository to command\n cmd += ' -repos=\"' + repo + '\"'\n\n ret_code, out, err = module.run_command(cmd, cwd=path)\n already_installed = \"No packages to install\" in out\n\n if ret_code != 0 and not already_installed:\n module.fail_json(\n rc=ret_code,\n stdout=out,\n stderr=err,\n msg=\"Cannot install one or more of the specified packages, \"\n + \"make sure all packages exist in the configured repository.\",\n )\n\n return cmd" ]
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.6200066", "0.6157249", "0.61510795", "0.6132066", "0.6122041", "0.6117013", "0.6113363", "0.60535926", "0.604949", "0.60188705", "0.60149723", "0.6005884", "0.6001865", "0.5994445", "0.5952823", "0.5950878", "0.59120035", "0.59031105", "0.5902215", "0.58973765", "0.5897239", "0.58967173", "0.5886299", "0.5875481", "0.586653", "0.58250093", "0.58176196", "0.58119714", "0.5811162", "0.5804783", "0.5802072", "0.5777229", "0.5762134", "0.5754137", "0.5749512", "0.5730237", "0.57127273", "0.57088447", "0.56937397", "0.569082", "0.56792736", "0.56730485", "0.5671629", "0.56652313", "0.56532127", "0.56483585", "0.56359285", "0.5615233", "0.56139237", "0.56120026", "0.56077254", "0.5596886", "0.5596886", "0.55855155", "0.55835664", "0.55749494", "0.5555037", "0.5553028", "0.5551551", "0.5542101", "0.5540291", "0.5537365", "0.5530371", "0.55150586", "0.55144536", "0.5511179", "0.54969233", "0.54825425", "0.5478699", "0.54667836", "0.5464257", "0.54629314", "0.5458849", "0.545141", "0.5439027", "0.5435583", "0.5434709", "0.54272467", "0.542712", "0.5424678", "0.5420664" ]
0.0
-1
Do work if there is work to do, otherwise check every two seconds for new work.
def run(self): operation_manager = self._core.get_operation_manager() while True: while operation_manager.process_next(): pass sleep(2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_checks():\n while True:\n if datetime.now() > core.misc_data.check_date+timedelta(minutes=45):\n for stuff in stuff_to_do:\n threading.Thread(target=stuff).start()\n core.misc_data.check_date = datetime.now() + config.utils.tasks.repeat_every\n time.sleep(5*60*60)", "def work(self):\n time.sleep(random.randint(0, 200) / 100)\n pass", "def work(self):\r\n if self.working:\r\n if self.time == 0:\r\n self.items = [Item.P]\r\n self.working = False\r\n self.time = 4\r\n else:\r\n self.time -= 1", "def run(self):\n print('checking for expired cache items...')\n\n while True:\n # Do something\n self.check()\n time.sleep(self.interval)", "def runnable(self):\n if \"calculations\" not in self.ctx:\n return True # if no calculations have run\n return self.ctx.running_calc < 2 and self.can_restart()", "def _doWork(self) -> bool:\n # grab a job from queur\n self.lock.acquire ()\n jobId = self.nextJob\n self.nextJob = jobId + 1\n self.lock.release ()\n\n # abort if no jobs are left\n if jobId >= len (self.jobs):\n return False\n\n # execute job\n job = self.jobs[jobId]\n lc = job[\"description\"]\n if self.progressCallback != None:\n self.lock.acquire ()\n self.progressCallback (lc, self.executedJobs, len (self.jobs))\n self.lock.release ()\n else:\n print (lc + \" (\" + str (jobId) + \" / \" + str (len (self.jobs)) + \")\\n\")\n if job[\"runner\"] (job) == False:\n return False\n self.executedJobs = self.executedJobs + 1\n\n # continue on jobs\n return True", "def _monitor_loop(self):\n while self._continue_running():\n for wl in self._workloads:\n if not wl.running():\n self.log.info('%-20s FAILED', wl.name())\n self._restart_workload(wl)\n else:\n self.log.info('%-20s OK', wl.name())\n\n time.sleep(self._monitor_delay)", "def do_work(self):", "def do_work(self):\r\n recv = self.check_queues()\r\n proc = 1 if self.process_frame() else 0\r\n for perf in self.perfs:\r\n perf.post_work(recv, proc, len(self.ooo_frames))\r\n return recv > 0 or proc > 0", "def _run_notice_event(look_for_work):\n while True:\n try:\n found = look_for_work()\n if not found:\n break\n except ConcurrentUpdate as e:\n # retry if we had a race-condition while claiming work\n sys.stderr.write('Handling ErmrestConcurrentUpdate exception...\\n')\n pass", "def pending_work(self) -> bool:\n return len(self.ongoing) > 0", "def run(self):\n #=======================================================================\n #\n # TODO: Replace this do-nothing code with some which does something.\n # Don't worry about looping (though you can), since this will be called\n # over and over again by the main appliance loop.\n #\n #=======================================================================\n self.logger.info('Nothing to do; sleeping for a while.')\n sleep(10)\n\n # Return something truthy to continue, anything else to exit.\n return True", "def run_if_refresh(self):\n if self.is_finished():\n self.status.collect = True\n self.run() # self.run_if_collect()\n elif (\n self.server.run_mode.non_modal\n or self.server.run_mode.queue\n or self.server.run_mode.modal\n ):\n self.run_static()\n else:\n self.refresh_job_status()\n if self.status.refresh:\n self.status.suspended = True\n if self.status.busy:\n self.status.refresh = True\n self.run_if_refresh()", "def monitor(self):\r\n while True:\r\n for worker, start_time in self.workers.items():\r\n if (not worker.isAlive() or\r\n self.timeout\r\n and datetime.now() - start_time > self.timeout): \r\n\r\n self.work_count.get_nowait()\r\n self.jobs.task_done()\r\n del self.workers[worker]\r\n\r\n time.sleep(1)", "def DoWork():\n def do_work():\n trace_begin(\"do_work\")\n time.sleep(0.25)\n trace_end(\"do_work\")\n do_work()", "def do_work(self):\n raise NotImplementedError", "async def _do_work(self) -> None:\n self.logger.info(\"Starting work on Bundles.\")\n work_claimed = True\n while work_claimed:\n work_claimed = await self._do_work_claim()\n # if we are configured to run once and die, then die\n if self.run_once_and_die:\n sys.exit()\n self.logger.info(\"Ending work on Bundles.\")", "async def _do_work(self) -> None:\n self.logger.info(\"Starting work on Bundles.\")\n work_claimed = True\n while work_claimed:\n work_claimed = await self._do_work_claim()\n # if we are configured to run once and die, then die\n if self.run_once_and_die:\n sys.exit()\n self.logger.info(\"Ending work on Bundles.\")", "def working(self):\n # ----1----\n url, keys, item = self._pool.get_a_task(TPEnum.ITEM_SAVE)\n\n # ----2----\n save_result = self._worker.working(url, keys, item)\n\n # ----3----\n if save_result > 0:\n self._pool.update_number_dict(TPEnum.ITEM_SAVE_SUCC, +1)\n else:\n self._pool.update_number_dict(TPEnum.ITEM_SAVE_FAIL, +1)\n\n # ----4----\n self._pool.finish_a_task(TPEnum.ITEM_SAVE)\n return True", "def work2():\n logging.info(\"work2 doing a job\")\n if random.randint(1, 5) == 1:\n logging.warning(\"something weird happened in work2!\")", "def has_more_work(self):\n return self.done_counter < self.N", "def update_ticker(self):\n while True:\n Thread(target=self.update_data_check).start()\n time.sleep(60)", "def is_at_work(cls, sim_info: SimInfo) -> bool:\n for career in cls.get_all_careers_for_sim_gen(sim_info):\n if career.currently_at_work:\n return True\n return False", "def _do_request(self):\n\n if time.time() < self._next_request:\n return False\n else:\n return True", "def __work__(self):\n while not self.is_done:\n self.refreshSignal.emit()\n time.sleep(0.05)", "def startworking():\r\n #In the future have the manager program or from the website implement this arguments to a route\r\n #the program will download the file from the website\r\n global exe_name\r\n global Task_Conditional\r\n task_data = None\r\n while task_data is None:\r\n task_data = recieve_data_from_server(\"get_task\")\r\n if task_data is None:\r\n time.sleep(5)\r\n else:\r\n exe_name = task_data[\"exe_name\"]\r\n print('Working on the task \"{}\"'.format(exe_name))\r\n get_file(exe_name)\r\n Task_Conditional = task_data[\"Task_conditional\"]\r\n print(\"loading\")\r\n t1 = time.time()\r\n task_divider(task_data[\"first_num\"], task_data[\"last_num\"])\r\n t2 = time.time()\r\n print(\"ready {}\".format(t2-t1))", "def test_notBeforeWhenCheckingForWork(self):\n dbpool, _ignore_qpool, clock, _ignore_performerChosen = self._setupPools()\n fakeNow = datetime.datetime(2012, 12, 12, 12, 12, 12)\n\n # Let's create a couple of work items directly, not via the enqueue\n # method, so that they exist but nobody will try to immediately execute\n # them.\n\n @transactionally(dbpool.pool.connection)\n @inlineCallbacks\n def setup(txn):\n # First, one that's right now.\n yield DummyWorkItem.makeJob(txn, a=1, b=2, notBefore=fakeNow)\n\n # Next, create one that's actually far enough into the past to run.\n yield DummyWorkItem.makeJob(\n txn, a=3, b=4, notBefore=(\n # Schedule it in the past so that it should have already\n # run.\n fakeNow - datetime.timedelta(seconds=20)\n )\n )\n\n # Finally, one that's actually scheduled for the future.\n yield DummyWorkItem.makeJob(\n txn, a=10, b=20, notBefore=fakeNow + datetime.timedelta(1000)\n )\n yield setup\n\n # Wait for job\n while len(DummyWorkItem.results) != 2:\n clock.advance(1)\n\n # Work item complete\n self.assertTrue(DummyWorkItem.results == {1: 3, 2: 7})", "def _is_working():\n global _worker\n return _worker is not None and _worker.is_alive()", "def work_refresh(self):\n now = dt.now()\n self.eisenhower_priority()\n p_week = now.isocalendar()[1] - self.work_datetime.isocalendar()[1]\n\n if (1 <= p_week) and (self.priority not in [1, 2]):\n self.time_ntf = now\n else:\n pass", "def process_checked(self):\n if self.chbxProcess.isChecked():\n self.run_worker()\n self.workerTimer.start(int(self.settings.INTERVALWORKER) * 1000)\n else:\n self.workerTimer.stop()", "def _worker():\n try:\n logger.info('Looping...')\n temp_list = []\n for file in ['data_unfcc.csv','data_ebal.csv']:\n temp_list.append(os.path.isfile(file))\n if not all(temp_list):\n print('Starting from scratch...')\n download_source()\n create_database()\n create_index()\n\n time_mod = datetime.strptime(time.ctime(os.stat('data_ebal.csv').st_mtime),'%a %b %d %H:%M:%S %Y')\n time_now = datetime.now()\n\n if (time_now - time_mod).seconds > 3600:\n download_source()\n get_updated_records('unfcc')\n get_updated_records('ebal')\n create_index()\n except Exception as e:\n logger.warning('Main Loop error')", "def check_for_work(self):\n print(\"validator: check for work\")\n self.check_for_analyzers()\n self.check_for_uploads()\n self.check_for_requests()", "def work(self):\r\n worker = self.worker_place_item()\r\n if not worker:\r\n worker = self.worker_take_item()\r\n\r\n if not worker:\r\n for worker in self.workers:\r\n worker.work()\r\n else:\r\n worker = [w for w in self.workers if w is not worker][0]\r\n worker.work()", "def work_sleep(arg):\n time, pids = arg\n sleep(time)\n res = True\n for p in pids:\n res &= psutil.pid_exists(p)\n return res", "def block_while_running():\n runs = is_running()\n while runs:\n runs = is_running()\n time.sleep(10)", "def monitor(self, seconds=1):\r\n\r\n for ts in range(0, seconds):\r\n self.listenFiles = self.listen_files_list(self.listenDir)\r\n FoI = list(set(self.listenFiles)-set(self.logFiles))\r\n if len(FoI) == 0:\r\n time.sleep(1)\r\n else:\r\n self.CHANGE_appendAll() # Can be probamatic for first iter..\r\n return True\r\n\r\n return False", "def _keep_running():\n return True", "async def checkNewLoop(self):\n pass", "def wait_on_job(self, delay=10):\n while self.isJobRunning() == True:\n time.sleep(delay)\n return self.ofile_exists()", "def run(self):\n while True:\n self.current_wifi_clients()\n self._eval_is_someone_home()\n time.sleep(self._interval)", "def _periodic_resync_helper(self):\n while True:\n eventlet.sleep(self.conf.resync_interval)\n if self.needs_resync:\n self.needs_resync = False\n self.sync_state()", "def check(self):\n self.lastcheck = time.time()\n delta = time.time() - self.last\n if delta > 270:\n self.server.restart = True\n self.server.connected = False\n elif delta > 180:\n self.server.printer.raw_message(\"PING :♥\")", "def check_completion(self):\n\n time.sleep(3)\n while self.status == 0:\n pass", "def this_needs_work_test_ensure_our_presence(self):\n self.do_test_ensure_our_presence()", "def run(self):\n while True:\n try:\n if not self._read_new_entries(False):\n time.sleep(0.1)\n self._update_all_tasks()\n except KeyboardInterrupt:\n break", "def wait_for_work(self, early_stop=lambda: False):\n self.work_notifier.acquire()\n\n try:\n while len(self.getDelayedCalls()) == 0 and not early_stop():\n self.work_notifier.wait()\n finally:\n self.work_notifier.release()", "def run(self):\n while not self.stop_event.is_set():\n self.manage_cache_tasks()", "def loop_forever(self):\n while self.running:\n time.sleep(0.01)", "def running_loop(self, run_check_ms=None):\r\n if self.board.area.down_click_call is None:\r\n raise SelectError(\"board.area.down_click_call is not set\")\r\n if self.numgame is not None and self.ngame >= self.numgame:\r\n SlTrace.lg(f\"running_loop: ngame={self.ngame} > numgame {self.numgame}\")\r\n self.running = False\r\n self.run = False\r\n return\r\n \r\n self.running = True # Still in game\r\n self.run = True # progressing (not paused)\r\n self.first_time = True \r\n self.game_start_ts = SlTrace.getTs(6)\r\n self.game_control_updates()\r\n if run_check_ms is not None:\r\n self.run_check_ms = run_check_ms\r\n BlinkerMultiState.enable()\r\n \r\n while self.running:\r\n SlTrace.lg(\"running_loop\", \"running_loop\")\r\n self.mw.update()\r\n if ActiveCheck.not_active():\r\n break\r\n SlTrace.lg(\"running_loop active\", \"running_loop\")\r\n self.mw.update_idletasks()\r\n if self.event_check():\r\n continue # Gobble up pending events\r\n \r\n if (self.cmd_stream is not None\r\n and not self.cmd_stream.is_eof()):\r\n self.run_file()\r\n self.first_time = False # Assume file did that\r\n continue # Check if more\r\n else:\r\n if self.first_time:\r\n if not self.start_game():\r\n break\r\n self.first_time = False\r\n if not self.make_move():\r\n break \r\n \r\n SlTrace.lg(\"running_loop after loop\", \"running_loop\")\r\n BlinkerMultiState.disable()\r\n \r\n if self.on_end is not None:\r\n SlTrace.lg(\"running_loop doing on_end\", \"running_loop\")\r\n self.mw.after(0, self.on_end) # After run processing\r", "def run(self):\n while True:\n # Do something\n print('Doing something imporant in the background')\n\n self.loadData()\n time.sleep(self.interval)", "def nanny(self): \n while not self.started and not self.failed:\n eventlet.sleep(.1)\n return not self.failed", "def work(self, job):\n pass", "def run(self):\n last_time = time.time()\n while self.running:\n now_time = time.time()\n interval = now_time - last_time\n last_time = now_time\n self.update(interval)\n time.sleep(Options['update interval'])", "def do_work(self) -> None:\n self._get_btc_eur_15min()\n print(\n f\"1 BTC = {self.btc_eur_15min} EUR\"\n f\"\\t\\t(15min delayed market price)\"\n )\n\n self._get_eur_gbp_last_month()\n print(\n f\"1 EUR = {self.eur_gbp_last_month} GBP\"\n f\"\\t(last month average rate)\"\n )\n\n self._get_btc_gbp_15min()\n print(\n f\"1 BTC = {self.btc_gbp_15min:.6f} GBP\"\n f\"\\t(BTC 15min delayed market price; GBP latest daily average rate)\"\n )", "def doctest_BackgroundWorkerThread_scheduleNextWork():", "def delay_checks(self):\n return False", "def busy_wait(self, seconds):\n end_time = time.perf_counter() + seconds\n while(time.perf_counter() < end_time):\n pass", "def burn_in_finished():\n global trials\n if trials <= 0:\n return True\n trials -= 1\n return False", "def check_thread(self):\n if self.submit_thread.is_alive():\n if self.prev_prog != progressbar.current:\n self.time_remaining()\n self.after(1000, self.check_thread)", "def running(self) -> bool:", "def run(self):\n self.timer.start()\n \n while not Status.is_final(self.status):\n if self.request:\n self.handle_request()\n \n if self.status == Status.RUNNING:\n # Clean up orphaned schedules and undead schedulers.\n # Schedule.objects.orphaned().update(scheduler=None)\n # CronSchedule.objects.orphaned().update(scheduler=None)\n \n cron = CronSchedule.objects.unclaimed()[:SCHEDULER_LIMIT]\n simple = Schedule.objects.unclaimed()[:SCHEDULER_LIMIT]\n for schedule in itertools.chain(cron, simple):\n self.log.info('Claiming %s.' % schedule)\n schedule.scheduler = self\n schedule.save()\n self.add(schedule)\n if not Status.is_final(self.status):\n self.wait()\n self.request = Scheduler.objects.get(pk=self.pk).request", "def working(self):\n # ----1----\n task: TaskSave = self._pool.get_a_task(TPEnum.ITEM_SAVE)\n\n # ----2----\n result: ResultSave = self._worker.working(task)\n\n # ----3----\n if result.state_code > 0:\n self._pool.update_number_dict(TPEnum.ITEM_SAVE_SUCC, +1)\n else:\n self._pool.update_number_dict(TPEnum.ITEM_SAVE_FAIL, +1)\n logging.error(\"%s error: %s, %s\", result.excep_class, result.excep_string, str(task))\n\n # ----4----\n self._pool.finish_a_task(TPEnum.ITEM_SAVE)\n\n # return\n return True", "def _do_work(self):\n syslog.syslog('entering _do_work().')\n try:\n signal.signal(signal.SIGALRM, self.__terminate_handler)\n signal.signal(signal.SIGTERM, self.__terminate_handler)\n signal.alarm(self.timeout)\n except Exception as e:\n syslog.syslog(str(e.args))\n self.__startup()\n syslog.syslog('successfully finished the work within time.')\n signal.alarm(0)\n self.__cleanup(success=True)", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def run(self): # pragma: no cover\n while True:\n self.update()", "def run(self):\n self.getPrice()\n #time.sleep(5)\n self.compare_price()\n #time.sleep(5)\n if self.__alert_client is True:\n self.send_email()\n #self.display()\n return True\n else:\n #self.display()\n return False", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def loop_once(self):\n while 1:\n if not self._active_nodes:\n # If everything has died stop looping\n self.triggershutdown()\n raise RuntimeError(\"Unexpectedly no active workers available\")\n try:\n eventcall = self.queue.get(timeout=2.0)\n break\n except Empty:\n continue\n callname, kwargs = eventcall\n assert callname, kwargs\n method = \"worker_\" + callname\n call = getattr(self, method)\n self.log(\"calling method\", method, kwargs)\n call(**kwargs)\n if self.sched.tests_finished:\n self.triggershutdown()", "def cooldown_checker(self):\n self.cooldown_tick += 1\n if self.cooldown_tick == self.pattern_cooldown:\n self.wait = False\n self.cooldown_tick = 0", "def run(self):\n\t\t\n\t\twhile self.update():\n\t\t\tpass", "def run_job(job, interrupt_if_necessary):", "def do_cycle(self):\n c.check_running()\n online_models = self.get_online_models()\n if len(online_models) > 0:\n self.process_models(online_models)\n self.print_recording()", "def maybe_schedule_update(self):\n if self.hass and not self._update_scheduled:\n self.hass.add_job(self._schedule_update)", "def precheck(self):\n # making sure it's a time for pull, otherwise just sleep\n if datetime.now() < self.startTime + timedelta(hours=int(self.newsFrequency)):\n logging.info(\"Didn't reach time to wakeup yet, going to sleep\")\n self.sleep()", "def trigger(self):\n if self.timer is None or time.time() - self.last_try > self.min_sec * 2:\n self.timer = time.time()\n self.last_try = time.time()\n return False\n elif time.time() - self.timer > self.min_sec:\n self.reset()\n return True\n else:\n self.last_try = time.time()\n return False", "def runJob(self, shouldRestart=False):\n self.inTheMiddle = False\n self.startTime = self.env.now\n while self.workLeft:\n try:\n delta = self.ckptTime\n oci = int(math.sqrt(2*MTBF*delta))\n computeTime = min(oci, self.workLeft)\n if computeTime <= 0:\n self.endTime = self.env.now\n self.actualRunTime = self.endTime - self.startTime\n self.env.exit()\n if shouldRestart:\n yield self.env.timeout(delta) # simulate restart when requested by the bq\n self.resumeCompleted.succeed()\n self.resumeCompleted = self.env.event()\n # Start computing\n start = self.env.now\n self.lastComputeStartTime = start\n self.ProcLog(\"Computing for %d, workleft %d\" % (computeTime, self.workLeft))\n yield self.env.timeout(computeTime)\n if self.workLeft < oci:\n self.workLeft = 0\n self.endTime = self.env.now\n self.actualRunTime = self.endTime - self.startTime\n self.waitForComputeToEnd.succeed()\n self.waitForComputeToEnd = self.env.event()\n self.env.exit()\n self.ProcLog(\"Ckpting, workleft %d\" % (self.workLeft))\n ckptStartTime = self.env.now\n self.inTheMiddle = True\n yield self.env.timeout(delta)\n self.lastCkptInstant = self.env.now\n # Done with ckpting, now\n # first, save the progress made since the last interruption, and\n timeSinceLastInterruption = ckptStartTime - start\n self.workLeft -= timeSinceLastInterruption\n # second, update the latest ckpt time\n self.lastCheckpointTime += timeSinceLastInterruption\n # ... and increment the number of ckpts\n self.startAfresh = True\n self.numCkpts += 1\n self.inTheMiddle = False\n self.ProcLog(\"Done ckpting, work left %d, ckpts %d, lastCkpt %d\" % (self.workLeft, self.numCkpts, self.lastCheckpointTime))\n except simpy.Interrupt as e:\n if e.cause == \"failure\":\n # fallback to the last checkpoint\n if self.inTheMiddle:\n self.inTheMiddle = False\n self.ckptFailures += 1\n #self.ProcLog(\"Checkpointing failure, lastCkpt %d, workLeft %d\" % (self.lastCheckpointTime, self.workLeft))\n self.broken = True\n #self.ProcLog(\"Incurred a failure, work left %d\" % (self.workLeft))\n self.numFailures += 1\n restarting = self.env.process(self.do_restart(self.env.now - start))\n yield restarting\n #self.ProcLog(\"Resumed after failure, work left %d, lost work %d\" % (self.workLeft, self.lostWork))\n self.broken = False\n elif e.cause == \"failureNoRestart\":\n if self.inTheMiddle:\n self.inTheMiddle = False\n self.ckptFailures += 1\n #self.ProcLog(\"Checkpointing failure, lastCkpt %d, workLeft %d\" % (self.lastCheckpointTime, self.workLeft))\n self.broken = False\n self.numFailures += 1\n restarting = self.env.process(self.do_restart(self.env.now - start, True))\n yield restarting\n yield self.waitForBq\n self.ProcLog(\"Resumed after failureNoRestart\")\n # Need to restart the job from its latest ckpt\n shouldRestart = True\n elif e.cause == \"preempt\":\n self.ProcLog(\"Preempted, workLeft %d\" %(self.workLeft))\n self.numOfPreempts += 1\n yield self.waitForBq\n self.ProcLog(\"Resumed after preemption\")\n # Need to restart the job\n shouldRestart = True\n else:\n print(\"Unexpected interrupt in the middle of computing\")\n exit(-1)\n self.workLeft = 0\n self.endTime = self.env.now\n self.actualRunTime = self.endTime - self.startTime", "def doWork():\n #rVal = True\n rc = 0\n printInfo()\n \n filler = getFillerData()\n #debug( \"doWork(): filler = \" + filler )\n requestServerFile( filler )\n\n return rc", "def watch_worker():\n global isFinished, ComputationTime, UsersOnline, N, CurrentIndex, Count\n received_data = request.json\n Count += received_data\n if CurrentIndex >= N:\n print 'Second text got ', Count, ' entries of given row.'\n print '--- %s seconds ---' % (time.time() - ComputationTime)\n isFinished = True\n return jsonify(current_row='', current_part='')\n else:\n print 'Current row in second text: ', CurrentIndex / 256\n part = SecondText[CurrentIndex:CurrentIndex+1023]\n CurrentIndex += 1024\n return jsonify(current_row=Row, current_part=part)", "def do_tasks(self):\n\t\twork_ = self.TASK_LIMIT\n\t\twhile True:\n\t\t\tif len(self.tasks) == 0 or work_ <= 0:\n\t\t\t\tbreak\n\t\t\tself.tasks[0].work(self)\n\t\t\tif self.tasks[0].completed:\n\t\t\t\tself.tasks.pop(0)\n\n\t\t\twork_ -= 1", "def run(self):\n try:\n while self._running:\n time.sleep(1)\n finally:\n self._exit()", "def run(self):\n time_to_quit = False\n while True:\n time_to_quit = self.run_to_yield_or_quit()\n if time_to_quit:\n print(self, 'quitting')\n break\n else:\n time.sleep(self.polling_interval)", "def wait_forever(self):\r\n while True:\r\n time.sleep(0.5)", "def continue_work(self):\n if self.worker_thread is not None:\n self.worker_thread.change_state(WorkerState.RUNNING)", "def monitor(self):\n if self.startup():\n time.sleep(0.250)\n self.run()", "def check_if_ok_to_update(self):\n current_time = int(time.time())\n last_refresh = self.last_refresh\n if last_refresh is None:\n last_refresh = 0\n if current_time >= (last_refresh + self.refresh_rate):\n return True\n return False", "def run_timer(self):\n self.min_count += 1\n print(self.min_count)\n if self.min_count % int(self.interval_entry.get()) == 0:\n self.min_count = 0\n # A separate thread to handle scraping\n thread = threading.Thread(target=self.scraper_data, args=())\n thread.setDaemon(True)\n thread.start()\n for entry in self.items_list.get_children():\n item_name = self.items_list.item(entry)[\"values\"][0]\n item_url = self.items_list.item(entry)[\"values\"][1]\n status = s.getStatus(item_name, item_url)\n item_stock = status.get(\"status\")\n item_pstock = status.get(\"pstatus\")\n item_cost = status.get(\"cost\")\n\n self.update_stock_info(\n entry, item_name, item_url, item_stock, item_cost\n )\n if item_stock == \"In Stock\" and item_pstock != \"In Stock\":\n app.update()\n self.items_list.alert(item_name, item_url)\n self.interval_entry.focus_force()\n self.email_addr_entry.focus_force()\n\n self.after(1000, self.run_timer)", "async def check(self):\n if await self.is_water_level_critical():\n _LOGGER.debug(\"Water level critical - pump should be off\")\n else:\n for run in self._runs:\n if run.run_now(self._now):\n _LOGGER.debug(\"Pool pump should be on now: %s\", run)\n await self._switch_pool_pump(STATE_ON)\n return\n # If we arrive here, the pool pump should be off.\n _LOGGER.debug(\"Pool pump should be off\")\n await self._switch_pool_pump(STATE_OFF)", "def run(self):\n self.workhorse_.run()\n try:\n while(True):\n self.workhorse_.heartbeat()\n self.periodic_snapshot()\n except workflow.NoMoreWork:\n print \"Fini.\"\n exit(0)\n exit(-1)", "def timer_startIfNeeded():\n nonlocal b_timerStart\n for k, v in kwargs.items():\n if k == 'timerStart': b_timerStart = bool(v)\n if b_timerStart:\n other.tic()", "def everytime(self):\n return True", "def wait_for_job(job) -> bool:\n job.refresh_from_db()\n is_done = False\n\n while not is_done:\n if job.end_time is None and job.success is None:\n print(f\"Polling {type(job).__name__}s. Currently waiting for job id: {job.id}\")\n sleep(20)\n job.refresh_from_db()\n elif job.retried and job.retried_job:\n job = job.retried_job\n elif job.success:\n return True\n else:\n print(f\"{type(job).__name__} {job.id} failed!\")\n return False\n\n return False", "def do_work(self,crash_task):\n import time\n t0=time.time()\n logger.info('*** do_work: worker id=%d tid=%d',self._agent.wid,crash_task.tid)\n _locals = {'tid':crash_task.tid}\n exec_command(self.commands['before_do_work'],_locals)\n exec_command(crash_task.command,_locals)\n exec_command(self.commands['after_do_work'],_locals)\n return CrashResult(time.time()-t0)", "def workTill(self, limitTime):\n if self._working:\n raise Exception(\"Scheduler is already working\")\n \n t0 = time.clock()\n steps = 0\n \n self._working = True\n while (self.step(limitTime)):\n steps += 1\n self.currentTime = limitTime\n self._working = False\n \n dt = time.clock() - t0\n \n return stat(steps, steps / dt if dt > 0 else -1, dt)", "def loop_forever(self):\n self.running = True\n while self.running:\n time.sleep(0.1)", "def run_single(self):\n self.run_sim_time(1)", "def run(self):\n while self._should_run and not self._done_check(self._state):\n try:\n state = self._state.update()\n # Catch any Exception but let any BaseException be raised\n except Exception as error:\n state = self._state.recover(error)\n self._state = state\n if state:\n time.sleep(float(self._state.update_period))", "def loop():\n global loop_idx\n sys.stdout.write('loop index %d/%d\\r\\n' % (loop_idx, _LOOPS))\n time.sleep(0.5)\n loop_idx += 1\n return loop_idx > _LOOPS", "def _run_once(self):\n try:\n self.do_wait()\n self._execute_wakeup_tasks()\n self._trigger_timers()\n except Exception as e:\n Log.error(\"Error occured during _run_once(): \" + e.message)\n Log.error(traceback.format_exc())\n self.should_exit = True" ]
[ "0.6984714", "0.6409769", "0.6315232", "0.62860876", "0.6256095", "0.62228227", "0.61101943", "0.60830945", "0.6035169", "0.60236955", "0.59735036", "0.59445417", "0.5925701", "0.5867354", "0.5861029", "0.5783809", "0.5782785", "0.5782785", "0.5774008", "0.57409346", "0.57183224", "0.57161385", "0.5689258", "0.5688801", "0.5685276", "0.56788754", "0.56751305", "0.5666075", "0.56622195", "0.565795", "0.5655586", "0.5626775", "0.5617098", "0.5611566", "0.5574958", "0.5569478", "0.55659", "0.5562192", "0.5556921", "0.55568314", "0.5547621", "0.5537659", "0.55316675", "0.551145", "0.5508911", "0.550803", "0.5507544", "0.54871386", "0.54868895", "0.54846835", "0.54754764", "0.54695374", "0.5467872", "0.54531145", "0.5450847", "0.54439986", "0.54399025", "0.54096574", "0.53989446", "0.53957975", "0.5393798", "0.5388602", "0.5380597", "0.5378452", "0.5373008", "0.5372585", "0.53652257", "0.53652257", "0.53652257", "0.53490645", "0.53441674", "0.5342836", "0.5335254", "0.5333686", "0.5333235", "0.53237927", "0.53200084", "0.5314187", "0.5312514", "0.5310392", "0.5309068", "0.53021646", "0.52941096", "0.5294056", "0.529264", "0.52874225", "0.5274621", "0.5274384", "0.5273393", "0.52674717", "0.52643645", "0.5262289", "0.5258117", "0.52502954", "0.52487", "0.52486396", "0.52485234", "0.52467346", "0.52464956", "0.5242153" ]
0.5247155
97
Return squarefree decomposition of a polynomial in ``K[X]``. Examples ======== >>> R, x, y = ring('x y', ZZ) >>> R.sqf_list(x5 + 2x4y + x3y2) (1, [(x + y, 2), (x, 3)]) >>> R, x, y = ring('x y', FF(5)) >>> f = x5y5 + 1
def sqf_list(self, f): domain = self.domain if domain.is_Field: coeff, f = f.LC, f.monic() else: coeff, f = f.primitive() if domain.is_FiniteField: return coeff, self._gf_sqf_list(f) return coeff, self._rr_yun0_sqf_list(f)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zzX_sqr(f):\n if poly_univariate_p(f):\n return zzx_sqr(f)\n\n if zzX_zero_p(f):\n return f\n\n df = zzX_degree(f)\n l = poly_level(f)-1\n\n h = []\n\n for i in xrange(0, 2*df+1):\n coeff = zzX_zero(l)\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff = zzX_add(coeff, zzX_mul(f[j], f[i-j]))\n\n coeff = zzX_mul_const(coeff, 2)\n\n if n & 1:\n elem = zzX_sqr(f[jmax+1])\n coeff = zzX_add(coeff, elem)\n\n h.append(coeff)\n\n return h", "def sqf_list(f, all=False):\n coeff, factors = dmp_sqf_list(f.rep, f.lev, f.dom, all=all)\n return coeff, [ (f.per(g), k) for g, k in factors ]", "def zzx_factor_sqf(f, **flags):\n cont, g = zzx_primitive(f)\n\n n = zzx_degree(g)\n\n if n <= 0:\n return cont, []\n\n if poly_LC(g) < 0:\n cont, g = -cont, zzx_neg(g)\n\n if n == 1 or zzx_eisenstein(g):\n return cont, [(g, 1)]\n\n factors = []\n\n if flags.get('cyclotomic', True):\n factors = zzx_cyclotomic_factor(g)\n\n if factors is None:\n factors = zzx_zassenhaus(g)\n\n def compare(f_a, f_b):\n i = len(f_a) - len(f_b)\n\n if not i:\n return cmp(f_a, f_b)\n else:\n return i\n\n return cont, sorted(factors, compare)", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def _gf_sqf_list(self, f):\n domain = self.domain\n\n n, factors, p = 1, [], int(domain.characteristic)\n m = int(domain.order // p)\n\n while not f.is_ground:\n df = [f.diff(x) for x in self.gens]\n\n if any(_ for _ in df):\n g = f\n for q in df:\n g = self.gcd(g, q)\n h, f, i = f // g, g, 1\n\n while h != 1:\n g = self.gcd(f, h)\n h //= g\n\n if not h.is_ground:\n factors.append((h, i*n))\n\n f //= g\n h = g\n i += 1\n\n n *= p\n\n g = self.zero\n for monom, coeff in f.items():\n g[tuple(_ // p for _ in monom)] = coeff**m\n f = g\n\n return factors", "def _rr_yun0_sqf_list(self, f):\n if f.is_ground:\n return []\n\n result, count = [], 1\n qs = [f.diff(x) for x in self.gens]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n\n while f != 1:\n qs = [q // g for q in qs]\n f //= g\n qs = [q - f.diff(x) for x, q in zip(self.gens, qs)]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n if g != 1:\n result.append((g, count))\n\n count += 1\n\n return result", "def zzx_sqr(f):\n df, h = zzx_degree(f), []\n\n for i in xrange(0, 2*df+1):\n coeff = INT_ZERO\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff += f[j]*f[i-j]\n\n coeff += coeff\n\n if n & 1:\n elem = f[jmax+1]\n coeff += elem**2\n\n h.append(coeff)\n\n return h", "def zzX_eval_for(f, k, x):\n if k < 0:\n k += poly_level(f) + 1\n\n if k == 1:\n return zzX_eval(f, x)\n\n def rec_eval(g, l):\n if l == k:\n return zzX_eval(g, x)\n else:\n return zzX_strip([ rec_eval(coeff, l+1) for coeff in g ])\n\n return rec_eval(f, 1)", "def zzx_sqf_p(f):\n return zzx_one_p(zzx_gcd(zzx_primitive(f)[1], zzx_diff(f)))", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_primitive(quo)[1]", "def quadratic_model(X,F):\r\n \r\n from numpy import flipud, zeros, ones, prod, sum, arange\r\n from numpy.linalg import lstsq\r\n from VyPy.tools import index_set\r\n \r\n M,m = X.shape\r\n \r\n # coefficients\r\n I = flipud( index_set('full',2,m) )\r\n A = zeros([M,I.shape[1]])\r\n for i in range(I.shape[1]):\r\n ind = I[:,i,None]\r\n A[:,i] = prod( X ** ind.T , axis=1 )\r\n \r\n # solve \r\n t = lstsq(A,F)[0]\r\n \r\n # unwrap\r\n be = t[1:m+1,:]\r\n Al = zeros([m,m])\r\n for i in range(m+1,I.shape[1]):\r\n ind = I[:,i]\r\n loc = arange(m)[ind != 0]\r\n if len(loc) == 1:\r\n Al[loc,loc] = 2*t[i]\r\n else:\r\n Al[loc[0],loc[1]] = t[i]\r\n Al[loc[1],loc[0]] = t[i]\r\n \r\n return be,Al", "def zzX_eval_list(f, A):\n def rec_eval(g, l, L):\n if l == L:\n return zzx_eval(g, A[-1])\n else:\n h = [ rec_eval(h, l+1, L) for h in g ]\n\n if l <= L - len(A):\n return h\n else:\n return zzx_eval(h, A[-L+l-1])\n\n if not A:\n return f\n\n L = poly_level(f)\n\n if zzX_zero_p(f):\n return zzX_zero(L - len(A))\n\n e = rec_eval(f, 1, L)\n\n if L == len(A):\n return e\n else:\n return zzX_strip(e)", "def qft_recursive(qubits):\n qftcirc = Circuit()\n\n # First add the QFT subroutine above\n qftcirc.add(qft_no_swap(qubits))\n\n # Then add SWAP gates to reverse the order of the qubits:\n for i in range(math.floor(len(qubits) / 2)):\n qftcirc.swap(qubits[i], qubits[-i - 1])\n\n return qftcirc", "def chebyshev_polynomial(X, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n T_k = list()\n T_k.append(sp.eye(X.shape[0]).tocsr())\n T_k.append(X)\n\n def chebyshev_recurrence(T_k_minus_one, T_k_minus_two, X):\n X_ = sp.csr_matrix(X, copy=True)\n return 2 * X_.dot(T_k_minus_one) - T_k_minus_two\n\n for i in range(2, k+1):\n T_k.append(chebyshev_recurrence(T_k[-1], T_k[-2], X))\n\n return T_k", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\n vsf_ph=vsf_ph+c_slm*vsh_ph\n return vsf_th, vsf_ph", "def sqrtx():\n return Operator([[(1.+1.j)/2,(1.-1.j)/2],[(1.-1.j)/2,(1.+1.j)/2]])", "def skolemize(formula, quantified_varible_list):\n formula_type = formula.get_type()\n\n if formula_type in NOT_QUANTIFIERS:\n return skolemize_non_quantifier(formula)\n\n elif formula_type == OperandTypes.T_EXISTS:\n return skolemize_exists(formula, quantified_varible_list)\n\n elif formula_type == OperandTypes.T_FORALL:\n return skolemize_forall(formula, quantified_varible_list)\n\n else:\n raise Exception(\"Skolemize exception: formula must be in nnf, \\\n prenex form!\")", "def QR_lsfit(flist, x, y, dy):\n # Initialization\n n = len(x)\n m = len(flist)\n A = np.zeros((n, m), dtype='float64')\n b = np.zeros(n, dtype='float64')\n c = np.zeros(m, dtype='float64')\n dc = np.zeros(m, dtype='float64')\n Rinv = np.zeros((m, m), dtype='float64')\n\n # Fill A and c\n for i in range(n):\n # Weight data by error\n b[i] = y[i] / dy[i]\n\n for j in range(m):\n A[i, j] = flist[j](x[i]) / dy[i]\n\n # Decompose using Given's rotation and solve by in-place backsub\n decomp(A)\n x = solve(A, b)\n\n # Save it in c\n for i in range(m):\n c[i] = x[i]\n\n # Calculate the inverse\n inverse(build_r(A), Rinv)\n\n # Calculate the covariance matrix S\n S = np.dot(Rinv, np.transpose(Rinv))\n\n # Calculate the uncertainties on the coefficients from S\n for i in range(m):\n dc[i] = np.sqrt(S[i, i])\n\n return c, dc", "def sqr(f):\n return f.per(dmp_sqr(f.rep, f.lev, f.dom))", "def eval_K_chol(self, S, sigma_n, sigma_f):\n K = self.eval_K(S)\n K += sigma_n * np.eye(K.shape[0])\n K_chol = jitchol(K)\n return K_chol", "def compute_clique_potentials(self,F):\r\n\r\n for i in self.nodes():\r\n self.node[i]['fac'] = factor([],[],[])\r\n \r\n for f in F.factors: # assign each factor to a clique\r\n for j,data in self.nodes_iter(data=True):\r\n if len(scipy.setdiff1d(f.var,data['clique']) ) ==0:\r\n self.node[j]['fac'] *= f\r\n self.nop += scipy.prod(self.node[j]['fac'].card)\r\n break", "def __test_s_polynomial():\n poly_ring = PolynomialRing(QQ, 'x,y', order='deglex')\n x, y = poly_ring('x'), poly_ring('y')\n g = x ** 3 - 2 * x * y\n h = x ** 2 * y - 2 * y ** 2 + x\n print __s_polynomial(g, h) # Expected -x^2", "def form(func, dist_list, init_search_point, alg):\n \n def SLSQP(func, dist_list, init_search_point):\n \n dim = len(dist_list)\n current_beta = 0\n new_beta = 1\n sig = np.empty((1, dim))\n mu = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n def f_l(x_l):\n return(func([x_l[i,:]*sig[0,i] + mu[0,i] for i in range(0, dim)]))\n \n while abs(current_beta-new_beta) > 0.001:\n current_search_point = new_search_point\n current_beta = new_beta\n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n mu[0,i], sig[0, i] = Rosenblatt_Transform(dist_list[i][0], current_search_point[0,i])\n else:\n mu[0,i], sig[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n dist_fun = lambda u: np.linalg.norm(u) \n \n alg = 'SLSQP'\n \n H = lambda u: f_l(u)\n cons = ({'type': 'eq', 'fun': lambda u: -(H(u.reshape(-1,1)))})\n \n result = scipy.optimize.minimize(dist_fun, x0 = current_search_point, constraints = cons, method=alg)\n \n new_beta = result.fun\n u = np.array(result.x).reshape((1,dim))\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = mu[0,i] + u[0,i]*sig[0,i]\n \n beta_value = new_beta \n p_f = sst.norm.cdf(-beta_value)\n iterations = result.nit\n u = result.x\n x = u[:]*sig[0,:] + mu[0,:]\n print(x)\n grad_val = scipy.optimize.approx_fprime(x, func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(sig[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*sig[0,i]/np.sqrt(sum1) \n \n print('------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: slsqp solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('------------------------')\n \n return(beta_value, p_f, x, u, mu, sig, cosines) \n \n def HL_R(func, dist_list, init_search_point):\n \n iterations = 0\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n while abs(cur_beta - new_beta) > 0.001:\n cur_beta = new_beta\n cur_cosines = np.zeros((1, dim))\n new_cosines = np.ones((1, dim))\n \n while max((abs(cur_cosines - new_cosines))[0]) > 0.005:\n \n cur_cosines = new_cosines\n \n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0, i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0, i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n \n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(global_std_arr[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*global_std_arr[0,i]/np.sqrt(sum1)\n \n new_cosines = cosines\n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = global_mean_arr[0,i] - new_cosines[0,i]*global_std_arr[0,i]*cur_beta\n \n iterations = iterations + 1\n \n \n B = Symbol('B')\n coordinates = []\n for i in range(0, dim):\n coordinates.append(global_mean_arr[0, i] - new_cosines[0,i]*global_std_arr[0, i]*B)\n new_beta = float(solve(func(coordinates), B)[0])\n \n cosines = new_cosines \n beta_value = new_beta\n p_f = sst.norm.cdf(-new_beta)\n x = new_search_point\n u = (x[0,:] - global_mean_arr[0,:])/global_std_arr\n \n print('-------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: HL-R solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('-------------------------')\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines)\n \n def HL_RF(func, dist_list, init_search_point):\n\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n\n new_search_point = np.array(init_search_point).reshape((1, dim))\n iterations = 0\n while abs(cur_beta - new_beta) > 0.001 and abs(func(new_search_point[0])) > 0.001:\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n cur_beta = new_beta\n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0,i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0,i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n f_val = func(cur_search_point[0])\n \n x_ast = np.empty((1, dim))\n for i in range(0, dim):\n x_ast[0,i] =(cur_search_point[0,i] - global_mean_arr[0,i])/global_std_arr[0,i]\n\n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.000001)\n grad_val = grad_val.reshape((1, dim)) \n \n grad_val_ast = np.empty(grad_val.shape)\n for i in range(0, dim):\n grad_val_ast[0,i] = grad_val[0,i]*global_std_arr[0,i]\n \n t1 = 1/np.sum(grad_val_ast[0,:]**2)\n\n t2 = sum(grad_val_ast[0,:]*x_ast[0,:]) - f_val\n \n t3 = t1*t2\n \n new_x_ast = np.empty(x_ast.shape)\n for i in range(0, dim):\n new_x_ast[0,i] = t3*grad_val_ast[0,i]\n u = new_x_ast\n new_beta = np.linalg.norm(new_x_ast)\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = new_x_ast[0,i]*global_std_arr[0,i] + global_mean_arr[0,i]\n iterations = iterations + 1\n \n grad_val_ast_sum = sum(grad_val_ast[0,:]**2)\n cosines = grad_val_ast/(grad_val_ast_sum**0.5)\n beta_value = new_beta\n x = new_search_point\n p_f = sst.norm.cdf(-beta_value)\n \n print('-------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: HL-RF solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('-------------------------')\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines)\n \n if alg == 'slsqp':\n return(SLSQP(func, dist_list, init_search_point))\n elif alg == 'HL-R':\n return(HL_R(func, dist_list, init_search_point))\n elif alg == 'HL-RF':\n return(HL_RF(func, dist_list, init_search_point))", "def zzx_zassenhaus(f):\n n = zzx_degree(f)\n\n if n == 1:\n return [f]\n\n A = zzx_max_norm(f)\n b = poly_LC(f)\n B = abs(int(sqrt(n+1))*2**n*A*b)\n C = (n+1)**(2*n)*A**(2*n-1)\n gamma = int(ceil(2*log(C, 2)))\n prime_max = int(2*gamma*log(gamma))\n\n for p in xrange(3, prime_max+1):\n if not isprime(p) or b % p == 0:\n continue\n\n F = gf_from_int_poly(f, p)\n\n if gf_sqf_p(F, p):\n break\n\n l = int(ceil(log(2*B + 1, p)))\n\n modular = []\n\n for ff in gf_factor_sqf(F, p)[1]:\n modular.append(gf_to_int_poly(ff, p))\n\n g = zzx_hensel_lift(p, f, modular, l)\n\n T = set(range(len(g)))\n factors, s = [], 1\n\n while 2*s <= len(T):\n for S in subsets(T, s):\n G, H = [b], [b]\n\n S = set(S)\n\n for i in S:\n G = zzx_mul(G, g[i])\n for i in T-S:\n H = zzx_mul(H, g[i])\n\n G = zzx_trunc(G, p**l)\n H = zzx_trunc(H, p**l)\n\n G_norm = zzx_l1_norm(G)\n H_norm = zzx_l1_norm(H)\n\n if G_norm*H_norm <= B:\n T = T - S\n\n G = zzx_primitive(G)[1]\n f = zzx_primitive(H)[1]\n\n factors.append(G)\n b = poly_LC(f)\n\n break\n else:\n s += 1\n\n return factors + [f]", "def sqf_part(f):\n return f.per(dmp_sqf_part(f.rep, f.lev, f.dom))", "def sqrty():\n return Operator([[(1.+1.j)/2,(-1-1.j)/2],[(1.+1.j)/2,(1.+1.j)/2]])", "def stiffnessMatrix (bsp, knotlist,p, nbquad):\n\n\tS = np.zeros((bsp.N-2, bsp.N-2))\n\t\n\tfor line in range(1, bsp.N-1):\n\t\t\n\t\tfor column in range(1, bsp.N-1):\n\t\t\n\t\t\tfor iknot in range(len(knotlist)-1):\n\t\t\t\tS[line-1, column-1] = S[line-1, column-1] + legendreGauss(bilinearForm, nbquad, knotlist[iknot], knotlist[iknot+1], line, bsp, ind2=column)\n\t\n\treturn S;", "def sqf_list_include(f, all=False):\n factors = dmp_sqf_list_include(f.rep, f.lev, f.dom, all=all)\n return [ (f.per(g), k) for g, k in factors ]", "def find_fks_perfect(keylist):\n global _max_k\n mlist = _get_l1_mlist(len(keylist))\n for m in mlist: # of buckets\n for p in _primes:\n for k in range(3, _max_k):\n hash_f = hash_fun_fks_t(k,p,m)\n if xedhash.is_perfect(keylist, hash_f):\n return hash_f\n del hash_f\n return None", "def hensel_lifting_poly_factorization(f):\n\n domain = f.parent()\n base_domain = domain.base()\n\n if not base_domain.is_ring() or not base_domain == IntegerRing():\n raise ValueError(\"The base domain must be the integer ring\")\n\n if f.degree() < 1 or not f.is_squarefree() or not f.is_primitive():\n raise ValueError(\"f must be a nonconstant, squarefree, primitive polynomial\")\n\n n = f.degree()\n if n == 1:\n return [f]\n\n A = base_domain(f.norm(Infinity))\n b = f.leading_coefficient()\n B = sqrt(n + 1) * 2**n * A * b\n C = (n + 1)**(2*n) * A**(2*n - 1)\n gamma = ceil(2 * log(C, 2))\n\n p = 2\n while p <= 2*gamma*log(gamma):\n if b % p != 0:\n Fp = PolynomialRing(GF(p), 'x')\n f_bar = Fp(f)\n if f_bar.is_squarefree():\n break\n p = next_prime(p)\n\n if p > 2*gamma*log(gamma): # Should never happen\n raise RuntimeError(\"Couldn't find such a prime\")\n\n # Modular factorization\n Fp = PolynomialRing(GF(p), 'x')\n f_bar = Fp(f)\n\n modular_factors = berlekamp_poly_factorization(f_bar, squarefree=True)\n\n ZZR = PolynomialRing(ZZ, 'x')\n modular_factors = map(ZZR, modular_factors)\n\n # Hensel lifting\n l = ceil(log(2*B + 1, p))\n modular_factors = __multifactor_hensel_lifting(f, p, l, modular_factors)\n\n # The set of modular factors still to be treated, the set of factors found, and the polynomial f_ still to be\n # factored.\n Zpl = PolynomialRing(IntegerModRing(p ** l), 'x')\n modular_factors = Set(map(Zpl, modular_factors))\n s = 1\n factors = []\n f_ = f\n\n # Factor combination\n while 2*s <= len(modular_factors):\n for S in Subsets(modular_factors, s):\n g_ = ZZR(Zpl(b) * prod(S))\n h_ = ZZR(Zpl(b) * prod(modular_factors.difference(S)))\n\n if g_.norm(1) * h_.norm(1) <= B:\n modular_factors = modular_factors.difference(S)\n factors.append(ZZR(g_ / poly_content(g_))) # Primitive part\n f_ = ZZR(h_ / poly_content(h_))\n b = f_.leading_coefficient()\n break # Exit the for loop and continue the while loop\n\n s += 1\n\n factors.append(f_)\n return factors", "def _root_sum_of_squares(list):\n return sum((el ** 2 for el in list)) ** (0.5)", "def chebyshev_polynomials(adj, k):\n # print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def sq_custom(f,T,a=0,b=0):\n fs=44100\n t=np.linspace(0,T,T*fs)\n A=np.floor(a*fs*T)\n D=np.floor(b*fs*T)\n S1=np.linspace(0,1,A)\n S2=np.ones(T*fs-A-D)\n S3=np.linspace(1,0,D)\n S0=signal.square(2 * np.pi * f * t)\n return(np.hstack((S1,S2,S3))*S0)", "def F(cst, x):\n [u0, v0, u1, v1, u2, v2, coeffs] = cst\n [u, v, g1, g2, g3] = x\n a = g1*u1 - u0\n b = g2*u2 - u0\n c = g3*u - u0\n l = g1*v1 - v0 \n m = g2*v2 - v0\n n = g3*v - v0\n r = g1 - 1\n s = g2 - 1\n t = g3 - 1\n return np.array([\n coeffs[0]*(a**2-l**2) + 2*coeffs[1]*(a*b-l*m) + coeffs[2]*(b**2-m**2) + 2*coeffs[3]*(a*c-l*n) + 2*coeffs[4]*(b*c-m*n) + c**2 - n**2,\n coeffs[0]*(l**2-r**2) + 2*coeffs[1]*(l*m-r*s) + coeffs[2]*(m**2-s**2) + 2*coeffs[3]*(l*n-r*t) + 2*coeffs[4]*(m*n-s*t) + n**2 - t**2,\n coeffs[0]*a*l + coeffs[1]*(l*b+m*a) + coeffs[2]*m*b + coeffs[3]*(l*c+n*a) + coeffs[4]*(m*c+b*n) + c*n,\n coeffs[0]*a*r + coeffs[1]*(r*b+s*a) + coeffs[2]*s*b + coeffs[3]*(r*c+t*a) + coeffs[4]*(s*c+b*t) + c*t,\n coeffs[0]*r*l + coeffs[1]*(l*s+m*r) + coeffs[2]*m*s + coeffs[3]*(l*t+n*r) + coeffs[4]*(m*t+s*n) + t*n \n ])", "def zassenhaus(f):\n # keep leading coefficient\n lf = f.leading_coefficient()\n\n # p-adic factorization\n p, fp_factors = padic_factorization(f)\n if len(fp_factors) == 1:\n return [f]\n\n # purge leading coefficient from factors\n for i,g in enumerate(fp_factors):\n if g.degree() == 0:\n del fp_factors[i]\n break\n\n # lift to Mignotte bound\n blm = upper_bound_of_coefficient(f)\n bound = p**(arith1.log(2*blm,p)+1)\n\n # Hensel lifting\n lf_inv_modq = intresidue.IntegerResidueClass(lf, bound).inverse()\n fq = f.coefficients_map(lambda c: (lf_inv_modq*c).minimumAbsolute()) # fq is monic\n fq_factors, q = hensel.lift_upto(fq, fp_factors, p, bound)\n\n return brute_force_search(f, fq_factors, bound)", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def test_simplex_lsq_fitter(self):\n\n class Rosenbrock(Fittable2DModel):\n a = Parameter()\n b = Parameter()\n\n @staticmethod\n def evaluate(x, y, a, b):\n return (a - x) ** 2 + b * (y - x**2) ** 2\n\n x = y = np.linspace(-3.0, 3.0, 100)\n with NumpyRNGContext(_RANDOM_SEED):\n z = Rosenbrock.evaluate(x, y, 1.0, 100.0)\n z += np.random.normal(0.0, 0.1, size=z.shape)\n\n fitter = SimplexLSQFitter()\n r_i = Rosenbrock(1, 100)\n r_f = fitter(r_i, x, y, z)\n\n assert_allclose(r_f.parameters, [1.0, 100.0], rtol=1e-2)", "def js_sq_safe(sol, depth=1, escape_newlines = True):\n if not sol:\n return ''\n \n if isinstance(sol, list):\n l = []\n for el in sol:\n l.append(_js_sq_safe(el, depth, escape_newlines))\n return l\n else:\n return _js_sq_safe(sol, depth, escape_newlines)", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (\n 2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k + 1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tensor(t_k)", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def chi2sf(x, k):", "def squareRoot(requestContext, seriesList):\n for series in seriesList:\n series.name = \"squareRoot(%s)\" % (series.name)\n for i,value in enumerate(series):\n series[i] = safePow(value, 0.5)\n return seriesList", "def automorphism_group_QQ_fixedpoints(rational_function, return_functions=False, iso_type=False):\n\n if rational_function.parent().is_field():\n K = rational_function.parent()\n R = K.ring()\n else:\n R = rational_function.parent()\n K = R.fraction_field()\n\n F = R.base_ring()\n\n if F != QQ and F!= ZZ:\n raise TypeError(\"coefficient ring is not the rational numbers or the integers\")\n\n z = R.gen(0)\n phi = R.fraction_field()(rational_function)\n\n f = phi.numerator()\n g = phi.denominator()\n\n #scale f,g so both have integer coefficients\n N = lcm(f.denominator(),g.denominator())\n f = f*N\n g = g*N\n N = gcd(gcd(f.coefficients()), gcd(g.coefficients()))\n f = f/N\n g = g/N\n\n d = max(f.degree(), g.degree())\n\n h = f - g*z\n\n if return_functions:\n elements = [z]\n else:\n elements = [matrix(F, 2, [1,0,0,1])]\n\n rational_roots = h.roots(multiplicities = False)\n\n min_poly = 1\n\n #check if infinity is a fixed point\n if g.degree() < d: #then infinity is a fixed point\n #find elements in W of the form (infinity, y)\n #where W is the set of F-rational points (x,y) such that\n #x is fixed by phi and phi(y)=x\n for T in g.roots(multiplicities=False):\n alpha = T\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n\n for S in h.roots():\n min_poly = min_poly*(z - S[0])**(S[1])\n\n if g.degree() < d: #then infinity is a fixed point so (infinity, S[0])\n alpha = S[0] # is in Z_(1,1)**2\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n\n #now compute points in W\n preimage = f - g*S[0]\n if preimage.degree() < d: #infinity is in W\n zeta = -1\n alpha = S[0]\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n for T in preimage.roots(multiplicities=False):\n if T != S[0]:\n zeta = -1\n alpha = S[0]\n beta = T\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n\n #first look at rational fixed points\n #Subsets is ok since we just needed unordered pairs\n for S in Subsets(rational_roots, 2):\n zeta = -1\n alpha = S[0]\n beta = S[1]\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n\n\n #now consider 2-periodic points\n psi = phi(phi(z))\n f2 = psi.numerator()\n g2 = psi.denominator()\n period2_points = [x for x in (f2 - z*g2).roots(multiplicities=False) if not x in rational_roots]\n for S in Subsets(period2_points, 2):\n zeta = -1\n alpha = S[0]\n beta = S[1]\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n if g2.degree() < f2.degree() and g.degree() == d: #infinity has period 2\n for alpha in period2_points:\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n factors = (f2 - z*g2).factor()\n L1 = NumberField(z**2 + 1,'i')\n i=L1.gen(0)\n L2 = NumberField(z**2 + 3,'isqrt3')\n isqrt3 = L2.gen(0)\n for psi in factors:\n if psi[0].degree() == 2:\n a = psi[0][2]\n b = psi[0][1]\n c = psi[0][0]\n disc = b**2 - 4*a*c\n s = (-b*z - 2*c)/(2*a*z + b)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [-b,-2*c, 2*a, b]))\n if is_square(-disc): #psi[0] generates Q(i)\n alpha = psi[0].change_ring(L1).roots()[0][0]\n beta = alpha.trace() - alpha\n for zeta in [i, -i]:\n a = (alpha - zeta*beta)/(1 - zeta)\n d = (alpha*zeta - beta)/(1 - zeta)\n if a in F and d in F:\n a = F(a)\n d = F(d)\n b = F(-alpha*beta)\n s = ( a*z + b)/(z + d)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [a,b, 1, d]))\n elif is_square(-3*disc): #psi[0] generates Q(zeta_3)\n alpha = psi[0].change_ring(L2).roots()[0][0]\n beta = alpha.trace() - alpha\n for zeta in [F(1)/F(2)*(1 + isqrt3), F(1)/F(2)*(1 - isqrt3),F(1)/F(2)*(-1 + isqrt3), F(1)/F(2)*(-1 - isqrt3)]:\n a = (alpha - zeta*beta)/(1 - zeta)\n d = (alpha*zeta - beta)/(1 - zeta)\n if a in F and d in F:\n a = F(a)\n d = F(d)\n b = F(-alpha*beta)\n s = ( a*z + b)/(z + d)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [a,b, 1, d]))\n\n if iso_type:\n return(elements, which_group(elements))\n return(elements)", "def is_sqf(f):\n return dmp_sqf_p(f.rep, f.lev, f.dom)", "def sqf_part(self, f):\n domain = self.domain\n\n if domain.is_FiniteField:\n g = self.one\n for f, _ in self.sqf_list(f)[1]:\n g *= f\n\n return g\n\n if not f:\n return f\n\n gcd = f\n for x in self.gens:\n gcd = self.gcd(gcd, f.diff(x))\n sqf = f // gcd\n\n if domain.is_Field:\n return sqf.monic()\n return sqf.primitive()[1]", "def my_evalf(expr, chop=False):\r\n if type(expr) == list:\r\n try:\r\n return [x.evalf(chop=chop) for x in expr]\r\n except:\r\n return expr\r\n try:\r\n return expr.evalf(chop=chop)\r\n except:\r\n return expr", "def sqf_norm(self, f):\n domain = self.domain\n\n if not domain.is_AlgebraicField:\n raise DomainError(f'ground domain must be algebraic, got {domain}')\n\n new_ring = self.to_ground().inject(*domain.symbols, front=True)\n g = domain.mod.set_ring(new_ring)\n s = 0\n\n while True:\n h = f.inject(front=True)\n r = g.resultant(h)\n\n if r.is_squarefree:\n return s, f, r\n f = f.compose({x: x - domain.unit for x in self.gens})\n s += 1", "def _try_heuristics(f):\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result", "def evaluate_polynomial(tropical_matrix, coefficient_list):\n identity_matrix = get_identity_matrix(tropical_matrix.get_dimension())\n sum_list = []\n sum_list.append(identity_matrix.mult_scalar(coefficient_list[0]))\n for i in range(1, len(coefficient_list)):\n sum_list.append(tropical_matrix.mult_scalar(coefficient_list[i]))\n return get_minimum_sum(sum_list)", "def calc_q_square(self):\n return self._q_x()**2 + self._q_z()**2", "def chebyshev_polynomials(adj, k):\n # print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0], dtype=adj.dtype) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0], dtype=adj.dtype)\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0], dtype=adj.dtype))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True, dtype=adj.dtype)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return t_k", "def free(x):\n _, p = extract_q_p(x)\n return tf.squeeze(0.5 * tf.reduce_sum(tf.square(p), axis=1))", "def from_QQ_sympy(K1, a, K0):\n return None", "def element_from_poly(self, f):\n n, k = self.n, f.degree()\n if k >= n:\n f = f % self.T\n if f == 0:\n return self.zero()\n d, c = dup_clear_denoms(f.rep.rep, QQ, convert=True)\n c = list(reversed(c))\n ell = len(c)\n z = [ZZ(0)] * (n - ell)\n col = to_col(c + z)\n return self(col, denom=d)", "def zzX_eval(f, x):\n if hasattr(x, '__iter__'):\n return zzX_eval_list(f, x)\n\n if poly_univariate_p(f):\n return zzx_eval(f, x)\n\n if not x:\n return poly_TC(f)\n\n result = poly_LC(f)\n\n for coeff in f[1:]:\n result = zzX_mul_const(result, x)\n result = zzX_add(result, coeff)\n\n return result", "def roots(f, *gens,\n auto=True,\n cubics=True,\n trig=False,\n quartics=True,\n quintics=False,\n multiple=False,\n filter=None,\n predicate=None,\n strict=False,\n **flags):\n from sympy.polys.polytools import to_rational_coeffs\n flags = dict(flags)\n\n if isinstance(f, list):\n if gens:\n raise ValueError('redundant generators given')\n\n x = Dummy('x')\n\n poly, i = {}, len(f) - 1\n\n for coeff in f:\n poly[i], i = sympify(coeff), i - 1\n\n f = Poly(poly, x, field=True)\n else:\n try:\n F = Poly(f, *gens, **flags)\n if not isinstance(f, Poly) and not F.gen.is_Symbol:\n raise PolynomialError(\"generator must be a Symbol\")\n f = F\n except GeneratorsNeeded:\n if multiple:\n return []\n else:\n return {}\n else:\n n = f.degree()\n if f.length() == 2 and n > 2:\n # check for foo**n in constant if dep is c*gen**m\n con, dep = f.as_expr().as_independent(*f.gens)\n fcon = -(-con).factor()\n if fcon != con:\n con = fcon\n bases = []\n for i in Mul.make_args(con):\n if i.is_Pow:\n b, e = i.as_base_exp()\n if e.is_Integer and b.is_Add:\n bases.append((b, Dummy(positive=True)))\n if bases:\n rv = roots(Poly((dep + con).xreplace(dict(bases)),\n *f.gens), *F.gens,\n auto=auto,\n cubics=cubics,\n trig=trig,\n quartics=quartics,\n quintics=quintics,\n multiple=multiple,\n filter=filter,\n predicate=predicate,\n **flags)\n return {factor_terms(k.xreplace(\n {v: k for k, v in bases})\n ): v for k, v in rv.items()}\n\n if f.is_multivariate:\n raise PolynomialError('multivariate polynomials are not supported')\n\n def _update_dict(result, zeros, currentroot, k):\n if currentroot == S.Zero:\n if S.Zero in zeros:\n zeros[S.Zero] += k\n else:\n zeros[S.Zero] = k\n if currentroot in result:\n result[currentroot] += k\n else:\n result[currentroot] = k\n\n def _try_decompose(f):\n \"\"\"Find roots using functional decomposition. \"\"\"\n factors, roots = f.decompose(), []\n\n for currentroot in _try_heuristics(factors[0]):\n roots.append(currentroot)\n\n for currentfactor in factors[1:]:\n previous, roots = list(roots), []\n\n for currentroot in previous:\n g = currentfactor - Poly(currentroot, f.gen)\n\n for currentroot in _try_heuristics(g):\n roots.append(currentroot)\n\n return roots\n\n def _try_heuristics(f):\n \"\"\"Find roots using formulas and some tricks. \"\"\"\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result\n\n # Convert the generators to symbols\n dumgens = symbols('x:%d' % len(f.gens), cls=Dummy)\n f = f.per(f.rep, dumgens)\n\n (k,), f = f.terms_gcd()\n\n if not k:\n zeros = {}\n else:\n zeros = {S.Zero: k}\n\n coeff, f = preprocess_roots(f)\n\n if auto and f.get_domain().is_Ring:\n f = f.to_field()\n\n # Use EX instead of ZZ_I or QQ_I\n if f.get_domain().is_QQ_I:\n f = f.per(f.rep.convert(EX))\n\n rescale_x = None\n translate_x = None\n\n result = {}\n\n if not f.is_ground:\n dom = f.get_domain()\n if not dom.is_Exact and dom.is_Numerical:\n for r in f.nroots():\n _update_dict(result, zeros, r, 1)\n elif f.degree() == 1:\n _update_dict(result, zeros, roots_linear(f)[0], 1)\n elif f.length() == 2:\n roots_fun = roots_quadratic if f.degree() == 2 else roots_binomial\n for r in roots_fun(f):\n _update_dict(result, zeros, r, 1)\n else:\n _, factors = Poly(f.as_expr()).factor_list()\n if len(factors) == 1 and f.degree() == 2:\n for r in roots_quadratic(f):\n _update_dict(result, zeros, r, 1)\n else:\n if len(factors) == 1 and factors[0][1] == 1:\n if f.get_domain().is_EX:\n res = to_rational_coeffs(f)\n if res:\n if res[0] is None:\n translate_x, f = res[2:]\n else:\n rescale_x, f = res[1], res[-1]\n result = roots(f)\n if not result:\n for currentroot in _try_decompose(f):\n _update_dict(result, zeros, currentroot, 1)\n else:\n for r in _try_heuristics(f):\n _update_dict(result, zeros, r, 1)\n else:\n for currentroot in _try_decompose(f):\n _update_dict(result, zeros, currentroot, 1)\n else:\n for currentfactor, k in factors:\n for r in _try_heuristics(Poly(currentfactor, f.gen, field=True)):\n _update_dict(result, zeros, r, k)\n\n if coeff is not S.One:\n _result, result, = result, {}\n\n for currentroot, k in _result.items():\n result[coeff*currentroot] = k\n\n if filter not in [None, 'C']:\n handlers = {\n 'Z': lambda r: r.is_Integer,\n 'Q': lambda r: r.is_Rational,\n 'R': lambda r: all(a.is_real for a in r.as_numer_denom()),\n 'I': lambda r: r.is_imaginary,\n }\n\n try:\n query = handlers[filter]\n except KeyError:\n raise ValueError(\"Invalid filter: %s\" % filter)\n\n for zero in dict(result).keys():\n if not query(zero):\n del result[zero]\n\n if predicate is not None:\n for zero in dict(result).keys():\n if not predicate(zero):\n del result[zero]\n if rescale_x:\n result1 = {}\n for k, v in result.items():\n result1[k*rescale_x] = v\n result = result1\n if translate_x:\n result1 = {}\n for k, v in result.items():\n result1[k + translate_x] = v\n result = result1\n\n # adding zero roots after non-trivial roots have been translated\n result.update(zeros)\n\n if strict and sum(result.values()) < f.degree():\n raise UnsolvableFactorError(filldedent('''\n Strict mode: some factors cannot be solved in radicals, so\n a complete list of solutions cannot be returned. Call\n roots with strict=False to get solutions expressible in\n radicals (if there are any).\n '''))\n\n if not multiple:\n return result\n else:\n zeros = []\n\n for zero in ordered(result):\n zeros.extend([zero]*result[zero])\n\n return zeros", "def squareform(X, force=\"no\", checks=True):\n\n return ssd.squareform(X, force, checks)", "def zzx_compose_term(f, k):\n if k <= 0:\n raise ValueError(\"'k' must be positive, got %s\" % k)\n if k == 1 or not f:\n return f\n\n result = [f[0]]\n\n for coeff in f[1:]:\n result.extend([0]*(k-1))\n result.append(coeff)\n\n return result", "def secuencia(R, Q, q):\r\n n = 1\r\n r = []\r\n for qq in q:\r\n for qqq in qq.eps:\r\n r.append(qqq)\r\n r = sorted(r)\r\n\r\n for l in r:\r\n print('la l', l)\r\n Qaux = []\r\n for j in range(len(Q)):\r\n notaux = []\r\n notaux.append(Q[j][0]+j*l[0])\r\n notaux.append(Q[j][1]+(j+1)*l[0])\r\n notaux.append(Q[j][2])\r\n Qaux.append(notaux)\r\n # print(Qaux)\r\n Qaux[-1][1] = R[-1][1]\r\n dibuja(R, Qaux, n)\r\n n += 1", "def lsquare_of_sums(inlist):\r\n s = sum(inlist)\r\n return float(s)*s", "def root_factors(f, *gens, filter=None, **args):\n args = dict(args)\n\n F = Poly(f, *gens, **args)\n\n if not F.is_Poly:\n return [f]\n\n if F.is_multivariate:\n raise ValueError('multivariate polynomials are not supported')\n\n x = F.gens[0]\n\n zeros = roots(F, filter=filter)\n\n if not zeros:\n factors = [F]\n else:\n factors, N = [], 0\n\n for r, n in ordered(zeros.items()):\n factors, N = factors + [Poly(x - r, x)]*n, N + n\n\n if N < F.degree():\n G = reduce(lambda p, q: p*q, factors)\n factors.append(F.quo(G))\n\n if not isinstance(f, Poly):\n factors = [ f.as_expr() for f in factors ]\n\n return factors", "def squared(num_list):\n new_list=[]\n for num in num_list:\n sq_num=pow(num,2)\n new_list.append(sq_num)\n return new_list", "def Q(self, k, x):\n g = np.asarray(self.g(k, x))\n Q = g @ g.T\n return Q", "def automorphism_group_FF_alg3(rational_function):\n # define ground field and ambient function field\n if rational_function.parent().is_field():\n K = rational_function.parent()\n R = K.ring()\n else:\n R = rational_function.parent()\n K = R.fraction_field()\n\n F = R.base_ring()\n if not F.is_finite() or not F.is_field():\n raise TypeError(\"coefficient ring is not a finite field\")\n p = F.characteristic()\n q = F.cardinality()\n z = R.gen(0)\n phi = K(rational_function)\n f = phi.numerator()\n g = phi.denominator()\n D = max(f.degree(), g.degree())\n\n # For use in the quadratic extension parts of the algorithm\n E = GF(p**(2*F.degree()),'b')\n b = E.gen(0)\n sigma = F.Hom(E)[0]\n S = PolynomialRing(E, 'w')\n w = S.gen(0)\n Phi = rational_function_coerce(phi, sigma, S)\n\n # Compute the set of distinct F-rational and F-quadratic\n # factors of the fixed point polynomial\n fix = R(f(z) - z*g(z))\n linear_fix = gcd(fix, z**q - z);\n quad_temp = fix.quo_rem(linear_fix)[0]\n residual = gcd(quad_temp, z**q - z)\n while residual.degree() > 0:\n quad_temp = quad_temp.quo_rem(residual)[0]\n residual = gcd(quad_temp, z**q - z)\n quadratic_fix = gcd(quad_temp, z**(q**2) - z).factor()\n\n # Compute the set of distinct F-rational fixed points\n linear_fix_pts = [[ x, F(1)] for x in linear_fix.roots(multiplicities=False)]\n if bool(fix.degree() < D+1):\n linear_fix_pts.append( [F(1),F(0)] )\n n1 = len(linear_fix_pts)\n\n # Coerce quadratic factors into a quadratic extension\n quad_fix_factors = [ rational_function_coerce(poly[0], sigma, S) for poly in quadratic_fix]\n n2 = 2*len(quad_fix_factors)\n\n # Collect pre-image data as a list L with entries in the form\n # [fixed point y, F-rational pre-images z != y, polynomial defining the pre-images]\n # Note that we remove the fixed point from its pre-image set and its polynomial\n pre_images = []\n for y in linear_fix_pts:\n if y == [F(1),F(0)]:\n Fpre = [ [x,F(1)] for x in g.roots(multiplicities=False) ]\n pre_images.append([y, Fpre, g])\n else:\n Fpre = [ [x,F(1)] for x in (f - y[0]*g).roots(multiplicities=False) if x != y[0]]\n if y[0] == 0 and f.degree() < g.degree():\n Fpre.append([F(1), F(0)]) # infinity is a pre-image of 0\n elif f.degree() == g.degree() and f.leading_coefficient() == y[0]*g.leading_coefficient():\n Fpre.append([F(1), F(0)]) # infinity is a pre-image of y[0]\n # remove y[0] as a root of pre-image polynomial\n h = (f - y[0]*g).quo_rem(z-y[0])[0]\n h_common = gcd(h, z-y[0])\n while h_common.degree() > 0:\n h = h.quo_rem(z-y[0])[0]\n h_common = gcd(h,z-y[0])\n pre_images.append([y, Fpre, h])\n\n # Initialize the set of automorphisms to contain the identity\n automorphisms = [R(z)]\n automorphisms_quad = []\n\n # order p elements\n # An F-rational fixed point has orbit length 1 or p under the action of an element of\n # order p. An F-quadratic fixed point has orbit length p. The set of F-rational\n # pre-images of fixed points decomposes as a union of orbits of length p.\n if n1%p == 1 and n2%p == 0 and sum(len(x[1]) for x in pre_images)%p == 0:\n # Compute total number of distinct fixed points as a final check for order p auts\n factor_list = fix.factor()\n minimal_fix_poly = R(prod(x[0] for x in factor_list))\n n = sum(x[0].degree() for x in factor_list) + bool(fix.degree() < D+1)\n if n%p == 1:\n automorphisms = automorphisms + order_p_automorphisms(phi, pre_images)\n\n ## nontrivial elements with order prime to p ##\n # case of 2 F-rational fixed points\n for pt_pair in combinations(linear_fix_pts, 2):\n x = pt_pair[0]\n y = pt_pair[1]\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, [x,y], False)\n\n # case of 1 F-rational fixed point and an F-rational pre-image\n for y in pre_images:\n for x in y[1]:\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, [x,y[0]], False)\n\n # case of a pair of quadratic fixed points\n for h in quad_fix_factors:\n quad_fix_pts = [ [x,E(1)] for x in h.roots(multiplicities=False)]\n automorphisms_quad = automorphisms_quad + automorphisms_fixing_pair(Phi, quad_fix_pts, True)\n\n phi_2 = phi(phi(z))\n f_2 = phi_2.numerator()\n g_2 = phi_2.denominator()\n\n period_2 = (f_2(z) - z*g_2(z)).quo_rem(fix)[0]\n factor_list_2 = period_2.factor()\n linear_period_2_pts = [[ x, F(1)] for x in period_2.roots(multiplicities=False)]\n if bool(period_2.degree() < D**2-D):\n linear_period_2_pts.append( [F(1),F(0)] )\n quad_period_2_factors = [rational_function_coerce(poly[0], sigma, S) for poly in factor_list_2 if poly[0].degree() == 2]\n # n2 = n1 + 2*len(quad_fix_factors)\n\n # case of a pair of F-rational period 2 points\n linear_period_2_pairs = []\n while len(linear_period_2_pts) > 0:\n x = linear_period_2_pts.pop(-1)\n if x[1] == 1 and g(x[0]) != 0:\n y = [phi(x[0]), F(1)]\n elif x[1] == 1 or f.degree() > g.degree():\n y = [F(1), F(0)]\n elif f.degree() == g.degree():\n y = [f.leading_coefficient() / g.leading_coefficient(), F(1)]\n else:\n y = [F(0), F(1)]\n\n if x != y:\n linear_period_2_pts.remove(y)\n linear_period_2_pairs.append([x,y])\n\n for pt_pair in linear_period_2_pairs:\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, pt_pair, False)\n\n # case of a pair of quadratic period 2 points\n for h in quad_period_2_factors:\n pt_pair = [ [x,E(1)] for x in h.roots(multiplicities=False)]\n if Phi(pt_pair[0][0]) == pt_pair[1][0]:\n automorphisms_quad = automorphisms_quad + automorphisms_fixing_pair(Phi, pt_pair, True)\n\n # Descend coefficients of the quadratic guys back to the base field\n for s in automorphisms_quad:\n automorphisms.append(rational_function_coefficient_descent(s, sigma, R))\n\n return automorphisms", "def full_S(self):\n return kron_list([R.T.dot(R) for R in self.Rs])", "def evaluate_poly(poly, x):\n if len(poly) == 1:\n\t\t#base case\n\t\treturn poly[0]\n else:\n #recursive case\n #the first item in the tuple is the coefficient of X**0, so it's the final value\n #the rest of the items in the tuple need multiplied by X and put in new tuple\n #Yes, I'm cheating and casting a list to a tuple. GFY and your immutability.\n return poly[0] + evaluate_poly(tuple([x * coeff for coeff in poly[1:]]), x)", "def skolemize_exists(formula, quantified_varible_list):\n quantified_variable = formula.get_variable()\n quantified_formula = formula.get_formula()\n\n if not quantified_varible_list:\n return skolemize(quantified_formula.substitute_variable(\\\n quantified_variable, ConstantTerm(get_unique_constant())), \\\n quantified_varible_list)\n else:\n new_function = get_unique_function(quantified_varible_list)\n return skolemize(quantified_formula.substitute_variable(\\\n quantified_variable, new_function), quantified_varible_list)", "def skolemize_forall(formula, quantified_varible_list):\n quantified_variable = formula.get_variable()\n quantified_formula = formula.get_formula()\n\n if not quantified_varible_list:\n quantified_varible_list = [quantified_variable]\n else:\n quantified_varible_list.append(quantified_variable)\n return Forall(quantified_variable, skolemize(quantified_formula, \\\n quantified_varible_list))", "def polyfit_2d(Xu,X):\n\txu = Xu[:,0]\n\tyu = Xu[:,1]\n\tX = np.squeeze(X) # an mx1 vector\n\tM = np.squeeze((np.ones(xu.size),xu,yu,xu**2,xu*yu,yu**2,\n\t\txu**3,xu**2*yu,xu*yu**2,yu**3)) # a mxn matrix\n\tM = M.transpose()\n\tprint(\"solving for the polynomial fitting coefficients...\")\n\tK,resid,rnk,svs = np.linalg.lstsq(M,X,rcond=-1) # k has size n\n\tprint(\"residue:%0.8f\trank:%0.8f\"%(np.sum(resid),rnk))\n\treturn K", "def zzX_compose_term(f, K):\n def rec_compose(g, l):\n if poly_univariate_p(g):\n return zzx_compose_term(g, K[l])\n\n if K[l] <= 0:\n raise ValueError(\"All 'K[i]' must be positive, got %s\" % K[l])\n\n g = [ rec_compose(c, l+1) for c in g ]\n result, L = [g[0]], poly_level(g) - 1\n\n for coeff in g[1:]:\n for i in xrange(1, K[l]):\n result.append(zzX_zero(L))\n\n result.append(coeff)\n\n return result\n\n if all([ k == 1 for k in K ]):\n return f\n else:\n return rec_compose(f, 0)", "def zzX_degree_for(f, k):\n if k < 0:\n k += poly_level(f) + 1\n\n if k == 1:\n return zzX_degree(f)\n\n def rec_degree(g, l):\n if l == k:\n return zzX_degree(g)\n else:\n return max([ rec_degree(coeff, l+1) for coeff in g ])\n\n return rec_degree(f, 1)", "def GramSchmidt(A):\r\n n = len(A)\r\n # Finds the number of lists in the list, which is also the number of rows\r\n m = len(A[0])\r\n # Finds the number of elements in list one, which is also the number of columns\r\n V = A\r\n R = [[0]*n for i in range(n)]\r\n # creates an empty list R with dimensions of n rows and n columns\r\n Q = [[0]*m for i in range(n)]\r\n # creates an empty list Q with dimensions of n rows and m columns\r\n inputStatus = True\r\n # inputStatus is true at this point until proven otherwise\r\n for i in range(n):\r\n for j in range(m):\r\n if ((type(A[i][j]) != int) and (type(A[i][j]) != float) and (type(A[i][j]) != complex)):\r\n inputStatus = False\r\n print(\"Invalid Input\")\r\n # this checks each value in the matrix A to make sure it is some time of number, if it isnt a number then the input status will be false \r\n # if the input status is false then an error message will be displayed stating that this is an invalid input\r\n if inputStatus == True:\r\n # if the given list does not fall under the previous if statement then the input status will continue to be true and we can continue to find the QR factorization \r\n for i in range(n):\r\n # for loop which continues as long as there are still lists in A \r\n R[i][i] = norm(V[i])\r\n # Creates the border for the upper triangle matrix R, where each value in the diagonal is the 2 norm of the corresponding vector in the original matrix A \r\n Q[i] = unit(V[i])\r\n # Each vector in Q is the unit vector of the corresponding vector in A \r\n for j in range(i+1,n):\r\n # the position j will be 1 more than the position i \r\n R[j][i] = dot(Q[i],V[j])\r\n # The element in R[i+1][i] is the dot product of Q[i] and V[i+1] \r\n temp = scalarmul(R[j][i],Q[i])\r\n # This is the scalar multiplication of R[i+1][i] and Q[i] which will be labeled as temp \r\n V[j] = subtract(V[j],temp)\r\n # V[j] is the difference between the original V[j] and temp \r\n return[Q,R]", "def integerpolynomialfactorization(f):\n cont = f.content()\n prim = f.primitive_part()\n F = [prim]\n G = prim\n c = 0\n one = G.getRing().one\n while (G.differentiate() and F[c] != one):\n deriv = G.differentiate()\n F.append(F[c].subresultant_gcd(deriv))\n c = c + 1\n G = F[c]\n sqfree_part = F[0].pseudo_floordiv(F[0].subresultant_gcd(F[1])).primitive_part()\n N = zassenhaus(sqfree_part)\n\n if cont != 1:\n result = [(one.scalar_mul(cont) ,1)]\n else:\n result = []\n\n F.reverse()\n e = len(F)\n for factor in N:\n for deg, deriv in enumerate(F):\n if not (deriv.pseudo_mod(factor)):\n result.append((factor, (e-deg)))\n break\n return result", "def evaluate_polynomial(f,x):\n degree = len(f)-1\n ans = 0\n for i in f:\n ans += i*x**degree\n degree -= 1\n return(ans)", "def SSHZ(L, teff):\n return [np.sqrt(L/seffi(teff)), np.sqrt(L/seffo(teff))]", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def numSquares_bfs(self, n):\n q = [0]\n visited = [False for _ in xrange(n+1)]\n\n level = 0\n while q:\n level += 1\n l = len(q)\n for i in xrange(l):\n for j in xrange(1, int(math.sqrt(n))+1):\n nxt = q[i]+j*j\n if nxt <= n and visited[nxt]:\n continue\n elif nxt < n:\n visited[nxt] = True\n q.append(nxt)\n elif nxt == n:\n return level\n else:\n break\n q = q[l:]\n\n return None", "def _rootsFinder(self, fun, jac, bounds, npoints, method):\n if method == \"regular\":\n step = (bounds[1] - bounds[0]) / (npoints + 1)\n try:\n X0 = np.arange(bounds[0] + step, bounds[1], step)\n except:\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n elif method == \"random\":\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n\n def objFun(X, f, jac):\n g = 0\n j = np.zeros(X.shape)\n i = 0\n for x in X:\n fx = f(x)\n g = g + fx**2\n j[i] = 2 * fx * jac(x)\n i = i + 1\n return g, j\n\n opt = minimize(\n lambda X: objFun(X, fun, jac),\n X0,\n method=\"L-BFGS-B\",\n jac=True,\n bounds=[bounds] * len(X0),\n )\n\n X = opt.x\n np.round(X, decimals=5)\n return np.unique(X)", "def gauss_kl(q_mu, q_sqrt, K=None):\n #print(q_sqrt._tf_array)\n #q_sqrt = q_sqrt._tf_array\n #q_mu = q_mu._tf_array\n\n if K is None:\n white = True\n alpha = q_mu\n else:\n white = False\n Lp = tf.cholesky(K)\n alpha = tf.matrix_triangular_solve(Lp, q_mu, lower=True)\n\n if q_sqrt.get_shape().ndims == 2:\n diag = True\n num_latent = tf.shape(q_sqrt)[1]\n NM = tf.size(q_sqrt)\n Lq = Lq_diag = q_sqrt\n elif q_sqrt.get_shape().ndims == 3:\n diag = False\n num_latent = tf.shape(q_sqrt)[2]\n NM = tf.reduce_prod(tf.shape(q_sqrt)[1:])\n Lq = tf.matrix_band_part(tf.transpose(q_sqrt, (2, 0, 1)), -1, 0) # force lower triangle\n Lq_diag = tf.matrix_diag_part(Lq)\n else: # pragma: no cover\n raise ValueError(\"Bad dimension for q_sqrt: %s\" %\n str(q_sqrt.get_shape().ndims))\n\n # Mahalanobis term: μqᵀ Σp⁻¹ μq\n mahalanobis = tf.reduce_sum(tf.square(alpha))\n \n # Constant term: - N x M\n constant = - tf.cast(NM, float_type)\n\n # Log-determinant of the covariance of q(x):\n logdet_qcov = tf.reduce_sum(tf.log(tf.square(Lq_diag)))\n\n # Trace term: tr(Σp⁻¹ Σq)\n if white:\n trace = tf.reduce_sum(tf.square(Lq))\n else:\n if diag:\n M = tf.shape(Lp)[0]\n Lp_inv = tf.matrix_triangular_solve(Lp, tf.eye(M, dtype=float_type), lower=True)\n K_inv = tf.matrix_triangular_solve(tf.transpose(Lp), Lp_inv, lower=False)\n trace = tf.reduce_sum(tf.expand_dims(tf.matrix_diag_part(K_inv), 1) *\n tf.square(q_sqrt))\n else:\n Lp_tiled = tf.tile(tf.expand_dims(Lp, 0), [num_latent, 1, 1])\n LpiLq = tf.matrix_triangular_solve(Lp_tiled, Lq, lower=True)\n trace = tf.reduce_sum(tf.square(LpiLq))\n\n twoKL = mahalanobis + constant - logdet_qcov + trace\n\n # Log-determinant of the covariance of p(x):\n if not white:\n prior_logdet = tf.cast(num_latent, float_type) * tf.reduce_sum(\n tf.log(tf.square(tf.matrix_diag_part(Lp))))\n twoKL += prior_logdet\n\n return 0.5 * twoKL", "def test_LSQ_SLSQP_with_constraints(self, fitter):\n fitter = fitter()\n\n g1 = models.Gaussian1D(100, 5, stddev=1)\n g1.mean.fixed = True\n fslsqp = SLSQPLSQFitter()\n slsqp_model = fslsqp(g1, self.xdata, self.ydata)\n model = fitter(g1, self.xdata, self.ydata)\n assert_allclose(model.parameters, slsqp_model.parameters, rtol=10 ** (-4))", "def force_list(X, V, iparams, blist, L, gamma, kT, dt, rc):\n force_cube = force_list_inner(X, V, iparams, blist, \\\n L, gamma, kT, dt, rc)\n force_cube -= np.transpose(force_cube, (1, 0, 2))\n return np.sum(force_cube, axis=1)", "def cheb_polynomial(L_tilde, K):\n N = L_tilde.shape[0]\n cheb_polynomials = [np.identity(N), L_tilde.copy()]\n for i in range(2, K):\n cheb_polynomials.append(2 * L_tilde * cheb_polynomials[i - 1] - cheb_polynomials[i - 2])\n\n return cheb_polynomials", "def is_squarefree(self, f):\n if f.is_ground:\n return True\n g = f\n for x in self.gens:\n g = self.gcd(g, f.diff(x))\n if g.is_ground:\n return True\n return False", "def buildSumPolySystem(FF, SM3, m, Rx = False):\n \n #number of bounding variables 'U'\n numBoundVars = m - 3\n if Rx == True: #last summation polynomial will be S_3(x_m, u_(m-2), Rx)\n numBoundVars += 1\n SMPR = PolynomialRing(FF, 'x', m + numBoundVars, order='degrevlex')\n \n #X-variables\n variablesX = SMPR.objgens()[1][0:m]\n #bounding variables\n variablesU = SMPR.objgens()[1][m:]\n \n generators = [] \n for k in range(0, numBoundVars):\n if k != 0:\n generators.append(SM3(variablesU[k - 1], variablesU[k], variablesX[k + 1]))\n else:\n generators.append(SM3(variablesX[0], variablesX[1], variablesU[0])) \n \n #Hotfix: in case when we don't need a bounding variable <=> only 1 summation polynomial will be used.\n #And is added manually.\n if len(variablesU) == 0:\n variablesU = [variablesX[0]]\n return generators, variablesX, variablesU, SMPR", "def sparsify(f, arg_types, sparse_rules=None):\n os.environ[\"STREE_PYTHON_FALLBACK\"] = \"1\"\n tree = SymbolTree.create(f)\n handler = tree.get_handler()\n sparse_rules = sparse_rules or {}\n sparsify_tree(handler, arg_types, sparse_rules, f)\n os.unsetenv(\"STREE_PYTHON_FALLBACK\")\n return tree.get_network()", "def convert_list(f, parameters):\n variables = f[0].arguments()\n varpar = list(parameters) + list(variables)\n F = symbolic_expression([i(*variables) for i in f]).function(*varpar)\n lis = flatten([fast_callable(i,vars=varpar).op_list() for i in F], max_level=1)\n deflist = []\n stack = []\n const =[]\n stackcomp=[]\n detail=[]\n for i in lis:\n if i[0] == 'load_arg':\n stack.append(varpar[i[1]])\n elif i[0] == 'ipow':\n if i[1] in NN:\n basis = stack[-1]\n for j in range(i[1]-1):\n\t a=stack.pop(-1)\n\t detail.append(('mul', a, basis))\n\t stack.append(a*basis)\n\t stackcomp.append(stack[-1])\n else:\n detail.append(('pow',stack[-1],i[1]))\n stack[-1]=stack[-1]**i[1]\n stackcomp.append(stack[-1])\n\n elif i[0] == 'load_const':\n const.append(i[1])\n stack.append(i[1])\n elif i == 'mul':\n a=stack.pop(-1)\n b=stack.pop(-1)\n detail.append(('mul', a, b))\n stack.append(a*b)\n stackcomp.append(stack[-1])\n\n elif i == 'div':\n a=stack.pop(-1)\n b=stack.pop(-1)\n detail.append(('div', a, b))\n stack.append(b/a)\n stackcomp.append(stack[-1])\n\n elif i == 'add':\n a=stack.pop(-1)\n b=stack.pop(-1)\n detail.append(('add',a,b))\n stack.append(a+b)\n stackcomp.append(stack[-1])\n\n elif i == 'pow':\n a=stack.pop(-1)\n b=stack.pop(-1)\n detail.append(('pow', b, a))\n stack.append(b**a)\n stackcomp.append(stack[-1])\n\n elif i[0] == 'py_call' and str(i[1])=='log':\n a=stack.pop(-1)\n detail.append(('log', a))\n stack.append(log(a))\n stackcomp.append(stack[-1])\n\n elif i[0] == 'py_call' and str(i[1])=='exp':\n a=stack.pop(-1)\n detail.append(('exp', a))\n stack.append(exp(a))\n stackcomp.append(stack[-1])\n\n elif i[0] == 'py_call' and str(i[1])=='sin':\n a=stack.pop(-1)\n detail.append(('sin', a))\n detail.append(('cos', a))\n stackcomp.append(sin(a))\n stackcomp.append(cos(a))\n stack.append(sin(a))\n\n elif i[0] == 'py_call' and str(i[1])=='cos':\n a=stack.pop(-1)\n detail.append(('sin', a))\n detail.append(('cos', a))\n stackcomp.append(sin(a))\n stackcomp.append(cos(a))\n stack.append(cos(a))\n\n elif i == 'neg':\n a = stack.pop(-1)\n detail.append(('mul', -1, a))\n stack.append(-a)\n stackcomp.append(-a)\n\n return stackcomp,detail", "def BFT(tree):\n queue = [tree]\n bft_nodelist = []\n while queue:\n node = queue.pop(0)\n bft_nodelist.append(node)\n queue += node.nodelist\n return bft_nodelist", "def kets(self: Qss) -> Qss:\n\n new_qs = []\n \n for qs in self.qss:\n new_qs.append(qs.ket())\n\n return Qss(new_qs)", "def zzx_mul_term(f, c, k):\n if not c or not f:\n return []\n else:\n return [ c * coeff for coeff in f ] + [INT_ZERO]*k", "def lchisquare(f_obs,f_exp=None):\r\n k = len(f_obs) # number of groups\r\n if f_exp == None:\r\n f_exp = [sum(f_obs)/float(k)] * len(f_obs) # create k bins with = freq.\r\n chisq = 0\r\n for i in range(len(f_obs)):\r\n chisq = chisq + (f_obs[i]-f_exp[i])**2 / float(f_exp[i])\r\n return chisq, chisqprob(chisq, k-1)", "def zzX_reduce(f):\n if zzX_zero_p(f):\n return (1,)*poly_level(f), f\n\n F, H = zzX_to_dict(f), {}\n\n def ilgcd(M):\n g = 0\n\n for m in M:\n g = igcd(g, m)\n\n if g == 1:\n break\n\n return g or 1\n\n M = tuple(map(lambda *row: ilgcd(row), *F.keys()))\n\n if all([ b == 1 for b in M ]):\n return M, f\n\n for m, coeff in F.iteritems():\n N = [ a // b for a, b in zip(m, M) ]\n H[tuple(N)] = coeff\n\n return M, zzX_from_dict(H, len(M))", "def qfunc(x):\n # Error check inputs\n if isinstance(x, np.ndarray):\n if x.dtype == np.complex128:\n raise TypeError(\"complex input not supported\")\n else:\n if isinstance(x, complex):\n raise TypeError(\"complex input not supported\")\n\n Q = 0.5 * erfc(x / np.sqrt(2.0))\n return Q", "def sorm(func, dist_list, init_search_point, alg): \n def SLSQP(func, dist_list, init_search_point):\n \n dim = len(dist_list)\n current_beta = 0\n new_beta = 1\n sig = np.empty((1, dim))\n mu = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n def f_l(x_l):\n return(func([x_l[i,:]*sig[0,i] + mu[0,i] for i in range(0, dim)]))\n \n while abs(current_beta-new_beta) > 0.001:\n current_search_point = new_search_point\n current_beta = new_beta\n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n mu[0,i], sig[0, i] = Rosenblatt_Transform(dist_list[i][0], current_search_point[0,i])\n else:\n mu[0,i], sig[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n dist_fun = lambda u: np.linalg.norm(u) \n \n alg = 'SLSQP'\n \n H = lambda u: f_l(u)\n cons = ({'type': 'eq', 'fun': lambda u: -(H(u.reshape(-1,1)))})\n \n result = scipy.optimize.minimize(dist_fun, x0 = current_search_point, constraints = cons, method=alg)\n \n new_beta = result.fun\n u = np.array(result.x).reshape((1,dim))\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = mu[0,i] + u[0,i]*sig[0,i]\n \n beta_value = new_beta \n p_f = sst.norm.cdf(-beta_value)\n iterations = result.nit\n u = result.x\n x = u[:]*sig[0,:] + mu[0,:]\n grad_val = scipy.optimize.approx_fprime(x, func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(sig[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*sig[0,i]/np.sqrt(sum1) \n \n return(beta_value, p_f, x, u, mu, sig, cosines, iterations) \n \n def HL_R(func, dist_list, init_search_point):\n \n iterations = 0\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n while abs(cur_beta - new_beta) > 0.001:\n cur_beta = new_beta\n cur_cosines = np.zeros((1, dim))\n new_cosines = np.ones((1, dim))\n \n while max((abs(cur_cosines - new_cosines))[0]) > 0.005:\n \n cur_cosines = new_cosines\n \n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0, i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0, i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n \n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(global_std_arr[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*global_std_arr[0,i]/np.sqrt(sum1)\n \n new_cosines = cosines\n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = global_mean_arr[0,i] - new_cosines[0,i]*global_std_arr[0,i]*cur_beta\n \n iterations = iterations + 1\n \n \n B = Symbol('B')\n coordinates = []\n for i in range(0, dim):\n coordinates.append(global_mean_arr[0, i] - new_cosines[0,i]*global_std_arr[0, i]*B)\n new_beta = float(solve(func(coordinates), B)[0])\n \n cosines = new_cosines \n beta_value = new_beta\n p_f = sst.norm.cdf(-new_beta)\n x = new_search_point\n u = (x[0,:] - global_mean_arr[0,:])/global_std_arr\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines, iterations)\n \n def HL_RF(func, dist_list, init_search_point):\n\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n\n new_search_point = np.array(init_search_point).reshape((1, dim))\n iterations = 0\n while abs(cur_beta - new_beta) > 0.001 and abs(func(new_search_point[0])) > 0.001:\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n cur_beta = new_beta\n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0,i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0,i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n f_val = func(cur_search_point[0])\n \n x_ast = np.empty((1, dim))\n for i in range(0, dim):\n x_ast[0,i] =(cur_search_point[0,i] - global_mean_arr[0,i])/global_std_arr[0,i]\n\n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.000001)\n grad_val = grad_val.reshape((1, dim)) \n \n grad_val_ast = np.empty(grad_val.shape)\n for i in range(0, dim):\n grad_val_ast[0,i] = grad_val[0,i]*global_std_arr[0,i]\n \n t1 = 1/np.sum(grad_val_ast[0,:]**2)\n\n t2 = sum(grad_val_ast[0,:]*x_ast[0,:]) - f_val\n \n t3 = t1*t2\n \n new_x_ast = np.empty(x_ast.shape)\n for i in range(0, dim):\n new_x_ast[0,i] = t3*grad_val_ast[0,i]\n u = new_x_ast\n new_beta = np.linalg.norm(new_x_ast)\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = new_x_ast[0,i]*global_std_arr[0,i] + global_mean_arr[0,i]\n iterations = iterations + 1\n \n grad_val_ast_sum = sum(grad_val_ast[0,:]**2)\n cosines = grad_val_ast/(grad_val_ast_sum**0.5)\n beta_value = new_beta\n x = new_search_point\n p_f = sst.norm.cdf(-beta_value)\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines, iterations)\n \n if alg == 'slsqp':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = SLSQP(func, dist_list, init_search_point)\n elif alg == 'HL-R':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = HL_R(func, dist_list, init_search_point)\n elif alg == 'HL-RF':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = HL_RF(func, dist_list, init_search_point)\n \n d = len(dist_list)\n\n R0 = np.eye(d)\n \n for i in range(0, d):\n R0[-1,i] = cosines[0,i]\n \n Q, R = scipy.linalg.rq(R0)\n \n def f_l(x_l):\n return(func([x_l[i]*sig[0,i] + mu[0,i] for i in range(0, d)]))\n \n x = np.array(x).reshape((1, -1))\n u = x[0,:]*sig[0,:] + mu[0,:]\n \n H = nd.Hessian(f_l)(u)\n \n grad_val_standard = (scipy.optimize.approx_fprime(x[0], func, 0.00000001)[:])*(sig[0,:])\n \n dist_standard = np.linalg.norm(grad_val_standard)\n \n A_1 = 1/dist_standard\n R_transp = np.transpose(R)\n A_2 = R.dot(H)\n A_3 = A_2.dot(R_transp)\n \n A = A_3.dot(A_1)\n \n A = A[0:-1, 0:-1]\n \n k = np.linalg.eig(A)[0]\n \n prod_arr = np.empty((1, len(k)))\n for i in range(0, len(k)):\n prod_arr[0,i] = (1 + beta_value*k[i])**-0.5\n \n p_f_sorm = p_f*np.prod(prod_arr)\n beta_sorm = -1*scipy.stats.norm.ppf(p_f_sorm)\n \n print('-------------------------')\n print('Second-Order Reliability Analysis')\n print('Algorithm:',alg,'solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_sorm, p_f_sorm))\n print('-------------------------')\n \n return(beta_sorm, p_f_sorm)", "def sum_squared(variable_list):\n return sum([el * el for el in variable_list])", "def zzX_value(l, f):\n if type(f) is not list:\n return zzX_const(l, f)\n else:\n if not l:\n return f\n else:\n return [zzX_value(l-1, f)]", "def Min(Fun, p, ubRes, conj):\n d = Fun.degree()\n AffFun = Fun.dehomogenize(1)\n R = AffFun.coordinate_ring()\n if R.is_field():\n #want the polynomial ring not the fraction field\n R = R.ring()\n F = R(AffFun[0].numerator())\n G = R(AffFun[0].denominator())\n dG = G.degree()\n if dG > (d+1)/2:\n lowerBound = (-2*(G[dG]).valuation(p)/(2*dG - d + 1) + 1).floor()\n else:\n lowerBound = (-2*(F[d]).valuation(p)/(d-1) + 1).floor()\n upperBound = 2*(ubRes.valuation(p))\n\n if upperBound < lowerBound:\n #There are no possible transformations to reduce the resultant.\n return Fun,conj\n else:\n #Looping over each possible k, we search for transformations to reduce the\n #resultant of F/G\n k = lowerBound\n Qb = PolynomialRing(QQ,'b')\n b = Qb.gen(0)\n Q = PolynomialRing(Qb,'z')\n z = Q.gen(0)\n while k <= upperBound:\n A = (p**k)*z + b\n Ft = Q(F(A) - b*G(A))\n Gt = Q((p**k)*G(A))\n Fcoeffs = Ft.coefficients(sparse=False)\n Gcoeffs = Gt.coefficients(sparse=False)\n coeffs = Fcoeffs + Gcoeffs\n RHS = (d + 1)*k/2\n #If there is some b such that Res(phi^A) < Res(phi), we must have ord_p(c) >\n #RHS for each c in coeffs.\n #Make sure constant coefficients in coeffs satisfy the inequality.\n if all( QQ(c).valuation(p) > RHS for c in coeffs if c.degree() ==0 ):\n #Constant coefficients in coeffs have large enough valuation, so check\n #the rest. We start by checking if simply picking b=0 works\n if all(c(0).valuation(p) > RHS for c in coeffs):\n #A = z*p^k satisfies the inequalities, and F/G is not minimal\n #\"Conjugating by\", p,\"^\", k, \"*z +\", 0\n newconj = matrix(QQ,2,2,[p**k,0,0,1])\n minFun = Fun.conjugate(newconj)\n conj = conj*newconj\n minFun.normalize_coordinates()\n return minFun, conj\n\n #Otherwise we search if any value of b will work. We start by finding a\n #minimum bound on the valuation of b that is necessary. See Theorem 3.3.5\n #in [Molnar, M.Sc. thesis].\n bval = max([bCheck(coeff,RHS,p,b) for coeff in coeffs if coeff.degree() > 0])\n\n #We scale the coefficients in coeffs, so that we may assume ord_p(b) is\n #at least 0\n scaledCoeffs = [coeff(b*(p**bval)) for coeff in coeffs]\n\n #We now scale the inequalities, ord_p(coeff) > RHS, so that coeff is in\n #ZZ[b]\n scale = QQ(max([coeff.denominator() for coeff in scaledCoeffs]))\n normalizedCoeffs = [coeff*scale for coeff in scaledCoeffs]\n scaleRHS = RHS + scale.valuation(p)\n\n #We now search for integers that satisfy the inequality ord_p(coeff) >\n #RHS. See Lemma 3.3.6 in [Molnar, M.Sc. thesis].\n bound = (scaleRHS+1).floor()\n bool,sol = blift(normalizedCoeffs,bound,p)\n\n #If bool is true after lifting, we have a solution b, and F/G is not\n #minimal.\n if bool:\n #Rescale, conjugate and return new map\n bsol = QQ(sol*(p**bval))\n #\"Conjugating by \", p,\"^\", k, \"*z +\", bsol\n newconj = matrix(QQ,2,2,[p**k,bsol,0,1])\n minFun = Fun.conjugate(newconj)\n conj = conj*newconj\n\n minFun.normalize_coordinates()\n return minFun, conj\n k = k + 1\n return Fun, conj" ]
[ "0.58898497", "0.5696768", "0.55792755", "0.5505899", "0.53948593", "0.53656983", "0.51418346", "0.51252043", "0.5098417", "0.50928617", "0.50540054", "0.5037502", "0.49890068", "0.48362672", "0.48262566", "0.48185673", "0.4812812", "0.48055914", "0.47933468", "0.47783032", "0.47128808", "0.46580333", "0.4643045", "0.46047652", "0.45826858", "0.45611826", "0.45544285", "0.45417157", "0.45258874", "0.4525435", "0.4515066", "0.4501446", "0.4498214", "0.4475793", "0.44739345", "0.4473146", "0.44538298", "0.44409278", "0.44353056", "0.44353056", "0.44331843", "0.44105673", "0.44086313", "0.44077826", "0.440303", "0.4400325", "0.43901864", "0.43899956", "0.43662187", "0.43612438", "0.43593833", "0.43585038", "0.4355368", "0.43434912", "0.43408465", "0.4336677", "0.43350223", "0.4331704", "0.4325861", "0.4315973", "0.4314618", "0.4294824", "0.42947912", "0.42930356", "0.42873013", "0.4283879", "0.42741436", "0.427137", "0.42678386", "0.42651483", "0.4254541", "0.42520082", "0.4250947", "0.424295", "0.42369068", "0.42359343", "0.42315596", "0.4224182", "0.42214292", "0.42184386", "0.42134973", "0.4212545", "0.42082003", "0.42072257", "0.41979986", "0.41936392", "0.41915646", "0.41833448", "0.4183296", "0.41816655", "0.417956", "0.41748393", "0.41731194", "0.41672352", "0.41634607", "0.41624185", "0.4155199", "0.41527143", "0.41482526", "0.41465145" ]
0.55302864
3
Compute squarefree decomposition of the monic ``f`` in ``GF(q)[X]``. Notes ===== Uses a modified version of Musser's algorithm for squarefree decomposition of univariate polynomials over finite fields. References ==========
def _gf_sqf_list(self, f): domain = self.domain n, factors, p = 1, [], int(domain.characteristic) m = int(domain.order // p) while not f.is_ground: df = [f.diff(x) for x in self.gens] if any(_ for _ in df): g = f for q in df: g = self.gcd(g, q) h, f, i = f // g, g, 1 while h != 1: g = self.gcd(f, h) h //= g if not h.is_ground: factors.append((h, i*n)) f //= g h = g i += 1 n *= p g = self.zero for monom, coeff in f.items(): g[tuple(_ // p for _ in monom)] = coeff**m f = g return factors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_squarefree(self, f):\n if f.is_ground:\n return True\n g = f\n for x in self.gens:\n g = self.gcd(g, f.diff(x))\n if g.is_ground:\n return True\n return False", "def sqf_part(self, f):\n domain = self.domain\n\n if domain.is_FiniteField:\n g = self.one\n for f, _ in self.sqf_list(f)[1]:\n g *= f\n\n return g\n\n if not f:\n return f\n\n gcd = f\n for x in self.gens:\n gcd = self.gcd(gcd, f.diff(x))\n sqf = f // gcd\n\n if domain.is_Field:\n return sqf.monic()\n return sqf.primitive()[1]", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def sqf_norm(self, f):\n domain = self.domain\n\n if not domain.is_AlgebraicField:\n raise DomainError(f'ground domain must be algebraic, got {domain}')\n\n new_ring = self.to_ground().inject(*domain.symbols, front=True)\n g = domain.mod.set_ring(new_ring)\n s = 0\n\n while True:\n h = f.inject(front=True)\n r = g.resultant(h)\n\n if r.is_squarefree:\n return s, f, r\n f = f.compose({x: x - domain.unit for x in self.gens})\n s += 1", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_primitive(quo)[1]", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def sqf_part(f):\n return f.per(dmp_sqf_part(f.rep, f.lev, f.dom))", "def zzx_sqf_p(f):\n return zzx_one_p(zzx_gcd(zzx_primitive(f)[1], zzx_diff(f)))", "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\n vsf_ph=vsf_ph+c_slm*vsh_ph\n return vsf_th, vsf_ph", "def quo(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_quo(F, G, lev, dom))", "def zzx_factor_sqf(f, **flags):\n cont, g = zzx_primitive(f)\n\n n = zzx_degree(g)\n\n if n <= 0:\n return cont, []\n\n if poly_LC(g) < 0:\n cont, g = -cont, zzx_neg(g)\n\n if n == 1 or zzx_eisenstein(g):\n return cont, [(g, 1)]\n\n factors = []\n\n if flags.get('cyclotomic', True):\n factors = zzx_cyclotomic_factor(g)\n\n if factors is None:\n factors = zzx_zassenhaus(g)\n\n def compare(f_a, f_b):\n i = len(f_a) - len(f_b)\n\n if not i:\n return cmp(f_a, f_b)\n else:\n return i\n\n return cont, sorted(factors, compare)", "def _rr_yun0_sqf_list(self, f):\n if f.is_ground:\n return []\n\n result, count = [], 1\n qs = [f.diff(x) for x in self.gens]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n\n while f != 1:\n qs = [q // g for q in qs]\n f //= g\n qs = [q - f.diff(x) for x, q in zip(self.gens, qs)]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n if g != 1:\n result.append((g, count))\n\n count += 1\n\n return result", "def form_factor( # pylint: disable=arguments-differ\n self, q: float, s: RealOrRealArray, t: RealOrRealArray, couplings: Couplings\n ) -> ComplexOrComplexArray:\n q2 = q**2 * 1e-6\n ss = s * 1e-6\n tt = t * 1e-6\n uu = q2 + MPI0_GEV**2 + 2 * MPI_GEV**2 - ss - tt\n\n ff = self.__form_factor(q2=q2, s=ss, t=tt, u=uu, couplings=couplings)\n return ff * 1e-9", "def sqr(f):\n return f.per(dmp_sqr(f.rep, f.lev, f.dom))", "def zzX_sqr(f):\n if poly_univariate_p(f):\n return zzx_sqr(f)\n\n if zzX_zero_p(f):\n return f\n\n df = zzX_degree(f)\n l = poly_level(f)-1\n\n h = []\n\n for i in xrange(0, 2*df+1):\n coeff = zzX_zero(l)\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff = zzX_add(coeff, zzX_mul(f[j], f[i-j]))\n\n coeff = zzX_mul_const(coeff, 2)\n\n if n & 1:\n elem = zzX_sqr(f[jmax+1])\n coeff = zzX_add(coeff, elem)\n\n h.append(coeff)\n\n return h", "def compute_clique_potentials(self,F):\r\n\r\n for i in self.nodes():\r\n self.node[i]['fac'] = factor([],[],[])\r\n \r\n for f in F.factors: # assign each factor to a clique\r\n for j,data in self.nodes_iter(data=True):\r\n if len(scipy.setdiff1d(f.var,data['clique']) ) ==0:\r\n self.node[j]['fac'] *= f\r\n self.nop += scipy.prod(self.node[j]['fac'].card)\r\n break", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def is_sqf(f):\n return dmp_sqf_p(f.rep, f.lev, f.dom)", "def exquo(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_exquo(F, G, lev, dom))", "def qft_recursive(qubits):\n qftcirc = Circuit()\n\n # First add the QFT subroutine above\n qftcirc.add(qft_no_swap(qubits))\n\n # Then add SWAP gates to reverse the order of the qubits:\n for i in range(math.floor(len(qubits) / 2)):\n qftcirc.swap(qubits[i], qubits[-i - 1])\n\n return qftcirc", "def pquo(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_pquo(F, G, lev, dom))", "def pexquo(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_pexquo(F, G, lev, dom))", "def sub(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_sub(F, G, lev, dom))", "def zzx_zassenhaus(f):\n n = zzx_degree(f)\n\n if n == 1:\n return [f]\n\n A = zzx_max_norm(f)\n b = poly_LC(f)\n B = abs(int(sqrt(n+1))*2**n*A*b)\n C = (n+1)**(2*n)*A**(2*n-1)\n gamma = int(ceil(2*log(C, 2)))\n prime_max = int(2*gamma*log(gamma))\n\n for p in xrange(3, prime_max+1):\n if not isprime(p) or b % p == 0:\n continue\n\n F = gf_from_int_poly(f, p)\n\n if gf_sqf_p(F, p):\n break\n\n l = int(ceil(log(2*B + 1, p)))\n\n modular = []\n\n for ff in gf_factor_sqf(F, p)[1]:\n modular.append(gf_to_int_poly(ff, p))\n\n g = zzx_hensel_lift(p, f, modular, l)\n\n T = set(range(len(g)))\n factors, s = [], 1\n\n while 2*s <= len(T):\n for S in subsets(T, s):\n G, H = [b], [b]\n\n S = set(S)\n\n for i in S:\n G = zzx_mul(G, g[i])\n for i in T-S:\n H = zzx_mul(H, g[i])\n\n G = zzx_trunc(G, p**l)\n H = zzx_trunc(H, p**l)\n\n G_norm = zzx_l1_norm(G)\n H_norm = zzx_l1_norm(H)\n\n if G_norm*H_norm <= B:\n T = T - S\n\n G = zzx_primitive(G)[1]\n f = zzx_primitive(H)[1]\n\n factors.append(G)\n b = poly_LC(f)\n\n break\n else:\n s += 1\n\n return factors + [f]", "def zzX_heu_gcd(f, g, **flags):\n if poly_univariate_p(f):\n return zzx_heu_gcd(f, g, **flags)\n\n def interpolate(h, x):\n f = []\n\n while not zzX_zero_p(h):\n g = zzX_zz_trunc(h, x)\n f.insert(0, g)\n h = zzX_sub(h, g)\n h = zzX_quo_const(h, x)\n\n return f\n\n def finalize(h, cff, cfg, gcd):\n if zzX_zz_LC(h) > 0:\n h = zzX_mul_const(h, gcd)\n else:\n h = zzX_mul_const(h, -gcd)\n cff = zzX_neg(cff)\n cfg = zzX_neg(cfg)\n\n return h, cff, cfg\n\n zero_f = zzX_zero_p(f)\n zero_g = zzX_zero_p(g)\n\n l = poly_level(f)\n z = zzX_zero(l)\n\n if zero_f and zero_g:\n return z, z, z\n elif zero_f:\n return g, z, zzX_const(l, 1)\n elif zero_g:\n return f, zzX_const(l, 1), z\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n cf = zzX_zz_content(f)\n cg = zzX_zz_content(g)\n\n gcd = igcd(cf, cg)\n\n f = zzX_quo_const(f, gcd)\n g = zzX_quo_const(g, gcd)\n\n f_norm = zzX_max_norm(f)\n g_norm = zzX_max_norm(g)\n\n B = 2*min(f_norm, g_norm) + 29\n\n x = max(min(B, 99*INT_TYPE(isqrt(B))),\n 2*min(f_norm // abs(zzX_zz_LC(f)),\n g_norm // abs(zzX_zz_LC(g))) + 2)\n\n for i in xrange(0, 6):\n ff = zzX_eval(f, x)\n gg = zzX_eval(g, x)\n\n if not (zzX_zero_p(ff) or zzX_zero_p(gg)):\n h, cff, cfg = zzX_heu_gcd(ff, gg, **flags)\n\n h = interpolate(h, x)\n h = zzX_zz_primitive(h)[1]\n\n cff_, r = zzX_div(f, h)\n\n if zzX_zero_p(r):\n cfg_, r = zzX_div(g, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff_, cfg_, gcd)\n\n cff = interpolate(cff, x)\n\n h, r = zzX_div(f, cff)\n\n if zzX_zero_p(r):\n cfg_, r = zzX_div(g, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff, cfg_, gcd)\n\n cfg = interpolate(cfg, x)\n\n h, r = zzX_div(g, cfg)\n\n if zzX_zero_p(r):\n cff_, r = zzX_div(f, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff_, cfg, gcd)\n\n x = INT_TYPE(2.7319*x*isqrt(isqrt(x)))\n\n raise HeuristicGCDFailed('no luck')", "def squareform(X, force=\"no\", checks=True):\n\n return ssd.squareform(X, force, checks)", "def gcd(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_gcd(F, G, lev, dom))", "def terms_gcd(f):\n J, F = dmp_terms_gcd(f.rep, f.lev, f.dom)\n return J, f.per(F)", "def get_f_h_gas_comp_out(p: float, s: float) -> float:\n\n return - 1.869892835947070 * 10 ** (-1) * p ** 4 \\\n + 8.223224182177200 * 10 ** (-1) * p ** 3 \\\n + 4.124595239531860 * p ** 2 \\\n - 8.346302788803210 * 10 * p \\\n - 1.016388214044490 * 10 ** 2 * s ** 4 \\\n + 8.652428629143880 * 10 ** 2 * s ** 3 \\\n - 2.574830800631310 * 10 ** 3 * s ** 2 \\\n + 3.462049327009730 * 10 ** 3 * s \\\n + 9.209837906396910 * 10 ** (-1) * p ** 3 * s \\\n - 5.163305566700450 * 10 ** (-1) * p ** 2 * s ** 2 \\\n + 4.076727767130210 * p * s ** 3 \\\n - 8.967168786520070 * p ** 2 * s \\\n - 2.062021416757910 * 10 * p * s ** 2 \\\n + 9.510257675728610 * 10 * p * s \\\n - 1.476914346214130 * 10 ** 3", "def F(cst, x):\n [u0, v0, u1, v1, u2, v2, coeffs] = cst\n [u, v, g1, g2, g3] = x\n a = g1*u1 - u0\n b = g2*u2 - u0\n c = g3*u - u0\n l = g1*v1 - v0 \n m = g2*v2 - v0\n n = g3*v - v0\n r = g1 - 1\n s = g2 - 1\n t = g3 - 1\n return np.array([\n coeffs[0]*(a**2-l**2) + 2*coeffs[1]*(a*b-l*m) + coeffs[2]*(b**2-m**2) + 2*coeffs[3]*(a*c-l*n) + 2*coeffs[4]*(b*c-m*n) + c**2 - n**2,\n coeffs[0]*(l**2-r**2) + 2*coeffs[1]*(l*m-r*s) + coeffs[2]*(m**2-s**2) + 2*coeffs[3]*(l*n-r*t) + 2*coeffs[4]*(m*n-s*t) + n**2 - t**2,\n coeffs[0]*a*l + coeffs[1]*(l*b+m*a) + coeffs[2]*m*b + coeffs[3]*(l*c+n*a) + coeffs[4]*(m*c+b*n) + c*n,\n coeffs[0]*a*r + coeffs[1]*(r*b+s*a) + coeffs[2]*s*b + coeffs[3]*(r*c+t*a) + coeffs[4]*(s*c+b*t) + c*t,\n coeffs[0]*r*l + coeffs[1]*(l*s+m*r) + coeffs[2]*m*s + coeffs[3]*(l*t+n*r) + coeffs[4]*(m*t+s*n) + t*n \n ])", "def refine_complex_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_complex_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def sqf_list(f, all=False):\n coeff, factors = dmp_sqf_list(f.rep, f.lev, f.dom, all=all)\n return coeff, [ (f.per(g), k) for g, k in factors ]", "def zzx_div(f, g):\n df = zzx_degree(f)\n dg = zzx_degree(g)\n\n if not g:\n raise ZeroDivisionError(\"polynomial division\")\n elif df < dg:\n return [], f\n\n q, r = [], f\n\n while True:\n dr = zzx_degree(r)\n\n if dr < dg:\n break\n\n lc_r = poly_LC(r)\n lc_g = poly_LC(g)\n\n if lc_r % lc_g != 0:\n break\n\n c, k = lc_r // lc_g, dr - dg\n\n q = zzx_add_term(q, c, k)\n h = zzx_mul_term(g, c, k)\n r = zzx_sub(r, h)\n\n return q, r", "def zzx_heu_gcd(f, g, **flags):\n def interpolate(h, x):\n f = []\n\n while h:\n g = h % x\n\n if g > x // 2:\n g -= x\n\n f.insert(0, g)\n h = (h-g) // x\n\n return f\n\n def finalize(h, cff, cfg, gcd):\n h = zzx_mul_const(h, gcd)\n return h, cff, cfg\n\n if not (f or g):\n return [], [], []\n elif not f:\n return g, [], [1]\n elif not g:\n return f, [1], []\n\n df = zzx_degree(f)\n dg = zzx_degree(g)\n\n cf = zzx_content(f)\n cg = zzx_content(g)\n\n gcd = igcd(cf, cg)\n\n f = [ c // gcd for c in f ]\n g = [ c // gcd for c in g ]\n\n if df == 0 or dg == 0:\n return [gcd], f, g\n\n f_norm = zzx_max_norm(f)\n g_norm = zzx_max_norm(g)\n\n B = 2*min(f_norm, g_norm) + 29\n\n x = max(min(B, 99*INT_TYPE(isqrt(B))),\n 2*min(f_norm // abs(poly_LC(f)),\n g_norm // abs(poly_LC(g))) + 2)\n\n for i in xrange(0, 6):\n ff = zzx_eval(f, x)\n gg = zzx_eval(g, x)\n\n if ff and gg:\n h = igcd(ff, gg)\n\n cff = ff // h\n cfg = gg // h\n\n h = interpolate(h, x)\n h = zzx_primitive(h)[1]\n\n cff_, r = zzx_div(f, h)\n\n if not r:\n cfg_, r = zzx_div(g, h)\n\n if not r:\n return finalize(h, cff_, cfg_, gcd)\n\n cff = interpolate(cff, x)\n\n h, r = zzx_div(f, cff)\n\n if not r:\n cfg_, r = zzx_div(g, h)\n\n if not r:\n return finalize(h, cff, cfg_, gcd)\n\n cfg = interpolate(cfg, x)\n\n h, r = zzx_div(g, cfg)\n\n if not r:\n cff_, r = zzx_div(f, h)\n\n if not r:\n return finalize(h, cff_, cfg, gcd)\n\n x = INT_TYPE(2.7319*x*isqrt(isqrt(x)))\n\n raise HeuristicGCDFailed('no luck')", "def sqf_list(self, f):\n domain = self.domain\n\n if domain.is_Field:\n coeff, f = f.LC, f.monic()\n else:\n coeff, f = f.primitive()\n\n if domain.is_FiniteField:\n return coeff, self._gf_sqf_list(f)\n return coeff, self._rr_yun0_sqf_list(f)", "def fdq2(f, x, h=1e-5):\n return (-3*f(x) + 4*f(x+h) - f(x+2*h))/(2*h)\n raise NotImplementedError(\"Problem 2 Incomplete\")", "def get_f_s_gas(p: float, h: float) -> float:\n return 5.823109493752840 * 10 ** (-2) * p ** 4 \\\n - 3.309666523931270 * 10 ** (-1) * p ** 3 \\\n + 7.700179914440890 * 10 ** (-1) * p ** 2 \\\n - 1.311726004718660 * p \\\n + 1.521486605815750 * 10 ** (-9) * h ** 4 \\\n - 2.703698863404160 * 10 ** (-6) * h ** 3 \\\n + 1.793443775071770 * 10 ** (-3) * h ** 2 \\\n - 5.227303746767450 * 10 ** (-1) * h \\\n + 1.100368875131490 * 10 ** (-4) * p ** 3 * h \\\n + 5.076769807083600 * 10 ** (-7) * p ** 2 * h ** 2 \\\n + 1.202580329499520 * 10 ** (-8) * p * h ** 3 \\\n - 7.278049214744230 * 10 ** (-4) * p ** 2 * h \\\n - 1.449198550965620 * 10 ** (-5) * p * h ** 2 \\\n + 5.716086851760640 * 10 ** (-3) * p * h \\\n + 5.818448621582900 * 10", "def sturm(f):\n return map(f.per, dmp_sturm(f.rep, f.lev, f.dom))", "def transform(f, p, q, j=0):\n lev, dom, per, F, (P, Q) = f.unify((p, q))\n return per(dmp_transform_in(F, P, Q, j, lev, dom))", "def zzx_sqr(f):\n df, h = zzx_degree(f), []\n\n for i in xrange(0, 2*df+1):\n coeff = INT_ZERO\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff += f[j]*f[i-j]\n\n coeff += coeff\n\n if n & 1:\n elem = f[jmax+1]\n coeff += elem**2\n\n h.append(coeff)\n\n return h", "def test_f_uni(self):\n s = np.array([100.0, 0, 0, 0, 0, 0])\n e = np.array([0.1, -0.05, -0.05, 0, 0, 0])\n f_direct = self.model.f(s, e, self.t, self.T)\n \n sdev = s - np.array([1,1,1,0,0,0]) * np.sum(s[:3]) / 3.0\n se = np.sqrt(3.0/2.0) * la.norm(sdev)\n ee = np.sqrt(2.0/3.0) * la.norm(e)\n\n g_direct = self.smodel.g(se, ee, self.t, self.T)\n \n self.assertTrue(np.isclose(g_direct, f_direct[0]))\n\n self.assertTrue(np.isclose(-g_direct/2.0, f_direct[1]))\n self.assertTrue(np.isclose(-g_direct/2.0, f_direct[2]))\n\n self.assertTrue(np.allclose([0,0,0], f_direct[3:]))", "def quad7(f, xspan):\n\n # gauss nodes for DOE 7\n lag_nodes = np.array([-np.sqrt(2/5), 0, np.sqrt(2/5)])\n\n true_nodes = (xspan[0] + xspan[1]) / 2 + (xspan[1] - xspan[0]) / 2 *lag_nodes", "def zzX_div(f, g):\n if poly_univariate_p(f):\n return zzx_div(f, g)\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n if dg < 0:\n raise ZeroDivisionError(\"polynomial division\")\n\n q, r = zzX_zero_of(f), f\n\n if df < dg:\n return q, r\n\n while True:\n dr = zzX_degree(r)\n\n if dr < dg:\n break\n\n lc_r = poly_LC(r)\n lc_g = poly_LC(g)\n\n c, R = zzX_div(lc_r, lc_g)\n\n if not zzX_zero_p(R):\n break\n\n k = dr - dg\n\n q = zzX_add_term(q, c, k)\n h = zzX_mul_term(g, c, k)\n r = zzX_sub(r, h)\n\n return q, r", "def zassenhaus(f):\n # keep leading coefficient\n lf = f.leading_coefficient()\n\n # p-adic factorization\n p, fp_factors = padic_factorization(f)\n if len(fp_factors) == 1:\n return [f]\n\n # purge leading coefficient from factors\n for i,g in enumerate(fp_factors):\n if g.degree() == 0:\n del fp_factors[i]\n break\n\n # lift to Mignotte bound\n blm = upper_bound_of_coefficient(f)\n bound = p**(arith1.log(2*blm,p)+1)\n\n # Hensel lifting\n lf_inv_modq = intresidue.IntegerResidueClass(lf, bound).inverse()\n fq = f.coefficients_map(lambda c: (lf_inv_modq*c).minimumAbsolute()) # fq is monic\n fq_factors, q = hensel.lift_upto(fq, fp_factors, p, bound)\n\n return brute_force_search(f, fq_factors, bound)", "def zzx_cofactors(f, g, **flags):\n return zzx_heu_gcd(f, g, **flags)", "def hensel_lifting_poly_factorization(f):\n\n domain = f.parent()\n base_domain = domain.base()\n\n if not base_domain.is_ring() or not base_domain == IntegerRing():\n raise ValueError(\"The base domain must be the integer ring\")\n\n if f.degree() < 1 or not f.is_squarefree() or not f.is_primitive():\n raise ValueError(\"f must be a nonconstant, squarefree, primitive polynomial\")\n\n n = f.degree()\n if n == 1:\n return [f]\n\n A = base_domain(f.norm(Infinity))\n b = f.leading_coefficient()\n B = sqrt(n + 1) * 2**n * A * b\n C = (n + 1)**(2*n) * A**(2*n - 1)\n gamma = ceil(2 * log(C, 2))\n\n p = 2\n while p <= 2*gamma*log(gamma):\n if b % p != 0:\n Fp = PolynomialRing(GF(p), 'x')\n f_bar = Fp(f)\n if f_bar.is_squarefree():\n break\n p = next_prime(p)\n\n if p > 2*gamma*log(gamma): # Should never happen\n raise RuntimeError(\"Couldn't find such a prime\")\n\n # Modular factorization\n Fp = PolynomialRing(GF(p), 'x')\n f_bar = Fp(f)\n\n modular_factors = berlekamp_poly_factorization(f_bar, squarefree=True)\n\n ZZR = PolynomialRing(ZZ, 'x')\n modular_factors = map(ZZR, modular_factors)\n\n # Hensel lifting\n l = ceil(log(2*B + 1, p))\n modular_factors = __multifactor_hensel_lifting(f, p, l, modular_factors)\n\n # The set of modular factors still to be treated, the set of factors found, and the polynomial f_ still to be\n # factored.\n Zpl = PolynomialRing(IntegerModRing(p ** l), 'x')\n modular_factors = Set(map(Zpl, modular_factors))\n s = 1\n factors = []\n f_ = f\n\n # Factor combination\n while 2*s <= len(modular_factors):\n for S in Subsets(modular_factors, s):\n g_ = ZZR(Zpl(b) * prod(S))\n h_ = ZZR(Zpl(b) * prod(modular_factors.difference(S)))\n\n if g_.norm(1) * h_.norm(1) <= B:\n modular_factors = modular_factors.difference(S)\n factors.append(ZZR(g_ / poly_content(g_))) # Primitive part\n f_ = ZZR(h_ / poly_content(h_))\n b = f_.leading_coefficient()\n break # Exit the for loop and continue the while loop\n\n s += 1\n\n factors.append(f_)\n return factors", "def zzX_cofactors(f, g, **flags):\n if poly_univariate_p(f):\n return zzx_heu_gcd(f, g, **flags)\n\n if not flags.get('reduced', True):\n return zzX_heu_gcd(f, g, **flags)\n else:\n K, (f, g) = zzX_multi_reduce(f, g)\n\n return [ zzX_compose_term(h, K)\n for h in zzX_heu_gcd(f, g, **flags) ]", "def fractalTransformationCG(F,G,M=256,N=50,its=16,\n deBruijn=True,return_Q=False):\n assert isinstance(F,DynamicalSystem) and isinstance(G,DynamicalSystem)\n assert F.check_validity(True,False) and G.check_validity(True,False)\n if deBruijn:\n its = int(its)\n if its>32:\n print(\"fractalTransformationCG: Warning: A very long sequence \"+\n \"length has been requested! (2**\",its,\")\")\n else:\n if its<=30:\n its = int(2.0**its)\n else:\n its = int(its)\n rho = F.get_rho()\n tau_L = F.tau(rho,N+1)\n tau_R = F.tau_plus(rho,N+1)\n sigma = np.zeros(N+1,dtype=np.int8)\n X = np.linspace(0.0,1.0,M+1)\n H = X.copy()\n Q = np.zeros(M+1,dtype=np.int)\n Q[0],Q[M] = N,N # since the end points are always correct\n q,x,y = 0,1.0,1.0\n def address_distance(alpha,beta):\n k = np.argmin(alpha==beta)\n return (beta[k]-alpha[k])*0.5**k\n if deBruijn:\n db_2 = DeBruijnGenerator(2,its)\n #for _ in range(db_2.length()): # beware of overflow!\n while not db_2.is_complete(): # this is better\n sigma = np.roll(sigma,1)\n sigma[0] = db_2()\n if sigma[0]==0:\n x = F.if0(x)\n y = G.if0(y)\n else:\n x = F.if1(x)\n y = G.if1(y)\n if sigma[0]==0:\n if address_distance(sigma,tau_L)<0:\n q = 0\n else:\n if address_distance(tau_R,sigma)<0:\n q = 0\n k = int(0.5+x*M)\n # Should really check k is in the right range (i.e. 0,1,...,M)\n # but this shouldn't happen and is somewhat expensive to check\n if Q[k] < q:\n H[k] = y\n Q[k] = q\n q += 1\n # end while\n else:\n for _ in range(its):\n sigma = np.roll(sigma,1)\n sigma[0] = np.random.randint(2)\n if sigma[0]==0:\n x = F.if0(x)\n y = G.if0(y)\n else:\n x = F.if1(x)\n y = G.if1(y)\n if sigma[0]==0:\n if address_distance(sigma,tau_L)<0:\n q = 0\n else:\n if address_distance(tau_R,sigma)<0:\n q = 0\n k = int(0.5+x*M)\n # Should really check k is in the right range (i.e. 0,1,...,M)\n # but this shouldn't happen and is somewhat expensive to check\n if Q[k] < q:\n H[k] = y\n Q[k] = q\n q += 1\n # end for\n # end if/else\n if return_Q:\n return X,H,Q\n return X,H", "def reduced(f, G):\n lev, dom, per, f, G = f.unify(G)\n return per(dmp_reduced(f, G, lev, dom))", "def generate_fgs_fsw_coefficients(siaf=None, verbose=False, scale=0.06738281367):\n if siaf is None:\n siaf = pysiaf.Siaf('fgs')\n\n instrument = 'FGS'\n\n pre_delivery_dir = os.path.join(JWST_DELIVERY_DATA_ROOT, instrument)\n if not os.path.isdir(pre_delivery_dir):\n os.makedirs(pre_delivery_dir)\n\n for aperture_name in ['FGS1_FULL_OSS', 'FGS2_FULL_OSS']:\n\n aperture = siaf[aperture_name]\n\n # center_offset_x = 1023.5\n # center_offset_y = 1023.5\n center_offset_x = aperture.XSciRef - 1.\n center_offset_y = aperture.YSciRef - 1.\n\n if verbose:\n print('External scale {}'.format(scale))\n print(aperture.get_polynomial_scales())\n\n # get SIAF coefficients\n coefficients = aperture.get_polynomial_coefficients()\n\n ar = coefficients['Sci2IdlX']\n br = coefficients['Sci2IdlY']\n cr = coefficients['Idl2SciX']\n dr = coefficients['Idl2SciY']\n\n a_fsw, b_fsw, c_fsw, d_fsw = polynomial.rescale(ar, br, cr, dr, 1. / scale)\n factor = -1.\n\n if 'FGS1' in aperture_name:\n b_fsw *= -1\n c_fsw = polynomial.flip_y(c_fsw)\n d_fsw = polynomial.flip_y(d_fsw)\n\n a_fsw = polynomial.shift_coefficients(a_fsw, factor * center_offset_x,\n factor * center_offset_y)\n b_fsw = polynomial.shift_coefficients(b_fsw, factor * center_offset_x,\n factor * center_offset_y)\n c_fsw = polynomial.shift_coefficients(c_fsw, factor * center_offset_x,\n factor * center_offset_y)\n d_fsw = polynomial.shift_coefficients(d_fsw, factor * center_offset_x,\n factor * center_offset_y)\n\n a_fsw[0] += center_offset_x\n b_fsw[0] += center_offset_y\n c_fsw[0] += center_offset_x\n d_fsw[0] += center_offset_y\n\n # print FSW coefficients to screen\n fsw_coefficients = Table((c_fsw, d_fsw, a_fsw, b_fsw), names=(\n 'IDEALPTOREALPXCOE', 'IDEALPTOREALPYCOE', 'REALPTOIDEALPXCOE', 'REALPTOIDEALPYCOE'))\n if verbose:\n fsw_coefficients.pprint()\n\n table = Table(names=('parameter_name', 'value'), dtype=(object, float))\n table.add_row(['XOFFSET', center_offset_x])\n table.add_row(['YOFFSET', center_offset_y])\n table.add_row(['PLATESCALE', scale])\n for colname in fsw_coefficients.colnames:\n for i in range(len(fsw_coefficients[colname])):\n table.add_row(['{}_{}'.format(colname, i), fsw_coefficients[colname][i]])\n table['parameter_name'] = np.array(table['parameter_name']).astype(str)\n\n # write to file\n fsw_distortion_file = os.path.join(pre_delivery_dir, 'ifgs{}_distortion_tbl.txt'.format(aperture_name[3]))\n comments = []\n comments.append('FGS distortion coefficients for FSW')\n comments.append('')\n comments.append('Derived from SIAF distortion coefficients.')\n comments.append('')\n comments.append('Generated {} {}'.format(timestamp.isot, timestamp.scale))\n comments.append('by {}'.format(username))\n comments.append('')\n table.meta['comments'] = comments\n formats={'parameter_name': '%-20s', 'value': '%+2.6e'}\n table.write(fsw_distortion_file, format='ascii.fixed_width',\n delimiter=',', delimiter_pad=' ', bookend=False,\n overwrite=True, formats=formats)", "def f2chi(f, kptsdf, c, arbfield=1.0):\n # Since the solution we obtain from cg and from iterative scheme is F_k where chi_k = eE/kT * f0(1-f0) * F_k\n # then we need to bring these factors back in to get the right units\n f0 = np.squeeze(kptsdf['k_FD'].values)\n prefactor = arbfield * c.e / c.kb_joule / c.T * f0 * (1 - f0)\n chi = np.squeeze(f) * np.squeeze(prefactor)\n return chi", "def subresultants(f, g):\n lev, dom, per, F, G = f.unify(g)\n R = dmp_subresultants(F, G, lev, dom)\n return map(per, R)", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def fock_ueg(sys, G):\n nbsf = sys.nbasis\n nq = len(sys.qvecs)\n assert nq == len(sys.vqvec)\n\n Fock = numpy.zeros((2, nbsf, nbsf), dtype=numpy.complex128)\n Gkpq = numpy.zeros((2, nq), dtype=numpy.complex128)\n Gpmq = numpy.zeros((2, nq), dtype=numpy.complex128)\n\n for s in [0, 1]:\n coulomb_greens_function(nq, sys.ikpq_i, sys.ikpq_kpq,\n sys.ipmq_i, sys.ipmq_pmq,\n Gkpq[s], Gpmq[s], G[s])\n\n J = build_J_opt(nq, sys.vqvec, sys.vol, sys.nbasis,\n sys.ikpq_i, sys.ikpq_kpq, sys.ipmq_i, sys.ipmq_pmq,\n Gkpq, Gpmq)\n\n K = build_K_opt(nq, sys.vqvec, sys.vol, sys.nbasis,\n sys.ikpq_i, sys.ikpq_kpq, sys.ipmq_i, sys.ipmq_pmq,\n G)\n\n for s in [0, 1]:\n Fock[s] = sys.H1[s] + J[s] + K[s]\n\n return Fock", "def automorphism_group_FF_alg3(rational_function):\n # define ground field and ambient function field\n if rational_function.parent().is_field():\n K = rational_function.parent()\n R = K.ring()\n else:\n R = rational_function.parent()\n K = R.fraction_field()\n\n F = R.base_ring()\n if not F.is_finite() or not F.is_field():\n raise TypeError(\"coefficient ring is not a finite field\")\n p = F.characteristic()\n q = F.cardinality()\n z = R.gen(0)\n phi = K(rational_function)\n f = phi.numerator()\n g = phi.denominator()\n D = max(f.degree(), g.degree())\n\n # For use in the quadratic extension parts of the algorithm\n E = GF(p**(2*F.degree()),'b')\n b = E.gen(0)\n sigma = F.Hom(E)[0]\n S = PolynomialRing(E, 'w')\n w = S.gen(0)\n Phi = rational_function_coerce(phi, sigma, S)\n\n # Compute the set of distinct F-rational and F-quadratic\n # factors of the fixed point polynomial\n fix = R(f(z) - z*g(z))\n linear_fix = gcd(fix, z**q - z);\n quad_temp = fix.quo_rem(linear_fix)[0]\n residual = gcd(quad_temp, z**q - z)\n while residual.degree() > 0:\n quad_temp = quad_temp.quo_rem(residual)[0]\n residual = gcd(quad_temp, z**q - z)\n quadratic_fix = gcd(quad_temp, z**(q**2) - z).factor()\n\n # Compute the set of distinct F-rational fixed points\n linear_fix_pts = [[ x, F(1)] for x in linear_fix.roots(multiplicities=False)]\n if bool(fix.degree() < D+1):\n linear_fix_pts.append( [F(1),F(0)] )\n n1 = len(linear_fix_pts)\n\n # Coerce quadratic factors into a quadratic extension\n quad_fix_factors = [ rational_function_coerce(poly[0], sigma, S) for poly in quadratic_fix]\n n2 = 2*len(quad_fix_factors)\n\n # Collect pre-image data as a list L with entries in the form\n # [fixed point y, F-rational pre-images z != y, polynomial defining the pre-images]\n # Note that we remove the fixed point from its pre-image set and its polynomial\n pre_images = []\n for y in linear_fix_pts:\n if y == [F(1),F(0)]:\n Fpre = [ [x,F(1)] for x in g.roots(multiplicities=False) ]\n pre_images.append([y, Fpre, g])\n else:\n Fpre = [ [x,F(1)] for x in (f - y[0]*g).roots(multiplicities=False) if x != y[0]]\n if y[0] == 0 and f.degree() < g.degree():\n Fpre.append([F(1), F(0)]) # infinity is a pre-image of 0\n elif f.degree() == g.degree() and f.leading_coefficient() == y[0]*g.leading_coefficient():\n Fpre.append([F(1), F(0)]) # infinity is a pre-image of y[0]\n # remove y[0] as a root of pre-image polynomial\n h = (f - y[0]*g).quo_rem(z-y[0])[0]\n h_common = gcd(h, z-y[0])\n while h_common.degree() > 0:\n h = h.quo_rem(z-y[0])[0]\n h_common = gcd(h,z-y[0])\n pre_images.append([y, Fpre, h])\n\n # Initialize the set of automorphisms to contain the identity\n automorphisms = [R(z)]\n automorphisms_quad = []\n\n # order p elements\n # An F-rational fixed point has orbit length 1 or p under the action of an element of\n # order p. An F-quadratic fixed point has orbit length p. The set of F-rational\n # pre-images of fixed points decomposes as a union of orbits of length p.\n if n1%p == 1 and n2%p == 0 and sum(len(x[1]) for x in pre_images)%p == 0:\n # Compute total number of distinct fixed points as a final check for order p auts\n factor_list = fix.factor()\n minimal_fix_poly = R(prod(x[0] for x in factor_list))\n n = sum(x[0].degree() for x in factor_list) + bool(fix.degree() < D+1)\n if n%p == 1:\n automorphisms = automorphisms + order_p_automorphisms(phi, pre_images)\n\n ## nontrivial elements with order prime to p ##\n # case of 2 F-rational fixed points\n for pt_pair in combinations(linear_fix_pts, 2):\n x = pt_pair[0]\n y = pt_pair[1]\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, [x,y], False)\n\n # case of 1 F-rational fixed point and an F-rational pre-image\n for y in pre_images:\n for x in y[1]:\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, [x,y[0]], False)\n\n # case of a pair of quadratic fixed points\n for h in quad_fix_factors:\n quad_fix_pts = [ [x,E(1)] for x in h.roots(multiplicities=False)]\n automorphisms_quad = automorphisms_quad + automorphisms_fixing_pair(Phi, quad_fix_pts, True)\n\n phi_2 = phi(phi(z))\n f_2 = phi_2.numerator()\n g_2 = phi_2.denominator()\n\n period_2 = (f_2(z) - z*g_2(z)).quo_rem(fix)[0]\n factor_list_2 = period_2.factor()\n linear_period_2_pts = [[ x, F(1)] for x in period_2.roots(multiplicities=False)]\n if bool(period_2.degree() < D**2-D):\n linear_period_2_pts.append( [F(1),F(0)] )\n quad_period_2_factors = [rational_function_coerce(poly[0], sigma, S) for poly in factor_list_2 if poly[0].degree() == 2]\n # n2 = n1 + 2*len(quad_fix_factors)\n\n # case of a pair of F-rational period 2 points\n linear_period_2_pairs = []\n while len(linear_period_2_pts) > 0:\n x = linear_period_2_pts.pop(-1)\n if x[1] == 1 and g(x[0]) != 0:\n y = [phi(x[0]), F(1)]\n elif x[1] == 1 or f.degree() > g.degree():\n y = [F(1), F(0)]\n elif f.degree() == g.degree():\n y = [f.leading_coefficient() / g.leading_coefficient(), F(1)]\n else:\n y = [F(0), F(1)]\n\n if x != y:\n linear_period_2_pts.remove(y)\n linear_period_2_pairs.append([x,y])\n\n for pt_pair in linear_period_2_pairs:\n automorphisms = automorphisms + automorphisms_fixing_pair(phi, pt_pair, False)\n\n # case of a pair of quadratic period 2 points\n for h in quad_period_2_factors:\n pt_pair = [ [x,E(1)] for x in h.roots(multiplicities=False)]\n if Phi(pt_pair[0][0]) == pt_pair[1][0]:\n automorphisms_quad = automorphisms_quad + automorphisms_fixing_pair(Phi, pt_pair, True)\n\n # Descend coefficients of the quadratic guys back to the base field\n for s in automorphisms_quad:\n automorphisms.append(rational_function_coefficient_descent(s, sigma, R))\n\n return automorphisms", "def _msqrd_v_f_f(s, t, model: SingleRhNeutrinoModel, mf: float):\n mx = model.mx\n u = 0.5 * np.tan(2 * model.theta)\n\n return (\n -2\n * u**2\n * GF**2\n * (\n 2 * mf**4 * (1 - 4 * SW**2 + 8 * SW**4)\n + 2 * mf**2 * (mx**2 - s - 2 * (1 - 4 * SW**2 + 8 * SW**4) * t)\n + (1 - 4 * SW**2 + 8 * SW**4)\n * (s**2 + 2 * s * t + 2 * t**2 - mx**2 * (s + 2 * t))\n )\n )", "def evolve_fqe_givens(wfn: Wavefunction, u: np.ndarray) -> Wavefunction:\n wfn = evolve_fqe_givens_sector(wfn, u, sector='alpha')\n wfn = evolve_fqe_givens_sector(wfn, u, sector='beta')\n return wfn", "def SCG(f, gradf, x, optargs=(), maxiters=500, max_f_eval=np.inf, display=True, xtol=None, ftol=None, gtol=None):\r\n if xtol is None:\r\n xtol = 1e-6\r\n if ftol is None:\r\n ftol = 1e-6\r\n if gtol is None:\r\n gtol = 1e-5\r\n\r\n sigma0 = 1.0e-8\r\n fold = f(x, *optargs) # Initial function value.\r\n function_eval = 1\r\n fnow = fold\r\n gradnew = gradf(x, *optargs) # Initial gradient.\r\n if any(np.isnan(gradnew)):\r\n raise UnexpectedInfOrNan, \"Gradient contribution resulted in a NaN value\"\r\n current_grad = np.dot(gradnew, gradnew)\r\n gradold = gradnew.copy()\r\n d = -gradnew # Initial search direction.\r\n success = True # Force calculation of directional derivs.\r\n nsuccess = 0 # nsuccess counts number of successes.\r\n beta = 1.0 # Initial scale parameter.\r\n betamin = 1.0e-15 # Lower bound on scale.\r\n betamax = 1.0e15 # Upper bound on scale.\r\n status = \"Not converged\"\r\n\r\n flog = [fold]\r\n\r\n iteration = 0\r\n\r\n len_maxiters = len(str(maxiters))\r\n if display:\r\n print ' {0:{mi}s} {1:11s} {2:11s} {3:11s}'.format(\"I\", \"F\", \"Scale\", \"|g|\", mi=len_maxiters)\r\n exps = exponents(fnow, current_grad)\r\n p_iter = iteration\r\n\r\n # Main optimization loop.\r\n while iteration < maxiters:\r\n\r\n # Calculate first and second directional derivatives.\r\n if success:\r\n mu = np.dot(d, gradnew)\r\n if mu >= 0:\r\n d = -gradnew\r\n mu = np.dot(d, gradnew)\r\n kappa = np.dot(d, d)\r\n sigma = sigma0 / np.sqrt(kappa)\r\n xplus = x + sigma * d\r\n gplus = gradf(xplus, *optargs)\r\n theta = np.dot(d, (gplus - gradnew)) / sigma\r\n\r\n # Increase effective curvature and evaluate step size alpha.\r\n delta = theta + beta * kappa\r\n if delta <= 0:\r\n delta = beta * kappa\r\n beta = beta - theta / kappa\r\n\r\n alpha = -mu / delta\r\n\r\n # Calculate the comparison ratio.\r\n xnew = x + alpha * d\r\n fnew = f(xnew, *optargs)\r\n function_eval += 1\r\n\r\n# if function_eval >= max_f_eval:\r\n# status = \"maximum number of function evaluations exceeded\"\r\n# break\r\n# return x, flog, function_eval, status\r\n\r\n Delta = 2.*(fnew - fold) / (alpha * mu)\r\n if Delta >= 0.:\r\n success = True\r\n nsuccess += 1\r\n x = xnew\r\n fnow = fnew\r\n else:\r\n success = False\r\n fnow = fold\r\n\r\n # Store relevant variables\r\n flog.append(fnow) # Current function value\r\n\r\n iteration += 1\r\n if display:\r\n print_out(len_maxiters, fnow, current_grad, beta, iteration)\r\n n_exps = exponents(fnow, current_grad)\r\n if iteration - p_iter >= 20 * np.random.rand():\r\n a = iteration >= p_iter * 2.78\r\n b = np.any(n_exps < exps)\r\n if a or b:\r\n p_iter = iteration\r\n print ''\r\n if b:\r\n exps = n_exps\r\n\r\n if success:\r\n # Test for termination\r\n\r\n if (np.abs(fnew - fold) < ftol):\r\n status = 'converged - relative reduction in objective'\r\n break\r\n# return x, flog, function_eval, status\r\n elif (np.max(np.abs(alpha * d)) < xtol):\r\n status = 'converged - relative stepsize'\r\n break\r\n else:\r\n # Update variables for new position\r\n gradold = gradnew\r\n gradnew = gradf(x, *optargs)\r\n current_grad = np.dot(gradnew, gradnew)\r\n fold = fnew\r\n # If the gradient is zero then we are done.\r\n if current_grad <= gtol:\r\n status = 'converged - relative reduction in gradient'\r\n break\r\n # return x, flog, function_eval, status\r\n\r\n # Adjust beta according to comparison ratio.\r\n if Delta < 0.25:\r\n beta = min(4.0 * beta, betamax)\r\n if Delta > 0.75:\r\n beta = max(0.5 * beta, betamin)\r\n\r\n # Update search direction using Polak-Ribiere formula, or re-start\r\n # in direction of negative gradient after nparams steps.\r\n if nsuccess == x.size:\r\n d = -gradnew\r\n# beta = 1. # TODO: betareset!!\r\n nsuccess = 0\r\n elif success:\r\n Gamma = np.dot(gradold - gradnew, gradnew) / (mu)\r\n d = Gamma * d - gradnew\r\n else:\r\n # If we get here, then we haven't terminated in the given number of\r\n # iterations.\r\n status = \"maxiter exceeded\"\r\n\r\n if display:\r\n print_out(len_maxiters, fnow, current_grad, beta, iteration)\r\n print \"\"\r\n print status\r\n return x, flog, function_eval, status", "def f_q_hisano(self, mchi):\n w = self.MW**2/mchi**2\n def gH(x):\n bx = np.sqrt(1-x/4+0*1j)\n out = np.real_if_close(-2/bx * (2 + 2*x - x**2) * np.arctan(2*bx/np.sqrt(x))\\\n + 2*np.sqrt(x) * (2 - x*np.log(x)))\n return out\n return (self.alpha)**2/(4*self.Mh**2*self.sw**4) * ((self.dchi**2 - 1)/(8*self.MW) * gH(w))", "def Pup(self, f):\n d, g, a = f.unwrap()\n assert g == self or g == self.dual\n a = g.upsample(d)(a)\n if g.dimension == 1:\n a = [a]\n gg = self.refine()\n c = tuple(type(f)(0, gg, _) for _ in a)\n return comp.Form(d, c)", "def retarded_gf(h_ao, s_ao, energy, gamma_left, gamma_right):\n return np.linalg.inv(energy*s_ao - h_ao + (1j/2.)*(gamma_left + gamma_right))", "def fdq1(f, x, h=1e-5):\n return (f(x+h) - f(x))/h\n \n raise NotImplementedError(\"Problem 2 Incomplete\")", "def sdg(self, q):\n if isinstance(q, QuantumRegister):\n instructions = InstructionSet()\n for j in range(q.size):\n instructions.add(self.sdg((q, j)))\n return instructions\n\n self._check_qubit(q)\n return self._attach(SdgGate(q, self))", "def generic_fsim_gate(\n fsim_angles: Dict[str, float], qubits: Tuple[cirq.GridQubit, cirq.GridQubit]\n) -> List[cirq.OP_TREE]:\n q_0, q_1 = qubits\n g_f = [\n cirq.Z(q_0)\n ** (\n -(\n fsim_angles[\"delta_minus_off_diag\"]\n + fsim_angles[\"delta_minus_diag\"]\n - 2 * fsim_angles[\"delta_plus\"]\n )\n / np.pi\n / 4.0\n ),\n cirq.Z(q_1)\n ** (\n (\n fsim_angles[\"delta_minus_off_diag\"]\n + fsim_angles[\"delta_minus_diag\"]\n + 2 * fsim_angles[\"delta_plus\"]\n )\n / np.pi\n / 4.0\n ),\n ] # type: List[cirq.OP_TREE]\n\n if not np.isclose(fsim_angles[\"phi\"], 0):\n g_f.append(cirq.CZ(q_0, q_1) ** (-fsim_angles[\"phi\"] / np.pi))\n\n if not np.isclose(fsim_angles[\"theta\"], 0):\n g_f.append(cirq.ISWAP(q_0, q_1) ** (-fsim_angles[\"theta\"] / (np.pi / 2.0)))\n\n g_f.append(\n cirq.Z(q_0)\n ** (-(fsim_angles[\"delta_minus_diag\"] - fsim_angles[\"delta_minus_off_diag\"]) / np.pi / 4.0)\n )\n g_f.append(\n cirq.Z(q_1)\n ** ((fsim_angles[\"delta_minus_diag\"] - fsim_angles[\"delta_minus_off_diag\"]) / np.pi / 4.0)\n )\n return g_f", "def qs_discretize(pqs_f):\n # does not integrate over x!\n # Becomes problem if segments are variable or inputs not uniform in x-direction\n return np.array([np.sum(np.split(pqs_f, GATE.ii_z[1:-1], axis=0)[i], axis=0)*dz/\\\n (GATE.z_coords[GATE.ii_z[i+1]]-GATE.z_coords[GATE.ii_z[i]])\\\n for i in range(n_z)])", "def hessian(f, s, p, dx=1e-6, gmix=False, k =['All']):\n import numpy\n N = (p.m['n'] - 1)\n H = numpy.zeros(shape=(N,N))\n for m in range(1, N + 1):\n for z in range(1, N + 1):\n H[m - 1, z - 1] = FD(f, s, p, 2, z, m, dx, gmix, k)\n \n return H", "def zzx_mod_gcd(f, g, **flags):\n if not (f or g):\n return [], [], []\n elif not f:\n return g, [], [1]\n elif not g:\n return f, [1], []\n\n n = zzx_degree(f)\n m = zzx_degree(g)\n\n cf = zzx_content(f)\n cg = zzx_content(g)\n\n gcd = igcd(cf, cg)\n\n f = [ c // gcd for c in f ]\n g = [ c // gcd for c in g ]\n\n if n == 0 or m == 0:\n return [gcd], f, g\n\n A = max(zzx_abs(f) + zzx_abs(g))\n b = igcd(poly_LC(f), poly_LC(g))\n\n B = int(ceil(2**n*A*b*int(sqrt(n + 1))))\n k = int(ceil(2*b*log((n + 1)**n*A**(2*n), 2)))\n l = int(ceil(log(2*B + 1, 2)))\n\n prime_max = max(int(ceil(2*k*log(k))), 51)\n\n while True:\n while True:\n primes = set([])\n unlucky = set([])\n\n ff, gg, hh = {}, {}, {}\n\n while len(primes) < l:\n p = randprime(3, prime_max+1)\n\n if (p in primes) or (b % p == 0):\n continue\n\n F = gf_from_int_poly(f, p)\n G = gf_from_int_poly(g, p)\n\n H = gf_gcd(F, G, p)\n\n primes.add(p)\n\n ff[p] = F\n gg[p] = G\n hh[p] = H\n\n e = min([ gf_degree(h) for h in hh.itervalues() ])\n\n for p in set(primes):\n if gf_degree(hh[p]) != e:\n primes.remove(p)\n unlucky.add(p)\n\n del ff[p]\n del gg[p]\n del hh[p]\n\n if len(primes) < l // 2:\n continue\n\n while len(primes) < l:\n p = randprime(3, prime_max+1)\n\n if (p in primes) or (p in unlucky) or (b % p == 0):\n continue\n\n F = gf_from_int_poly(f, p)\n G = gf_from_int_poly(g, p)\n\n H = gf_gcd(F, G, p)\n\n if gf_degree(H) != e:\n unlucky.add(p)\n else:\n primes.add(p)\n\n ff[p] = F\n gg[p] = G\n hh[p] = H\n\n break\n\n fff, ggg = {}, {}\n\n for p in primes:\n fff[p] = gf_quo(ff[p], hh[p], p)\n ggg[p] = gf_quo(gg[p], hh[p], p)\n\n F, G, H = [], [], []\n\n crt_mm, crt_e, crt_s = crt1(primes)\n\n for i in xrange(0, e + 1):\n C = [ b * poly_nth(hh[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n H.insert(0, c)\n\n H = zzx_strip(H)\n\n for i in xrange(0, zzx_degree(f) - e + 1):\n C = [ poly_nth(fff[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n F.insert(0, c)\n\n for i in xrange(0, zzx_degree(g) - e + 1):\n C = [ poly_nth(ggg[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n G.insert(0, c)\n\n H_norm = zzx_l1_norm(H)\n\n F_norm = zzx_l1_norm(F)\n G_norm = zzx_l1_norm(G)\n\n if H_norm*F_norm <= B and H_norm*G_norm <= B:\n break\n\n return zzx_mul_const(H, gcd), F, G", "def zzx_quo(f, g):\n return zzx_div(f, g)[0]", "def half_gcdex(f, g):\n lev, dom, per, F, G = f.unify(g)\n s, h = dmp_half_gcdex(F, G, dom)\n return per(s), per(h)", "def fqs(tokens, q):\n\n totalLength = len(tokens)\n for length in range(1, totalLength + 1): #need to look at all possible subsets of tokens\n for start in range(totalLength - length + 1):\n\n end = start + length\n pTokens = tokens[start:end] #the subphrase we're examining\n\n b = 0.\n if not start: #phrase begins at the start of the sentence\n b += 1.\n if end == totalLength: #phrase ends at end of tokens\n b += 1.\n\n f = q**(2. - b)*(1 - q)**(length - 1.) #this is our formulation for probability\n\n #spit out a tuple of coordinates and prob of the phrase, but only if prob is nonzero\n if f:\n yield [start, end] , f", "def g(self, t, s, u):\n P, g = s\n return np.matrix([self.Toc * P])", "def construct_hessian(f, mesh=None, op=DefaultOptions()):\n if mesh is None:\n mesh = f.function_space().mesh()\n dim = mesh.topological_dimension()\n assert dim in (2, 3)\n P1_ten = TensorFunctionSpace(mesh, \"CG\", 1)\n n = FacetNormal(mesh)\n\n # Integration by parts applied to the Hessian definition\n if op.hessian_recovery == 'parts':\n H = TrialFunction(P1_ten)\n τ = TestFunction(P1_ten)\n a = inner(tau, H)*dx\n L = -inner(div(τ), grad(f))*dx\n for i in range(dim):\n for j in range(dim):\n L += τ[i, j]*n[j]*f.dx(i)*ds\n\n H = Function(P1_ten)\n solve(a == L, H, solver_parameters=op.hessian_solver_parameters)\n\n # Double L2 projection, using a mixed formulation for the gradient and Hessian\n elif op.hessian_recovery == 'dL2':\n P1_vec = VectorFunctionSpace(mesh, \"CG\", 1)\n V = P1_ten*P1_vec\n H, g = TrialFunctions(V)\n τ, φ = TestFunctions(V)\n a = inner(τ, H)*dx\n a += inner(φ, g)*dx\n a += inner(div(τ), g)*dx\n for i in range(dim):\n for j in range(dim):\n a += -g[i]*τ[i, j]*n[j]*ds\n\n # L = inner(grad(f), φ)*dx\n L = f*dot(φ, n)*ds - f*div(φ)*dx # enables f to be P0\n\n q = Function(V)\n solve(a == L, q) # TODO: Solver parameters?\n H = q.split()[0]\n\n return H", "def zzX_mul(f, g):\n if poly_univariate_p(f):\n return zzx_mul(f, g)\n\n if f == g:\n return zzX_sqr(f)\n\n if zzX_zero_p(f):\n return f\n if zzX_zero_p(g):\n return g\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n h, l = [], poly_level(f)-1\n\n for i in xrange(0, df+dg+1):\n coeff = zzX_zero(l)\n\n for j in xrange(max(0, i-dg), min(df, i)+1):\n coeff = zzX_add(coeff, zzX_mul(f[j], g[i-j]))\n\n h.append(coeff)\n\n return h", "def cqt(X, fs, n_bins=48, fmin=27.5, fmax=\"nyq\", gamma=20):\n # nyquist\n if fmax == \"nyq\":\n fmax = fs / 2.\n multiscale, shift, window_lens = nsgcwin(fmin, fmax, n_bins, fs,\n len(X), gamma)\n fbas = fs * np.cumsum(shift[1:]) / len(X)\n fbas = fbas[:len(window_lens) // 2 - 1]\n bins = window_lens.shape[0] // 2 - 1\n window_lens[1:bins + 1] = window_lens[bins + 2]\n window_lens[bins + 2:] = window_lens[1:bins + 1][::-1]\n norm = 2. * window_lens[:bins + 2] / float(len(X))\n norm = np.concatenate((norm, norm[1:-1][::-1]))\n multiscale = [norm[ii] * multiscale[ii] for ii in range(2 * (bins + 1))]\n\n c = nsgtf_real(X, multiscale, shift, window_lens)\n c_dc = c[0]\n c_nyq = c[-1]\n c_sub = c[1:-1]\n c = np.vstack(c_sub)\n return c, c_dc, c_nyq, multiscale, shift, window_lens", "def calc_f_g(mu, x, t, ro, inv_a):\n z = inv_a*pow(x,2)\n f = 1 - pow(x,2)/ro*stump_C(z) \n g = t - 1/sqrt(mu)*pow(x,3)*stump_S(z)\n return f, g", "def automorphism_group_QQ_fixedpoints(rational_function, return_functions=False, iso_type=False):\n\n if rational_function.parent().is_field():\n K = rational_function.parent()\n R = K.ring()\n else:\n R = rational_function.parent()\n K = R.fraction_field()\n\n F = R.base_ring()\n\n if F != QQ and F!= ZZ:\n raise TypeError(\"coefficient ring is not the rational numbers or the integers\")\n\n z = R.gen(0)\n phi = R.fraction_field()(rational_function)\n\n f = phi.numerator()\n g = phi.denominator()\n\n #scale f,g so both have integer coefficients\n N = lcm(f.denominator(),g.denominator())\n f = f*N\n g = g*N\n N = gcd(gcd(f.coefficients()), gcd(g.coefficients()))\n f = f/N\n g = g/N\n\n d = max(f.degree(), g.degree())\n\n h = f - g*z\n\n if return_functions:\n elements = [z]\n else:\n elements = [matrix(F, 2, [1,0,0,1])]\n\n rational_roots = h.roots(multiplicities = False)\n\n min_poly = 1\n\n #check if infinity is a fixed point\n if g.degree() < d: #then infinity is a fixed point\n #find elements in W of the form (infinity, y)\n #where W is the set of F-rational points (x,y) such that\n #x is fixed by phi and phi(y)=x\n for T in g.roots(multiplicities=False):\n alpha = T\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n\n for S in h.roots():\n min_poly = min_poly*(z - S[0])**(S[1])\n\n if g.degree() < d: #then infinity is a fixed point so (infinity, S[0])\n alpha = S[0] # is in Z_(1,1)**2\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n\n #now compute points in W\n preimage = f - g*S[0]\n if preimage.degree() < d: #infinity is in W\n zeta = -1\n alpha = S[0]\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n for T in preimage.roots(multiplicities=False):\n if T != S[0]:\n zeta = -1\n alpha = S[0]\n beta = T\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n\n #first look at rational fixed points\n #Subsets is ok since we just needed unordered pairs\n for S in Subsets(rational_roots, 2):\n zeta = -1\n alpha = S[0]\n beta = S[1]\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n\n\n #now consider 2-periodic points\n psi = phi(phi(z))\n f2 = psi.numerator()\n g2 = psi.denominator()\n period2_points = [x for x in (f2 - z*g2).roots(multiplicities=False) if not x in rational_roots]\n for S in Subsets(period2_points, 2):\n zeta = -1\n alpha = S[0]\n beta = S[1]\n s = ( (alpha - zeta*beta)*z - (alpha*beta)*(1 - zeta))/((1 - zeta)*z + (alpha*zeta - beta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2,\n [(alpha - zeta*beta), - (alpha*beta)*(1 - zeta),\n (1 - zeta), (alpha*zeta - beta)]))\n if g2.degree() < f2.degree() and g.degree() == d: #infinity has period 2\n for alpha in period2_points:\n zeta = -1\n s = (zeta*z + alpha*(1 - zeta))\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(s)\n else:\n elements.append(matrix(F, 2, [zeta, alpha*(1-zeta), 0, 1]))\n factors = (f2 - z*g2).factor()\n L1 = NumberField(z**2 + 1,'i')\n i=L1.gen(0)\n L2 = NumberField(z**2 + 3,'isqrt3')\n isqrt3 = L2.gen(0)\n for psi in factors:\n if psi[0].degree() == 2:\n a = psi[0][2]\n b = psi[0][1]\n c = psi[0][0]\n disc = b**2 - 4*a*c\n s = (-b*z - 2*c)/(2*a*z + b)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [-b,-2*c, 2*a, b]))\n if is_square(-disc): #psi[0] generates Q(i)\n alpha = psi[0].change_ring(L1).roots()[0][0]\n beta = alpha.trace() - alpha\n for zeta in [i, -i]:\n a = (alpha - zeta*beta)/(1 - zeta)\n d = (alpha*zeta - beta)/(1 - zeta)\n if a in F and d in F:\n a = F(a)\n d = F(d)\n b = F(-alpha*beta)\n s = ( a*z + b)/(z + d)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [a,b, 1, d]))\n elif is_square(-3*disc): #psi[0] generates Q(zeta_3)\n alpha = psi[0].change_ring(L2).roots()[0][0]\n beta = alpha.trace() - alpha\n for zeta in [F(1)/F(2)*(1 + isqrt3), F(1)/F(2)*(1 - isqrt3),F(1)/F(2)*(-1 + isqrt3), F(1)/F(2)*(-1 - isqrt3)]:\n a = (alpha - zeta*beta)/(1 - zeta)\n d = (alpha*zeta - beta)/(1 - zeta)\n if a in F and d in F:\n a = F(a)\n d = F(d)\n b = F(-alpha*beta)\n s = ( a*z + b)/(z + d)\n if s(phi(z)) == phi(s(z)):\n if return_functions:\n elements.append(K(s))\n else:\n elements.append(matrix(F, 2, [a,b, 1, d]))\n\n if iso_type:\n return(elements, which_group(elements))\n return(elements)", "def zzX_sub(f, g):\n if poly_univariate_p(f):\n return zzx_sub(f, g)\n\n if zzX_zero_p(g):\n return f\n if zzX_zero_p(f):\n return zzX_neg(g)\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n if df == dg:\n return zzX_strip([ zzX_sub(a, b) for a, b in zip(f, g) ])\n else:\n k = abs(df - dg)\n\n if df > dg:\n h, f = f[:k], f[k:]\n else:\n h, g = zzX_neg(g[:k]), g[k:]\n\n return h + [ zzX_sub(a, b) for a, b in zip(f, g) ]", "def gaussSeidel(f, x0, N=int(1e3), eps=1e-6, solutionHistory=False):\r\n start_time = perf_counter()\r\n xn = x0\r\n fn = f(x0)\r\n\r\n if solutionHistory:\r\n xHist = xn.copy()\r\n funHist = np.array([fn])\r\n\r\n for n in range(N):\r\n xlast = xn.copy()\r\n for i in range(len(xn)):\r\n\r\n def dirFcn(x):\r\n xInternal = xn.copy()\r\n xInternal[i] = x\r\n return f(xInternal)\r\n\r\n res = minimize_scalar(dirFcn)\r\n xn[i] = res.x\r\n\r\n if solutionHistory:\r\n xHist = np.vstack((xHist, xn))\r\n funHist = np.vstack((funHist, res.fun))\r\n # debug\r\n # print(f\"Gauss: {f(xn)}, it: {n+1}\")\r\n if (f(xn) - 0 < eps):\r\n break\r\n\r\n end_time = perf_counter()\r\n retval = {\r\n 'x': xn,\r\n 'fun': res.fun,\r\n 'nit': n + 1,\r\n 'time': end_time - start_time\r\n }\r\n\r\n if solutionHistory:\r\n retval['xHist'] = xHist\r\n retval['funHist'] = funHist\r\n\r\n\r\n return retval", "def quo_ground(f, c):\n return f.per(dmp_quo_ground(f.rep, f.dom.convert(c), f.lev, f.dom))", "def create_clique_tree(g):\r\n \r\n g2 = g.copy()\r\n clq_ind = []# For each clique, a list of nodes whose elimination would lead to each from that clique\r\n tree = nx.Graph()\r\n tree.add_nodes_from(range(g.number_of_nodes()))\r\n \r\n while g2.number_of_nodes() != 0:\r\n n = min_fill_node(g2) # other option is min_neighbor_node\r\n eliminate_var(n, g2,clq_ind,tree)\r\n tree = prune_tree(tree)\r\n return tree", "def test_qft_reconstruction(self, interface):\n circuit = qft_circuit(3, interface=interface)\n bits, recipes = circuit()\n shadow = ClassicalShadow(bits, recipes)\n\n state = shadow.global_snapshots()\n assert state.shape == (10000, 8, 8)\n\n state = np.mean(state, axis=0)\n expected = np.exp(np.arange(8) * 2j * np.pi / 8) / np.sqrt(8)\n expected = np.outer(expected, np.conj(expected))\n\n assert qml.math.allclose(state, expected, atol=1e-1)", "def simplify (self):\n if (self.debug): print(f'enter fraction.simplify')\n hcf = find_hcf(self.value[0], self.value[1])\n self.value = (self.value[0] // hcf.product(), self.value[1] // hcf.product())\n return", "def trans_chisq(m):\n m = asmatrix(m)\n grand_sum, row_sums, col_sums = m.sum(), m.sum(1), m.sum(0)\n result = m * sqrt(grand_sum)\n result /= row_sums\n result /= sqrt(col_sums)\n return result", "def singularities(f, x):\n from ..functions import log, sign\n from ..solvers import solve\n from .limits import Limit\n\n f, x = sympify(f), sympify(x)\n guess, res = set(), set()\n\n assert x.is_Symbol\n\n if f.is_number:\n return set()\n if f.is_polynomial(x):\n return set()\n if f.func in (Add, Mul):\n guess = guess.union(*[singularities(a, x) for a in f.args])\n elif isinstance(f, Pow):\n if f.exp.is_number and f.exp.is_negative:\n guess = {s[x] for s in solve(f.base, x) if s[x].is_real}\n else:\n guess |= singularities(log(f.base)*f.exp, x)\n elif f.func in (log, sign) and len(f.args) == 1:\n guess |= singularities(f.args[0], x)\n guess |= {s[x] for s in solve(f.args[0], x) if s[x].is_real}\n else:\n raise NotImplementedError\n\n for s in guess:\n l = Limit(f, x, s, dir=Reals)\n try:\n r = l.doit()\n if r.is_infinite:\n raise PoleError\n raise NotImplementedError\n except PoleError:\n res.add(s)\n\n return res", "def gravg(f, seismic):\n\n fk = seismic.KneeFrequency\n a = seismic.LowFrequencyLevel\n gamma = seismic.Gamma\n rho = seismic.Rho\n # factor to account for correlation between masses\n # and the height of the mirror above the ground\n beta = seismic.Beta\n h = seismic.TestMassHeight\n c_rayleigh = seismic.RayleighWaveSpeed\n\n if 'Omicron' in seismic:\n omicron = seismic.Omicron\n else:\n omicron = 1\n\n # a sort of theta function (Fermi distr.)\n coeff = 3**(-gamma*f)/(3**(-gamma*f) + 3**(-gamma*fk))\n\n # modelization of seismic noise (vertical)\n ground = a*coeff + a*(1-coeff)*(fk/f)**2\n if 'Site' in seismic and seismic.Site == 'LLO':\n ground = a*coeff*(fk/f) + a*(1-coeff)*(fk/f)**2\n\n # effective GG spring frequency, with G gravitational\n fgg = sqrt(const.G * rho) / (2*pi)\n\n # fixed numerical factors, 5/9/06, PF\n n = (beta*2*pi*(fgg**2/f**2)*ground)**2\n\n # The following two terms are corrections due to Jan Harms\n # https://git.ligo.org/rana-adhikari/CryogenicLIGO/issues/45\n # (1) projection of NN force onto the direction of the arm\n n = n * 1/2\n # (2) exponential cutoff at frequency (seismic speed)/(test mass height)\n n = n * exp(-4*pi*f*h/c_rayleigh)\n\n # Feedforward cancellation\n n /= (omicron**2)\n\n return n", "def test_multi_branches(self):\n sgf = \"\"\"\n (;FF[4]GM[1]SZ[19];B[aa];W[bb](;B[cc];W[dd](;B[ad];W[bd])\n (;B[ee];W[ff]))\n (;B[hh];W[gg])\n (;B[ii];W[jj]))\n \"\"\"\n coll = parseSgf(sgf)\n self.assertEqual(coll,\n [[{'SZ': '19', 'GM': '1', 'FF': '4'}, {'B': 'aa'},\n {'W': 'bb'},\n [[{'B': 'cc'}, {'W': 'dd'}, [[{'B': 'ad'}, {'W': 'bd'}], [{'B': 'ee'}, {'W': 'ff'}]]],\n [{'B': 'hh'}, {'W': 'gg'}],\n [{'B': 'ii'}, {'W': 'jj'}]],\n ]])\n self.assertEqual(self._trim_sgf_whitespace(sgf), makeSgf(coll))", "def zzX_gcd(f, g, **flags):\n return zzX_cofactors(f, g, **flags)[0]", "def refine_real_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_real_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def qfFunction(f, x, N):\r\n return ssstats.binom.ppf(x, N, f)", "def sqf_norm(f):\n s, g, r = dmp_sqf_norm(f.rep, f.lev, f.dom)\n return s, f.per(g), f.per(r, dom=f.dom.dom)", "def s(self, q):\n if isinstance(q, QuantumRegister):\n instructions = InstructionSet()\n for j in range(q.size):\n instructions.add(self.s((q, j)))\n return instructions\n\n self._check_qubit(q)\n return self._attach(SGate(q, self))", "def test_simple_branches(self):\n sgf = \"\"\"\n (;FF[4]GM[1]SZ[19];B[aa];W[bb](;B[cc];W[dd];B[ee])(;B[hh];W[hg]))\n \"\"\"\n coll = parseSgf(sgf)\n self.assertEqual(coll,\n [[{'SZ': '19', 'GM': '1', 'FF': '4'}, {'B': 'aa'}, {'W': 'bb'},\n [[{'B': 'cc'}, {'W': 'dd'}, {'B': 'ee'}], [{'B': 'hh'}, {'W': 'hg'}]]]])\n self.assertEqual(self._trim_sgf_whitespace(sgf), makeSgf(coll))", "def rem(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_rem(F, G, lev, dom))", "def resultant(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_resultant(F, G, lev, dom), lower=True)", "def hoggar_fiducial():\n fiducial = qt.Qobj(np.array([-1 + 2j, 1, 1, 1, 1, 1, 1, 1])).unit()\n fiducial.dims = [[2,2,2],[1,1,1]]\n return fiducial", "def algo_4_3(sf,FV):\n F = set()\n V = FV.keys()\n \n for s in sf:\n if isVariable(s, V):\n if EMPTY_SYMBOL_UNI in FV[s]:\n F |= (FV[s]- set([EMPTY_SYMBOL_UNI]))\n else:\n F |= FV[s]\n break\n else:\n assert s[0] == s[-1]\n assert s[0] == \"'\" or s[0] == '\"'\n\n s = s[1:-1] #字面量 去除引号\n F.add(s[0]) #取第一个符号作为首终结符\n break\n return F", "def optg(self, ff='mmff94', n=1000):\n immff = F\n if ff in ['mmff94']:\n props = AllChem.MMFFGetMoleculeProperties(self.mol)\n immff = T\n angs = _get_angles_csp2(self.mol)\n self.es = []\n if not hasattr(self,'cids'):\n cids = [-1]\n else:\n cids = self.cids\n for cid in cids:\n for cycle in [0,1]:\n \"\"\"\n minization is split into 2 parts\n a) The first part tries to correct some ill local geometries in conformers,\n realized through constraints in angles and will be iterated for maximally\n 200 steps;\n b) Normal geometry minization without constraints, number of iterations: `n-200\n \"\"\"\n if immff:\n ff = AllChem.MMFFGetMoleculeForceField(self.mol, props, confId=cid)\n else:\n ff = AllChem.UFFGetMoleculeForceField(self.mol, confId=cid)\n ff.Initialize()\n if cycle == 0:\n _n = 200\n ## The two lines below are essential to obtain reasonable conformer geometries\n ## If not present, then conformer with some angle centered on sp2-C may be ~90\n ## degrees\n for i,j,k in angs:\n ff.MMFFAddAngleConstraint(i,j,k, F, 95, 145, 9999.) # relative=False\n ## Here, in essense, we manually constrain such angles to be within the range\n ## of [95,145] degree\n else:\n _n = n - 200\n if n > 0:\n converged = ff.Minimize(maxIts=_n, forceTol=0.0001, energyTol=1e-05)\n #RETURNS: 0 if the optimization converged, 1 if more iterations are required.\n self.es.append( ff.CalcEnergy() )\n #res = AllChem.MMFFOptimizeMoleculeConfs(self.mol, numThreads=1, maxIters=n)\n self.optg0 = T", "def suprec(f, g, Bc=None):\n from numpy import product\n if Bc is None: Bc = secross()\n n = product(f.shape)\n y = cerode(f,g,Bc,n);\n return y", "def pruneRecursiveFeatures(v, newFeatures, s):\n # create the node feature matrix holding both v and new features\n allFeatures = {}\n for node in v.keys():\n allFeatures[node] = []\n appendFeatures(allFeatures, v)\n appendFeatures(allFeatures, newFeatures)\n # vertical logarithmic binning\n p = 0.5\n logFeatures = verticalLogBinning(allFeatures, p)\n\n # construct feature graph (s-friend)\n numFeatures = len(logFeatures.values()[0])\n featureGraph = TUNGraph.New() # the s-friend feature graph\n for i in range(numFeatures):\n featureGraph.AddNode(i)\n for i in range(numFeatures):\n featureI = getIthFeature(logFeatures, i)\n for j in range(i + 1, numFeatures):\n featureJ = getIthFeature(logFeatures, j)\n if isSimilar(featureI, featureJ, s):\n if not featureGraph.IsEdge(i, j):\n featureGraph.AddEdge(i, j)\n # summarize connected component\n retainedIdx = []\n wcc = TCnComV()\n GetWccs(featureGraph, wcc) # get all weakly connected components\n for i in range(0, wcc.Len()):\n retainedIdx.append(wcc[i][0])\n retainedIdx = sorted(retainedIdx)\n # return retained features\n retained = {}\n for node in v.keys():\n retained[node] = []\n startIdxNewFeatures = len(v.values()[0])\n for i in retainedIdx:\n # if the retained feature is from new features, add to retained feature\n if i >= startIdxNewFeatures:\n appendFeatures(retained, newFeatures, i - startIdxNewFeatures)\n return retained" ]
[ "0.6348645", "0.6082395", "0.60524696", "0.60362184", "0.5977433", "0.5936218", "0.5890577", "0.58653134", "0.5856254", "0.57929945", "0.5784882", "0.5635512", "0.56248885", "0.554542", "0.55379504", "0.5530155", "0.5527634", "0.55048215", "0.54990107", "0.5371403", "0.53676456", "0.53638583", "0.5350854", "0.53342074", "0.53170127", "0.53049266", "0.5300341", "0.52219623", "0.5218056", "0.5211141", "0.5193275", "0.5189077", "0.51665175", "0.5145722", "0.51420975", "0.5138823", "0.51231134", "0.5090266", "0.50823", "0.50795317", "0.5077796", "0.507433", "0.5073059", "0.5071542", "0.507093", "0.50632644", "0.5059606", "0.5050104", "0.50483936", "0.504326", "0.5005861", "0.49990052", "0.49954462", "0.49858272", "0.49715015", "0.49670076", "0.49509445", "0.49456272", "0.4945366", "0.4945263", "0.49448067", "0.49365565", "0.49304837", "0.49224764", "0.49217433", "0.49186006", "0.49156106", "0.4903831", "0.49034372", "0.49013916", "0.49007267", "0.48957026", "0.48914102", "0.48864186", "0.48839834", "0.4882798", "0.48725688", "0.48664227", "0.48653948", "0.48548588", "0.48519406", "0.48482978", "0.4843348", "0.4838339", "0.48344803", "0.48344576", "0.48293754", "0.48232067", "0.48191777", "0.48181924", "0.48168403", "0.48116183", "0.4808764", "0.48059613", "0.47935915", "0.47901225", "0.4782", "0.47757307", "0.47747648", "0.4768052" ]
0.6371153
0
Compute squarefree decomposition of ``f`` in zerocharacteristic ring ``K[X]``. References ==========
def _rr_yun0_sqf_list(self, f): if f.is_ground: return [] result, count = [], 1 qs = [f.diff(x) for x in self.gens] g = f for q in qs: g = self.gcd(g, q) while f != 1: qs = [q // g for q in qs] f //= g qs = [q - f.diff(x) for x, q in zip(self.gens, qs)] g = f for q in qs: g = self.gcd(g, q) if g != 1: result.append((g, count)) count += 1 return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\n vsf_ph=vsf_ph+c_slm*vsh_ph\n return vsf_th, vsf_ph", "def f(k):\n return k * k * pk(k, suppression) * spherical_jn(0, k * r)", "def f(k):\n return k * k * k * k * pk(k, suppression) * spherical_jn(0, k * r)", "def f(k):\n return k * k * k * pk(k, suppression) * spherical_jn(1, k * r)", "def f(k):\n return k * k * k * k * pk(k, suppression) * spherical_jn(2, k * r)", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def F(k0):\r\n assert((not np.any(np.isnan(k0))) and np.all(np.isfinite(k0)) and\r\n np.all(np.isreal(k0))),\\\r\n \"k0 must be real, finite and not NaN\"\r\n assert(len(k0) == 4), \"K must have length 4\"\r\n assert(hasattr(F, '__call__')), \\\r\n \"F must be a callable function\"\r\n k1 = np.array([k0[0], k0[1]])\r\n k2 = np.array([k0[2], k0[3]])\r\n f1 = k1 - np.array([f(t + dt / 3,\r\n qn + (dt / 12) * (5 * k1 - k2), r, e, w)])\r\n f2 = k2 - np.array([f(t + dt,\r\n qn + (dt / 4) * (3 * k1 + k2), r, e, w)])\r\n f3 = np.reshape(np.array([f1, f2]), (4,))\r\n return f3", "def chi2sf(x, k):", "def is_squarefree(self, f):\n if f.is_ground:\n return True\n g = f\n for x in self.gens:\n g = self.gcd(g, f.diff(x))\n if g.is_ground:\n return True\n return False", "def F(cst, x):\n [u0, v0, u1, v1, u2, v2, coeffs] = cst\n [u, v, g1, g2, g3] = x\n a = g1*u1 - u0\n b = g2*u2 - u0\n c = g3*u - u0\n l = g1*v1 - v0 \n m = g2*v2 - v0\n n = g3*v - v0\n r = g1 - 1\n s = g2 - 1\n t = g3 - 1\n return np.array([\n coeffs[0]*(a**2-l**2) + 2*coeffs[1]*(a*b-l*m) + coeffs[2]*(b**2-m**2) + 2*coeffs[3]*(a*c-l*n) + 2*coeffs[4]*(b*c-m*n) + c**2 - n**2,\n coeffs[0]*(l**2-r**2) + 2*coeffs[1]*(l*m-r*s) + coeffs[2]*(m**2-s**2) + 2*coeffs[3]*(l*n-r*t) + 2*coeffs[4]*(m*n-s*t) + n**2 - t**2,\n coeffs[0]*a*l + coeffs[1]*(l*b+m*a) + coeffs[2]*m*b + coeffs[3]*(l*c+n*a) + coeffs[4]*(m*c+b*n) + c*n,\n coeffs[0]*a*r + coeffs[1]*(r*b+s*a) + coeffs[2]*s*b + coeffs[3]*(r*c+t*a) + coeffs[4]*(s*c+b*t) + c*t,\n coeffs[0]*r*l + coeffs[1]*(l*s+m*r) + coeffs[2]*m*s + coeffs[3]*(l*t+n*r) + coeffs[4]*(m*t+s*n) + t*n \n ])", "def zzx_compose_term(f, k):\n if k <= 0:\n raise ValueError(\"'k' must be positive, got %s\" % k)\n if k == 1 or not f:\n return f\n\n result = [f[0]]\n\n for coeff in f[1:]:\n result.extend([0]*(k-1))\n result.append(coeff)\n\n return result", "def F2K(T_F):\n return 5./9.*(T_F+459.67)", "def compute_clique_potentials(self,F):\r\n\r\n for i in self.nodes():\r\n self.node[i]['fac'] = factor([],[],[])\r\n \r\n for f in F.factors: # assign each factor to a clique\r\n for j,data in self.nodes_iter(data=True):\r\n if len(scipy.setdiff1d(f.var,data['clique']) ) ==0:\r\n self.node[j]['fac'] *= f\r\n self.nop += scipy.prod(self.node[j]['fac'].card)\r\n break", "def zzX_compose_term(f, K):\n def rec_compose(g, l):\n if poly_univariate_p(g):\n return zzx_compose_term(g, K[l])\n\n if K[l] <= 0:\n raise ValueError(\"All 'K[i]' must be positive, got %s\" % K[l])\n\n g = [ rec_compose(c, l+1) for c in g ]\n result, L = [g[0]], poly_level(g) - 1\n\n for coeff in g[1:]:\n for i in xrange(1, K[l]):\n result.append(zzX_zero(L))\n\n result.append(coeff)\n\n return result\n\n if all([ k == 1 for k in K ]):\n return f\n else:\n return rec_compose(f, 0)", "def sqf_norm(self, f):\n domain = self.domain\n\n if not domain.is_AlgebraicField:\n raise DomainError(f'ground domain must be algebraic, got {domain}')\n\n new_ring = self.to_ground().inject(*domain.symbols, front=True)\n g = domain.mod.set_ring(new_ring)\n s = 0\n\n while True:\n h = f.inject(front=True)\n r = g.resultant(h)\n\n if r.is_squarefree:\n return s, f, r\n f = f.compose({x: x - domain.unit for x in self.gens})\n s += 1", "def zzX_eval_for(f, k, x):\n if k < 0:\n k += poly_level(f) + 1\n\n if k == 1:\n return zzX_eval(f, x)\n\n def rec_eval(g, l):\n if l == k:\n return zzX_eval(g, x)\n else:\n return zzX_strip([ rec_eval(coeff, l+1) for coeff in g ])\n\n return rec_eval(f, 1)", "def sqf_part(f):\n return f.per(dmp_sqf_part(f.rep, f.lev, f.dom))", "def f2chi(f, kptsdf, c, arbfield=1.0):\n # Since the solution we obtain from cg and from iterative scheme is F_k where chi_k = eE/kT * f0(1-f0) * F_k\n # then we need to bring these factors back in to get the right units\n f0 = np.squeeze(kptsdf['k_FD'].values)\n prefactor = arbfield * c.e / c.kb_joule / c.T * f0 * (1 - f0)\n chi = np.squeeze(f) * np.squeeze(prefactor)\n return chi", "def kf(f, amp, tr_fac, del_fac, t):\n T = 1 / f\n delay = del_fac * T\n tr = tr_fac * T\n beta = 1 - (2 * tr / T)\n\n t_T1 = 0\n t_T2 = (T * (1 - beta) / 4)\n t_T3 = (T * (1 + beta) / 4)\n t_T4 = (T * (3 - beta) / 4)\n t_T5 = (T * (3 + beta) / 4)\n t_T6 = T\n\n t = np.mod(t - delay, T)\n if t_T1 <= t < t_T2:\n f_value = amp * np.sin((2 * np.pi * t) / (T * (1 - beta)))\n elif t_T2 <= t < t_T3:\n f_value = amp\n elif t_T3 <= t < t_T4:\n f_value = amp * np.sin(\n (2 * np.pi * (t - (beta * T / 2))) / (T * (1 - beta)))\n elif t_T4 <= t < t_T5:\n f_value = -amp\n elif t_T5 <= t <= t_T6:\n f_value = amp * np.sin((2 * np.pi * (t - beta * T)) / (T * (1 - beta)))\n return f_value\n # ------------------------------------------\n\n\n # sinusoidal (sinusiodal) function that is continuous in acc", "def refine_complex_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_complex_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def getEssentialMatrix(K, F):\n E = np.dot(K.T, np.dot(F, K))\n u, s, v = np.linalg.svd(E)\n\n # We correct the singular values of the E matrix\n s_new = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 0]]).reshape(3, 3)\n final_E = np.dot(u, np.dot(s_new, v))\n return final_E", "def getForm(F, butch, t, dt, u0, bcs=None):\n\n v = F.arguments()[0]\n V = v.function_space()\n assert V == u0.function_space()\n\n A = numpy.array([[Constant(aa) for aa in arow] for arow in butch.A])\n c = numpy.array([Constant(ci) for ci in butch.c])\n\n num_stages = len(c)\n num_fields = len(V)\n\n Vbig = numpy.prod([V for i in range(num_stages)])\n # Silence a warning about transfer managers when we\n # coarsen coefficients in V\n push_parent(V.dm, Vbig.dm)\n vnew = TestFunction(Vbig)\n k = Function(Vbig)\n if len(V) == 1:\n u0bits = [u0]\n vbits = [v]\n if num_stages == 1:\n vbigbits = [vnew]\n kbits = [k]\n else:\n vbigbits = split(vnew)\n kbits = split(k)\n else:\n u0bits = split(u0)\n vbits = split(v)\n vbigbits = split(vnew)\n kbits = split(k)\n\n kbits_np = numpy.zeros((num_stages, num_fields), dtype=\"object\")\n\n for i in range(num_stages):\n for j in range(num_fields):\n kbits_np[i, j] = kbits[i*num_fields+j]\n\n Ak = A @ kbits_np\n\n Fnew = Zero()\n\n for i in range(num_stages):\n repl = {t: t + c[i] * dt}\n for j, (ubit, vbit, kbit) in enumerate(zip(u0bits, vbits, kbits)):\n repl[ubit] = ubit + dt * Ak[i, j]\n repl[vbit] = vbigbits[num_fields * i + j]\n repl[TimeDerivative(ubit)] = kbits_np[i, j]\n if (len(ubit.ufl_shape) == 1):\n for kk, kbitbit in enumerate(kbits_np[i, j]):\n repl[TimeDerivative(ubit[kk])] = kbitbit\n repl[ubit[kk]] = repl[ubit][kk]\n repl[vbit[kk]] = repl[vbit][kk]\n Fnew += replace(F, repl)\n\n bcnew = []\n gblah = []\n\n if bcs is None:\n bcs = []\n for bc in bcs:\n if isinstance(bc.domain_args[0], str):\n boundary = bc.domain_args[0]\n else:\n boundary = ()\n try:\n for j in bc.sub_domain:\n boundary += j\n except TypeError:\n boundary = (bc.sub_domain,)\n gfoo = expand_derivatives(diff(bc._original_arg, t))\n if len(V) == 1:\n for i in range(num_stages):\n gcur = replace(gfoo, {t: t + c[i] * dt})\n try:\n gdat = interpolate(gcur, V)\n except NotImplementedError:\n gdat = project(gcur, V)\n gblah.append((gdat, gcur))\n bcnew.append(DirichletBC(Vbig[i], gdat, boundary))\n else:\n sub = bc.function_space_index()\n for i in range(num_stages):\n gcur = replace(gfoo, {t: t + butch.c[i] * dt})\n try:\n gdat = interpolate(gcur, V.sub(sub))\n except NotImplementedError:\n gdat = project(gcur, V)\n gblah.append((gdat, gcur))\n bcnew.append(DirichletBC(Vbig[sub + num_fields * i],\n gdat, boundary))\n\n return Fnew, k, bcnew, gblah", "def sqf_part(self, f):\n domain = self.domain\n\n if domain.is_FiniteField:\n g = self.one\n for f, _ in self.sqf_list(f)[1]:\n g *= f\n\n return g\n\n if not f:\n return f\n\n gcd = f\n for x in self.gens:\n gcd = self.gcd(gcd, f.diff(x))\n sqf = f // gcd\n\n if domain.is_Field:\n return sqf.monic()\n return sqf.primitive()[1]", "def chi2sf_inplace(x, k):", "def forcing_full(self):\n if not self._fr or not self._frstar:\n raise ValueError('Need to compute Fr, Fr* first.')\n f1 = self._k_ku * Matrix(self.u) + self._f_k\n return -Matrix([f1, self._f_d, self._f_dnh])", "def _gf_sqf_list(self, f):\n domain = self.domain\n\n n, factors, p = 1, [], int(domain.characteristic)\n m = int(domain.order // p)\n\n while not f.is_ground:\n df = [f.diff(x) for x in self.gens]\n\n if any(_ for _ in df):\n g = f\n for q in df:\n g = self.gcd(g, q)\n h, f, i = f // g, g, 1\n\n while h != 1:\n g = self.gcd(f, h)\n h //= g\n\n if not h.is_ground:\n factors.append((h, i*n))\n\n f //= g\n h = g\n i += 1\n\n n *= p\n\n g = self.zero\n for monom, coeff in f.items():\n g[tuple(_ // p for _ in monom)] = coeff**m\n f = g\n\n return factors", "def eval_K_chol(self, S, sigma_n, sigma_f):\n K = self.eval_K(S)\n K += sigma_n * np.eye(K.shape[0])\n K_chol = jitchol(K)\n return K_chol", "def sqr(f):\n return f.per(dmp_sqr(f.rep, f.lev, f.dom))", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def hessian(f, s, p, dx=1e-6, gmix=False, k =['All']):\n import numpy\n N = (p.m['n'] - 1)\n H = numpy.zeros(shape=(N,N))\n for m in range(1, N + 1):\n for z in range(1, N + 1):\n H[m - 1, z - 1] = FD(f, s, p, 2, z, m, dx, gmix, k)\n \n return H", "def refine_real_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_real_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def planck_f(nu, T):\n return ((2*h*nu**3)/(c**2))*(1./(np.exp((h*nu)/(k*T))-1))", "def kruskal(Grafo,diferencia):\n edges = list()\n #print(diferencia,\"la diferencia\" )\n for i in range(len(Grafo)): # collect the edges in G\n for v,w in Grafo[i]:\n if (w!=-1):\n edges.append((i,v,w))\n # sort the edges in ascending order w.r.t weights in the edges\n edges.sort(key=lambda x: x[2])## se organiza por peso \n ans,sans = [ list() for i in range(len(Grafo)) ],0\n df = dforest(len(Grafo))\n i = 0\n contador=0\n while i!=len(edges):\n u,v,w = edges[i]\n if df.find(u)!=df.find(v):\n df.union(u,v)\n contador+=1\n if(contador==diferencia):\n #print (w,\"pinche w\")\n return w\n\n i += 1", "def skelmrec(f, B=None):\n from numpy import ravel\n if B is None: B = secross()\n y = binary(intersec(f, 0))\n for r in xrange(f.max(),1,-1):\n y = dilate(union(y,binary(f,r)), B)\n y = union(y, binary(f,1))\n return y", "def zzx_sub_term(f, c, k=0):\n if not c:\n return f\n\n n = len(f)\n m = n-k-1\n\n if k == n-1:\n return zzx_strip([f[0]-c] + f[1:])\n else:\n if k >= n:\n return [-c] + [INT_ZERO]*(k-n) + f\n else:\n return f[:m] + [f[m]-c] + f[m+1:]", "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def _m_to_F_on_basis(self, la):\n Sym = self._kBoundedRing.ambient()\n kB = Sym.kBoundedSubspace(self.k, t=1)\n h = kB.khomogeneous()\n ks = kB.kschur()\n return sum( h(ks(x)).coefficient(la) * self(x) for x in PartitionsGreatestLE(sum(la), self.k))", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_primitive(quo)[1]", "def ccz_equivalent_function(f, V):\n n = int(log(len(f), 2))\n mask = sum(int(1 << i) for i in range(0, n))\n L_map = FastLinearMapping(get_generating_matrix(V, 2*n).transpose())\n graph_f = [(x << n) | f[x] for x in range(0, 2**n)]\n graph_g = [L_map(word) for word in graph_f]\n g = [-1 for x in range(0, 2**n)]\n for word in graph_g:\n x, y = word >> n, word & mask\n g[x] = y\n if -1 in g:\n raise Exception(\"V was not contained in the Walsh zeroes of f!\")\n else:\n return g", "def create_sccs_kosaraju_dfs(directed_graph, nontrivial):\n\n Logging.log(\"\\nStarting\")\n stack = []\n sccs_trivial, visited = list(), dict()\n for vertex in directed_graph.get_vertices().keys():\n if visited.get(vertex) is None:\n Logging.log(\"Vertex {0} not visited, go deep\", vertex)\n kh.fill_order_dfd_sccs(directed_graph, vertex, visited, stack)\n else:\n Logging.log(\"Vertex {0} already visited, skipping\", vertex)\n\n reversed_graph = get_reversed_graph(directed_graph)\n\n visited = dict()\n for i in reversed(stack):\n if visited.get(i) is None:\n sccs_trivial.append(set())\n kh.visit_dfs_sccs(reversed_graph, i, visited, sccs_trivial[-1])\n\n if nontrivial:\n return filter_nontrivial(sccs_trivial, directed_graph)\n else:\n return sccs_trivial", "def y_fs(self, k):\n theta = self.T_cmb/2.7\n chi = k*theta**2./(self.Omega_m) # k in h/Mpc (it is right! Don't worry)\n f_nu = np.sum(np.atleast_1d(self.f_nu))\n f_cb = self.f_cb\n \n y_fs = 17.2*f_nu*(1.+0.488*f_nu**(-7./6.))*(self.massive_nu*chi/f_nu)**2.\n \n return y_fs", "def element_from_poly(self, f):\n n, k = self.n, f.degree()\n if k >= n:\n f = f % self.T\n if f == 0:\n return self.zero()\n d, c = dup_clear_denoms(f.rep.rep, QQ, convert=True)\n c = list(reversed(c))\n ell = len(c)\n z = [ZZ(0)] * (n - ell)\n col = to_col(c + z)\n return self(col, denom=d)", "def evolve_fqe_givens(wfn: Wavefunction, u: np.ndarray) -> Wavefunction:\n wfn = evolve_fqe_givens_sector(wfn, u, sector='alpha')\n wfn = evolve_fqe_givens_sector(wfn, u, sector='beta')\n return wfn", "def skelm(f, B=None, option=\"binary\"):\n from string import upper\n from numpy import asarray\n if B is None: B = secross()\n assert isbinary(f),'Input binary image only'\n option = upper(option)\n k1,k2 = limits(f)\n y = gray(intersec(f, k1),'uint16')\n iszero = asarray(y)\n nb = sesum(B,0)\n for r in xrange(1,65535):\n ero = erode(f,nb)\n if isequal(ero, iszero): break\n f1 = openth( ero, B)\n nb = sedilate(nb, B)\n y = union(y, gray(f1,'uint16',r))\n if option == 'BINARY':\n y = binary(y)\n return y", "def fock_tensor(\n S,\n alpha,\n cutoff,\n choi_r=np.arcsinh(1.0),\n check_symplectic=True,\n sf_order=False,\n rtol=1e-05,\n atol=1e-08,\n):\n # Check the matrix is symplectic\n if check_symplectic:\n if not is_symplectic(S, rtol=rtol, atol=atol):\n raise ValueError(\"The matrix S is not symplectic\")\n\n # And that S and alpha have compatible dimensions\n m, _ = S.shape\n l = m // 2\n if l != len(alpha):\n raise ValueError(\"The matrix S and the vector alpha do not have compatible dimensions\")\n # Check if S corresponds to an interferometer, if so use optimized routines\n if np.allclose(S @ S.T, np.identity(m), rtol=rtol, atol=atol) and np.allclose(\n alpha, 0, rtol=rtol, atol=atol\n ):\n reU = S[:l, :l]\n imU = S[:l, l:]\n U = reU - 1j * imU\n Ub = np.block([[0 * U, -U], [-U.T, 0 * U]])\n tensor = interferometer(Ub, cutoff)\n else:\n # Construct the covariance matrix of l two-mode squeezed vacua pairing modes i and i+l\n ch = np.cosh(choi_r) * np.identity(l)\n sh = np.sinh(choi_r) * np.identity(l)\n zh = np.zeros([l, l])\n Schoi = np.block([[ch, sh, zh, zh], [sh, ch, zh, zh], [zh, zh, ch, -sh], [zh, zh, -sh, ch]])\n # And then its Choi expanded symplectic\n S_exp = expand(S, list(range(l)), 2 * l) @ Schoi\n # And this is the corresponding covariance matrix\n cov = S_exp @ S_exp.T\n alphat = np.array(list(alpha) + ([0] * l))\n x = 2 * alphat.real\n p = 2 * alphat.imag\n mu = np.concatenate([x, p])\n\n tensor = state_vector(\n mu,\n cov,\n normalize=False,\n cutoff=cutoff,\n hbar=2,\n check_purity=False,\n choi_r=choi_r,\n )\n\n if sf_order:\n sf_indexing = tuple(chain.from_iterable([[i, i + l] for i in range(l)]))\n return tensor.transpose(sf_indexing)\n\n return tensor", "def suprec(f, g, Bc=None):\n from numpy import product\n if Bc is None: Bc = secross()\n n = product(f.shape)\n y = cerode(f,g,Bc,n);\n return y", "def root_factors(f, *gens, filter=None, **args):\n args = dict(args)\n\n F = Poly(f, *gens, **args)\n\n if not F.is_Poly:\n return [f]\n\n if F.is_multivariate:\n raise ValueError('multivariate polynomials are not supported')\n\n x = F.gens[0]\n\n zeros = roots(F, filter=filter)\n\n if not zeros:\n factors = [F]\n else:\n factors, N = [], 0\n\n for r, n in ordered(zeros.items()):\n factors, N = factors + [Poly(x - r, x)]*n, N + n\n\n if N < F.degree():\n G = reduce(lambda p, q: p*q, factors)\n factors.append(F.quo(G))\n\n if not isinstance(f, Poly):\n factors = [ f.as_expr() for f in factors ]\n\n return factors", "def smoothSpectrum(f, X_f, r_oct):\n X_f_out = np.zeros(np.shape(X_f))\n for n in range(np.shape(f)[0]):\n # standard deviation\n sigma = f[n] / r_oct / np.pi\n # Gaussian window with the center frequnecy f[n] an dstandard deviation\n w = np.exp( -(f-f[n])**2 / (2*sigma**2) )\n w = w / np.sum(w, axis=0)\n X_f_out[n] = np.sum(w * X_f)\n \n return X_f_out", "def qft_recursive(qubits):\n qftcirc = Circuit()\n\n # First add the QFT subroutine above\n qftcirc.add(qft_no_swap(qubits))\n\n # Then add SWAP gates to reverse the order of the qubits:\n for i in range(math.floor(len(qubits) / 2)):\n qftcirc.swap(qubits[i], qubits[-i - 1])\n\n return qftcirc", "def zzx_sqf_p(f):\n return zzx_one_p(zzx_gcd(zzx_primitive(f)[1], zzx_diff(f)))", "def algo_4_3(sf,FV):\n F = set()\n V = FV.keys()\n \n for s in sf:\n if isVariable(s, V):\n if EMPTY_SYMBOL_UNI in FV[s]:\n F |= (FV[s]- set([EMPTY_SYMBOL_UNI]))\n else:\n F |= FV[s]\n break\n else:\n assert s[0] == s[-1]\n assert s[0] == \"'\" or s[0] == '\"'\n\n s = s[1:-1] #字面量 去除引号\n F.add(s[0]) #取第一个符号作为首终结符\n break\n return F", "def zzX_sqr(f):\n if poly_univariate_p(f):\n return zzx_sqr(f)\n\n if zzX_zero_p(f):\n return f\n\n df = zzX_degree(f)\n l = poly_level(f)-1\n\n h = []\n\n for i in xrange(0, 2*df+1):\n coeff = zzX_zero(l)\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff = zzX_add(coeff, zzX_mul(f[j], f[i-j]))\n\n coeff = zzX_mul_const(coeff, 2)\n\n if n & 1:\n elem = zzX_sqr(f[jmax+1])\n coeff = zzX_add(coeff, elem)\n\n h.append(coeff)\n\n return h", "def zassenhaus(f):\n # keep leading coefficient\n lf = f.leading_coefficient()\n\n # p-adic factorization\n p, fp_factors = padic_factorization(f)\n if len(fp_factors) == 1:\n return [f]\n\n # purge leading coefficient from factors\n for i,g in enumerate(fp_factors):\n if g.degree() == 0:\n del fp_factors[i]\n break\n\n # lift to Mignotte bound\n blm = upper_bound_of_coefficient(f)\n bound = p**(arith1.log(2*blm,p)+1)\n\n # Hensel lifting\n lf_inv_modq = intresidue.IntegerResidueClass(lf, bound).inverse()\n fq = f.coefficients_map(lambda c: (lf_inv_modq*c).minimumAbsolute()) # fq is monic\n fq_factors, q = hensel.lift_upto(fq, fp_factors, p, bound)\n\n return brute_force_search(f, fq_factors, bound)", "def full_S(self):\n return kron_list([R.T.dot(R) for R in self.Rs])", "def zzx_factor_sqf(f, **flags):\n cont, g = zzx_primitive(f)\n\n n = zzx_degree(g)\n\n if n <= 0:\n return cont, []\n\n if poly_LC(g) < 0:\n cont, g = -cont, zzx_neg(g)\n\n if n == 1 or zzx_eisenstein(g):\n return cont, [(g, 1)]\n\n factors = []\n\n if flags.get('cyclotomic', True):\n factors = zzx_cyclotomic_factor(g)\n\n if factors is None:\n factors = zzx_zassenhaus(g)\n\n def compare(f_a, f_b):\n i = len(f_a) - len(f_b)\n\n if not i:\n return cmp(f_a, f_b)\n else:\n return i\n\n return cont, sorted(factors, compare)", "def fs_c(self, percent=0.9, N=None):\n\t\tif not 0 <= percent <= 1:\n\t\t\traise ValueError(\"Percent should be a real number between 0 and 1.\")\n\t\tif N:\t\t\t\n\t\t\tif not isinstance(N, (int, numpy.int64)) or N<=0:\n\t\t\t\traise ValueError(\"N should be a positive integer.\")\n\t\t\tN = min(N, self.rank) # maybe we should notify the user?\n\t\t\t# S = numpy.zeros((self._numitems, N))\n\t\t# else:\n\t\tself.k = 1 + numpy.flatnonzero(numpy.cumsum(self.L) >= sum(self.L)*percent)[0]\n\t\t\t# S = numpy.zeros((self._numitems, self.k))\t\t\n\t\t# the sign of the square root can be either way; singular value vs. eigenvalue\n\t\t# numpy.fill_diagonal(S, -numpy.sqrt(self.E) if self.cor else self.s)\n\t\tnum2ret = N if N else self.k\n\t\ts = -numpy.sqrt(self.L) if self.cor else self.s\n\t\tS = scipy.linalg.diagsvd(s[:num2ret], len(self.Q), num2ret)\n\t\tself.G = _mul(self.D_c, self.Q.T, S) # important! note the transpose on Q\n\t\treturn self.G", "def f_exact(n, k):\n def fact(m):\n return math.factorial(m)\n\n partition = part(n, k)\n\n total = 0\n for p in partition:\n product = 1\n nodes_left = n\n counts = dict([(x, len(list(y))) for x, y in itertools.groupby(p)])\n for num in p:\n product *= fact(num - 1) * comb(nodes_left, num)\n nodes_left -= num\n for num in counts:\n product /= fact(counts[num])\n\n total += product\n return int(total)", "def zzX_sub_term(f, c, k=0):\n return zzX_add_term(f, zzX_neg(c), k)", "def k1(self, f, t, u, usereverse=False):\n\n if usereverse:\n output, storagez, storagew = f(t, u)\n\n self.storagez.append(storagez)\n self.storagew.append(storagew)\n\n return output\n else:\n return f(t, u)", "def sigma_f(nuc, temp=300.0, group_struct=None, phi_g=None, xs_cache=None):\n xs_cache = cache.xs_cache if xs_cache is None else xs_cache\n _prep_cache(xs_cache, group_struct, phi_g)\n if isinstance(nuc, collectionsAbc.Iterable) and not isinstance(nuc, basestring):\n return _atom_mass_channel(sigma_f, nuc, temp=temp, xs_cache=xs_cache)\n nuc = nucname.id(nuc)\n key = (nuc, rxname.id(\"fission\"), temp)\n return xs_cache[key]", "def S(k):\n\n from scipy.special import binom as binomial\n\n TrueHset = [0]\n if k > 1:\n for j in np.arange(k, 0, -1, dtype=int):\n TrueHset = list(set(TrueHset) | set([binomial(j, 2) + x for x in S(k - j)]))\n return TrueHset", "def kf_continuous(f, amp, tr_fac, del_fac, t):\n T = 1 / f\n T_sinu = 0.5 * T\n delay = del_fac * T\n tr = tr_fac * T\n beta = 1 - (2 * tr / T)\n\n t_T1 = 0\n t_T2 = (T * (1 - beta) / 4)\n t_T3 = (T * (1 + beta) / 4)\n t_T4 = (T * (3 - beta) / 4)\n t_T5 = (T * (3 + beta) / 4)\n t_T6 = T\n\n t = np.mod(t - delay, T)\n if t_T1 <= t < t_T2:\n f_value = amp / 2 + amp / 2 * np.cos(\n (2 * np.pi * t) / (T_sinu * (1 - beta)))\n elif t_T2 <= t < t_T3:\n f_value = 0\n elif t_T3 <= t < t_T4:\n f_value = -amp / 2 - amp / 2 * np.cos(\n (2 * np.pi * (t - (beta * T / 2))) / (T_sinu * (1 - beta)))\n elif t_T4 <= t < t_T5:\n f_value = 0\n elif t_T5 <= t <= t_T6:\n f_value = amp / 2 + amp / 2 * np.cos(\n (2 * np.pi * (t - beta * T)) / (T_sinu * (1 - beta)))\n return f_value\n # ------------------------------------------\n\n\n # revolving wing kinematics with sinusiodal ramp function", "def f3(self,k,cosTheta):\n n = 3\n m = 1\n\n m1term = self.g1 * (2.*n + 1.)*(self.f2(k[1],k[2],cosTheta[1,2])/2.) * \\\n (self.sumDotProd(1,n, 1,m, k,cosTheta)/self.sumDotProd(1,m, 1,m, k,cosTheta)) + \\\n (self.sumDotProd(1,n, 1,n, k,cosTheta)*self.sumDotProd(1,m, m+1,n, k,cosTheta)) / \\\n (self.sumDotProd(1,m, 1,m, k,cosTheta)*self.sumDotProd(m+1,n, m+1,n,k,cosTheta)) * \\\n self.g2(k[1],k[2],cosTheta[1,2])\n m = 2\n m2term = self.g2(k[1],k[1],cosTheta[0,1]) * (2.*n + 1.)* self.f1 * \\\n (self.sumDotProd(1,n, 1,m, k,cosTheta)/self.sumDotProd(1,m, 1,m, k,cosTheta)) + \\\n (self.sumDotProd(1,n, 1,n, k,cosTheta)*self.sumDotProd(1,m, m+1,n, k,cosTheta)) / \\\n (self.sumDotProd(1,m, 1,m, k,cosTheta)*self.sumDotProd(m+1,n, m+1,n,k,cosTheta)) * \\\n self.g1\n return (m1term+m2term)/((n-1.)*(2.*n+3))", "def Q(self, k, x):\n g = np.asarray(self.g(k, x))\n Q = g @ g.T\n return Q", "def derivert(f, k):\r\n \r\n return(k*f)", "def calculate_k_SVD(smooth_spreadsheet_matrix, k):\n U_unitary_matrix, singular_value, V_unitary_matrix = linalg.svd(smooth_spreadsheet_matrix)\n S_full_squared_matrix = np.zeros((k, k))\n np.fill_diagonal(S_full_squared_matrix, np.sqrt(singular_value[:k]))\n U_unitary_matrix = U_unitary_matrix[:, :k]\n return U_unitary_matrix, S_full_squared_matrix", "def fs_r(self, percent=0.9, N=None):\n\t\tif not 0 <= percent <= 1:\n\t\t\traise ValueError(\"Percent should be a real number between 0 and 1.\")\n\t\tif N:\n\t\t\tif not isinstance(N, (int, numpy.int64)) or N<=0:\n\t\t\t\traise ValueError(\"N should be a positive integer.\")\n\t\t\tN = min(N, self.rank)\n\t\t\t# S = numpy.zeros((self._numitems, N))\n\t\t# else:\n\t\tself.k = 1 + numpy.flatnonzero(numpy.cumsum(self.L) >= sum(self.L)*percent)[0]\n\t\t\t# S = numpy.zeros((self._numitems, self.k))\t\t\n\t\t# the sign of the square root can be either way; singular value vs. eigenvalue\n\t\t# numpy.fill_diagonal(S, -numpy.sqrt(self.E) if self.cor else self.s)\n\t\tnum2ret = N if N else self.k\n\t\ts = -numpy.sqrt(self.L) if self.cor else self.s\n\t\tS = scipy.linalg.diagsvd(s[:num2ret], self._numitems, num2ret)\t\t\n\t\tself.F = _mul(self.D_r, self.P, S)\n\t\treturn self.F", "def zzX_zeros_of(f, k, d=0):\n if poly_univariate_p(f):\n return [INT_ZERO]*k\n\n l = poly_level(f)-d\n\n if not k:\n return []\n else:\n return [ zzX_zero(l) for i in xrange(k) ]", "def stability(X, g_x_func, s, p, k):\n import numpy\n s.update_state(s, p, X = X, phase = k, Force_Update=True)\n H = hessian(g_x_func, s, p, dx=1e-6, gmix=True, k=k)\n Heig = numpy.linalg.eig(H)[0]\n HBeig = (Heig > 0.0)\n return numpy.all(HBeig)", "def zzx_zassenhaus(f):\n n = zzx_degree(f)\n\n if n == 1:\n return [f]\n\n A = zzx_max_norm(f)\n b = poly_LC(f)\n B = abs(int(sqrt(n+1))*2**n*A*b)\n C = (n+1)**(2*n)*A**(2*n-1)\n gamma = int(ceil(2*log(C, 2)))\n prime_max = int(2*gamma*log(gamma))\n\n for p in xrange(3, prime_max+1):\n if not isprime(p) or b % p == 0:\n continue\n\n F = gf_from_int_poly(f, p)\n\n if gf_sqf_p(F, p):\n break\n\n l = int(ceil(log(2*B + 1, p)))\n\n modular = []\n\n for ff in gf_factor_sqf(F, p)[1]:\n modular.append(gf_to_int_poly(ff, p))\n\n g = zzx_hensel_lift(p, f, modular, l)\n\n T = set(range(len(g)))\n factors, s = [], 1\n\n while 2*s <= len(T):\n for S in subsets(T, s):\n G, H = [b], [b]\n\n S = set(S)\n\n for i in S:\n G = zzx_mul(G, g[i])\n for i in T-S:\n H = zzx_mul(H, g[i])\n\n G = zzx_trunc(G, p**l)\n H = zzx_trunc(H, p**l)\n\n G_norm = zzx_l1_norm(G)\n H_norm = zzx_l1_norm(H)\n\n if G_norm*H_norm <= B:\n T = T - S\n\n G = zzx_primitive(G)[1]\n f = zzx_primitive(H)[1]\n\n factors.append(G)\n b = poly_LC(f)\n\n break\n else:\n s += 1\n\n return factors + [f]", "def compute_F(self, r: float, suppression: Suppression) -> Tuple[float, float]:\n moments = self.model.get_moments(suppression)\n suppression_factor = None\n if suppression == Suppression.PEAKS:\n raise ValueError(f'Bad suppression method: {suppression}')\n elif suppression == Suppression.RAW:\n suppression_factor = self.model.grid.sampling_cutoff\n \n if r == 0:\n # Treat the special case\n return 0.0, 0.0\n \n pk = self.model.powerspectrum\n min_k = self.model.min_k\n max_k = self.model.max_k\n if suppression == suppression.SAMPLING:\n # No need to go far out into the tail of the suppressing Gaussian\n # At k = n k_0, the suppression is exp(-n^2/2)\n # At n = 6, this is ~10^-8, which seems like a good place to stop\n max_k = min(max_k, self.model.grid.sampling_cutoff * 6)\n\n # Construct the list of domains\n halfoscillation = 5.76345919689455 # Half oscillation of j_2(x)\n oscillation1 = 9.09501133047638 # 1 oscillation of j_2(x)\n oscillation10 = 65.92794150295865 # 10 oscillations of j_2(x)\n halfosc = halfoscillation / r\n osc1 = oscillation1 / r\n osc10 = oscillation10 / r\n domains = self.generate_domains(min_k, max_k, moments.k4peak, osc1, osc10, suppression_factor)\n\n # Define integration functions\n def f(k):\n \"\"\"Straight function to integrate\"\"\"\n return k * k * k * k * pk(k, suppression) * spherical_jn(2, k * r)\n low_osc = self.gen_low_osc(f, \"F\", r)\n\n def hi_osc(min_k: float, max_k: float) -> Tuple[float, float]:\n \"\"\"Compute integrals for highly-oscillatory functions\"\"\"\n def f_sin1(k):\n return k * pk(k, suppression)\n\n def f_cos2(k):\n return k * k * pk(k, suppression)\n\n def f_sin3(k):\n return k * k * k * pk(k, suppression)\n\n # Perform the integrations using sin and cos quadrature\n sin1_result = quad(f_sin1, min_k, max_k, weight='sin', wvar=r,\n epsrel=self.err_rel, epsabs=self.err_abs, full_output=1, limit=70)\n cos2_result = quad(f_cos2, min_k, max_k, weight='cos', wvar=r,\n epsrel=self.err_rel, epsabs=self.err_abs, full_output=1, limit=70)\n sin3_result = quad(f_sin3, min_k, max_k, weight='sin', wvar=r,\n epsrel=self.err_rel, epsabs=self.err_abs, full_output=1, limit=70)\n # Check for any warnings\n if len(sin1_result) == 4 and 'roundoff error is detected' not in sin1_result[-1]:\n print('Warning when integrating F_sin1(r) at r =', r)\n print(sin1_result[-1])\n if len(cos2_result) == 4 and 'roundoff error is detected' not in cos2_result[-1]:\n print('Warning when integrating F_cos2(r) at r =', r)\n print(cos2_result[-1])\n if len(sin3_result) == 4 and 'roundoff error is detected' not in sin3_result[-1]:\n print('Warning when integrating F_sin3(r) at r =', r)\n print(sin3_result[-1])\n\n # Construct the result\n int_result = 3 * sin1_result[0] / (r * r * r) - 3 * cos2_result[0] / (r * r) - sin3_result[0] / r\n err_est = 3 * sin1_result[1] / (r * r * r) + 3 * cos2_result[1] / (r * r) + sin3_result[1] / r\n\n return int_result, err_est\n\n # Define selector function\n def selector(min_k: float, max_k: float) -> Callable:\n \"\"\"Returns the function to use to perform integration on the given domain\"\"\"\n if max_k < osc1 or (max_k - min_k) * r < 2 * pi:\n return low_osc\n return hi_osc\n\n # Perform integration\n result, err = self.perform_integral(domains, selector)\n\n return result, err", "def f(cls, R, K):\n bits = tuple(xor_streams(cls.expand_bits(R), K))\n Bs = nslice(bits, 6)\n Ss = [cls.s_box(i, bits) for i, bits in enumerate(Bs)]\n C = list(itertools.chain.from_iterable(Ss))\n return cls.permute(C, cls._sbox_permutation)", "def evolve_fqe_givens_sector(wfn: Wavefunction, u: np.ndarray,\n sector='alpha') -> Wavefunction:\n if sector == 'alpha':\n sigma = 0\n elif sector == 'beta':\n sigma = 1\n else:\n raise ValueError(\"Bad section variable. Either (alpha) or (beta)\")\n\n if u.shape[0] != wfn.norb():\n raise ValueError(\n \"unitary is not specified for the correct number of orbitals\")\n\n rotations, diagonal = givens_decomposition_square(u.copy())\n # Iterate through each layer and time evolve by the appropriate\n # fermion operators\n out = copy.deepcopy(wfn)\n for layer in rotations:\n for givens in layer:\n i, j, theta, phi = givens\n if not np.isclose(phi, 0):\n op = of.FermionOperator(\n ((2 * j + sigma, 1), (2 * j + sigma, 0)), coefficient=-phi)\n out = out.time_evolve(1.0, op, inplace=True)\n if not np.isclose(theta, 0):\n op = of.FermionOperator(((2 * i + sigma, 1),\n (2 * j + sigma, 0)),\n coefficient=-1j * theta) + \\\n of.FermionOperator(((2 * j + sigma, 1),\n (2 * i + sigma, 0)),\n coefficient=1j * theta)\n out = out.time_evolve(1.0, op, inplace=True)\n\n # evolve the last diagonal phases\n for idx, final_phase in enumerate(diagonal):\n if not np.isclose(final_phase, 1.0):\n op = of.FermionOperator(\n ((2 * idx + sigma, 1), (2 * idx + sigma, 0)),\n -np.angle(final_phase))\n out = out.time_evolve(1.0, op, inplace=True)\n\n return out", "def test_f_uni(self):\n s = np.array([100.0, 0, 0, 0, 0, 0])\n e = np.array([0.1, -0.05, -0.05, 0, 0, 0])\n f_direct = self.model.f(s, e, self.t, self.T)\n \n sdev = s - np.array([1,1,1,0,0,0]) * np.sum(s[:3]) / 3.0\n se = np.sqrt(3.0/2.0) * la.norm(sdev)\n ee = np.sqrt(2.0/3.0) * la.norm(e)\n\n g_direct = self.smodel.g(se, ee, self.t, self.T)\n \n self.assertTrue(np.isclose(g_direct, f_direct[0]))\n\n self.assertTrue(np.isclose(-g_direct/2.0, f_direct[1]))\n self.assertTrue(np.isclose(-g_direct/2.0, f_direct[2]))\n\n self.assertTrue(np.allclose([0,0,0], f_direct[3:]))", "def squareform(X, force=\"no\", checks=True):\n\n return ssd.squareform(X, force, checks)", "def SIDFT(X,D):\n N=len(X)\n x=np.zeros(N,'complex')\n for n in range(0,N,1):\n for k in range(0,N,1):\n x[n]=x[n]+np.exp(-1j*2*np.pi*k*D/N)*X[k]*np.exp(1j*2*np.pi*k*n/N)\n return x/N", "def convertKelvinToFarenheit(K):\n if isinstance(K, str) == True:\n raise ValueError(\"Kelvin cannot be a string value\")\n if isinstance(K,complex) == True:\n raise ValueError(\"Kelvin cannot be a complex value\")\n if isinstance(K,int) == True:\n raise ValueError(\"Kelvin should be a float value, example: 320.00\")\n \n F = (K * 9/5) - 459.67\n return F", "def evolve_fqe_givens_unrestricted(wfn: Wavefunction,\n u: np.ndarray) -> Wavefunction:\n rotations, diagonal = givens_decomposition_square(u.copy())\n out = copy.deepcopy(wfn)\n # Iterate through each layer and time evolve by the appropriate\n # fermion operators\n for layer in rotations:\n for givens in layer:\n i, j, theta, phi = givens\n if not np.isclose(phi, 0):\n op = of.FermionOperator(((j, 1), (j, 0)), coefficient=-phi)\n out = out.time_evolve(1.0, op, inplace=True)\n if not np.isclose(theta, 0):\n op = of.FermionOperator(\n ((i, 1),\n (j, 0)), coefficient=-1j * theta) + of.FermionOperator(\n ((j, 1), (i, 0)), coefficient=1j * theta)\n out = out.time_evolve(1.0, op, inplace=True)\n\n # evolve the last diagonal phases\n for idx, final_phase in enumerate(diagonal):\n if not np.isclose(final_phase, 1.0):\n op = of.FermionOperator(((idx, 1), (idx, 0)),\n -np.angle(final_phase))\n out = out.time_evolve(1.0, op, inplace=True)\n\n return out", "def _try_decompose(f):\n factors, roots = f.decompose(), []\n\n for currentroot in _try_heuristics(factors[0]):\n roots.append(currentroot)\n\n for currentfactor in factors[1:]:\n previous, roots = list(roots), []\n\n for currentroot in previous:\n g = currentfactor - Poly(currentroot, f.gen)\n\n for currentroot in _try_heuristics(g):\n roots.append(currentroot)\n\n return roots", "def _msqrd_v_f_f(s, t, model: SingleRhNeutrinoModel, mf: float):\n mx = model.mx\n u = 0.5 * np.tan(2 * model.theta)\n\n return (\n -2\n * u**2\n * GF**2\n * (\n 2 * mf**4 * (1 - 4 * SW**2 + 8 * SW**4)\n + 2 * mf**2 * (mx**2 - s - 2 * (1 - 4 * SW**2 + 8 * SW**4) * t)\n + (1 - 4 * SW**2 + 8 * SW**4)\n * (s**2 + 2 * s * t + 2 * t**2 - mx**2 * (s + 2 * t))\n )\n )", "def F(N,k=0) :\n accum = 0.0\n for i in xrange(1,N+1-k) :\n accum += (1.0+F(N-1,k+i-1))/N\n return accum", "def _sigmainf(N, h, m, dW, Km0, Pm0):\n M = m*(m-1)/2\n Im = broadcast_to(np.eye(m), (N, m, m))\n IM = broadcast_to(np.eye(M), (N, M, M))\n Ims0 = np.eye(m**2)\n factor1 = broadcast_to((2.0/h)*np.dot(Km0, Ims0 - Pm0), (N, M, m**2))\n factor2 = _kp2(Im, _dot(dW, _t(dW)))\n factor3 = broadcast_to(np.dot(Ims0 - Pm0, Km0.T), (N, m**2, M))\n return 2*IM + _dot(_dot(factor1, factor2), factor3)", "def richards_equation(x, s, gradient, kfun):\n return -kfun(x, s) * (gradient + 1)", "def shear_tensor(self, f):\n shear = self.einsum(\"qa,qb->qab\", [self.e, self.e])\n shear = self.einsum(\"q,qab->ab\", [f, shear])\n return shear", "def reduced(f, G):\n lev, dom, per, f, G = f.unify(G)\n return per(dmp_reduced(f, G, lev, dom))", "def eval_K(self, S):\n K = (self.eigenfunctions[self.X] * S[None, :]) @ \\\n self.eigenfunctions[self.X].T # shape (n,n)\n return K", "def get_f_s_gas(p: float, h: float) -> float:\n return 5.823109493752840 * 10 ** (-2) * p ** 4 \\\n - 3.309666523931270 * 10 ** (-1) * p ** 3 \\\n + 7.700179914440890 * 10 ** (-1) * p ** 2 \\\n - 1.311726004718660 * p \\\n + 1.521486605815750 * 10 ** (-9) * h ** 4 \\\n - 2.703698863404160 * 10 ** (-6) * h ** 3 \\\n + 1.793443775071770 * 10 ** (-3) * h ** 2 \\\n - 5.227303746767450 * 10 ** (-1) * h \\\n + 1.100368875131490 * 10 ** (-4) * p ** 3 * h \\\n + 5.076769807083600 * 10 ** (-7) * p ** 2 * h ** 2 \\\n + 1.202580329499520 * 10 ** (-8) * p * h ** 3 \\\n - 7.278049214744230 * 10 ** (-4) * p ** 2 * h \\\n - 1.449198550965620 * 10 ** (-5) * p * h ** 2 \\\n + 5.716086851760640 * 10 ** (-3) * p * h \\\n + 5.818448621582900 * 10", "def sol(s, k):\n f = [0]*26\n for x in s:\n f[ord(x)-97] -= 1\n # We store the negative of the frequencies\n \n heapq.heapify(f)\n # Make it a max heap\n \n while k and f:\n d = heapq.heappop(f)\n heapq.heappush(f, d+1)\n # Reduce the max frequency by 1 and k by 1 till k exists\n k-=1\n \n res = 0\n for x in f:\n res += x**2\n # Return the result, we dont care for the '-' since its gets squared\n return res", "def f(self, (k,t), (J,q,dq), **params):\n f = 0.*q\n return f", "def pruneRecursiveFeatures(v, newFeatures, s):\n # create the node feature matrix holding both v and new features\n allFeatures = {}\n for node in v.keys():\n allFeatures[node] = []\n appendFeatures(allFeatures, v)\n appendFeatures(allFeatures, newFeatures)\n # vertical logarithmic binning\n p = 0.5\n logFeatures = verticalLogBinning(allFeatures, p)\n\n # construct feature graph (s-friend)\n numFeatures = len(logFeatures.values()[0])\n featureGraph = TUNGraph.New() # the s-friend feature graph\n for i in range(numFeatures):\n featureGraph.AddNode(i)\n for i in range(numFeatures):\n featureI = getIthFeature(logFeatures, i)\n for j in range(i + 1, numFeatures):\n featureJ = getIthFeature(logFeatures, j)\n if isSimilar(featureI, featureJ, s):\n if not featureGraph.IsEdge(i, j):\n featureGraph.AddEdge(i, j)\n # summarize connected component\n retainedIdx = []\n wcc = TCnComV()\n GetWccs(featureGraph, wcc) # get all weakly connected components\n for i in range(0, wcc.Len()):\n retainedIdx.append(wcc[i][0])\n retainedIdx = sorted(retainedIdx)\n # return retained features\n retained = {}\n for node in v.keys():\n retained[node] = []\n startIdxNewFeatures = len(v.values()[0])\n for i in retainedIdx:\n # if the retained feature is from new features, add to retained feature\n if i >= startIdxNewFeatures:\n appendFeatures(retained, newFeatures, i - startIdxNewFeatures)\n return retained", "def saff_kuijlaars(N):\n k = np.arange(N)\n h = -1 + 2.0 * k / (N - 1)\n theta = np.arccos(h)\n phi = np.zeros_like(h)\n for i in range(1, N - 1):\n phi[i] = (phi[i - 1] + 3.6 / np.sqrt(N * (1 - h[i]**2))) % (2.0 * np.pi)\n\n return sph2car(np.ones_like(theta), theta, phi)", "def SSPRK3(t, h, state, f, kwargs):\n dim = len(state)//2\n S = [ np.asarray(state) ] + [ np.zeros_like(state) for i in range(4) ]\n C = [ [ (0,0.5) ], [ (1, 0.5) ], [ (0,2./3.), (2,1./6.) ], [ (3,0.5) ] ]\n B = [ (0.5, 0, 0), (0.5, 1, h/2.), (1./6., 2, h), (0.5, 3, h/2.) ]\n \n for i in range(4):\n b, k, dt = B[i]\n A = f(t + dt, S[k], kwargs)\n S[i+1] = b * (S[k] + h * A ) \n for j, c in C[i]:\n S[i+1] += c * S[j]\n return S[-1]", "def spec_helm_decomp(k,Cu,Cv,GM=False):\n dk = k[1]-k[0]\n s = np.log(k)\n\n Fphi = np.zeros_like(Cu)\n Fpsi = np.zeros_like(Cu)\n Cphi = np.zeros_like(Cu)\n Cpsi = np.zeros_like(Cu)\n\n # assume GM for decomposing into wave and vortex\n if GM:\n gm = np.load(\"/Users/crocha/Projects/dp_spectra/GM/gm_omega_star.npz\")\n f2omg2 = gm['rgm']\n ks = gm['k']*1.e3\n\n for i in range(s.size-1):\n\n ds = np.diff(s[i:])\n\n sh = sinh(s[i]-s[i:])\n ch = cosh(s[i]-s[i:])\n\n # the function to integrate\n Fp = Cu[i:]*sh + Cv[i:]*ch\n Fs = Cv[i:]*sh + Cu[i:]*ch\n\n # integrate using Simpson's rule\n Fpsi[i] = integrate.simps(Fs,s[i:])\n Fphi[i] = integrate.simps(Fp,s[i:])\n\n # zero out unphysical values\n Fpsi[Fpsi < 0.] = 0.\n Fphi[Fphi < 0.] = 0.\n\n # compute rotational and divergent components\n Cpsi = Fpsi - Fphi + Cu\n Cphi = Fphi - Fpsi + Cv\n\n if GM:\n\n f2omg2i = np.interp(k,ks,f2omg2)\n\n Cv_w = f2omg2i*Fphi - Fpsi + Cv\n Cv_v = Cv - Cv_w\n \n kdkromg = diff_central(ks, f2omg2)\n kdkromg = np.interp(k,ks[1:-1],kdkromg)\n\n dFphi = diff_central(k, Fphi)\n #dFphi = np.gradient(Fphi,k)\n dFphi = np.interp(k,k[1:-1],dFphi.real)\n E_w = Fphi - k*dFphi\n\n Cu_w = -k*kdkromg*Fphi + f2omg2i*(-Fpsi+Cv) + Fphi\n Cu_v = Cu - Cu_w\n\n Cb_w = E_w - (Cu_w + Cv_w)/2.\n\n return Cpsi,Cphi, Cu_w,Cv_w, Cu_v,Cv_v, E_w, Cb_w\n\n else:\n return Cpsi,Cphi", "def zzx_sqr(f):\n df, h = zzx_degree(f), []\n\n for i in xrange(0, 2*df+1):\n coeff = INT_ZERO\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff += f[j]*f[i-j]\n\n coeff += coeff\n\n if n & 1:\n elem = f[jmax+1]\n coeff += elem**2\n\n h.append(coeff)\n\n return h", "def construct_hessian(f, mesh=None, op=DefaultOptions()):\n if mesh is None:\n mesh = f.function_space().mesh()\n dim = mesh.topological_dimension()\n assert dim in (2, 3)\n P1_ten = TensorFunctionSpace(mesh, \"CG\", 1)\n n = FacetNormal(mesh)\n\n # Integration by parts applied to the Hessian definition\n if op.hessian_recovery == 'parts':\n H = TrialFunction(P1_ten)\n τ = TestFunction(P1_ten)\n a = inner(tau, H)*dx\n L = -inner(div(τ), grad(f))*dx\n for i in range(dim):\n for j in range(dim):\n L += τ[i, j]*n[j]*f.dx(i)*ds\n\n H = Function(P1_ten)\n solve(a == L, H, solver_parameters=op.hessian_solver_parameters)\n\n # Double L2 projection, using a mixed formulation for the gradient and Hessian\n elif op.hessian_recovery == 'dL2':\n P1_vec = VectorFunctionSpace(mesh, \"CG\", 1)\n V = P1_ten*P1_vec\n H, g = TrialFunctions(V)\n τ, φ = TestFunctions(V)\n a = inner(τ, H)*dx\n a += inner(φ, g)*dx\n a += inner(div(τ), g)*dx\n for i in range(dim):\n for j in range(dim):\n a += -g[i]*τ[i, j]*n[j]*ds\n\n # L = inner(grad(f), φ)*dx\n L = f*dot(φ, n)*ds - f*div(φ)*dx # enables f to be P0\n\n q = Function(V)\n solve(a == L, q) # TODO: Solver parameters?\n H = q.split()[0]\n\n return H", "def kl_ucb(self, T, f):\n def index_func(x):\n return x.Sa / x.Na + np.sqrt(f(x.t)*2 / x.Na)\n return self.Index_Policy(T, index_func)", "def sub(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_sub(F, G, lev, dom))", "def k_corona(G, k, core_number=None):\n\n def func(v, k, c):\n return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)\n\n return _core_subgraph(G, func, k, core_number)", "def transformation_matrix_frenet(self, kappa, phi, s):\n if kappa == 0.0:\n # See Design and Kinematic Modeling of Constant Curvature Continuum Robots: A Review\n # the entries (limits) of the 4th column in case kappa = 0 can be calculated by using L'Hopital's rule\n return np.array([[cos(phi)*cos(kappa*s), -sin(phi), cos(phi)*sin(kappa*s), 0],\n [sin(phi)*cos(kappa*s), cos(phi), sin(phi)*sin(kappa*s), 0],\n [-sin(kappa*s), 0, cos(kappa*s), s],\n [0, 0, 0, 1]])\n else:\n return np.array([[cos(phi)*cos(kappa*s), -sin(phi), cos(phi)*sin(kappa*s), cos(phi)*(1-cos(kappa*s))/kappa],\n [sin(phi)*cos(kappa*s), cos(phi), sin(phi)*sin(kappa*s), sin(phi)*(1-cos(kappa*s))/kappa],\n [-sin(kappa*s), 0, cos(kappa*s), sin(kappa*s)/kappa],\n [0, 0, 0, 1]])" ]
[ "0.5876295", "0.5828523", "0.58153415", "0.57921195", "0.5763062", "0.5645726", "0.5505161", "0.54806006", "0.54626274", "0.5399158", "0.5388178", "0.53604126", "0.53176093", "0.5301814", "0.52727515", "0.5272213", "0.52575696", "0.52096814", "0.52035517", "0.51917213", "0.5188052", "0.51871717", "0.5180236", "0.51745504", "0.5173033", "0.5161864", "0.5138789", "0.5123245", "0.51199764", "0.5080659", "0.5068957", "0.50634605", "0.50558406", "0.5045818", "0.50434196", "0.5041392", "0.5040921", "0.503504", "0.50020903", "0.49695855", "0.49649096", "0.4957321", "0.49528503", "0.4938257", "0.49322328", "0.4927195", "0.4917878", "0.49132833", "0.4911717", "0.49083978", "0.49063855", "0.49058077", "0.489513", "0.48918718", "0.489113", "0.4887197", "0.48753327", "0.48654565", "0.48526192", "0.48465875", "0.48449764", "0.4836151", "0.48219556", "0.4818391", "0.481684", "0.48090118", "0.4801262", "0.4799627", "0.4795654", "0.47875762", "0.47872692", "0.47817817", "0.4773431", "0.47636244", "0.47604874", "0.47579792", "0.47557172", "0.47544", "0.47542897", "0.4751668", "0.47505948", "0.47442603", "0.47386193", "0.47355142", "0.47345248", "0.4733136", "0.47321576", "0.47290123", "0.47271514", "0.47225976", "0.47225678", "0.47215462", "0.47027397", "0.4697441", "0.46954116", "0.46938744", "0.4690298", "0.46891764", "0.46884948", "0.4687358", "0.46872815" ]
0.0
-1
Return ``True`` if ``f`` is a squarefree polynomial in ``K[X]``. Examples ======== >>> _, x, y = ring('x y', ZZ) >>> ((x + y)2).is_squarefree False >>> (x2 + y2).is_squarefree True
def is_squarefree(self, f): if f.is_ground: return True g = f for x in self.gens: g = self.gcd(g, f.diff(x)) if g.is_ground: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_squarefree_hilbert_number(n):\n return is_hilbert_number(n) and is_hilbert_squarefree_number(n)", "def isNodeSheaf(_session, _node):\n return checkIncToSets(_session, _node, [keynodes.info.stype_sheaf], sc.SC_A_CONST | sc.SC_POS)", "def is_sqf(f):\n return dmp_sqf_p(f.rep, f.lev, f.dom)", "def is_quantifier_free(formula: Formula) -> bool:\r\n # Task 11.3.1\r\n\r\n if is_quantifier(formula.root):\r\n return False\r\n\r\n if is_binary(formula.root):\r\n return is_quantifier_free(formula.first) and is_quantifier_free(formula.second)\r\n\r\n if is_unary(formula.root):\r\n return is_quantifier_free(formula.first)\r\n\r\n return True", "def is_quantifier_free(formula):\n assert type(formula) is Formula\n # Task 11.3.1\n if is_constant(formula.root) or is_variable(formula.root) or is_relation(formula.root) or is_equality(formula.root):\n return True\n\n if is_quantifier(formula.root):\n return False\n\n is_first = is_quantifier_free(formula.first)\n if is_binary(formula.root):\n return is_first and is_quantifier_free(formula.second)\n\n return is_first", "def is_square(q_1: Qs) -> bool:\n\n return math.sqrt(q_1.dim).is_integer()", "def is_clique(G,S): #set of vertices where every pair in the set forms an edge \n for v in S:\n if list(set(S)&set(neighbors(G,v))) != []: #[] <-- empty list\n return False\n \n return True", "def _can_do_sum_of_squares(n, k):\n if k < 1:\n return False\n if n < 0:\n return False\n if n == 0:\n return True\n if k == 1:\n return is_square(n)\n if k == 2:\n if n in (1, 2):\n return True\n if isprime(n):\n if n % 4 == 1:\n return 1 # signal that it was prime\n return False\n else:\n f = factorint(n)\n for p, m in f.items():\n # we can proceed iff no prime factor in the form 4*k + 3\n # has an odd multiplicity\n if (p % 4 == 3) and m % 2:\n return False\n return True\n if k == 3:\n if (n//4**multiplicity(4, n)) % 8 == 7:\n return False\n # every number can be written as a sum of 4 squares; for k > 4 partitions\n # can be 0\n return True", "def isSqrt(self):\n return _libsbml.ASTNode_isSqrt(self)", "def is_symbolic(self: Q) -> bool:\n\n symbolic = False\n\n if (\n hasattr(self.t, \"free_symbols\")\n or hasattr(self.x, \"free_symbols\")\n or hasattr(self.y, \"free_symbols\")\n or hasattr(self.z, \"free_symbols\")\n ):\n symbolic = True\n\n return symbolic", "def __contains__(self, f) :\n if self.__disc is infinity :\n return True\n \n (s, l) = f\n\n (a, b, c) = apply_GL_to_form(self.__p1list[l], s)\n if not c % self.__level == 0 :\n return False\n \n disc = 4*a*c - b**2\n if disc == 0 :\n return gcd([a,b,c]) < self._indefinite_content_bound()\n else :\n return disc < self.__disc", "def is_primitive_root(g,n):\n\t# SAGE equivalent is mod(g,n).is_primitive_root() in IntegerMod class\n\tif gcd(g,n) != 1: return False # Not in the group of units\n\torder = euler_phi(n)\n\tif carmichael_lambda(n) != order: return False # Group of units isn't cyclic\n\torderfacts = prime_divisors(order)\n\tfor fact in orderfacts:\n\t\tif pow(g,order//fact,n) == 1: return False\n\treturn True", "def has_xfree(self, s: set[Basic]):\n # protect O(1) containment check by requiring:\n if type(s) is not set:\n raise TypeError('expecting set argument')\n return any(a in s for a in iterfreeargs(self))", "def is_square(N):\n if N < 0:\n print(\"N is negative number @is_square in ModulesFactorization.\")\n sys.exit()\n\n sqrt_N=round(math.sqrt(N))\n if N == sqrt_N*sqrt_N:\n return True\n else:\n return False", "def __contains__(self, f) :\n if self.__disc is infinity :\n return True\n \n (s, l) = f\n\n (a, _, c) = apply_GL_to_form(self.__p1list[l], s)\n if not c % self.__level == 0 :\n return False\n\n return a + c < self.index()", "def is_perfect_square():", "def is_square(N):\n return N == round(N**(0.5))**2", "def is_hilbert_squarefree_number(n):\n ubound = math.ceil(n / 2)\n for a in range(5, ubound + 1):\n if is_hilbert_square(a) and n % a == 0:\n return False\n return True", "def is_square(x):\n\n if x < 0:\n return False\n if math.pow(int(math.sqrt(x)), 2) == x:\n return True", "def has_path_sum(self, k):\n\n return self.has_path_sum_helper(self.root, k)", "def bfs(graph: np.ndarray, row: int, s: int, t: int, parent: list) -> bool:\r\n visited = [False] * row\r\n queue = []\r\n queue.append(s)\r\n visited[s] = True\r\n\r\n while queue:\r\n\r\n u = queue.pop(0)\r\n\r\n for ind, val in enumerate(graph[u]):\r\n if visited[ind] is False and val > 0:\r\n queue.append(ind)\r\n visited[ind] = True\r\n parent[ind] = u\r\n\r\n return True if visited[t] else False", "def stability(X, g_x_func, s, p, k):\n import numpy\n s.update_state(s, p, X = X, phase = k, Force_Update=True)\n H = hessian(g_x_func, s, p, dx=1e-6, gmix=True, k=k)\n Heig = numpy.linalg.eig(H)[0]\n HBeig = (Heig > 0.0)\n return numpy.all(HBeig)", "def sroot(n):\n\n return int(n ** 0.5) == n ** 0.5", "def isSymmetric(self, root: TreeNode) -> bool:\n return Solution().isMirror(root, root)", "def is_triangular(k):\n sum = 0\n \n for number in range(1,k+1):\n sum += number\n if sum == k:\n return True\n if sum > k:\n return False", "def is_semileaf(self):\n if self._leftchild and self._rightchild:\n return False\n if not self._leftchild and not self._rightchild:\n return False\n return True", "def isSetStoichiometry(self):\n return _libsbml.SpeciesReference_isSetStoichiometry(self)", "def in_family(ls, sol):\r\n familia = sym(sol)\r\n for k in range(1, len(familia)):\r\n if familia[k] in ls:\r\n return True\r\n return False", "def is_stump(self):\n if self.is_leaf():\n return False\n return self.left_subtree.is_leaf() and self.right_subtree.is_leaf()", "def is_straight(hand):\n # same suite\n suite = hand[0][1]\n vals = []\n for c in hand:\n vals.append(cards[c[0]])\n # check if vals are consecutive or not\n if is_contiguous(vals):\n return True\n else:\n return False", "def isSC(g, v):\n\n a = [False]*(v) # mark vertices not visited\n \n DFS(g, 0, a) # perform depth-first search\n\n if any(i == False for i in a): # see if we have visited all vertices\n return False\n\n gr = tranpose(g) # create a reversed (tranposed) graph.\n \n a = [False]*(v) # again, mark vertices visited\n\n gr = DFS(gr, 0,v) # DFS for gr\n\n if any(i == False for i in v):\n return False\n\n return True", "def is_root(self, p):\n return self.root() == p", "def sparsify(f, arg_types, sparse_rules=None):\n os.environ[\"STREE_PYTHON_FALLBACK\"] = \"1\"\n tree = SymbolTree.create(f)\n handler = tree.get_handler()\n sparse_rules = sparse_rules or {}\n sparsify_tree(handler, arg_types, sparse_rules, f)\n os.unsetenv(\"STREE_PYTHON_FALLBACK\")\n return tree.get_network()", "def has_single_root(self):\n root = self.left_root\n if root != NULL and self.right_sib(root) == NULL:\n return True\n return False", "def has_scoring(self):\n return not self.get_node('//Scoring') is None", "def test_sym_sqrtm(self): \n # create random symmetric n x n matrix\n n = 5\n A = 5.0 * 2.0*(torch.rand(n,n) - 0.5)\n A = A + A.T\n\n # reference implementation of scipy\n sqA_scipy = sla.sqrtm(A.numpy())\n isqA_scipy = sla.inv(sla.sqrtm(A.numpy()))\n # my own implementation using pure torch functions\n sqA,isqA = (x.numpy() for x in _sym_sqrtm(A))\n \n self.assertTrue(np.isclose(sqA, sqA_scipy).all())\n self.assertTrue(np.isclose(isqA, isqA_scipy).all())", "def is_FSAL(self):\n if np.all(self.A[-1,:]==self.b): return True\n else: return False", "def perfect_square(num: int) -> bool:\n return math.sqrt(num) * math.sqrt(num) == num", "def is_square(n):\r\n m = int(sqrt(n))\r\n return m * m == n", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def is_symmetric(tree):\n\n def is_symmetric_helper(subtree_0, subtree_1):\n if not subtree_0 and not subtree_1:\n return True\n elif subtree_0 and subtree_1:\n if (subtree_0.data == subtree_1.data\n and is_symmetric_helper(subtree_0.left, subtree_1.right)\n and is_symmetric_helper(subtree_0.right, subtree_1.left)):\n return True\n return False\n\n return not tree or is_symmetric_helper(tree.left, tree.right)", "def squareform(X, force=\"no\", checks=True):\n\n return ssd.squareform(X, force, checks)", "def test_symplectic_multimode(self, tol):\n r = [0.543] * 4\n phi = [0.123] * 4\n S = symplectic.squeezing(r, phi)\n\n # the symplectic matrix\n O = symplectic.sympmat(4)\n\n assert np.allclose(S @ O @ S.T, O, atol=tol, rtol=0)", "def isprimitive(g,n):\r\n\t# SAGE equivalent is mod(g,n).is_primitive_root() in IntegerMod class\r\n\tif gcd(g,n) != 1: return False # Not in the group of units\r\n\torder = euler_phi(n)\r\n\tif carmichaellambda(n) != order: return False # Group of units isn't cyclic\r\n\torderfacts = prime_divisors(order)\r\n\toldfact = 1\r\n\tfor fact in orderfacts:\r\n\t\tif fact!=oldfact:\r\n\t\t\tif pow(g,order/fact,n) == 1: return False\r\n\t\t\toldfact = fact\r\n\treturn True", "def isSStx(tx):\n try:\n checkSStx(tx)\n\n except Exception as e:\n log.debug(\"isSStx: {}\".format(e))\n\n else:\n return True", "def is_simple(self):\n return self.upper_binary_tree() == self.lower_binary_tree()", "def is_root(self, n):\n return n == self._root", "def isSetStoichiometryMath(self):\n return _libsbml.SpeciesReference_isSetStoichiometryMath(self)", "def test_simple_branches(self):\n sgf = \"\"\"\n (;FF[4]GM[1]SZ[19];B[aa];W[bb](;B[cc];W[dd];B[ee])(;B[hh];W[hg]))\n \"\"\"\n coll = parseSgf(sgf)\n self.assertEqual(coll,\n [[{'SZ': '19', 'GM': '1', 'FF': '4'}, {'B': 'aa'}, {'W': 'bb'},\n [[{'B': 'cc'}, {'W': 'dd'}, {'B': 'ee'}], [{'B': 'hh'}, {'W': 'hg'}]]]])\n self.assertEqual(self._trim_sgf_whitespace(sgf), makeSgf(coll))", "def is_triangle(x):\n solution = solve_quad(1, 1, -2*x)\n return max(solution) % 1 == 0", "def verif_stochastic(sv, tree=Special):\r\n err=\"\"\r\n if tree==Special: # explore all program\r\n for nod in sv.Object.values():\r\n for c,v in nod.clauses:\r\n \r\n # explore conditions\r\n A=c[1] \r\n if A:\r\n for sto in Stochastic: \r\n if applied(A[0], sto): # fatal error\r\n err+=\"\\n\"+Err_not_allowed_cond+\"\\n \"+sto+ \"-->\" + A[0] # *** Syntax error: operator not allowed in a condition ***\r\n if applied(A[0], Call): # verify volatile calls\r\n for y in Volatile_calls:\r\n if A[0].startswith(Call+Obr+Quote+y+Obr): # fatal error \r\n err+=\"\\n\"+Err_not_allowed_cond+\"\\n \"+y+ \"-->\"+ A[0] # *** Syntax error: operator not allowed in a condition ***\r\n \r\n # explore values\r\n verif_stochastic(sv, tree=v)\r\n \r\n if err:\r\n print(err)\r\n raise ReferenceError\r\n \r\n else: # explore a single object\r\n op,A,B=tree \r\n if op==Comma and A: # a list: recursively explore each element\r\n for x in A:\r\n if x: verif_stochastic(sv, tree=x)\r\n else:\r\n for x in [A, B]:\r\n if x:\r\n for sto in Stochastic: \r\n if applied(x[0], sto): # fatal error\r\n err+=\"\\n\"+Err_not_allowed_expr+\"\\n \"+sto+ \"-->\"+ tree_join(tree) # *** Syntax error: operator not allowed in an expression ***\r\n if applied(x[0], Call): # verify volatile calls\r\n for y in Volatile_calls: \r\n if x[0].startswith(Call+Obr+Quote+y+Obr): # fatal error \r\n err+=\"\\n\"+Err_not_allowed_expr+\"\\n \"+y+ \"-->\"+tree_join(tree) # *** Syntax error: operator not allowed in an expression ***\r\n verif_stochastic(sv, tree=x) # recursively explore each term\r\n \r\n if err:\r\n print(err)\r\n raise ReferenceError", "def cheapCheck(t: Ticket) -> bool:\n t_staker = stakers[t.sender]\n\n valid_Q_j = t.proof.Q_j == t_staker.address\n valid_vs = t_staker.weight() > t.proof.vs >= 1\n\n return valid_Q_j && valid_vs", "def is_perfect(self) -> bool:\n if self.root is None: # If tree is empty\n return True\n\n h = self.height()\n return self.is_perfect_helper(self.root, 0, h)", "def test_symplectic(self, tol):\n r = 0.543\n phi = 0.123\n S = symplectic.squeezing(r, phi)\n\n # the symplectic matrix\n O = np.array([[0, 1], [-1, 0]])\n\n assert np.allclose(S @ O @ S.T, O, atol=tol, rtol=0)", "def is_skew_component_fusion(self) -> bool:\n fcell = self.first_cell\n scell = self.second_cell\n if self._fuse_row:\n skew_ob = GriddedPerm((0, 1), (fcell, scell))\n else:\n skew_ob = GriddedPerm((0, 1), (fcell, scell))\n return skew_ob in self._tiling.obstructions", "def test_weakest_squad(self):\n\n sq = choose_squad(\"weakest_squad\", self.army)\n self.assertIs(sq, self.army.squads[1])", "def is_square(apositiveint):\n x = apositiveint // 2\n seen = set([x])\n while x * x != apositiveint:\n x = (x + (apositiveint // x)) // 2\n if x in seen: return False\n seen.add(x)\n return True", "def test_get_sqrrect_sqr_all_float(self):\n result = get_squarerectangle_type(2.22, 2.22, 2.22, 2.22,)\n self.assertEqual(result, 'square')", "def is_sra(val):\n return sra_regexp.match(val)", "def isSetSpeciesReference(self):\n return _libsbml.MultiASTPlugin_isSetSpeciesReference(self)", "def is_square(n):\n if type(n) is not int:\n raise ValueError(\"Wrong given type, should be integer instead\")\n return n > -1 and math.sqrt(n) == int(math.sqrt(n))", "def find_fantasy(self, fantasy: Fantasy, obj: Union[Statement, Fantasy]) -> bool:\n if fantasy is None:\n return True # all objects have top level as parent\n fant = obj.fantasy\n while fant is not None:\n fant = fant.fantasy\n if fant is fantasy:\n return True\n return False", "def is_perfect(self) -> bool:\n #binary search tree==empty\n if self.root is None:\n return True\n\n # loop binary search tree\n height = self.height()\n return self.is_perfect_helper(self.root, 0, height)", "def has_multiple_roots(self):\n root = self.left_root\n if root != NULL and self.right_sib(root) != NULL:\n return True\n return False", "def is_root(self, p):\n return self.root() == p", "def is_root(self, p):\n return self.root() == p", "def is_root(self, p):\n return self.root() == p", "def has_seven(k):\n\tif k % 10 == 7:\n\t\treturn True\n\telif k < 10:\n\t\treturn False\n\telse:\n\t\treturn has_seven(k // 10)", "def is_root(self):\n return self.unpack_word(0x2) & 0x0004 > 0", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def is_triangular(k):\n n = 1\n x = k\n while True:\n if x == 0:\n return True\n break\n elif x < 0:\n return False\n break\n else:\n x -= n\n n += 1", "def XCAFDoc_ShapeTool_IsSimpleShape(*args):\n return _XCAFDoc.XCAFDoc_ShapeTool_IsSimpleShape(*args)", "def isValid(self, s: str) -> bool:\n stack = list()\n for c in s:\n if c in Solution.corresponding_parenthesis:\n stack.append(Solution.corresponding_parenthesis[c])\n elif not stack or stack.pop() != c:\n return False\n return not stack", "def IsSimpleShape(*args):\n return _XCAFDoc.XCAFDoc_ShapeTool_IsSimpleShape(*args)", "def is_posn_above_fish(p: Posn, f: SwimmingFish) -> bool:\n return p.y > f.posn.y and left_edge(f) <= p.x <= right_edge(f)", "def is_rational(symbol):\n return isa(symbol, fractions.Fraction) or _is_real(symbol)", "def is_root(self,p):\n return self.root() == p", "def test_multi_branches(self):\n sgf = \"\"\"\n (;FF[4]GM[1]SZ[19];B[aa];W[bb](;B[cc];W[dd](;B[ad];W[bd])\n (;B[ee];W[ff]))\n (;B[hh];W[gg])\n (;B[ii];W[jj]))\n \"\"\"\n coll = parseSgf(sgf)\n self.assertEqual(coll,\n [[{'SZ': '19', 'GM': '1', 'FF': '4'}, {'B': 'aa'},\n {'W': 'bb'},\n [[{'B': 'cc'}, {'W': 'dd'}, [[{'B': 'ad'}, {'W': 'bd'}], [{'B': 'ee'}, {'W': 'ff'}]]],\n [{'B': 'hh'}, {'W': 'gg'}],\n [{'B': 'ii'}, {'W': 'jj'}]],\n ]])\n self.assertEqual(self._trim_sgf_whitespace(sgf), makeSgf(coll))", "def __bool__(self):\n for root, products in self.rel_paths():\n if products:\n return True\n return False", "def is_simplex(self):\n return self.is_compact() and (self.dim()+1==self.n_vertices())", "def isFunctionalCompartment(*args):\n return _libsbml.SBO_isFunctionalCompartment(*args)", "def is_symmetric_mode(beta, k0, g, a_over_d, h):\r\n lhs = ((cmath.sqrt(beta**2 - k0**2) / k0)\r\n * cmath.tanh(g/2 * cmath.sqrt(beta**2 - k0**2)))\r\n rhs = a_over_d * cmath.tan(k0 * h)\r\n return floats_are_equal(lhs, rhs, tol=1e-4)", "def is_hash(fhash):\n\n # Intentionally doing if/else statement for ease of testing and reading\n if re.match(re_md5, fhash):\n return True\n elif re.match(re_sha1, fhash):\n return True\n elif re.match(re_sha256, fhash):\n return True\n elif re.match(re_sha512, fhash):\n return True\n elif re.match(re_ssdeep, fhash):\n return True\n else:\n return False", "def has_curry(tree, userlambdas=[]):\n return has_deco([\"curry\"], tree, userlambdas)", "def is_square(matrix):\n return is_matrix(matrix) and matrix.shape[0] == matrix.shape[1]", "def test_is_symplectic():\n theta = np.pi / 6\n r = np.arcsinh(1.0)\n phi = np.pi / 8\n S = symplectic.rotation(theta)\n assert symplectic.is_symplectic(S)\n S = symplectic.squeezing(r, theta)\n assert symplectic.is_symplectic(S)\n S = symplectic.beam_splitter(theta, phi)\n assert symplectic.is_symplectic(S)\n S = symplectic.two_mode_squeezing(r, theta)\n assert symplectic.is_symplectic(S)\n A = np.array([[2.0, 3.0], [4.0, 6.0]])\n assert not symplectic.is_symplectic(A)\n A = np.identity(3)\n assert not symplectic.is_symplectic(A)\n A = np.array([[2.0, 3.0], [4.0, 6.0], [4.0, 6.0]])\n assert not symplectic.is_symplectic(A)", "def f_boolean(node, pos, size, context, v):\n if xpath.tools.nodesetp(v):\n return len(v) > 0\n elif xpath.tools.numberp(v):\n if v == 0 or v != v:\n return False\n return True\n elif xpath.tools.stringp(v):\n return v != ''\n\n return v", "def is_square(self):\n return self.shape[0] == self.shape[1]", "def is_square(self):\n return self.shape[0] == self.shape[1]", "def has_seven(k):\n \n if k % 10 == 7:\n return True\n else:\n if k<10:\n return False\n return has_seven(k//10)", "def is_symmetric(self, root):\n \n if not root: return True\n return self.is_symmetric_recursive(root)", "def is_inside_canvas(quadtree, shift, canvas_size):\r\n\r\n stack = [quadtree.root]\r\n w, h = canvas_size\r\n sh_w, sh_h = shift\r\n\r\n while stack:\r\n v = stack.pop()\r\n\r\n a, b, c, d = v.value # a 4 tuple, left upper-coord and right-bottom coordinates\r\n if not ((a + sh_w < 0) or (c + sh_w > w) or (b + sh_h < 0) or (d + sh_h > h)):\r\n continue\r\n\r\n if v.is_leaf():\r\n return False\r\n\r\n stack += v.get_children_list()\r\n\r\n return True", "def single_quad_op_sparse(n_modes, mode, quadrature, hbar, trunc):\n if trunc < 1 or not isinstance(trunc, int):\n raise ValueError(\"Fock space truncation must be a positive integer.\")\n\n b = boson_ladder_sparse(1, 0, 0, trunc)\n\n if quadrature == 'q':\n op = numpy.sqrt(hbar / 2) * (b + b.conj().T)\n elif quadrature == 'p':\n op = -1j * numpy.sqrt(hbar / 2) * (b - b.conj().T)\n\n Id = [scipy.sparse.identity(trunc, dtype=complex, format='csc')]\n operator_list = Id * mode + [op] + Id * (n_modes - mode - 1)\n operator = kronecker_operators(operator_list)\n\n return operator", "def fn(p, q):\n if not p or not q: return p is q\n return fn(p.left, q.left) and p.val == q.val and fn(p.right, q.right)", "def has_shape(node):\n allowed_shapes = (\n pm.nt.Mesh,\n pm.nt.NurbsCurve,\n pm.nt.NurbsSurface\n )\n\n has_it = False\n\n children = node.getChildren()\n while len(children) and not has_it:\n child = children.pop(0)\n if isinstance(child, allowed_shapes):\n has_it = True\n break\n children += child.getChildren()\n\n return has_it", "def contains(self, g, strict=True):\n if not isinstance(g, Permutation):\n return False\n if g.size != self.degree:\n if strict:\n return False\n g = Permutation(g, size=self.degree)\n if g in self.generators:\n return True\n return bool(self.coset_factor(g.array_form, True))", "def semileaf(self):\n if self._leftchild and not self._rightchild:\n return True\n if self._rightchild and not self._leftchild:\n return True\n return False", "def modular_squareroot_in_FQ2(value: FQ2) -> Optional[FQ2]:\n candidate_squareroot = value ** ((FQ2_ORDER + 8) // 16)\n check = candidate_squareroot ** 2 / value\n if check in EIGTH_ROOTS_OF_UNITY[::2]:\n x1 = candidate_squareroot / EIGTH_ROOTS_OF_UNITY[EIGTH_ROOTS_OF_UNITY.index(check) // 2]\n x2 = -x1\n x1_re, x1_im = x1.coeffs\n x2_re, x2_im = x2.coeffs\n return x1 if (x1_im > x2_im or (x1_im == x2_im and x1_re > x2_re)) else x2\n return None", "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def bfs(self, initialSt, goalSt): # Breadth­First Search\n self.__reset_all_variables()\n\n start = time.perf_counter()\n\n frontier = deque() # deque will be treated as a queue\n frontier.append(initialSt)\n explored = set()\n frontier_U_explored = set() # for fasten up the lookup time\n\n max_frontier_size = 0\n max_ram_used = psutil.virtual_memory().used\n\n while len(frontier) != 0:\n currentState = frontier.popleft()\n explored.add(currentState)\n frontier_U_explored.add(currentState)\n\n if goalSt == currentState:\n end = time.perf_counter()\n self.__success(initialSt,\n currentState,\n len(explored)-1,\n len(frontier),\n max_frontier_size,\n frontier[-1].depth,\n end-start,\n max_ram_used,\n \"bfs\")\n return True\n\n for child in currentState.children():\n if child not in frontier_U_explored:\n frontier.append(child)\n\n max_frontier_size = len(frontier) if len(\n frontier) > max_frontier_size else max_frontier_size\n max_ram_used = psutil.virtual_memory().used if psutil.virtual_memory(\n ).used > max_ram_used else max_ram_used\n return False" ]
[ "0.56360364", "0.5465797", "0.5397887", "0.5356589", "0.533036", "0.51120263", "0.50754285", "0.5052217", "0.49830848", "0.49745223", "0.49274656", "0.4885014", "0.48708126", "0.484963", "0.48122284", "0.47914714", "0.47727177", "0.47523925", "0.47462133", "0.47416347", "0.47316313", "0.47047514", "0.47031182", "0.4694216", "0.46798366", "0.46722856", "0.46672094", "0.4665157", "0.46398547", "0.46220466", "0.46148542", "0.46018225", "0.45955884", "0.4570583", "0.457045", "0.45639583", "0.45612666", "0.4560077", "0.45531046", "0.45418832", "0.45374998", "0.4529607", "0.45285222", "0.45280957", "0.45212322", "0.4507305", "0.44963312", "0.44920737", "0.44878006", "0.44835067", "0.44785982", "0.44653252", "0.4461542", "0.44586474", "0.4456669", "0.44540268", "0.44487306", "0.44368193", "0.4425075", "0.44236934", "0.4413396", "0.44129118", "0.44099802", "0.44096908", "0.44059062", "0.44059062", "0.44059062", "0.44048867", "0.4401681", "0.44005072", "0.43976283", "0.4386524", "0.43862936", "0.43841127", "0.43838254", "0.43731683", "0.43665993", "0.435268", "0.4347043", "0.43458176", "0.43438834", "0.43427262", "0.43426126", "0.4339035", "0.43389386", "0.4338916", "0.4335729", "0.43348324", "0.43348324", "0.43341377", "0.4332452", "0.43321782", "0.43312857", "0.43305147", "0.43225527", "0.43223777", "0.4318187", "0.43148857", "0.43148604", "0.43129137" ]
0.74997777
0
Returns squarefree part of a polynomial in ``K[X]``. Examples ======== >>> R, x, y = ring('x y', ZZ) >>> R.sqf_part(x3 + 2x2y + xy2) x2 + xy
def sqf_part(self, f): domain = self.domain if domain.is_FiniteField: g = self.one for f, _ in self.sqf_list(f)[1]: g *= f return g if not f: return f gcd = f for x in self.gens: gcd = self.gcd(gcd, f.diff(x)) sqf = f // gcd if domain.is_Field: return sqf.monic() return sqf.primitive()[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def rfpart(x):\n return 1 - fpart(x)", "def sqf_part(f):\n return f.per(dmp_sqf_part(f.rep, f.lev, f.dom))", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_primitive(quo)[1]", "def rfpart(x):\n return 1 - Util.fpart(x)", "def zzX_eval_for(f, k, x):\n if k < 0:\n k += poly_level(f) + 1\n\n if k == 1:\n return zzX_eval(f, x)\n\n def rec_eval(g, l):\n if l == k:\n return zzX_eval(g, x)\n else:\n return zzX_strip([ rec_eval(coeff, l+1) for coeff in g ])\n\n return rec_eval(f, 1)", "def zzX_sqr(f):\n if poly_univariate_p(f):\n return zzx_sqr(f)\n\n if zzX_zero_p(f):\n return f\n\n df = zzX_degree(f)\n l = poly_level(f)-1\n\n h = []\n\n for i in xrange(0, 2*df+1):\n coeff = zzX_zero(l)\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff = zzX_add(coeff, zzX_mul(f[j], f[i-j]))\n\n coeff = zzX_mul_const(coeff, 2)\n\n if n & 1:\n elem = zzX_sqr(f[jmax+1])\n coeff = zzX_add(coeff, elem)\n\n h.append(coeff)\n\n return h", "def splineBasis(K, x, degree=3):\n # Function written by M. Defferrard, taken verbatim (except for function\n # name), from \n # https://github.com/mdeff/cnn_graph/blob/master/lib/models.py#L662\n if np.isscalar(x):\n x = np.linspace(0, 1, x)\n\n # Evenly distributed knot vectors.\n kv1 = x.min() * np.ones(degree)\n kv2 = np.linspace(x.min(), x.max(), K-degree+1)\n kv3 = x.max() * np.ones(degree)\n kv = np.concatenate((kv1, kv2, kv3))\n\n # Cox - DeBoor recursive function to compute one spline over x.\n def cox_deboor(k, d):\n # Test for end conditions, the rectangular degree zero spline.\n if (d == 0):\n return ((x - kv[k] >= 0) & (x - kv[k + 1] < 0)).astype(int)\n\n denom1 = kv[k + d] - kv[k]\n term1 = 0\n if denom1 > 0:\n term1 = ((x - kv[k]) / denom1) * cox_deboor(k, d - 1)\n\n denom2 = kv[k + d + 1] - kv[k + 1]\n term2 = 0\n if denom2 > 0:\n term2 = ((-(x - kv[k + d + 1]) / denom2) * cox_deboor(k + 1, d - 1))\n\n return term1 + term2\n\n # Compute basis for each point\n basis = np.column_stack([cox_deboor(k, degree) for k in range(K)])\n basis[-1,-1] = 1\n return basis", "def Q(self, k, x):\n g = np.asarray(self.g(k, x))\n Q = g @ g.T\n return Q", "def __test_s_polynomial():\n poly_ring = PolynomialRing(QQ, 'x,y', order='deglex')\n x, y = poly_ring('x'), poly_ring('y')\n g = x ** 3 - 2 * x * y\n h = x ** 2 * y - 2 * y ** 2 + x\n print __s_polynomial(g, h) # Expected -x^2", "def partition_pair_to_spart(part_pair):\n part_star = list(part_pair[0])\n part_circ_star = list(part_pair[1])\n add_zeros = len(part_circ_star) - len(part_star)\n if add_zeros != 0:\n new_star = part_star + [0]\n else:\n new_star = part_star\n diff_list = [a - b for a, b in zip(part_circ_star, new_star)]\n fermionic_parts = []\n bosonic_parts = []\n for k in range(len(diff_list)):\n if diff_list[k] == 0:\n bosonic_parts += [part_circ_star[k]]\n elif diff_list[k] == 1:\n fermionic_parts += [new_star[k]]\n else:\n raise Exception(\"This should not happen.\")\n # sparts = Superpartitions()\n return _Superpartitions([fermionic_parts, bosonic_parts])", "def chebyshev_polynomial(X, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n T_k = list()\n T_k.append(sp.eye(X.shape[0]).tocsr())\n T_k.append(X)\n\n def chebyshev_recurrence(T_k_minus_one, T_k_minus_two, X):\n X_ = sp.csr_matrix(X, copy=True)\n return 2 * X_.dot(T_k_minus_one) - T_k_minus_two\n\n for i in range(2, k+1):\n T_k.append(chebyshev_recurrence(T_k[-1], T_k[-2], X))\n\n return T_k", "def construct_qsp_model(poly_deg, measurement=\"z\"):\n theta_input = tf.keras.Input(shape=(1,), dtype=tf.float32, name=\"theta\")\n qsp = QSP(poly_deg, measurement=measurement)\n real_parts = qsp(theta_input)\n model = tf.keras.Model(inputs=theta_input, outputs=real_parts)\n optimizer = tf.keras.optimizers.Adam(learning_rate=0.1)\n loss = tf.keras.losses.MeanSquaredError()\n model.compile(optimizer=optimizer, loss=loss)\n return model", "def find_partitions(V,k):\n k_subs = k_subset(V,k)\n k_subs = uniq_subsets(k_subs)\n\n return k_subs", "def gschmidt(order,point,weight=0,trunc=1):\n\n printer(0,'Computing polynomials ...')\n\n point = np.atleast_2d(np.transpose(point)).T\n if not np.any(weight): weight = 1/point.shape[0]\n dim = point.shape[1]\n\n # Creates the tensor product of univariate polynomials\n\n nbrPoly = order+1\n expo = indextens(order,dim,trunc)\n nbrPoly = expo.shape[1]\n coef = sparse.eye(nbrPoly)\n base = Polynomial(expo,coef)\n\n # Computes modified Gram-Schmidt algorithm\n\n V = base.eval(point)\n V = np.transpose(np.sqrt(weight)*V.T)\n R = np.linalg.qr(V,'r')\n\n if V.shape[0]<nbrPoly: raise Exception('Underdetermined system')\n coef = linalg.lapack.dtrtri(R)[0].T\n coef[0,0] = 1\n\n poly = Polynomial(expo,coef,1)\n printer(1,'Computing polynomials 100 %')\n return poly", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def zzX_degree_for(f, k):\n if k < 0:\n k += poly_level(f) + 1\n\n if k == 1:\n return zzX_degree(f)\n\n def rec_degree(g, l):\n if l == k:\n return zzX_degree(g)\n else:\n return max([ rec_degree(coeff, l+1) for coeff in g ])\n\n return rec_degree(f, 1)", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def zzX_compose_term(f, K):\n def rec_compose(g, l):\n if poly_univariate_p(g):\n return zzx_compose_term(g, K[l])\n\n if K[l] <= 0:\n raise ValueError(\"All 'K[i]' must be positive, got %s\" % K[l])\n\n g = [ rec_compose(c, l+1) for c in g ]\n result, L = [g[0]], poly_level(g) - 1\n\n for coeff in g[1:]:\n for i in xrange(1, K[l]):\n result.append(zzX_zero(L))\n\n result.append(coeff)\n\n return result\n\n if all([ k == 1 for k in K ]):\n return f\n else:\n return rec_compose(f, 0)", "def calc_q_square(self):\n return self._q_x()**2 + self._q_z()**2", "def zzx_factor_sqf(f, **flags):\n cont, g = zzx_primitive(f)\n\n n = zzx_degree(g)\n\n if n <= 0:\n return cont, []\n\n if poly_LC(g) < 0:\n cont, g = -cont, zzx_neg(g)\n\n if n == 1 or zzx_eisenstein(g):\n return cont, [(g, 1)]\n\n factors = []\n\n if flags.get('cyclotomic', True):\n factors = zzx_cyclotomic_factor(g)\n\n if factors is None:\n factors = zzx_zassenhaus(g)\n\n def compare(f_a, f_b):\n i = len(f_a) - len(f_b)\n\n if not i:\n return cmp(f_a, f_b)\n else:\n return i\n\n return cont, sorted(factors, compare)", "def build_rightpart():\n # build in 1: (K dec)\n apply_card(\"put\", 1)\n apply_slot(1, \"dec\")\n apply_card(\"K\", 1)\n\n # build in 0: greg\n build_greg(0)\n\n # smash together to get (greg (K dec)) in 0\n smash()\n\n # copy it to 1.\n apply_card(\"put\", 1)\n apply_slot(1, \"zero\")\n apply_card(\"get\", 1)\n\n # build horace in 0.\n build_horace(0)\n\n # smash together to get (horace (greg (K dec))) in 0.\n smash()\n\n # Wrap with an S.\n apply_card(\"S\", 0)\n\n # build ian in 1.\n build_ian(1)\n\n # smash together to get ((S (horace (greg (K dec)))) ian) in 0.\n smash()", "def zzx_compose_term(f, k):\n if k <= 0:\n raise ValueError(\"'k' must be positive, got %s\" % k)\n if k == 1 or not f:\n return f\n\n result = [f[0]]\n\n for coeff in f[1:]:\n result.extend([0]*(k-1))\n result.append(coeff)\n\n return result", "def pmf(self, k):\n if k % 1 != 0:\n k = int(k)\n if k < 0 and k <= self.n:\n return 0\n q = 1 - self.p\n co = (self.factorial(self.n) / ((self.factorial(self.n-k)\n * self.factorial(k))))\n q2 = q ** (self.n - k)\n return co * (self.p ** k) * q2", "def free(x):\n _, p = extract_q_p(x)\n return tf.squeeze(0.5 * tf.reduce_sum(tf.square(p), axis=1))", "def part_recur(ckt, initial, w):\n partition_set = []\n# partition_mech = KLPart.KLPartition()\n# convert_Gate(ckt, partition_mech)\n print \"Diving into C++\"\n# (a, b) = partition_mech.partition_once(KLPart.StringVector(list(set(initial))))\n (a, b) = partition(ckt, list(set(initial)))\n print \"Coming back up\"\n if len(get_inputs(ckt, a)) > w and len(a) > 3:\n partition_set = partition_set + part_recur(ckt, a, w)\n else:\n partition_set.append(a)\n if len(get_inputs(ckt, b)) > w and len(b) > 3:\n partition_set = partition_set + part_recur(ckt, b, w)\n else:\n partition_set.append(b)\n return partition_set", "def evaluate_quadratic(shape,x):\n d = ((shape.a*x)** 2) + (shape.b * x) + shape.c\n return d", "def getPartitionFunction(self, Tlist):\n\t\tQ = np.ones((len(Tlist)), np.float64) / self.symmetry\n\t\t# Active K-rotor\n\t\trotors = [mode for mode in self.modes if isinstance(mode, RigidRotor)]\n\t\tif len(rotors) == 0:\n\t\t\tTrot = constants.h * constants.c * 100.0 * 1.0 / constants.kB\n\t\t\tQ0 = [math.sqrt(T / Trot) for T in Tlist]\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\t# Other modes\n\t\tfor mode in self.modes:\n\t\t\tQ0 = mode.getPartitionFunction(Tlist)\n\t\t\tfor i in range(len(Tlist)):\n\t\t\t\tQ[i] *= Q0[i]\n\t\treturn Q", "def fpart(x):\n return x - np.floor(x)", "def getskx(self,whichsol_,skx_):\n _skx_minlength = self.getnumvar()\n if self.getnumvar() > 0 and skx_ is not None and len(skx_) != self.getnumvar():\n raise ValueError(\"Array argument skx is not long enough: Is %d, expected %d\" % (len(skx_),self.getnumvar()))\n if isinstance(skx_,numpy.ndarray) and not skx_.flags.writeable:\n raise ValueError(\"Argument skx must be writable\")\n if skx_ is not None:\n _skx_tmp = (ctypes.c_int32 * len(skx_))()\n else:\n _skx_tmp = None\n res = __library__.MSK_XX_getskx(self.__nativep,whichsol_,_skx_tmp)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n if skx_ is not None: skx_[:] = [ stakey(v) for v in _skx_tmp[0:len(skx_)] ]", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def Sk(self, x, k):\n self._check(x, k)\n\n from math import atan, pi, log\n log_x = log(x) # base e\n # This is from equation 32 on page 978 of Riesel-Gohl.\n term1 = self.msum / (2 * log_x) + \\\n (1 / pi) * atan(pi / log_x)\n\n # This is from equation 19 on page 975\n term2 = sum(self.Tk(x, v) for v in range(1, k + 1))\n return term1 + term2", "def bezier_surface(points, part=10):\n nU = points.shape[0]\n nV = points.shape[1]\n nPoints = nU*nV\n\n xPoints = np.array([p[0] for p in points.reshape(nPoints, 3)])\n yPoints = np.array([p[1] for p in points.reshape(nPoints, 3)])\n zPoints = np.array([p[2] for p in points.reshape(nPoints, 3)])\n\n u = np.linspace(0, 1, part)\n v = np.linspace(0, 1, part)\n\n polynomial_array = np.array([[bernstein_surface(i, j, nU - 1, nV - 1, u, v) for j in\n range(nV)] for i in range(nU)]).reshape(nPoints, part ** 2)\n\n xvals = []\n yvals = []\n zvals = []\n\n for j in range(len(polynomial_array[0])):\n xvals.append(sum([polynomial_array[i][j] * xPoints[i] for i in range(nPoints)]))\n yvals.append(sum([polynomial_array[i][j] * yPoints[i] for i in range(nPoints)]))\n zvals.append(sum([polynomial_array[i][j] * zPoints[i] for i in range(nPoints)]))\n\n return (xvals, yvals, zvals)", "def K(self, X, X2=None, which_parts='all'):\r\n if which_parts == 'all':\r\n which_parts = [True] * self.num_parts\r\n assert X.shape[1] == self.input_dim\r\n if X2 is None:\r\n target = np.zeros((X.shape[0], X.shape[0]))\r\n [p.K(X[:, i_s], None, target=target) for p, i_s, part_i_used in zip(self.parts, self.input_slices, which_parts) if part_i_used]\r\n else:\r\n target = np.zeros((X.shape[0], X2.shape[0]))\r\n [p.K(X[:, i_s], X2[:, i_s], target=target) for p, i_s, part_i_used in zip(self.parts, self.input_slices, which_parts) if part_i_used]\r\n return target", "def splmake(xk, yk, order=3, kind='smoothest', conds=None):\n yk = np.asanyarray(yk)\n\n order = int(order)\n if order < 0:\n raise ValueError(\"order must not be negative\")\n if order == 0:\n return xk, yk[:-1], order\n elif order == 1:\n return xk, yk, order\n\n try:\n func = eval('_find_%s' % kind)\n except:\n raise NotImplementedError\n\n # the constraint matrix\n B = _fitpack._bsplmat(order, xk)\n coefs = func(xk, yk, order, conds, B)\n return xk, coefs, order", "def to_quadratic_program(self) -> QuadraticProgram:\n mdl = Model(name=\"Number partitioning\")\n x = {i: mdl.binary_var(name=\"x_{0}\".format(i)) for i in range(len(self._number_set))}\n mdl.add_constraint(\n mdl.sum(num * (-2 * x[i] + 1) for i, num in enumerate(self._number_set)) == 0\n )\n op = from_docplex_mp(mdl)\n return op", "def G_quotient(self, r, b=-1, label_swap_xy=False):\n # Sagemath uses the convention {1:E, 0:N} when reading partition from a path sequence, so we have to swap '0's and '1's\n p_list = [Partition(zero_one=invert_zero_one(wire)) for wire in self.G_abacus(r,b)]\n # Reflect the order of partitions in the $b=-1$ case `G_quotient` to account for differences in conventions for cell colouring \n # for compatibility with `Partition.quotient`.\n if label_swap_xy:\n p_list = [p_list[0]] + p_list[:0:-1]\n # Cast the list of partitions in the quotient as a `PartitionTuple` for compatibility with the `Partition.quotient` method\n return PartitionTuple(p_list)", "def rkha_basis(p: float, tau: float, k: K) -> Callable[[X], object]:\n\n # TODO: Improve typing of this function. The nlsa.function_algebra module\n # does not know that we can pass arrays of points in X as function\n # arguments, which is what we do throughout this module for efficiency.\n w = rkha_weights(p, tau)\n lam = w(k)\n phi = fourier_basis(k)\n psi = fun.mul(lam, phi)\n return psi", "def zzx_sub_term(f, c, k=0):\n if not c:\n return f\n\n n = len(f)\n m = n-k-1\n\n if k == n-1:\n return zzx_strip([f[0]-c] + f[1:])\n else:\n if k >= n:\n return [-c] + [INT_ZERO]*(k-n) + f\n else:\n return f[:m] + [f[m]-c] + f[m+1:]", "def modular_squareroot_in_FQ2(value: FQ2) -> Optional[FQ2]:\n candidate_squareroot = value ** ((FQ2_ORDER + 8) // 16)\n check = candidate_squareroot ** 2 / value\n if check in EIGTH_ROOTS_OF_UNITY[::2]:\n x1 = candidate_squareroot / EIGTH_ROOTS_OF_UNITY[EIGTH_ROOTS_OF_UNITY.index(check) // 2]\n x2 = -x1\n x1_re, x1_im = x1.coeffs\n x2_re, x2_im = x2.coeffs\n return x1 if (x1_im > x2_im or (x1_im == x2_im and x1_re > x2_re)) else x2\n return None", "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\n vsf_ph=vsf_ph+c_slm*vsh_ph\n return vsf_th, vsf_ph", "def least_sqr_fit(self,x, y):\n A = np.array([ x, np.ones(len(x))])\n # linearly generated sequence\n a,f,g,h = np.linalg.lstsq(A.T,y) # obtaining the parameters\n print 'de gevonden rechte = %.10f x + %.10f' %(a[0], a[1])\n lined = map(lambda g: a[0]*g +a[1],x) # regression line\n return lined , a", "def partition_gdf(df, k, terms):\n return __partition_gdf_recursive(df, df.index, k, terms)", "def raise_spline_parts(self, n_spline_parts=None):\n if n_spline_parts is None:\n # usual case\n self._parameters['n_parts_x'] *= self._parameters['kx']\n\n # TODO: introduce parameter `ku` and handle it here\n # (and in CollocationSystem.get_guess())\n npu = self._parameters['n_parts_u']\n npu *= self._parameters['kx']\n nx = self.masterobject.dyn_sys.n_states\n # this should prevent the input signal from getting too much ripple\n np.clip(npu, 0, nx*3)\n self._parameters['n_parts_u'] = npu\n else:\n # this is used by processing first_guess\n assert isinstance(n_spline_parts, auxiliary.Container)\n self._parameters['n_parts_x'] = n_spline_parts.x\n self._parameters['n_parts_u'] = n_spline_parts.u\n\n return self.n_parts_x", "def Get(self,k:int): \n ### get partitions depending on the partition schemes C that depends on k!\n return subsets_k(list(range(self._n)),k)", "def gram_schmidt(S, start_col=0):\n Q = S.copy()\n k = S.shape[1]\n assert k > 1 and start_col >= 0\n start_col = min(S.shape[1], start_col)\n if Q.dtype != np.float32 and Q.dtype != np.float64:\n Q = Q.astype(np.float64)\n\n if start_col == 0:\n Q[:, 0] = normalize_vector(Q[:, 0])\n\n uu = []\n for i in range(start_col + 1, k):\n Q[:, i] = S[:, i]\n for j in range(0, i):\n u = Q[:, j]\n v = Q[:, i]\n if len(uu) <= j:\n uu.append(u.T.dot(u))\n Q[:, i] -= u * (u.T.dot(v) / uu[j])\n\n Q[:, i] = normalize_vector(Q[:, i])\n # Re-project Q[:, i] to the orthogonal complement of Q[:, :i] to make sure they stay orthogonal.\n Q[:, i] = Q[:, i] - Q[:, :i].dot(Q[:, :i].T.dot(Q[:, i]))\n\n return Q", "def fpart(x):\n return x - math.floor(x)", "def __simplifyRecurse(self): # TODO make this work with ^\n # check if we're a + node, a * node, or a simple node\n if isinstance(self.poly, (int, float, Variable.Variable)):\n return self\n elif self.poly[0] == \"+\":\n self.__handlePowPlus()\n\n newPoly = Polynomial()\n newPoly.poly = [\"+\"]\n for branch in self.poly[1:]:\n simplifiedBranch = ensurePoly(branch).__simplifyRecurse()\n if not simplifiedBranch.isSimple():\n for additiveTerm in simplifiedBranch.poly[1:]:\n newPoly.poly.append(additiveTerm)\n else:\n newPoly.poly.append(simplifiedBranch)\n return newPoly\n elif self.poly[0] == \"*\":\n self.__handlePowTimes()\n\n nonSimple, simple = self.__partitionSimpleAndNonSimpleBranches()\n\n if len(nonSimple) == 0: # this means our * node gives a monomial!\n return Polynomial(input=self)\n else:\n # do the full distribution\n simplified = self.__distribute(nonSimple, simple)\n newPoly = Polynomial()\n newPoly.poly = [\"+\"]\n for branch in simplified.poly[1:]:\n recursive = ensurePoly(branch).__simplifyRecurse()\n if recursive.poly[0] == \"*\":\n newPoly.poly.append(recursive)\n elif recursive.poly[0] == \"+\":\n newPoly.poly.extend(recursive.poly[1:])\n return newPoly\n\n elif self.poly[0] == \"^\":\n # this case will only be hit when we have a ^ node as the root\n newPoly = Polynomial()\n newPoly.poly = [\"*\"]\n for _ in range(self.poly[2]):\n newPoly.poly.append(self.poly[1])\n\n simp = newPoly.__simplifyRecurse()\n return simp", "def eval_f(self, part, t):\n\n N = self.params.nparts\n\n Emat = np.diag([1, 1, -2])\n f = self.dtype_f(((3, self.params.nparts), self.init[1], self.init[2]))\n\n f.elec[:] = self.get_interactions(part)\n\n for n in range(N):\n f.elec[:, n] += self.params.omega_E ** 2 / (part.q[n] / part.m[n]) * np.dot(Emat, part.pos[:, n])\n f.magn[:, n] = self.params.omega_B * np.array([0, 0, 1])\n\n return f", "def quartic_potential(x):\n k1=1\n k2=10\n return (k1*x**4)-(k2*x**2)", "def gram_schmidt(basis):\n b1 = basis[0]\n b2 = basis[1]\n\n basis1 = b1 / sqrt(innerprod_q2(b1, b1))\n b2 = b2 - innerprod_q2(basis1, b2) * basis1\n basis2 = b2 / sqrt(innerprod_q2(b2, b2))\n\n basis_o = [basis1, basis2]\n\n return (basis_o)", "def element_from_poly(self, f):\n n, k = self.n, f.degree()\n if k >= n:\n f = f % self.T\n if f == 0:\n return self.zero()\n d, c = dup_clear_denoms(f.rep.rep, QQ, convert=True)\n c = list(reversed(c))\n ell = len(c)\n z = [ZZ(0)] * (n - ell)\n col = to_col(c + z)\n return self(col, denom=d)", "def eval_K(self, S):\n K = (self.eigenfunctions[self.X] * S[None, :]) @ \\\n self.eigenfunctions[self.X].T # shape (n,n)\n return K", "def zzx_sqr(f):\n df, h = zzx_degree(f), []\n\n for i in xrange(0, 2*df+1):\n coeff = INT_ZERO\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n = jmax - jmin + 1\n\n jmax = jmin + n // 2 - 1\n\n for j in xrange(jmin, jmax+1):\n coeff += f[j]*f[i-j]\n\n coeff += coeff\n\n if n & 1:\n elem = f[jmax+1]\n coeff += elem**2\n\n h.append(coeff)\n\n return h", "def metis_partition(\n g,\n k,\n extra_cached_hops=0,\n reshuffle=False,\n balance_ntypes=None,\n balance_edges=False,\n mode=\"k-way\",\n):\n assert mode in (\n \"k-way\",\n \"recursive\",\n ), \"'mode' can only be 'k-way' or 'recursive'\"\n node_part = metis_partition_assignment(\n g, k, balance_ntypes, balance_edges, mode\n )\n if node_part is None:\n return None\n\n # Then we split the original graph into parts based on the METIS partitioning results.\n return partition_graph_with_halo(\n g, node_part, extra_cached_hops, reshuffle\n )[0]", "def partition(self, head: ListNode, x: int) -> ListNode:\n\n if not head:\n return head\n\n pre = dummy = ListNode(\"X\")\n dummy.next = head\n\n # First find where is the partition node\n while pre and pre.next:\n if pre.next.val >= x: # find the first partition point\n break\n else:\n pre = pre.next\n else: # if find no partition point just return head\n return head\n\n partition_head = partition_tail = pre.next # partition can have length\n check = partition_tail.next\n while check:\n if check.val < x:\n this_node = check # record current check\n check = check.next # move check to next\n\n # move this current node between pre and partition head\n pre.next = this_node\n this_node.next = partition_head\n\n partition_tail.next = check # link partition_tail to next node\n pre = pre.next # move pre after squeeze in\n else:\n partition_tail = partition_tail.next # extend the partition part\n check = check.next\n return dummy.next", "def get_bspline_basis(self, knots, degree=3, periodic=False):\n nknots = len(knots)\n y_dummy = np.zeros(nknots)\n\n knots, coeffs, degree = si.splrep(knots, y_dummy, k=degree, s=0,\n per=periodic)\n ncoeffs = len(coeffs)\n bsplines = []\n for ispline in range(nknots):\n coeffs = [1.0 if ispl == ispline else 0.0 for ispl in range(ncoeffs)]\n bsplines.append((knots, coeffs, degree))\n return bsplines", "def zzx_sqf_p(f):\n return zzx_one_p(zzx_gcd(zzx_primitive(f)[1], zzx_diff(f)))", "def qft_recursive(qubits):\n qftcirc = Circuit()\n\n # First add the QFT subroutine above\n qftcirc.add(qft_no_swap(qubits))\n\n # Then add SWAP gates to reverse the order of the qubits:\n for i in range(math.floor(len(qubits) / 2)):\n qftcirc.swap(qubits[i], qubits[-i - 1])\n\n return qftcirc", "def quadratic_model(X,F):\r\n \r\n from numpy import flipud, zeros, ones, prod, sum, arange\r\n from numpy.linalg import lstsq\r\n from VyPy.tools import index_set\r\n \r\n M,m = X.shape\r\n \r\n # coefficients\r\n I = flipud( index_set('full',2,m) )\r\n A = zeros([M,I.shape[1]])\r\n for i in range(I.shape[1]):\r\n ind = I[:,i,None]\r\n A[:,i] = prod( X ** ind.T , axis=1 )\r\n \r\n # solve \r\n t = lstsq(A,F)[0]\r\n \r\n # unwrap\r\n be = t[1:m+1,:]\r\n Al = zeros([m,m])\r\n for i in range(m+1,I.shape[1]):\r\n ind = I[:,i]\r\n loc = arange(m)[ind != 0]\r\n if len(loc) == 1:\r\n Al[loc,loc] = 2*t[i]\r\n else:\r\n Al[loc[0],loc[1]] = t[i]\r\n Al[loc[1],loc[0]] = t[i]\r\n \r\n return be,Al", "def rational_quadratic(input_dim, variance=1., lengthscale=1., power=1.):\r\n part = parts.rational_quadratic.RationalQuadratic(input_dim, variance, lengthscale, power)\r\n return kern(input_dim, [part])", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (\n 2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k + 1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tensor(t_k)", "def kepler(x, k=1.0):\n assert(x.shape[2] == 1)\n\n q,p = extract_q_p(x)\n # The derivative of r wrt q is 1/sqrt(sum(q^2)), which is singular in 0.\n # Cutoff r so that it is > eps.\n eps = 1e-5\n r = tf.sqrt(tf.reduce_sum(tf.square(q), axis=1) + eps)\n return tf.squeeze(0.5 * tf.reduce_sum(tf.square(p), axis=1) + k / r)", "def sqrtx():\n return Operator([[(1.+1.j)/2,(1.-1.j)/2],[(1.-1.j)/2,(1.+1.j)/2]])", "def f(cls, R, K):\n bits = tuple(xor_streams(cls.expand_bits(R), K))\n Bs = nslice(bits, 6)\n Ss = [cls.s_box(i, bits) for i, bits in enumerate(Bs)]\n C = list(itertools.chain.from_iterable(Ss))\n return cls.permute(C, cls._sbox_permutation)", "def chebyshev_polynomials(adj, k):\n # print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def get_reg_part(reg_doc):\n\n potential_parts = []\n potential_parts.extend(\n # FR notice\n node.attrib['PART'] for node in reg_doc.xpath('//REGTEXT'))\n potential_parts.extend(\n # e-CFR XML, under PART/EAR\n node.text.replace('Pt.', '').strip()\n for node in reg_doc.xpath('//PART/EAR')\n if 'Pt.' in node.text)\n potential_parts.extend(\n # e-CFR XML, under FDSYS/HEADING\n node.text.replace('PART', '').strip()\n for node in reg_doc.xpath('//FDSYS/HEADING')\n if 'PART' in node.text)\n potential_parts.extend(\n # e-CFR XML, under FDSYS/GRANULENUM\n node.text.strip() for node in reg_doc.xpath('//FDSYS/GRANULENUM'))\n potential_parts = [p for p in potential_parts if p.strip()]\n\n if potential_parts:\n return potential_parts[0]", "def subpartition_bsp(self, min_width, min_height):\n \n def split_horizontal(p):\n ul_x, ul_y = p.ul_pos\n \n split_pos = (random.choice(\n list(range(ul_x + min_width, ul_x + p.width - min_width + 1))), ul_y)\n \n split_x, split_y = split_pos\n \n return([Partition(p.ul_pos, split_x-ul_x, p.height), \n Partition(split_pos, ul_x + p.width - split_x, p.height)])\n \n def split_vertical(p):\n ul_x, ul_y = p.ul_pos\n \n split_pos = (ul_x, random.choice(\n list(range(ul_y + min_height, ul_y + p.height - min_height + 1))))\n \n split_x, split_y = split_pos\n \n return([Partition(p.ul_pos, p.width, split_y-ul_y), \n Partition(split_pos, p.width, ul_y + p.height - split_y)])\n \n\n \n if self.width < min_width or self.height < min_height:\n raise PartitionException(\"Partition too small!\")\n \n splith = (self.width > 2*min_width)\n splitv = (self.height > 2*min_height)\n \n new_partitions = None\n \n if splith and splitv:\n new_partitions = random.choice([\n split_horizontal, split_vertical])(self)\n \n elif splith:\n new_partitions = split_horizontal(self)\n \n elif splitv:\n new_partitions = split_vertical(self)\n \n else:\n return [self]\n \n return list(flatten([p.subpartition_bsp(min_width, min_height) \n for p in new_partitions]))", "def chi2sf(x, k):", "def get_basisfunc(self, k, j, knots):\n def basisfunction(u, k=k, j=j, knots=knots):\n \"\"\"\n Method to evaluate the the basis function N^k with index j at point u.\n u (float): the point where to evaluate the basis function\n k (int): the degree of the basis function\n j (int): the index of the basis function we want to evaluate\n knots (array): knot sequence u_i, where i=0,...,K\n \"\"\"\n if k == 0:\n return 1 if knots[j] <= u < knots[j+1] \\\n else 0\n else:\n try:\n a0 = 0 if knots[j+k] == knots[j] \\\n else (u - knots[j])/(knots[j+k]-knots[j])\n a1 = 0 if knots[j+k+1] == knots[j+1] \\\n else (knots[j+k+1] - u)/(knots[j+k+1] - knots[j+1])\n basisfunc = a0 * basisfunction(u, k=k-1) \\\n + a1 * basisfunction(u, k=k-1, j=j+1)\n except IndexError:\n numBasisfunc = len(knots) - 1 - k\n return 'Invalid index. There are no more than {} basis functions for the given problem, choose an ' \\\n 'index lower than the number of basis functions.'.format(numBasisfunc)\n return basisfunc\n return basisfunction", "def basis_function(self, basis_function_index, polynomial_order):\n if polynomial_order == 0: # base case\n return self.basis_null(basis_function_index)\n else:\n # recursion formula from Hughes et. al. 2004, p. 4140\n first_num = self.xi - self.knot_vector[basis_function_index]\n first_den = self.knot_vector[basis_function_index + polynomial_order] - self.knot_vector[basis_function_index]\n first_basis = self.basis_function(basis_function_index, polynomial_order - 1)\n\n second_num = self.knot_vector[basis_function_index + polynomial_order + 1] - self.xi\n second_den = self.knot_vector[basis_function_index + polynomial_order + 1] - self.knot_vector[basis_function_index + 1]\n second_basis = self.basis_function(basis_function_index + 1, polynomial_order - 1)\n\n with np.errstate(divide = 'ignore', invalid = 'ignore'): # ignore divide by zero errors, the np.where calls bypass them\n first_term = np.where(np.not_equal(first_den, 0), first_num * first_basis / first_den, 0)\n second_term = np.where(np.not_equal(second_den, 0), (second_num * second_basis / second_den), 0)\n\n return first_term + second_term", "def polyfit_2d(Xu,X):\n\txu = Xu[:,0]\n\tyu = Xu[:,1]\n\tX = np.squeeze(X) # an mx1 vector\n\tM = np.squeeze((np.ones(xu.size),xu,yu,xu**2,xu*yu,yu**2,\n\t\txu**3,xu**2*yu,xu*yu**2,yu**3)) # a mxn matrix\n\tM = M.transpose()\n\tprint(\"solving for the polynomial fitting coefficients...\")\n\tK,resid,rnk,svs = np.linalg.lstsq(M,X,rcond=-1) # k has size n\n\tprint(\"residue:%0.8f\trank:%0.8f\"%(np.sum(resid),rnk))\n\treturn K", "def Min(Fun, p, ubRes, conj):\n d = Fun.degree()\n AffFun = Fun.dehomogenize(1)\n R = AffFun.coordinate_ring()\n if R.is_field():\n #want the polynomial ring not the fraction field\n R = R.ring()\n F = R(AffFun[0].numerator())\n G = R(AffFun[0].denominator())\n dG = G.degree()\n if dG > (d+1)/2:\n lowerBound = (-2*(G[dG]).valuation(p)/(2*dG - d + 1) + 1).floor()\n else:\n lowerBound = (-2*(F[d]).valuation(p)/(d-1) + 1).floor()\n upperBound = 2*(ubRes.valuation(p))\n\n if upperBound < lowerBound:\n #There are no possible transformations to reduce the resultant.\n return Fun,conj\n else:\n #Looping over each possible k, we search for transformations to reduce the\n #resultant of F/G\n k = lowerBound\n Qb = PolynomialRing(QQ,'b')\n b = Qb.gen(0)\n Q = PolynomialRing(Qb,'z')\n z = Q.gen(0)\n while k <= upperBound:\n A = (p**k)*z + b\n Ft = Q(F(A) - b*G(A))\n Gt = Q((p**k)*G(A))\n Fcoeffs = Ft.coefficients(sparse=False)\n Gcoeffs = Gt.coefficients(sparse=False)\n coeffs = Fcoeffs + Gcoeffs\n RHS = (d + 1)*k/2\n #If there is some b such that Res(phi^A) < Res(phi), we must have ord_p(c) >\n #RHS for each c in coeffs.\n #Make sure constant coefficients in coeffs satisfy the inequality.\n if all( QQ(c).valuation(p) > RHS for c in coeffs if c.degree() ==0 ):\n #Constant coefficients in coeffs have large enough valuation, so check\n #the rest. We start by checking if simply picking b=0 works\n if all(c(0).valuation(p) > RHS for c in coeffs):\n #A = z*p^k satisfies the inequalities, and F/G is not minimal\n #\"Conjugating by\", p,\"^\", k, \"*z +\", 0\n newconj = matrix(QQ,2,2,[p**k,0,0,1])\n minFun = Fun.conjugate(newconj)\n conj = conj*newconj\n minFun.normalize_coordinates()\n return minFun, conj\n\n #Otherwise we search if any value of b will work. We start by finding a\n #minimum bound on the valuation of b that is necessary. See Theorem 3.3.5\n #in [Molnar, M.Sc. thesis].\n bval = max([bCheck(coeff,RHS,p,b) for coeff in coeffs if coeff.degree() > 0])\n\n #We scale the coefficients in coeffs, so that we may assume ord_p(b) is\n #at least 0\n scaledCoeffs = [coeff(b*(p**bval)) for coeff in coeffs]\n\n #We now scale the inequalities, ord_p(coeff) > RHS, so that coeff is in\n #ZZ[b]\n scale = QQ(max([coeff.denominator() for coeff in scaledCoeffs]))\n normalizedCoeffs = [coeff*scale for coeff in scaledCoeffs]\n scaleRHS = RHS + scale.valuation(p)\n\n #We now search for integers that satisfy the inequality ord_p(coeff) >\n #RHS. See Lemma 3.3.6 in [Molnar, M.Sc. thesis].\n bound = (scaleRHS+1).floor()\n bool,sol = blift(normalizedCoeffs,bound,p)\n\n #If bool is true after lifting, we have a solution b, and F/G is not\n #minimal.\n if bool:\n #Rescale, conjugate and return new map\n bsol = QQ(sol*(p**bval))\n #\"Conjugating by \", p,\"^\", k, \"*z +\", bsol\n newconj = matrix(QQ,2,2,[p**k,bsol,0,1])\n minFun = Fun.conjugate(newconj)\n conj = conj*newconj\n\n minFun.normalize_coordinates()\n return minFun, conj\n k = k + 1\n return Fun, conj", "def partsphere(self, x):\r\n self.counter += 1\r\n # return np.random.rand(1)[0]**0 * sum(x**2) + 1 * np.random.rand(1)[0]\r\n dim = len(x)\r\n x = array([x[i % dim] for i in range(2*dim)])\r\n N = 8\r\n i = self.counter % dim\r\n #f = sum(x[i:i + N]**2)\r\n f = sum(x[np.random.randint(dim, size=N)]**2)\r\n return f", "def quadraticize(self, x, u, k):\n num_players = len(u)\n\n # Congert to torch.Tensor format.\n x_torch = torch.from_numpy(x).requires_grad_(True)\n u_torch = [torch.from_numpy(ui).requires_grad_(True) for ui in u]\n\n # Evaluate cost here.\n cost_torch = self.__call__(x_torch, u_torch, k)\n cost = cost_torch.item()\n\n # Compute gradients (and store numpy versions).\n grad_x_torch = torch.autograd.grad(\n cost_torch, x_torch, create_graph=True, allow_unused=True)[0]\n grad_u_torch = [\n torch.autograd.grad(\n cost_torch, ui_torch, create_graph=True, allow_unused=True)[0]\n for ui_torch in u_torch]\n\n # Compute Hessians (and store numpy versions), and be careful to\n # catch Nones (which indicate cost not depending on a particular\n # variable).\n hess_x = np.zeros((len(x), len(x)))\n grad_x = np.zeros((len(x), 1))\n if grad_x_torch is not None:\n grad_x = grad_x_torch.detach().numpy().copy()\n for ii in range(len(x)):\n hess_row = torch.autograd.grad(\n grad_x_torch[ii, 0], x_torch, retain_graph=True)[0]\n hess_x[ii, :] = hess_row.detach().numpy().copy().T\n\n hess_u = []\n for ii in range(num_players):\n hess_ui = np.zeros((len(u[ii]), len(u[ii])))\n grad_ui_torch = grad_u_torch[ii]\n if grad_ui_torch is not None:\n grad_ui = grad_ui_torch.detach().numpy().copy()\n for dim in range(len(u[ii])):\n hess_row = torch.autograd.grad(\n grad_ui_torch[dim, 0], u_torch[ii], retain_graph=True)[0]\n hess_ui[dim, :] = hess_row.detach().numpy().copy().T\n\n hess_u.append(hess_ui)\n\n return cost, grad_x, hess_x, hess_u", "def build_leftpart():\n # build kelly.\n build_kelly()\n # copy kelly to 3.\n copy(0, 3)\n\n # build june in slots 0,1,2\n build_june()\n # copy kelly to slot 1\n copy(3, 1)\n\n # smash together to get (june kelly) in 0\n smash()\n # copy (june kelly) to 1\n copy(0, 1)\n # build horace in 0\n build_horace(0)\n # smash together to get (horace (june kelly)) in 0\n smash()\n # wrap with an S for the whole left part.\n apply_card(\"S\", 0)", "def steenrod_chain(p, s, q, bockstein=False, shape='simplex'):\n\n def filter_homogeneous(element):\n homogeneous = {}\n for k, v in element.items():\n if len(set(elmt.dimension for elmt in k)) == 1:\n homogeneous[k] = v\n return element.create(homogeneous)\n\n surj = Surjection.steenrod_operation(p, s, q, bockstein=bockstein)\n b = int(bockstein)\n\n if p == 2:\n d = q + s\n else:\n d = q + 2 * s * (p - 1) - b\n\n if shape == 'simplex':\n element = Simplicial.standard_element(-d, torsion=p)\n elif shape == 'cube':\n element = Cubical.standard_element(-d, torsion=p)\n\n return filter_homogeneous(surj(element))", "def evaluate_poly(poly, x):\n if len(poly) == 1:\n\t\t#base case\n\t\treturn poly[0]\n else:\n #recursive case\n #the first item in the tuple is the coefficient of X**0, so it's the final value\n #the rest of the items in the tuple need multiplied by X and put in new tuple\n #Yes, I'm cheating and casting a list to a tuple. GFY and your immutability.\n return poly[0] + evaluate_poly(tuple([x * coeff for coeff in poly[1:]]), x)", "def sqf_list(f, all=False):\n coeff, factors = dmp_sqf_list(f.rep, f.lev, f.dom, all=all)\n return coeff, [ (f.per(g), k) for g, k in factors ]", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def chebyshev_polynomials(adj, k):\n print(\"Calculating Chebyshev polynomials up to order {}...\".format(k))\n\n adj_normalized = normalize_adj(adj)\n laplacian = sp.eye(adj.shape[0]) - adj_normalized\n largest_eigval, _ = eigsh(laplacian, 1, which='LM')\n scaled_laplacian = (2. / largest_eigval[0]) * laplacian - sp.eye(adj.shape[0])\n\n t_k = list()\n t_k.append(sp.eye(adj.shape[0]))\n t_k.append(scaled_laplacian)\n\n def chebyshev_recurrence(t_k_minus_one, t_k_minus_two, scaled_lap):\n s_lap = sp.csr_matrix(scaled_lap, copy=True)\n return 2 * s_lap.dot(t_k_minus_one) - t_k_minus_two\n\n for i in range(2, k+1):\n t_k.append(chebyshev_recurrence(t_k[-1], t_k[-2], scaled_laplacian))\n\n return sparse_to_tuple(t_k)", "def spline_linear(x, f, x_k, x_ki):\n A = (x_ki - x) / (x_ki - x_k)\n B = (x - x_k) / (x_ki - x_k)\n \n return A*f(x_k) + B*f(x_ki)", "def star(self):\n new_spart = flatten(map(list, self))\n new_spart.sort(reverse=True)\n while 0 in new_spart:\n new_spart.remove(0)\n new_spart = _BosonicPartitions(new_spart)\n return new_spart", "def sketch_und_part(self):\n if (self.dimension == '3D'):\n #Sketch Wuerfel zeichnen\n self.sketch_Wuerfel = self.model.ConstrainedSketch(\n name='Seitenansicht_Wuerfel',\n sheetSize=200.0)\n self.sketch_Wuerfel.rectangle(\n point1=(-self.laenge_x/2.0, -self.laenge_y/2.0),\n point2=(self.laenge_x/2.0, self.laenge_y/2.0))\n #Part Wuerfel generieren\n self.part_Wuerfel = self.model.Part(\n name=self.name+'_Wuerfel',\n dimensionality=THREE_D,\n type=DEFORMABLE_BODY)\n self.part_Wuerfel.BaseSolidExtrude(\n sketch=self.sketch_Wuerfel,\n depth=self.laenge_z/2.0) #z-Symmetrie\n #Sketch Pore zeichnen (fuer Quader und Zylinder)\n self.sketch_Pore = self.model.ConstrainedSketch(\n name='Seitenansicht_Pore',\n sheetSize=200.0)\n if (self.typ_Pore == 'Quader'):\n self.sketch_Pore.rectangle(\n point1=(-self.porenparameter_x/2.0, -self.porenparameter_y/2.0),\n point2=(self.porenparameter_x/2.0, self.porenparameter_y/2.0))\n elif (self.typ_Pore == 'Zylinder'):\n self.sketch_Pore.EllipseByCenterPerimeter(\n center=(0.0, 0.0),\n axisPoint1=(self.porenparameter_x/2.0, 0.0),\n axisPoint2=(0.0, self.porenparameter_y/2.0))\n elif (self.typ_Pore == 'Ellipsoid' ):\n matlab.ellipsoidIgesOut(\n self.porenparameter_x,\n self.porenparameter_y,\n self.porenparameter_z,\n 'Ellipsoid')\n # if (self.porenparameter_x == self.porenparameter_z):\n # self.sketch_Pore.ConstructionLine(\n # point1=(0.0, -100.0),\n # point2=(0.0, 100.0))\n # self.sketch_Pore.EllipseByCenterPerimeter(\n # center=(0.0, 0.0),\n # axisPoint1=(self.porenparameter_x/2.0, 0.0),\n # axisPoint2=(0.0, self.porenparameter_y/2.0))\n # self.sketch_Pore.autoTrimCurve(\n # curve1=self.sketch_Pore.geometry[3],\n # point1=(-self.porenparameter_x/2.0, 0.0))\n # self.sketch_Pore.Line(\n # point1=(0.0, self.porenparameter_y/2.0),\n # point2=(0.0, -self.porenparameter_y/2.0))\n else:\n print('typ_Pore Error!')\n #Part Pore generieren\n if (self.typ_Pore == 'Ellipsoid' ):\n # if (self.porenparameter_x == self.porenparameter_z):\n # self.part_Pore.BaseSolidRevolve(\n # sketch=self.sketch_Pore,\n # angle=360.0,\n # flipRevolveDirection=OFF)\n self.iges_Datei = mdb.openIges(\n 'Ellipsoid.igs',\n msbo=False,\n trimCurve=DEFAULT,\n scaleFromFile=OFF)\n self.model.PartFromGeometryFile(\n name=self.name+'_Pore',\n geometryFile=self.iges_Datei,\n combine=False,\n stitchTolerance=1.0,\n dimensionality=THREE_D,\n type=DEFORMABLE_BODY,\n convertToAnalytical=1,\n stitchEdges=1,\n scale=1) # Skalierung\n self.part_Pore = self.model.parts[self.name+'_Pore']\n self.part_Pore.AddCells(\n faceList = self.part_Pore.faces,\n flipped=False)\n del self.iges_Datei\n os.remove('abaqus_read_iges0.log') #Arbeitsordner aufraeumen\n os.remove('temp-Ellipsoid-new.sat')\n os.remove('Ellipsoid.igs')\n elif (self.typ_Pore == 'Quader' or 'Zylinder'):\n self.part_Pore = self.model.Part(\n name=self.name+'_Pore',\n dimensionality=THREE_D,\n type=DEFORMABLE_BODY)\n self.part_Pore.BaseSolidExtrude(\n sketch=self.sketch_Pore,\n depth=self.porenparameter_z)\n #Assemble\n self.assembly = self.model.rootAssembly\n self.assembly.DatumCsysByDefault(CARTESIAN)\n self.assembly.Instance(\n name=self.name+'_Pore',\n part=self.part_Pore,\n dependent=ON)\n self.assembly.Instance(\n name=self.name+'_Wuerfel',\n part=self.part_Wuerfel,\n dependent=ON)\n #Translation\n self.assembly.translate(\n instanceList=(self.name+'_Wuerfel', ),\n vector=(0.0, 0.0, -self.laenge_z/2.0))\n if (self.typ_Pore == 'Ellipsoid'):\n self.assembly.translate(\n instanceList=(self.name+'_Pore', ),\n vector=(0.0, 0.0, 0.0))\n elif (self.typ_Pore == 'Quader' or 'Zylinder'):\n self.assembly.translate(\n instanceList=(self.name+'_Pore', ),\n vector=(0.0, 0.0, -self.porenparameter_z/2.0))\n #Rotation\n self.assembly.rotate(\n instanceList=(self.name+'_Pore', ),\n axisPoint=(0.0, 0.0, 0.0),\n axisDirection=(1.0, 0.0, 0.0),\n angle=self.porenparameter_rx)\n self.assembly.rotate(\n instanceList=(self.name+'_Pore', ),\n axisPoint=(0.0, 0.0, 0.0),\n axisDirection=(0.0, 1.0, 0.0),\n angle=self.porenparameter_ry)\n self.assembly.rotate(\n instanceList=(self.name+'_Pore', ),\n axisPoint=(0.0, 0.0, 0.0),\n axisDirection=(0.0, 0.0,1.0),\n angle=self.porenparameter_rz)\n #Schneiden\n self.assembly.InstanceFromBooleanCut(\n name='RVE',\n instanceToBeCut=self.assembly.instances[self.name+'_Wuerfel'],\n cuttingInstances=(self.assembly.instances[self.name+'_Pore'], ),\n originalInstances=SUPPRESS)\n self.assembly.deleteFeatures((self.name+'_Wuerfel', self.name+'_Pore', ))\n # del self.model.parts[self.name+'_Wuerfel']\n # del self.model.parts[self.name+'_Pore']\n self.part_RVE = self.model.parts[self.name]\n elif (self.dimension == '2D'):\n #Sketch Wuerfel zeichnen\n self.sketch_Wuerfel = self.model.ConstrainedSketch(\n name='Seitenansicht_Wuerfel',\n sheetSize=200.0)\n self.sketch_Wuerfel.rectangle(\n point1=(0.0, 0.0),\n point2=(self.laenge_x/2.0, self.laenge_y/2.0)) #x- und y-Symmetrie\n #Part Wuerfel generieren\n self.part_Wuerfel = self.model.Part(\n name=self.name+'_Wuerfel',\n dimensionality=TWO_D_PLANAR,\n type=DEFORMABLE_BODY)\n self.part_Wuerfel.BaseShell(sketch=self.sketch_Wuerfel)\n #Sketch Pore zeichnen\n self.sketch_Pore = self.model.ConstrainedSketch(\n name='Seitenansicht_Pore',\n sheetSize=200.0)\n if (self.typ_Pore == 'Ellipsoid'):\n self.sketch_Pore.ConstructionLine(\n point1=(0.0, -100.0),\n point2=(0.0, 100.0))\n self.sketch_Pore.EllipseByCenterPerimeter(\n center=(0.0, 0.0),\n axisPoint1=(self.porenparameter_x/2.0, 0.0),\n axisPoint2=(0.0, self.porenparameter_y/2.0))\n self.sketch_Pore.autoTrimCurve(\n curve1=self.sketch_Pore.geometry[3],\n point1=(-self.porenparameter_x/2.0, 0.0))\n self.sketch_Pore.Line(\n point1=(0.0, self.porenparameter_y/2.0),\n point2=(0.0, -self.porenparameter_y/2.0))\n elif (self.typ_Pore == 'Quader'):\n self.sketch_Pore.rectangle(\n point1=(-self.porenparameter_x/2.0, -self.porenparameter_y/2.0),\n point2=(self.porenparameter_x/2.0, self.porenparameter_y/2.0))\n elif (self.typ_Pore == 'Zylinder'):\n self.sketch_Pore.EllipseByCenterPerimeter(\n center=(0.0, 0.0),\n axisPoint1=(self.porenparameter_x/2.0, 0.0),\n axisPoint2=(0.0, self.porenparameter_y/2.0))\n else:\n print('typ_Pore Error!')\n #Part Pore generieren\n self.part_Pore = self.model.Part(\n name=self.name+'_Pore',\n dimensionality=TWO_D_PLANAR,\n type=DEFORMABLE_BODY)\n self.part_Pore.BaseShell(sketch=self.sketch_Pore)\n #Assemble\n self.assembly = self.model.rootAssembly\n self.assembly.DatumCsysByDefault(CARTESIAN)\n self.assembly.Instance(\n name=self.name+'_Wuerfel',\n part=self.part_Wuerfel,\n dependent=ON)\n self.assembly.Instance(\n name=self.name+'_Pore',\n part=self.part_Pore,\n dependent=ON)\n self.assembly.rotate(\n instanceList=(self.name+'_Pore', ),\n axisPoint=(0.0, 0.0, self.laenge_z/2.0),\n axisDirection=(0.0, 0.0, self.laenge_z/2.0+1),\n angle=self.porenparameter_rz)\n self.assembly.InstanceFromBooleanCut(\n name='RVE',\n instanceToBeCut=self.assembly.instances[self.name+'_Wuerfel'],\n cuttingInstances=(self.assembly.instances[self.name+'_Pore'], ),\n originalInstances=SUPPRESS)\n self.assembly.deleteFeatures((self.name+'_Wuerfel', self.name+'_Pore', ))\n del self.model.parts[self.name+'_Wuerfel']\n #del self.model.parts[self.name+'_Pore']\n self.part_RVE = self.model.parts[self.name]\n else:\n print('dimension Error!')", "def findQ(H_s):\n nl,dl = symToTransferFn(H_s)\n syst = sp.lti(nl,dl)\n p1,p2 = syst.poles[0], syst.poles[1]\n return np.sqrt(abs(p1*p2))/abs(p1+p2)", "def spltopp(xk, cvals, k):\n return ppform.fromspline(xk, cvals, k)", "def better_partition_parallel(graph, part1, part2, independent_set_extraction_strategy):\n\n best = part2\n for i in range(len(part1)):\n if better_partition(graph, part1[i], best, independent_set_extraction_strategy):\n best = part1[i]\n\n return best", "def subsquares(x):\n return subsquares.subsquares(x)", "def euclidean_dist_quadratic_expansion(x: Tensor, y: Tensor) -> Tensor:\n eps = torch.tensor(\n torch.finfo(x.dtype).eps,\n device=x.device,\n dtype=x.dtype,\n )\n\n # using einsum is slightly faster than `torch.pow(x, 2).sum(-1)`\n xnorm = torch.einsum(\"...ij,...ij->...i\", x, x)\n ynorm = torch.einsum(\"...ij,...ij->...i\", y, y)\n\n n = xnorm.unsqueeze(-1) + ynorm.unsqueeze(-2)\n\n # x @ y.mT\n prod = torch.einsum(\"...ik,...jk->...ij\", x, y)\n\n # important: remove negative values that give NaN in backward\n return torch.sqrt(torch.clamp(n - 2.0 * prod, min=eps))", "def f_exact(n, k):\n def fact(m):\n return math.factorial(m)\n\n partition = part(n, k)\n\n total = 0\n for p in partition:\n product = 1\n nodes_left = n\n counts = dict([(x, len(list(y))) for x, y in itertools.groupby(p)])\n for num in p:\n product *= fact(num - 1) * comb(nodes_left, num)\n nodes_left -= num\n for num in counts:\n product /= fact(counts[num])\n\n total += product\n return int(total)", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def qs_discretize(pqs_f):\n # does not integrate over x!\n # Becomes problem if segments are variable or inputs not uniform in x-direction\n return np.array([np.sum(np.split(pqs_f, GATE.ii_z[1:-1], axis=0)[i], axis=0)*dz/\\\n (GATE.z_coords[GATE.ii_z[i+1]]-GATE.z_coords[GATE.ii_z[i]])\\\n for i in range(n_z)])", "def zzX_div(f, g):\n if poly_univariate_p(f):\n return zzx_div(f, g)\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n if dg < 0:\n raise ZeroDivisionError(\"polynomial division\")\n\n q, r = zzX_zero_of(f), f\n\n if df < dg:\n return q, r\n\n while True:\n dr = zzX_degree(r)\n\n if dr < dg:\n break\n\n lc_r = poly_LC(r)\n lc_g = poly_LC(g)\n\n c, R = zzX_div(lc_r, lc_g)\n\n if not zzX_zero_p(R):\n break\n\n k = dr - dg\n\n q = zzX_add_term(q, c, k)\n h = zzX_mul_term(g, c, k)\n r = zzX_sub(r, h)\n\n return q, r", "def _get_split_spectrum(T,WK):\n\n n_samples, n_frames, n_frequencies = T.shape\n U = np.zeros([n_samples, n_samples, n_frames, n_frequencies], dtype=np.complex64)\n \n for l in range(n_frequencies):\n for n in range(n_samples):\n _T = np.zeros([n_samples, n_frames], dtype=np.complex64)\n _T[n,:] = T[n,:,l]\n inv_WK = np.linalg.inv(WK[:,:,l])\n U[n,:,:,l] = np.dot(inv_WK, _T)\n \n return U", "def part(n, k, prev_parts=None):\n if prev_parts is None:\n prev_parts = {}\n if n < k or k < 1:\n raise Exception(\"Invalid partition args\")\n if k == 1:\n return [[n]]\n if n == k:\n return [[1 for i in range(n)]]\n parts = []\n for i in range(math.ceil(float(n) / float(k)), n - k + 2):\n others = deepcopy(prev_parts.get((n - i, k - 1), part(n - i, k - 1, prev_parts)))\n for other in others:\n other.append(i)\n parts.extend(others)\n deduplicated = set(tuple(sorted(x)) for x in parts)\n uniq_parts = []\n for dedup in deduplicated:\n uniq_parts.append(list(dedup))\n if (n, k) not in prev_parts:\n prev_parts[(n, k)] = uniq_parts\n return uniq_parts", "def get_partitions(cliques,cut=1):\n cliques.sort(key=len)\n k, m = divmod(len(cliques), cut)\n return list(cliques[i*k+min(i, m):(i+1)*k+min(i+1, m)] for i in range(cut))", "def Qc_fit(x, a, b, c, d, e, f, g, h, i, k):\n x1 = x[0] # I\n x2 = x[1] # dT\n m = (i * x1 ** 4 + a * x1 ** 3 + b * x1 ** 2 + c * x1 + d)\n b = (k * x1 ** 4 + e * x1 ** 3 + f * x1 ** 2 + g * x1 + h)\n return m * x2 + b", "def RTSpace( ref_el , deg ):\n sd = ref_el.get_spatial_dimension()\n\n vec_Pkp1 = polynomial_set.ONPolynomialSet( ref_el , deg+1 , (sd,) )\n\n dimPkp1 = expansions.polynomial_dimension( ref_el , deg+1 )\n dimPk = expansions.polynomial_dimension( ref_el , deg )\n dimPkm1 = expansions.polynomial_dimension( ref_el , deg-1 )\n\n vec_Pk_indices = reduce( lambda a,b: a+b , \\\n [ list(range(i*dimPkp1,i*dimPkp1+dimPk)) \\\n for i in range(sd) ] )\n vec_Pk_from_Pkp1 = vec_Pkp1.take( vec_Pk_indices )\n\n Pkp1 = polynomial_set.ONPolynomialSet( ref_el , deg + 1 )\n PkH = Pkp1.take( list(range(dimPkm1,dimPk)) )\n\n Q = quadrature.make_quadrature( ref_el , 2 * deg + 2 )\n\n # have to work on this through \"tabulate\" interface\n # first, tabulate PkH at quadrature points\n Qpts = numpy.array( Q.get_points() )\n Qwts = numpy.array( Q.get_weights() )\n\n zero_index = tuple( [ 0 for i in range(sd) ] )\n\n PkH_at_Qpts = PkH.tabulate( Qpts )[zero_index]\n Pkp1_at_Qpts = Pkp1.tabulate( Qpts )[zero_index]\n\n PkHx_coeffs = numpy.zeros( (PkH.get_num_members() , \\\n sd, \\\n Pkp1.get_num_members()) , \"d\" )\n\n import time\n t1 = time.time()\n for i in range( PkH.get_num_members() ):\n for j in range( sd ):\n fooij = PkH_at_Qpts[i,:] * Qpts[:,j] * Qwts\n PkHx_coeffs[i,j,:] = numpy.dot( Pkp1_at_Qpts , fooij )\n\n PkHx = polynomial_set.PolynomialSet( ref_el , \\\n deg , \\\n deg + 1 , \\\n vec_Pkp1.get_expansion_set() , \\\n PkHx_coeffs , \\\n vec_Pkp1.get_dmats() )\n\n return polynomial_set.polynomial_set_union_normalized( vec_Pk_from_Pkp1 , PkHx )", "def pmf(self, k):\n\n if k < 0 or k > self.n:\n return 0\n\n k = int(k)\n\n c = (Binomial.factorial(self.n)) / \\\n (Binomial.factorial(k) * self.factorial((self.n - k)))\n\n return c * pow(self.p, k) * pow((1 - self.p), (self.n - k))", "def zzX_add_term(f, c, k=0):\n if poly_univariate_p(f):\n return zzx_add_term(f, c, k)\n\n if zzX_zero_p(c):\n return f\n\n n = len(f)\n m = n-k-1\n\n if k == n-1:\n return zzX_strip([zzX_add(f[0], c)] + f[1:])\n else:\n if k >= n:\n return [c] + zzX_zeros_of(f, k-n, 1) + f\n else:\n return f[:m] + [zzX_add(f[m], c)] + f[m+1:]" ]
[ "0.59264237", "0.5611723", "0.55813473", "0.5479996", "0.5371169", "0.51204914", "0.50550634", "0.50538117", "0.49067897", "0.47249427", "0.4711605", "0.46479887", "0.46400973", "0.4625992", "0.4623234", "0.45929667", "0.4567981", "0.45518592", "0.45361853", "0.4516747", "0.45131138", "0.44943917", "0.44654816", "0.44605464", "0.44399968", "0.4434695", "0.4429431", "0.44201025", "0.4414089", "0.4408546", "0.44010952", "0.43749136", "0.43719319", "0.43671557", "0.43639106", "0.43524355", "0.4340055", "0.43380484", "0.4337063", "0.432966", "0.43237856", "0.43208975", "0.43195146", "0.4315024", "0.43080863", "0.42991492", "0.42973348", "0.42958668", "0.42555875", "0.42513824", "0.42511693", "0.42423725", "0.42404127", "0.42359343", "0.42354676", "0.42306444", "0.42298535", "0.42262602", "0.4219306", "0.42185313", "0.42061222", "0.4202698", "0.4195686", "0.41913864", "0.41757333", "0.4170929", "0.41688028", "0.41670737", "0.41667798", "0.41650388", "0.4157371", "0.4151027", "0.4147652", "0.41423607", "0.41408843", "0.41373274", "0.41344273", "0.41298676", "0.41259605", "0.412352", "0.412352", "0.41188243", "0.41183177", "0.41120198", "0.40970403", "0.40963796", "0.4096041", "0.40913948", "0.40878007", "0.40842718", "0.40833116", "0.40829757", "0.4082603", "0.40820348", "0.40749034", "0.4071819", "0.40690532", "0.406773", "0.40659878", "0.4062031" ]
0.50990486
6
Squarefree norm of ``f`` in ``K[X]``, useful over algebraic domains. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(xsa)`` and ``r(x) = Norm(g(x))`` is a squarefree polynomial over K, where ``a`` is the algebraic extension of ``K``. Examples ======== >>> _, x, y = ring('x y', QQ.algebraic_field(I)) >>> (xy + y2).sqf_norm() (1, xy Ix + y2 3Iy 2, x2y2 + x2 + 2xy3 + 2xy + y4 + 5y2 + 4)
def sqf_norm(self, f): domain = self.domain if not domain.is_AlgebraicField: raise DomainError(f'ground domain must be algebraic, got {domain}') new_ring = self.to_ground().inject(*domain.symbols, front=True) g = domain.mod.set_ring(new_ring) s = 0 while True: h = f.inject(front=True) r = g.resultant(h) if r.is_squarefree: return s, f, r f = f.compose({x: x - domain.unit for x in self.gens}) s += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sqf_norm(f):\n s, g, r = dmp_sqf_norm(f.rep, f.lev, f.dom)\n return s, f.per(g), f.per(r, dom=f.dom.dom)", "def squared_frobenius_norm(x):\n # http://mathworld.wolfram.com/FrobeniusNorm.html\n # The gradient of KL[p,q] is not defined when p==q. The culprit is\n # tf.norm, i.e., we cannot use the commented out code.\n # return tf.square(tf.norm(x, ord=\"fro\", axis=[-2, -1]))\n return tf.reduce_sum(tf.square(x), axis=[-2, -1])", "def sqr(f):\n return f.per(dmp_sqr(f.rep, f.lev, f.dom))", "def normsq(self):\n return sum(x**2 for x in self.data)", "def norm(self):\n mag_squared = self._sum_of_squares()\n return sqrt(mag_squared)", "def _rsq(self):\n return self._ss_reg / self._ss_tot", "def normFro(X):\n return norm(X)", "def norm(self):\n C = np.prod([F.T @ F for F in self.factors], axis=0)\n return np.sqrt(np.sum(C))", "def tree_l2_norm(tree_x, squared=False):\n squared_tree = tree_map(jnp.square, tree_x)\n sqnorm = tree_sum(squared_tree)\n if squared:\n return sqnorm\n else:\n return jnp.sqrt(sqnorm)", "def normsq(self):\n return abs(sum(self._ar * self._ar))", "def rsqrt(data):\n return _make.rsqrt(data)", "def norm_sqr(x):\n return inner_prod(x, x)[0]", "def test_sym_sqrtm(self): \n # create random symmetric n x n matrix\n n = 5\n A = 5.0 * 2.0*(torch.rand(n,n) - 0.5)\n A = A + A.T\n\n # reference implementation of scipy\n sqA_scipy = sla.sqrtm(A.numpy())\n isqA_scipy = sla.inv(sla.sqrtm(A.numpy()))\n # my own implementation using pure torch functions\n sqA,isqA = (x.numpy() for x in _sym_sqrtm(A))\n \n self.assertTrue(np.isclose(sqA, sqA_scipy).all())\n self.assertTrue(np.isclose(isqA, isqA_scipy).all())", "def zzX_sqf_p(f):\n return zzX_one_p(zzX_gcd(zzX_primitive(f)[1], zzX_diff(f)))", "def scalar_sqrt(self, dst, src):\n return self._scalar_single_func('sqrt', dst, src)", "def norm(self):\n\t\treturn np.sqrt(self.normSq())", "def score_sqrt(self, params):\n\n if type(params) is not MixedLMParams:\n params = MixedLMParams.from_packed(params, self.k_fe,\n self.use_sqrt)\n\n score_full = self.score_full(params)\n params_vec = params.get_packed(use_sqrt=True)\n\n lin, quad = self._reparam()\n\n scr = 0.\n for i in range(len(params_vec)):\n v = lin[i] + 2 * np.dot(quad[i], params_vec)\n scr += score_full[i] * v\n\n if self._freepat is not None:\n return self._freepat.get_packed() * scr\n else:\n return scr", "def weight_norm(W, s):\n _eps = numpy_floatX(1e-5)\n W_norms = tensor.sqrt((W * W).sum(axis=0, keepdims=True) + _eps)\n W_norms_s = W_norms * s # do this first to ensure proper broadcasting\n return W / W_norms_s", "def sparse_square_norm(A: SparseTensor, out: torch.Tensor) -> torch.Tensor:\n if not A.is_csr:\n raise RuntimeError(\"Squared norm can only be applied on CSR tensors\")\n if not check_same_dtype(A, out):\n raise ValueError(\"All data-types must match\")\n if A.shape[0] != out.shape[0]:\n raise ValueError(\"Dimension 0 of A must match the length of tensor 'out'\")\n\n return norm_sq(A.indexptr, A.data, out)", "def norm(self):\n return sqrt(self.dot(self))", "def norm(x):\n return inner_prod(x, x)[0].sqrt_()", "def f_norm(self,G):\n if isinstance(G,np.ndarray):\n nu = np.outer(self.nu_array,1./G**2)\n # sigma_inv = np.outer(1./self.sigma,1./G**2)\n else:\n nu = self.nu_array/G**2\n # sigma_inv = 1./self.sigma*1./G**2\n #f = self.A*np.sqrt(2.*self.a/np.pi)*(1.+(1./self.a/nu)**self.p)*np.sqrt(nu)*np.exp(-self.a*nu/2.)\n f = self.f_nu(nu)\n\n #norm = np.trapz(f,np.log(sigma_inv),axis=0)\n norm = trapz2(f,np.log(1./self.sigma))\n return norm", "def rsq(self):\n return np.squeeze(self._rsq)", "def sqrt(self):\n return type(self)(self.parent(),\n self._simplify(self._express.sqrt()))", "def hessian_sqrt(self, params):\n\n if type(params) is not MixedLMParams:\n params = MixedLMParams.from_packed(params, self.k_fe,\n self.use_sqrt)\n\n score0 = self.score_full(params)\n hess0 = self.hessian_full(params)\n\n params_vec = params.get_packed(use_sqrt=True)\n\n lin, quad = self._reparam()\n k_tot = self.k_fe + self.k_re2\n\n # Convert Hessian to new coordinates\n hess = 0.\n for i in range(k_tot):\n hess += 2 * score0[i] * quad[i]\n for i in range(k_tot):\n vi = lin[i] + 2*np.dot(quad[i], params_vec)\n for j in range(k_tot):\n vj = lin[j] + 2*np.dot(quad[j], params_vec)\n hess += hess0[i, j] * np.outer(vi, vj)\n\n return hess", "def norm(self) -> float:\n return self.squared_norm()**0.5", "def sqrt(self: Float[LinearOperator, \"*batch M N\"]) -> Float[LinearOperator, \"*batch M N\"]:\n return self.__class__(self._diag.sqrt())", "def norm(self):\n\t\treturn math.sqrt(self.norm2())", "def norm(x):\r\n return sqrt(np.numerical.sum(x**2))", "def sqrt(self: Float[LinearOperator, \"*batch M N\"]) -> Float[LinearOperator, \"*batch M N\"]:\n return ConstantDiagLinearOperator(self.diag_values.sqrt(), diag_shape=self.diag_shape)", "def norm(alpha, F):\n return inner_product(alpha, F, alpha)", "def squared_norm(self, x_tensors=None):\n if x_tensors is None:\n x_tensors = self.x_tensors()\n\n return numpy.sum([squared_L2_norm(t) for t in x_tensors])", "def normkernel(S, T, n):\n\n k1 = kernel(S, S, n)\n k2 = kernel(T, T, n)\n res = kernel(S, T, n) / sqrt(k1 * k2)\n\n return res", "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def normSq(self):\n\t\treturn self.x*self.x+self.y*self.y", "def norm(self):\n return math.sqrt(self.dotProduct(self))", "def norm(self):\n norm = self.scalarProduct(self) ** 0.5\n return norm", "def normalequ(self):\n tx = self.train_x\n y = self.train_y\n if self.regularizer is None:\n return np.linalg.solve(np.dot(tx.T, tx), np.dot(tx.T, y))\n elif self.regularizer.name is 'Ridge':\n G = np.eye(tx.shape[1])\n G[0, 0] = 0\n hes = np.dot(tx.T, tx) + self.regularizer_p * G\n return np.linalg.solve(hes, np.dot(tx.T, y))\n else:\n raise NotImplementedError", "def norm(self):\n return np.sqrt(np.dot(self._data, self._data))", "def sqrt(self):\n\n\t\t# Maintain state of self and create new trace variable new_var\n\t\tnew_var = Var(self.val, self.der)\n\t\treturn new_var.__pow__(0.5)", "def sqrt(self):\n # There might be a conversion factor from taking the square root of the unit\n new_value = math.sqrt(self._value)\n new_unit = self.unit.sqrt()\n unit_factor = self.unit.conversion_factor_to(new_unit*new_unit)\n if unit_factor != 1.0:\n new_value *= math.sqrt(unit_factor)\n return Quantity(value=new_value, unit=new_unit)", "def norm(x):\n return np.sqrt(norm2(x))", "def norm(u, w, a, b):\n u_1, u_2 = u\n return sqrt(dot((u_1, u_2), (u_1, u_2), w, a, b))", "def get_fisher_rao_norm_squared(self, sess, x_test, y_test):\n pred_np = self.get_prediction(sess, x_test)\n dl_df_np = self.get_dl_df(sess, x_test, y_test)\n prod = pred_np * dl_df_np\n inner_prod_vector = np.sum(prod, axis = 0)\n inner_prod_squared = inner_prod_vector * inner_prod_vector\n return np.mean(inner_prod_squared)", "def normF2(X):\r\n # pass\r\n if X.shape[0]*X.shape[1] == 0:\r\n return 0\r\n return LA.norm(X, 'fro')**2", "def sqnorm(self, d):\n ###TODO\n total = 0.0\n for i in d:\n total = total + (d[i] * d[i])\n return total", "def square_norm(x):\n return np.linalg.norm(x) ** 2", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def norm(self, T=None):\n T = T or self.T\n x = T.gen\n A = self.numerator(x=x)\n return T.resultant(A) // self.denom ** self.n", "def gauss_kl(q_mu, q_sqrt, K=None):\n #print(q_sqrt._tf_array)\n #q_sqrt = q_sqrt._tf_array\n #q_mu = q_mu._tf_array\n\n if K is None:\n white = True\n alpha = q_mu\n else:\n white = False\n Lp = tf.cholesky(K)\n alpha = tf.matrix_triangular_solve(Lp, q_mu, lower=True)\n\n if q_sqrt.get_shape().ndims == 2:\n diag = True\n num_latent = tf.shape(q_sqrt)[1]\n NM = tf.size(q_sqrt)\n Lq = Lq_diag = q_sqrt\n elif q_sqrt.get_shape().ndims == 3:\n diag = False\n num_latent = tf.shape(q_sqrt)[2]\n NM = tf.reduce_prod(tf.shape(q_sqrt)[1:])\n Lq = tf.matrix_band_part(tf.transpose(q_sqrt, (2, 0, 1)), -1, 0) # force lower triangle\n Lq_diag = tf.matrix_diag_part(Lq)\n else: # pragma: no cover\n raise ValueError(\"Bad dimension for q_sqrt: %s\" %\n str(q_sqrt.get_shape().ndims))\n\n # Mahalanobis term: μqᵀ Σp⁻¹ μq\n mahalanobis = tf.reduce_sum(tf.square(alpha))\n \n # Constant term: - N x M\n constant = - tf.cast(NM, float_type)\n\n # Log-determinant of the covariance of q(x):\n logdet_qcov = tf.reduce_sum(tf.log(tf.square(Lq_diag)))\n\n # Trace term: tr(Σp⁻¹ Σq)\n if white:\n trace = tf.reduce_sum(tf.square(Lq))\n else:\n if diag:\n M = tf.shape(Lp)[0]\n Lp_inv = tf.matrix_triangular_solve(Lp, tf.eye(M, dtype=float_type), lower=True)\n K_inv = tf.matrix_triangular_solve(tf.transpose(Lp), Lp_inv, lower=False)\n trace = tf.reduce_sum(tf.expand_dims(tf.matrix_diag_part(K_inv), 1) *\n tf.square(q_sqrt))\n else:\n Lp_tiled = tf.tile(tf.expand_dims(Lp, 0), [num_latent, 1, 1])\n LpiLq = tf.matrix_triangular_solve(Lp_tiled, Lq, lower=True)\n trace = tf.reduce_sum(tf.square(LpiLq))\n\n twoKL = mahalanobis + constant - logdet_qcov + trace\n\n # Log-determinant of the covariance of p(x):\n if not white:\n prior_logdet = tf.cast(num_latent, float_type) * tf.reduce_sum(\n tf.log(tf.square(tf.matrix_diag_part(Lp))))\n twoKL += prior_logdet\n\n return 0.5 * twoKL", "def norm(q):\n normq = q/amplitude(q)\n if q.ndim == 1:\n normq = normq.flatten()\n return normq", "def sqf_part(self, f):\n domain = self.domain\n\n if domain.is_FiniteField:\n g = self.one\n for f, _ in self.sqf_list(f)[1]:\n g *= f\n\n return g\n\n if not f:\n return f\n\n gcd = f\n for x in self.gens:\n gcd = self.gcd(gcd, f.diff(x))\n sqf = f // gcd\n\n if domain.is_Field:\n return sqf.monic()\n return sqf.primitive()[1]", "def zzx_sqf_p(f):\n return zzx_one_p(zzx_gcd(zzx_primitive(f)[1], zzx_diff(f)))", "def norm(self, X, G):\n raise NotImplementedError", "def norm_squared(q_1: Q) -> Q:\n\n end_q_type = f\"||{q_1.q_type}||²\"\n\n qxq = _commuting_products(q_1, q_1)\n\n n_q = Q(q_type=end_q_type, representation=q_1.representation)\n n_q.t = qxq[\"tt\"] + qxq[\"xx+yy+zz\"]\n\n return n_q", "def sorm(func, dist_list, init_search_point, alg): \n def SLSQP(func, dist_list, init_search_point):\n \n dim = len(dist_list)\n current_beta = 0\n new_beta = 1\n sig = np.empty((1, dim))\n mu = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n def f_l(x_l):\n return(func([x_l[i,:]*sig[0,i] + mu[0,i] for i in range(0, dim)]))\n \n while abs(current_beta-new_beta) > 0.001:\n current_search_point = new_search_point\n current_beta = new_beta\n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n mu[0,i], sig[0, i] = Rosenblatt_Transform(dist_list[i][0], current_search_point[0,i])\n else:\n mu[0,i], sig[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n dist_fun = lambda u: np.linalg.norm(u) \n \n alg = 'SLSQP'\n \n H = lambda u: f_l(u)\n cons = ({'type': 'eq', 'fun': lambda u: -(H(u.reshape(-1,1)))})\n \n result = scipy.optimize.minimize(dist_fun, x0 = current_search_point, constraints = cons, method=alg)\n \n new_beta = result.fun\n u = np.array(result.x).reshape((1,dim))\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = mu[0,i] + u[0,i]*sig[0,i]\n \n beta_value = new_beta \n p_f = sst.norm.cdf(-beta_value)\n iterations = result.nit\n u = result.x\n x = u[:]*sig[0,:] + mu[0,:]\n grad_val = scipy.optimize.approx_fprime(x, func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(sig[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*sig[0,i]/np.sqrt(sum1) \n \n return(beta_value, p_f, x, u, mu, sig, cosines, iterations) \n \n def HL_R(func, dist_list, init_search_point):\n \n iterations = 0\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n while abs(cur_beta - new_beta) > 0.001:\n cur_beta = new_beta\n cur_cosines = np.zeros((1, dim))\n new_cosines = np.ones((1, dim))\n \n while max((abs(cur_cosines - new_cosines))[0]) > 0.005:\n \n cur_cosines = new_cosines\n \n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0, i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0, i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n \n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(global_std_arr[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*global_std_arr[0,i]/np.sqrt(sum1)\n \n new_cosines = cosines\n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = global_mean_arr[0,i] - new_cosines[0,i]*global_std_arr[0,i]*cur_beta\n \n iterations = iterations + 1\n \n \n B = Symbol('B')\n coordinates = []\n for i in range(0, dim):\n coordinates.append(global_mean_arr[0, i] - new_cosines[0,i]*global_std_arr[0, i]*B)\n new_beta = float(solve(func(coordinates), B)[0])\n \n cosines = new_cosines \n beta_value = new_beta\n p_f = sst.norm.cdf(-new_beta)\n x = new_search_point\n u = (x[0,:] - global_mean_arr[0,:])/global_std_arr\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines, iterations)\n \n def HL_RF(func, dist_list, init_search_point):\n\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n\n new_search_point = np.array(init_search_point).reshape((1, dim))\n iterations = 0\n while abs(cur_beta - new_beta) > 0.001 and abs(func(new_search_point[0])) > 0.001:\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n cur_beta = new_beta\n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0,i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0,i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n f_val = func(cur_search_point[0])\n \n x_ast = np.empty((1, dim))\n for i in range(0, dim):\n x_ast[0,i] =(cur_search_point[0,i] - global_mean_arr[0,i])/global_std_arr[0,i]\n\n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.000001)\n grad_val = grad_val.reshape((1, dim)) \n \n grad_val_ast = np.empty(grad_val.shape)\n for i in range(0, dim):\n grad_val_ast[0,i] = grad_val[0,i]*global_std_arr[0,i]\n \n t1 = 1/np.sum(grad_val_ast[0,:]**2)\n\n t2 = sum(grad_val_ast[0,:]*x_ast[0,:]) - f_val\n \n t3 = t1*t2\n \n new_x_ast = np.empty(x_ast.shape)\n for i in range(0, dim):\n new_x_ast[0,i] = t3*grad_val_ast[0,i]\n u = new_x_ast\n new_beta = np.linalg.norm(new_x_ast)\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = new_x_ast[0,i]*global_std_arr[0,i] + global_mean_arr[0,i]\n iterations = iterations + 1\n \n grad_val_ast_sum = sum(grad_val_ast[0,:]**2)\n cosines = grad_val_ast/(grad_val_ast_sum**0.5)\n beta_value = new_beta\n x = new_search_point\n p_f = sst.norm.cdf(-beta_value)\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines, iterations)\n \n if alg == 'slsqp':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = SLSQP(func, dist_list, init_search_point)\n elif alg == 'HL-R':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = HL_R(func, dist_list, init_search_point)\n elif alg == 'HL-RF':\n beta_value, p_f, x, u, mu, sig, cosines, iterations = HL_RF(func, dist_list, init_search_point)\n \n d = len(dist_list)\n\n R0 = np.eye(d)\n \n for i in range(0, d):\n R0[-1,i] = cosines[0,i]\n \n Q, R = scipy.linalg.rq(R0)\n \n def f_l(x_l):\n return(func([x_l[i]*sig[0,i] + mu[0,i] for i in range(0, d)]))\n \n x = np.array(x).reshape((1, -1))\n u = x[0,:]*sig[0,:] + mu[0,:]\n \n H = nd.Hessian(f_l)(u)\n \n grad_val_standard = (scipy.optimize.approx_fprime(x[0], func, 0.00000001)[:])*(sig[0,:])\n \n dist_standard = np.linalg.norm(grad_val_standard)\n \n A_1 = 1/dist_standard\n R_transp = np.transpose(R)\n A_2 = R.dot(H)\n A_3 = A_2.dot(R_transp)\n \n A = A_3.dot(A_1)\n \n A = A[0:-1, 0:-1]\n \n k = np.linalg.eig(A)[0]\n \n prod_arr = np.empty((1, len(k)))\n for i in range(0, len(k)):\n prod_arr[0,i] = (1 + beta_value*k[i])**-0.5\n \n p_f_sorm = p_f*np.prod(prod_arr)\n beta_sorm = -1*scipy.stats.norm.ppf(p_f_sorm)\n \n print('-------------------------')\n print('Second-Order Reliability Analysis')\n print('Algorithm:',alg,'solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_sorm, p_f_sorm))\n print('-------------------------')\n \n return(beta_sorm, p_f_sorm)", "def r_sq(data, fit):\n ss_res = np.sum((data - fit) ** 2)\n ss_tot = np.sum((data - np.mean(data)) ** 2)\n\n return 1 - (ss_res / ss_tot)", "def sqrt(data):\n return _make.sqrt(data)", "def norm_squared_of_vector(q_1: Q):\n\n end_q_type = f\"|V({q_1.q_type})|²\"\n\n qxq = _commuting_products(q_1, q_1)\n\n nv_q = Q(q_type=end_q_type, representation=q_1.representation)\n nv_q.t = qxq[\"xx+yy+zz\"]\n\n return nv_q", "def isSqrt(self):\n return _libsbml.ASTNode_isSqrt(self)", "def squared_norm(self) -> float:\n return self.__real**2 + self.__img[0]**2 + self.__img[1]**2 + self.__img[2]**2", "def is_sqf(f):\n return dmp_sqf_p(f.rep, f.lev, f.dom)", "def sqnorm(v):\n res = 0\n for elt in v:\n for coef in elt:\n res += coef ** 2\n return res", "def norm(self):\n raise NotImplementedError", "def frobeniusNorm(X):\n accum = 0\n V = np.reshape(X,X.size)\n for i in xrange(V.size):\n accum += abs(V[i] ** 2)\n return np.sqrt(accum)", "def sqrt(x):\n return 0.0", "def srwf(xi):\n\treturn np.sqrt(wienergain(xi)) # SRWF gain function.", "def symsqrt_v2(A, func='symeig'):\n if func == 'symeig':\n s, v = A.symeig(eigenvectors=True) # This is faster in GPU than CPU, fails gradcheck. See https://github.com/pytorch/pytorch/issues/30578\n elif func == 'svd':\n _, s, v = A.svd() # But this passes torch.autograd.gradcheck()\n else:\n raise ValueError()\n\n above_cutoff = s > s.max() * s.size(-1) * torch.finfo(s.dtype).eps\n\n ### This doesn't work for batched version\n\n ### This does but fails gradcheck because of inpalce\n\n ### This seems to be equivalent to above, work for batch, and pass inplace. CHECK!!!!\n s = torch.where(above_cutoff, s, torch.zeros_like(s))\n\n sol =torch.matmul(torch.matmul(v,torch.diag_embed(s.sqrt(),dim1=-2,dim2=-1)),v.transpose(-2,-1))\n\n return sol", "def norm(self):\n return numpy.linalg.norm(self.values)", "def normalized_extended_euclidean_algorithm(f, g, normal=None):\n\n if f.parent() is not g.parent():\n raise ValueError(\"Arguments should belong to the same ring\")\n\n domain = f.parent()\n\n if not domain.is_euclidean_domain():\n raise ValueError(\"Arguments should belong to an euclidean domain\")\n\n if normal is None:\n if domain is ZZ:\n normal = lambda z: z.abs()\n elif is_PolynomialRing(domain) and domain.base().is_field():\n normal = lambda f: f.parent().zero() if f.is_zero() else f.quo_rem(f.lc())[0]\n else:\n raise ValueError(\"No default implementation for normal found, a value must be provided\")\n\n q = [domain.zero()]\n rho = [__lu(f, normal), __lu(g, normal)]\n r = [normal(f), normal(g)]\n s = [domain.one().quo_rem(rho[0])[0], domain.zero()]\n t = [domain.zero(), domain.one().quo_rem(rho[1])[0]]\n\n i = 1\n while r[i] != domain.zero():\n q.append(r[i - 1].quo_rem(r[i])[0])\n rho.append(__lu(r[i - 1] - q[i] * r[i], normal))\n r.append((r[i - 1] - q[i] * r[i]).quo_rem(rho[-1])[0])\n s.append((s[i - 1] - q[i] * s[i]).quo_rem(rho[-1])[0])\n t.append((t[i - 1] - q[i] * t[i]).quo_rem(rho[-1])[0])\n i += 1\n\n return r, s, t, q", "def norm(self):", "def my_sqrt(x):\n square_root = x**(0.5)\n return square_root", "def normalizeL2(f):\r\n \r\n f=np.array(f)\r\n fsum=np.sum(np.abs(f))\r\n if fsum==0:\r\n fnorm=f\r\n else:\r\n fnorm=f/np.sqrt(np.sum(np.abs(f)**2))\r\n \r\n return fnorm", "def sqrt(obj):\n\tif isinstance(obj, Variable):\n \t\tnew_Variable = Variable(obj.val, obj.der)\n \t\treturn new_Variable.__pow__(0.5)\n\telse:\n\t\treturn np.sqrt(obj)", "def norm(x):\n return np.sqrt(np.sum(x ** 2))", "def norm(self):\n return np.linalg.norm(self.values)", "def S(a, b):\n return np.sqrt(np.dot(a, a) + np.dot(b, b) - 2*np.dot(a, b))", "def norm(self):\n if self._coord_format != constants.MatrixCoordinateDefault:\n self._logger.error(\"invalid coordinate format\")\n raise NotImplementedError(\"invalid coordinate format\")\n\n if self._dtype == complex:\n def __map(m):\n return m[2].real ** 2 + m[2].imag ** 2\n else:\n def __map(m):\n return m[2] ** 2\n\n n = self._data.map(\n __map\n ).reduce(\n lambda a, b: a + b\n )\n\n return math.sqrt(n)", "def sqrt(a):", "def _compute_squared_recon_error(tensor, kruskal_factors, norm_tensor):\n return tensorly.tenalg.norm(tensor - kruskal_to_tensor(kruskal_factors), 2) / norm_tensor", "def sqrt_hessian(self, module, g_inp, g_out):\n return self._sqrt_hessian(module, g_inp, g_out)", "def is_squarefree(self, f):\n if f.is_ground:\n return True\n g = f\n for x in self.gens:\n g = self.gcd(g, f.diff(x))\n if g.is_ground:\n return True\n return False", "def eval_K(self, S):\n K = (self.eigenfunctions[self.X] * S[None, :]) @ \\\n self.eigenfunctions[self.X].T # shape (n,n)\n return K", "def symsqrt_v1(A, func='symeig'):\n ## https://github.com/pytorch/pytorch/issues/25481#issuecomment-576493693\n ## perform the decomposition\n ## Recall that for Sym Real matrices, SVD, EVD coincide, |λ_i| = σ_i, so\n ## for PSD matrices, these are equal and coincide, so we can use either.\n if func == 'symeig':\n s, v = A.symeig(eigenvectors=True) # This is faster in GPU than CPU, fails gradcheck. See https://github.com/pytorch/pytorch/issues/30578\n elif func == 'svd':\n _, s, v = A.svd() # But this passes torch.autograd.gradcheck()\n else:\n raise ValueError()\n\n ## truncate small components\n good = s > s.max(-1, True).values * s.size(-1) * torch.finfo(s.dtype).eps\n components = good.sum(-1)\n common = components.max()\n unbalanced = common != components.min()\n if common < s.size(-1):\n s = s[..., :common]\n v = v[..., :common]\n if unbalanced:\n good = good[..., :common]\n if unbalanced:\n s = s.where(good, torch.zeros((), device=s.device, dtype=s.dtype))\n return (v * s.sqrt().unsqueeze(-2)) @ v.transpose(-2, -1)", "def _do_sqrt(x, prec=None, extend=True, all=False):\n if prec:\n if x >= 0:\n return RealField(prec)(x).sqrt(all=all)\n else:\n return ComplexField(prec)(x).sqrt(all=all)\n if x == -1:\n from sage.symbolic.pynac import I\n z = I\n else:\n z = SR(x) ** one_half\n\n if all:\n if z:\n return [z, -z]\n else:\n return [z]\n return z", "def get_s( self ):\n\n # initialize scaling factor as unknown variable, assuming it's real and\n # greater than zero\n _s = Symbol( 's', real = True, positive = True )\n\n # solve for scaling factor (first argument is expression set equal to zero)\n s = solve( self.a * _s ** self.n + self.b * _s - 1, _s )\n\n # save result as float\n self.s = float( s[ 0 ] )", "def convert_sqrt(node, **kwargs):\n return create_basic_op_node('Sqrt', node, kwargs)", "def get_bprop_sqrt(self):\n mul_func = P.Mul()\n fill_func = P.Fill()\n div_op = P.RealDiv()\n sqrt = P.Sqrt()\n dtype = P.DType()\n\n def bprop(x, out, dout):\n temp = div_op(fill_func(dtype(x), shape_op(x), 0.5), sqrt(x))\n dx = mul_func(dout, temp)\n return (dx,)\n return bprop", "def eval_K_chol(self, S, sigma_n, sigma_f):\n K = self.eval_K(S)\n K += sigma_n * np.eye(K.shape[0])\n K_chol = jitchol(K)\n return K_chol", "def squareform(X, force=\"no\", checks=True):\n\n return ssd.squareform(X, force, checks)", "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\n vsf_ph=vsf_ph+c_slm*vsh_ph\n return vsf_th, vsf_ph", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_primitive(quo)[1]", "def norm(self):\n return np.linalg.norm(self.ravel())", "def sqrtsigned(x):\n return tf.sign(x) * tf.sqrt(tf.abs(x))", "def sqf_normal(a, b, c, steps=False):\n ABC = _remove_gcd(a, b, c)\n sq = tuple(square_factor(i) for i in ABC)\n sqf = A, B, C = tuple([i//j**2 for i,j in zip(ABC, sq)])\n pc = igcd(A, B)\n A /= pc\n B /= pc\n pa = igcd(B, C)\n B /= pa\n C /= pa\n pb = igcd(A, C)\n A /= pb\n B /= pb\n\n A *= pa\n B *= pb\n C *= pc\n\n if steps:\n return (sq, sqf, (A, B, C))\n else:\n return A, B, C", "def Norm(self):\n \n return sqrt(sum([sum(abs(x)**2) for x in self.__ObjList]))", "def sqrt(self, a):\n raise NotImplementedError", "def sqrt(tensor):\n raise NotImplementedError", "def fs_r(self, percent=0.9, N=None):\n\t\tif not 0 <= percent <= 1:\n\t\t\traise ValueError(\"Percent should be a real number between 0 and 1.\")\n\t\tif N:\n\t\t\tif not isinstance(N, (int, numpy.int64)) or N<=0:\n\t\t\t\traise ValueError(\"N should be a positive integer.\")\n\t\t\tN = min(N, self.rank)\n\t\t\t# S = numpy.zeros((self._numitems, N))\n\t\t# else:\n\t\tself.k = 1 + numpy.flatnonzero(numpy.cumsum(self.L) >= sum(self.L)*percent)[0]\n\t\t\t# S = numpy.zeros((self._numitems, self.k))\t\t\n\t\t# the sign of the square root can be either way; singular value vs. eigenvalue\n\t\t# numpy.fill_diagonal(S, -numpy.sqrt(self.E) if self.cor else self.s)\n\t\tnum2ret = N if N else self.k\n\t\ts = -numpy.sqrt(self.L) if self.cor else self.s\n\t\tS = scipy.linalg.diagsvd(s[:num2ret], self._numitems, num2ret)\t\t\n\t\tself.F = _mul(self.D_r, self.P, S)\n\t\treturn self.F", "def RMSE(F1: Array, F2: Array, rel_tol: float = 1e-3, norm=True) -> float:\n x, y = F1.sum(), F2.sum()\n assert abs((x - y) / x) < rel_tol, \"arrays do not have same sum (up to rel. tol.)\"\n # assert np.isclose(F1.sum(), F2.sum()), 'arrays should have same sum'\n\n N = np.prod(F1.shape)\n\n diff = F1 - F2\n power = diff.power(2) if sp.issparse(diff) else np.power(diff, 2)\n out = np.sqrt(power.sum() / N)\n\n if norm:\n out *= N / x\n\n return out" ]
[ "0.6828397", "0.6347827", "0.58674204", "0.58420646", "0.57301825", "0.56841624", "0.56582105", "0.56257766", "0.5603542", "0.55873054", "0.5552461", "0.55447716", "0.55371946", "0.5521278", "0.5477447", "0.54771346", "0.5461877", "0.54501307", "0.54418135", "0.53796417", "0.53401095", "0.5336934", "0.530898", "0.53083265", "0.53063637", "0.5299868", "0.52738804", "0.52732944", "0.52694875", "0.5263808", "0.52574515", "0.52343297", "0.52302945", "0.51968837", "0.51962614", "0.5196183", "0.51944244", "0.51814157", "0.51577365", "0.5152607", "0.51211023", "0.5106413", "0.509934", "0.5072237", "0.5065311", "0.5061671", "0.50513995", "0.5050703", "0.50448424", "0.50424373", "0.50323766", "0.5024649", "0.50216067", "0.5013772", "0.5012232", "0.5009696", "0.49964872", "0.49957946", "0.4990836", "0.49879783", "0.49736595", "0.49674723", "0.49635315", "0.49571526", "0.4948787", "0.49410725", "0.49385437", "0.49288207", "0.4922953", "0.4922473", "0.4921782", "0.49022603", "0.48907554", "0.48826706", "0.4881639", "0.48808828", "0.48792905", "0.48763713", "0.48526558", "0.48472965", "0.4836203", "0.48339748", "0.48308986", "0.4830495", "0.4824479", "0.48209357", "0.48097083", "0.48075646", "0.4795829", "0.47940317", "0.47856045", "0.47842985", "0.47745597", "0.47681072", "0.47662503", "0.47621793", "0.47449082", "0.47426614", "0.47333387", "0.47234356" ]
0.7036989
0
tries number of times to retry starting the broker. < 0 means infinitely many. delay number of seconds to wait after the first failed attempt backoff factor by which the delay will be incremented after a failure.
def startSTOMPBroker(config, serverUpEvent, tries=-1, delay=1, backoff=1.5): #stomp broker mtries = tries mdelay = delay coilserver = None from coilmq.config import config as coilconfig if config.has_section('coilmq'): for k,v in config.items('coilmq'): coilconfig.set('coilmq', k, v) logger.debug("Set %s to %s for coilmq config." % (k,v)) while True: try: coilserver = coilmq.start.server_from_config(coilconfig) logger.info("Stomp server listening on %s:%s" % \ coilserver.server_address) serverUpEvent.set() coilserver.serve_forever() except IOError as ex: logger.error("Exception while starting coilmq broker: '%s'", ex) if mtries != 0: logger.debug("Retrying coilmq startup in %.1f seconds...", mdelay) time.sleep(mdelay) mdelay *= backoff mtries -= 1 else: logger.debug("Ran out of trials (tried %d times) for coilmq startup. Giving up.", tries) break finally: if coilserver: coilserver.server_close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_retry_delay(self, last_delay):\n return last_delay * 2", "def retry(times):\n return repeat_with_success_at_least(times, 1)", "def retry(tries, delay=3, backoff=2):\n tries = math.floor(tries)\n if tries < 0:\n raise ValueError(\"tries must be 0 or greater\")\n\n\n def deco_retry(f):\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay # make mutable\n err = None\n while mtries > 0:\n print(\"Trial Number:\" + str(mtries))\n try:\n rv = f(*args, **kwargs)\n except DBException as e:\n print(\"Retry..\")\n mtries -= 1 # consume an attempt\n time.sleep(mdelay) # wait...\n mdelay += backoff # make future wait longer\n err = e\n\n # except Exception as e:\n # print(str(e))\n # mtries -= 1 # consume an attempt\n # time.sleep(mdelay) # wait...\n # mdelay += backoff # make future wait longer\n # err = e\n else:\n return rv\n raise err\n\n return f_retry # true decorator -> decorated function\n\n return deco_retry # @retry(arg[, ...]) -> true decorator", "def __init__(self, tries , exceptions=None, delay=0.01):\n self.tries = tries\n if exceptions is None:\n exceptions = Retry.default_exceptions\n self.exceptions = exceptions\n self.delay = delay", "def default_backoff(retries, max_retries):\n\n time.sleep(random.random() * (max_retries - retries) / max_retries * 2)", "def __init__(self, tries, exceptions=None, delay=1):\r\n self.tries = tries\r\n if exceptions is None:\r\n exceptions = retry.default_exceptions\r\n self.exceptions = exceptions\r\n self.delay = delay", "def _delay(self, n=None):", "def retry(exceptions=Exception, tries=3, delay=1):\n\n def retry_decorator(func):\n def func_wrapper(*args, **kwargs):\n _tries = tries\n while _tries:\n try:\n return func(*args, **kwargs)\n except exceptions as e:\n _tries -= 1\n if not _tries:\n raise\n\n time.sleep(delay)\n\n return func_wrapper\n\n return retry_decorator", "def retry(initial_delay,\n max_delay,\n factor=2.0,\n jitter=0.25,\n is_retriable=None):\n if factor < 1:\n raise ValueError('factor must be >= 1; was %f' % (factor,))\n\n if jitter >= 1:\n raise ValueError('jitter must be < 1; was %f' % (jitter,))\n\n # Generator to compute the individual delays\n def delays():\n delay = initial_delay\n while delay <= max_delay:\n yield delay * random.uniform(1 - jitter, 1 + jitter)\n delay *= factor\n\n def wrap(fn):\n \"\"\"Wrapper function factory invoked by decorator magic.\"\"\"\n\n def wrapped_fn(*args, **kwargs):\n \"\"\"The actual wrapper function that applies the retry logic.\"\"\"\n for delay in delays():\n try:\n return fn(*args, **kwargs)\n except Exception as e: # pylint: disable=broad-except)\n if is_retriable is None:\n continue\n\n if is_retriable(e):\n time.sleep(delay)\n else:\n raise\n return fn(*args, **kwargs)\n\n return wrapped_fn\n\n return wrap", "def backoff_time(attempt, retry_backoff=2., max_delay=30.):\n delay = retry_backoff * (2 ** attempt)\n # Add +-25% of variation.\n delay += delay * ((random.random() - 0.5) / 2.)\n return min(delay, max_delay)", "def retry(exception, tries=10, delay=1, backoff=2, max_delay=30):\n def deco_retry(f):\n @wraps(f)\n def f_retry(*args, **kwargs):\n m_tries, m_delay = tries, delay\n while m_tries > 1:\n try:\n return f(*args, **kwargs)\n except exception:\n time.sleep(min(m_delay, max_delay))\n m_tries -= 1\n m_delay *= backoff\n return f(*args, **kwargs)\n return f_retry # true decorator\n return deco_retry", "def set_retry_timeout(self, retry_timeout):", "def retry_query(tries=3, delay=1):\n\n def retry_wrapper(func):\n \"\"\"Wrapper function.\n :params func: function to call\n :return: wrapper function\n \"\"\"\n\n @functools.wraps(func)\n def inner(*args, **kwargs):\n \"\"\"Inner wrapper function\n :params *args: list of different arguments\n *kwargs: dictionary of different arguments\n \"\"\"\n\n mtries = tries\n mdelay = delay\n\n while mtries:\n try:\n return func(*args, **kwargs)\n except Exception: # pylint: disable=broad-except\n if mtries:\n time.sleep(mdelay)\n mtries -= 1\n\n return inner\n\n return retry_wrapper", "def retry_exception(num, delay, func, exception=Exception, *args, **kwargs):\n i = 0\n while i <= num:\n try:\n func(*args, **kwargs)\n time.sleep(delay)\n except exception: # pylint: disable=broad-except\n i += 1\n continue\n return\n raise StopIteration(\"Function did not finished successfully\")", "def retries(self, count: int):\n if count < 0:\n raise ValueError(\"negative\")\n\n self._retries = count", "def retry(self, times):\n return Retry((requests.ConnectionError, requests.Timeout), times)", "def retry(ExceptionToCheck, tries=3, delay=3, backoff=2):\n\n def deco_retry(f):\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except ExceptionToCheck as e:\n logging.warning('%s, Retrying in %d seconds...', str(e), mdelay)\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n\n return f_retry\n\n return deco_retry", "def determine_sleep_times(self):\n\n determined_sleep_time = \\\n random.randrange(self.dns_conf.min_backoff_range,\n self.dns_conf.max_backoff_range)\n\n backoff = [(2 ** i) * determined_sleep_time for i in\n range(0, self.dns_conf.retries)]\n\n return backoff", "def sleep_for(self):\n return max(0, (self._retry_after - datetime.now()).total_seconds())", "def backoff(\n max_tries=constants.BACKOFF_DEFAULT_MAXTRIES,\n delay=constants.BACKOFF_DEFAULT_DELAY,\n factor=constants.BACKOFF_DEFAULT_FACTOR,\n exception_handler=always_retry,\n before_delay_handler=noop,\n after_delay_handler=noop):\n if max_tries <= 0:\n raise ValueError((\n 'Max tries must be greater than 0; got {!r}'\n ).format(max_tries))\n\n if delay <= 0:\n raise ValueError((\n 'Delay must be greater than 0; got {!r}'\n ).format(delay))\n\n if factor <= 1:\n raise ValueError((\n 'Backoff factor must be greater than 1; got {!r}'\n ).format(factor))\n\n def outter(f):\n def inner(*args, **kwargs):\n m_max_tries, m_delay = max_tries, delay # make mutable\n while m_max_tries > 0:\n try:\n retval = f(*args, **kwargs)\n except Exception as ex:\n m_max_tries -= 1 # consume an attempt\n if m_max_tries < 0:\n # run out of tries\n raise\n if exception_handler(ex):\n logger.info(\n (\n 'backoff retry for: %r (max_tries=%r, '\n 'delay=%r, factor=%r)'\n ),\n f,\n max_tries,\n delay,\n factor\n )\n before_delay_handler(ex)\n time.sleep(m_delay) # wait...\n after_delay_handler(ex)\n m_delay *= factor # make future wait longer\n else:\n # exception handler gave up\n raise\n else:\n # done without errors\n return retval\n return inner\n return outter", "def retry(exceptions, tries=3, delay=2, _logger=logger()):\n\n def deco_retry(f):\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except exceptions as e:\n msg = '{}, Retrying in {} seconds...'.format(e, mdelay)\n _logger.warning(msg)\n time.sleep(mdelay)\n mtries -= 1\n return f(*args, **kwargs)\n\n return f_retry # true decorator\n\n return deco_retry", "def retry(exception, tries=10, delay=3, backoff=0.1):\n def deco_retry(f):\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except exception as ex:\n print \"{0}, Retrying in {1} seconds...\".format(ex, mdelay)\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n return f_retry # true decorator\n return deco_retry", "def retry(attempts_number, delay=0, step=0, max_delay=-1,\n retry_on=Exception, logger=None):\n\n def decorator(func):\n\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n current_logger = logger\n\n attempts = 1\n retry_delay = delay\n\n try:\n if isinstance(args[0], object):\n current_logger = args[0].get_logger()\n except (AttributeError, IndexError):\n pass\n\n if isinstance(retry_on, (types.FunctionType,\n types.MethodType,)):\n catch_strategy = CatchFunctionStrategy(retry_on)\n else:\n catch_strategy = CatchExceptionStrategy(retry_on)\n\n while attempts <= attempts_number or attempts_number < 0:\n try:\n return func(*args, **kwargs)\n except Exception as e:\n if catch_strategy.need_to_retry(e):\n if attempts >= attempts_number >= 0:\n raise\n elif current_logger:\n retry_count = \"inf\" if attempts_number < 0 \\\n else attempts_number - 1\n\n current_logger.warning(\n \"Retry: Call to %(fn)s failed due to \"\n \"%(exc_class)s: %(exc)s, retry \"\n \"attempt #%(retry_no)s/\"\n \"%(retry_count)s after %(delay)ss\",\n dict(fn=func.__name__,\n exc=str(e),\n retry_no=attempts,\n exc_class=e.__class__.__name__,\n retry_count=retry_count,\n delay=retry_delay))\n time.sleep(retry_delay)\n attempts += 1\n retry_delay += step\n if 0 <= max_delay < retry_delay:\n retry_delay = max_delay\n else:\n raise\n return wrapper\n return decorator", "def backoff(start_sleep_time=0.1, border_sleep_time=30, factor=2, jitter=True):\n if start_sleep_time < 0.001:\n logger.warning('start_sleep_time fewer than 0.001 and will be set to 0.001')\n start_sleep_time = 0.001\n\n def decorator(target):\n @wraps(target)\n def retry(*args, **kwargs):\n attempt = 0\n while True:\n sleep_time = _sleep_time(start_sleep_time, border_sleep_time, factor, attempt, jitter)\n try:\n attempt += 1\n sleep(sleep_time)\n ret = target(*args, **kwargs)\n except Exception as e:\n logger.error(f'Exception is catched {e}')\n logger.warning(f'Wait fo {sleep_time} seconds and try again')\n else:\n return ret\n return retry\n return decorator", "def retry_connect(redis_cfg, tries=300, base_delay=4.):\n for i in range(tries):\n try:\n r = redis.StrictRedis(**redis_cfg)\n r.ping()\n return r\n except redis.ConnectionError as e:\n if i == tries - 1:\n raise\n else:\n delay = base_delay * (1 + (os.getpid() % 10) / 9)\n print(f'WARNING: could not connect to {redis_cfg}. Retrying after {delay} sec ({i+2}/{tries}). Error {e}')\n time.sleep(delay)", "def retry(tries, delay=3, backoff=2, except_on=(Exception, )):\n\n tries = math.floor(tries)\n\n def decorator(f):\n def f_retry(*args, **kwargs):\n return function_retry(\n tries, delay, backoff, except_on, f, *args, **kwargs)\n return f_retry # true decorator -> decorated function\n return decorator # @retry(arg[, ...]) -> true decorator", "def retry(func, repeat=3, delay=tickTime * 2):\n\twhile repeat:\n\t\tresult = func()\n\n\t\tif result is None and delay and repeat != 1:\n\t\t\tsleep(delay)\n\n\t\telse:\n\t\t\treturn result\n\n\t\trepeat -= 1", "def retry(callback, retries, sleep=0.5, catch=Exception, *args, **kwargs):\n r = 0\n while r < retries:\n r += 1\n try:\n return callback(*args, **kwargs)\n except catch as c:\n if r == retries:\n raise c\n else:\n time.sleep(r * sleep)", "def set_initial_delay(cls, initial_delay):\n LOGGER.debug(\"Updating initial delay to {} seconds\".format(initial_delay))\n # See https://twistedmatrix.com/documents/19.10.0/api/twisted.internet.protocol.ReconnectingClientFactory.html\n cls.initialDelay = initial_delay", "def __init__(self, retry_count):\n self.retry_count = retry_count", "def testGoodRetry(self):\n self.p = start_short_timeout_app_process()\n gateway = JavaGateway()\n connections = gateway._gateway_client.deque\n try:\n # Call #1\n gateway.jvm.System.currentTimeMillis()\n str_connection = str(connections[0])\n\n # Call #2 after, should not create new connections if the system is\n # not too slow :-)\n gateway.jvm.System.currentTimeMillis()\n self.assertEqual(1, len(connections))\n str_connection2 = str(connections[0])\n self.assertEqual(str_connection, str_connection2)\n\n sleep(0.5)\n gateway.jvm.System.currentTimeMillis()\n self.assertEqual(1, len(connections))\n str_connection3 = str(connections[0])\n # A new connection was automatically created.\n self.assertNotEqual(str_connection, str_connection3)\n except Py4JError:\n self.fail(\"Should retry automatically by default.\")\n finally:\n gateway.shutdown()\n self.p.join()", "def connect(self, num_retry_attempts=1):\n pass", "def retry(\n self, n: int, /, *args, error: Catchable = Exception, sleep=None, **kwargs\n ) -> \"fn\":\n\n func = self._mod.retry(n, self, error=error, sleep=sleep)\n return func(*args, **kwargs)", "def retry(ExceptionToCheck, tries=3, delay=3, backoff=2, logger=None):\n def deco_retry(f):\n\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except ExceptionToCheck, e:\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\n if logger:\n logger.warning(msg)\n else:\n print msg\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n\n return f_retry # true decorator\n\n return deco_retry", "def retry(exception_to_check, tries=4, delay=0.5, backoff=2):\n\n def deco_retry(f):\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n try_one_last_time = True\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n try_one_last_time = False\n break\n except exception_to_check, e:\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\n logging.warning(msg)\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n if try_one_last_time:\n return f(*args, **kwargs)\n return\n return f_retry\n return deco_retry", "def retry_after(self, delay: float, request_method: Callable, *args: Any, **kwargs: Any) -> 'NetworkResponse':\n raise NotImplementedError # pragma: no cover", "def retry(exception_to_check=AssertionError, tries=100, delay=.1):\n def deco_retry(f):\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 0:\n try:\n return f(*args, **kwargs)\n except exception_to_check, e:\n log.info('%s, Retrying in %s seconds...' % (str(e), mdelay))\n time.sleep(mdelay)\n mtries -= 1\n try_time = float(tries*delay)\n raise exception_to_check('tried for %1.1f seconds, gave up' % try_time)\n return f_retry\n return deco_retry", "def retry(nattempts, exception=None):\n \n def tryIt(func):\n def wrapper(*args, **kwargs):\n attempts = 0\n while attempts < nattempts - 1:\n try:\n return func(*args, **kwargs)\n except (exception if exception is not None else Exception):\n attempts += 1\n return func(*args, **kwargs)\n return wrapper\n return tryIt", "def backoffState(self, tick):\n #assert (self.mState == States.BackOff)\n self.mBackoffIteration += 1\n if self.mBackoffIteration > self.MAX_BACKOFF_COUNT:\n ResultsSingleton.getInstance().recordError()\n self.mBackoffIteration = self.MAX_BACKOFF_COUNT\n #r = Random()\n R = np.random.uniform(0,1,1)[0] * ((2**self.mBackoffIteration) - 1)\n delay = R * self.bitTicks(self.BACKOFF_BITS)\n # convert delay to an int??\n self.mNextTickForRetryAfterBackoff = ((tick + int(delay)))\n #print(\"DELAY\")\n #print(self.mNextTickForRetryAfterBackoff)\n self.mState = self.States.BackOffWaiting", "def exponential_backoff(max_tries, max_sleep=20):\n return [random.random() * min(max_sleep, (2**i - 1)) for i in range(0, max_tries)]", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\n def deco_retry(f):\n\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except ExceptionToCheck, e:\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\n if logger:\n logger.warning(msg)\n else:\n print msg\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n\n return f_retry # true decorator\n\n return deco_retry", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\n def deco_retry(f):\n\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except ExceptionToCheck as e:\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\n if logger:\n logger.warning(msg)\n else:\n print(msg)\n sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n\n return f_retry # true decorator\n\n return deco_retry", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\n def deco_retry(f):\n\n @wraps(f)\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n except ExceptionToCheck, e:\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\n if logger:\n logger.warning(msg)\n else:\n print(msg)\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n return f(*args, **kwargs)\n\n return f_retry # true decorator\n\n return deco_retry", "def delay(self, delay=None):\n if delay is None:\n return self._delayvalue\n self._delayvalue = int(delay)", "def retry(func, *args, **kwargs):\n\n # config\n backoff = 1. + random.random() * 0.1\n max_backoff = 32\n max_retries = 5\n\n # try to make the request\n for i in range(max_retries):\n try:\n # return on success\n return func(*args, **kwargs)\n except Exception:\n # sleep on failure\n time.sleep(backoff)\n backoff = 2 * backoff if backoff < max_backoff else backoff\n \n # max retries exceeded\n raise RuntimeError('The connection to the server timed out.')", "async def _wait_retry(self) -> None:\n # Sleep 2^tries + 0…tries*3 seconds between retries\n self.retry_task = asyncio.create_task(\n asyncio.sleep(2 ** min(9, self.tries) + random.randint(0, self.tries * 3))\n )\n await self.retry_task\n self.retry_task = None", "def __init__(self, application, tries, retryable=None, highwater=2<<20,\n log_after_try_count=1, delay=0, delay_factor=2):\n self.application = application\n self.tries = tries\n\n if retryable is None:\n retryable = (TransientError, ConflictError, RetryException,)\n\n if not isinstance(retryable, (list, tuple)):\n retryable = [retryable]\n\n self.retryable = tuple(retryable)\n self.highwater = highwater\n self.delay = delay\n self.delay_factor = delay_factor\n self.log_after_try_count = log_after_try_count", "def _wait_for_port(self, delay=0.1, attempts=20):\n while attempts > 0:\n s = socket.socket()\n try:\n s.connect((self.host, self.port))\n except Exception:\n time.sleep(delay)\n attempts -= 1\n else:\n return\n finally:\n s.close()\n raise RuntimeError(\"Port %d is not open\" % self.port)", "def repeat_delay(self):\n return self.app.config.WORKER_REAP_DELAY", "def default_delay(self) -> int:\n return DEFAULT_DELAY", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\r\n def deco_retry(f):\r\n\r\n @wraps(f)\r\n def f_retry(*args, **kwargs):\r\n mtries, mdelay = tries, delay\r\n while mtries > 1:\r\n try:\r\n return f(*args, **kwargs)\r\n except ExceptionToCheck as e:\r\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\r\n if logger:\r\n logger.warning(msg)\r\n else:\r\n print (msg)\r\n time.sleep(mdelay)\r\n mtries -= 1\r\n mdelay *= backoff\r\n return f(*args, **kwargs)\r\n return f_retry # true decorator\r\n return deco_retry", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\r\n def deco_retry(f):\r\n\r\n @wraps(f)\r\n def f_retry(*args, **kwargs):\r\n mtries, mdelay = tries, delay\r\n while mtries > 1:\r\n try:\r\n return f(*args, **kwargs)\r\n except ExceptionToCheck, e:\r\n msg = \"%s, Retrying in %d seconds...\" % (str(e), mdelay)\r\n if logger:\r\n logger.warning(msg)\r\n else:\r\n print msg\r\n time.sleep(mdelay)\r\n mtries -= 1\r\n mdelay *= backoff\r\n return f(*args, **kwargs)\r\n\r\n return f_retry # true decorator\r\n\r\n return deco_retry", "def _exponential_backoff(backoff=0.1, max_delay=5):\n attempt = 0\n while True:\n delay = backoff * (2 ** attempt)\n if delay > max_delay:\n \"\"\"prevent redundant calculations\"\"\"\n break\n attempt += 1\n yield delay\n while True:\n yield max_delay", "def GetBackoff(self, retry_backoff, tries):\n if retry_backoff > 1:\n return retry_backoff * (2 ** (tries - 1))\n else:\n return retry_backoff", "async def _sleep_backoff(\n self, settings: Dict[str, Any], transport: AsyncHttpTransport[HTTPRequestType, AsyncHTTPResponseType]\n ) -> None:\n backoff = self.get_backoff_time(settings)\n if backoff <= 0:\n return\n await transport.sleep(backoff)", "def test_connectionLostBackoffDelayDoubles(self):\r\n self.pm.startService()\r\n self.pm.addProcess(\"foo\", [\"foo\"])\r\n self.reactor.advance(self.pm.threshold - 1) #9s\r\n self.assertIn(\"foo\", self.pm.protocols)\r\n self.assertEqual(self.pm.delay[\"foo\"], self.pm.minRestartDelay)\r\n # process dies within the threshold and should not restart immediately\r\n self.pm.protocols[\"foo\"].processEnded(Failure(ProcessDone(0)))\r\n self.assertEqual(self.pm.delay[\"foo\"], self.pm.minRestartDelay * 2)", "def min_wait_between_retries(self) -> ConfigNodePropertyInteger:\n return self._min_wait_between_retries", "def call_with_retries(function, retry_count, retry_delay):\n logger.info(\"Calling function: %s with retry count: %s, retry_delay: %s\",\n function, retry_count, retry_delay)\n for retry in range(1, int(retry_count) + 1):\n logger.info(\"Attempt number: %s\", retry)\n try:\n return function()\n # pylint: disable=broad-except\n except Exception as verify_exception:\n logger.info(\"Verify exception: %s\", verify_exception)\n time.sleep(float(retry_delay))\n if retry > int(retry_count):\n logger.info(\"Exceeded max retries! Reraising last exception\")\n raise\n assert False, \"Should never get here.\"", "def retry_wait_backoff(fn_check, fail_msg, max_wait=20):\n sleep_time = 0.1\n total_waited = 0.0\n while total_waited < max_wait:\n if fn_check():\n break\n log.info('{0}, retrying in {1:.2f}s'.format(fail_msg, sleep_time))\n total_waited += sleep_time\n time.sleep(sleep_time)\n sleep_time = min(sleep_time * 2, 5, max_wait - total_waited)\n else:\n raise TimeoutError('{0} after {1:.2f}s'.format(fail_msg, max_wait))", "def retry_wait_backoff(fn_check, fail_msg, max_wait=20):\n sleep_time = 0.1\n total_waited = 0.0\n while total_waited < max_wait:\n if fn_check():\n break\n log.info('{0}, retrying in {1:.2f}s'.format(fail_msg, sleep_time))\n total_waited += sleep_time\n time.sleep(sleep_time)\n sleep_time = min(sleep_time * 2, 5, max_wait - total_waited)\n else:\n raise TimeoutError('{0} after {1:.2f}s'.format(fail_msg, max_wait))", "def retry_timer(which_retry, retry_base_interval, mode = None):\n\n if mode == None:\n mode = 'random'\n\n if mode == 'random':\n retry_wait_interval = retry_base_interval * random.random()\n elif mode == 'multiply':\n retry_wait_interval = which_retry * retry_base_interval\n elif mode == 'multirand':\n retry_wait_interval = which_retry * retry_base_interval * random.random()\n\n return {'mode': mode, 'interval': retry_wait_interval, 'retry': which_retry }", "def declare_retry_queue(self, delay):\n # type: (BaseRetryPolicy, int) -> str\n\n delay_in_ms = int(delay * 1000)\n retry_queue_name = '{}.{}.{}'.format(\n self.consumer.queue_name, self.retry_queue_suffix, delay_in_ms)\n\n # To avoid frequent queue create and destroy for low retry delays\n queue_ttl = delay_in_ms * 2\n if queue_ttl < self.min_retry_queue_ttl:\n queue_ttl = self.min_retry_queue_ttl\n\n self.consumer.channel.queue_declare(\n callback=None,\n queue=retry_queue_name,\n durable=self.consumer.durable,\n nowait=True,\n arguments={\n 'x-dead-letter-exchange': '',\n 'x-dead-letter-routing-key': self.consumer.queue_name,\n 'x-message-ttl': delay_in_ms,\n 'x-expires': queue_ttl\n })\n logger.warning(\n 'Retry queue \"{}\" is created/redeclared'.format(retry_queue_name))\n return retry_queue_name", "def test_exp_backoff():\n stream = ReconnectingTweetStream('user', 'pass', initial_wait=1, max_wait=5,\n error_cb=error_callback)\n # A connection failure should happen automatically because of patch\n assert_raises(ConnectionError, stream.next)\n # By now, callback should have been invoked 3 times (1s, 2s, 4s)\n assert callback_invoked == 3", "def test_retry_run(self):\n pass", "def url_socket_retry(func, *args, **kw):\n min_delay = 1\n max_delay = 32\n max_attempts = 4\n\n for idx, delay in enumerate(\n backoff_delays(min_delay, max_delay, jitter=True)):\n try:\n return func(*args, **kw)\n except HTTPError as err:\n if not (err.status == 503 and 'Slow Down' in err.reason):\n raise\n if idx == max_attempts - 1:\n raise\n except URLError as err:\n if not isinstance(err.reason, socket.error):\n raise\n if err.reason.errno not in (104, 110):\n raise\n if idx == max_attempts - 1:\n raise\n\n time.sleep(delay)", "def get_backoff_seconds(self) -> int:\n max_instances = self.get_max_instances()\n instances = max_instances if max_instances is not None else self.get_instances()\n if instances == 0:\n return 1\n else:\n return int(ceil(10.0 / instances))", "async def sleep(cls, delay: float) -> None:", "def __init__(self, timeout, tries):\r\n self._timeout = timeout\r\n self._tries = tries", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\n def deco_retry(f):\n def f_retry(*args, **kwargs):\n mtries, mdelay = tries, delay\n try_one_last_time = True\n while mtries > 1:\n try:\n return f(*args, **kwargs)\n try_one_last_time = False\n break\n except ExceptionToCheck, e:\n if logger:\n msg = getMessage(\"en\", \"retrying-notification\").format(str(e), mdelay)\n logger.warning(msg)\n time.sleep(mdelay)\n mtries -= 1\n mdelay *= backoff\n if try_one_last_time:\n return f(*args, **kwargs)\n return\n return f_retry # true decorator\n return deco_retry", "def retry(func):\n # ... retry MAX_RETRIES times\n # ...\n # make sure you include this for testing:\n # except Exception as exc:\n # print(exc)\n # ...\n # and use wraps to preserve docstring\n #\n @wraps(func)\n def wrapper(*args, **kwargs):\n\n tries = MAX_RETRIES\n while tries > 0:\n try:\n return func(*args, **kwargs)\n except Exception as err:\n print(err)\n\n tries -= 1\n\n raise MaxRetriesException\n\n return wrapper", "def __init__(self, delay=0):\n self.delay = delay", "def testBadRetry(self):\n self.p = start_example_app_process()\n gateway = JavaGateway(\n gateway_parameters=GatewayParameters(read_timeout=0.250))\n try:\n value = gateway.entry_point.getNewExample().sleepFirstTimeOnly(500)\n self.fail(\n \"Should never retry once the first command went through.\"\n \"number of calls made: {0}\".format(value))\n except Py4JError:\n self.assertTrue(True)\n finally:\n gateway.shutdown()\n self.p.join()", "def get_retry_count(self):\r\n return self.retried_nomax + self.retried_withmax", "def delay(ms: int, /) -> None:", "def fair_use_delay(self):\n if foo._error_count > 1:\n delay = foo(120, 30 * (2 ** (foo._error_count - 2)))\n else:\n delay = foo._next_call_timestamp - foo(foo.time())\n if delay > 0 and foo.respect_fair_use_policy:\n foo.info('Sleeping for %s seconds' % delay)\n foo.sleep(delay)", "def retries(self) -> int:\n return self._retries", "def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):\n\tdef deco_retry(f):\n\t\t@wraps(f)\n\t\tdef f_retry(*args, **kwargs):\n\t\t\tmtries, mdelay = tries, delay\n\t\t\twhile mtries > 1:\n\t\t\t\ttry:\n\t\t\t\t\treturn f(*args, **kwargs)\n\t\t\t\texcept ExceptionToCheck, e:\n\t\t\t\t\tmsg = \"func: '{}' > exc: {}, Retrying in {} seconds...\".format(str(f.__name__), str(e), mdelay)\n\t\t\t\t\tif logger:\n\t\t\t\t\t\tlogger.warning(msg)\n\t\t\t\t\telse:\n\t\t\t\t\t\tprint msg\n\t\t\t\t\ttime.sleep(mdelay)\n\t\t\t\t\tmtries -= 1\n\t\t\t\t\tmdelay *= backoff\n\t\t\treturn f(*args, **kwargs)\n\t\treturn f_retry\t# true decorator\n\treturn deco_retry", "def retry(self):\n return self._retry", "def retry(num=5):\n s = requests.Session()\n retries = Retry(total=num, backoff_factor=0.1,\n status_forcelist=[500, 502, 503, 504])\n s.mount('http://', HTTPAdapter(max_retries=retries))\n\n return s", "def delay(self):\n # well, so this is really bad practice\n # but since the nature of this app \n # I have to make assumptions around time..\n is_delayed_trader = self.delayed\n now = time.time()\n if not is_delayed_trader or self.message_arrival_estimate is None:\n self.message_arrival_estimate = now + self.default_delay\n delay = self.default_delay\n else:\n current_arrival_estimate = now + self.__delay\n if self.message_arrival_estimate > current_arrival_estimate:\n diff = self.message_arrival_estimate - current_arrival_estimate\n delay = diff + self.__delay\n self.message_arrival_estimate = now + delay \n else: \n self.message_arrival_estimate = current_arrival_estimate\n delay = self.__delay\n delay = round(delay, 4)\n log.debug('trader %s: message delay %s.' % (self.tag, delay))\n return delay", "def _RunWithRetries(self, callback, error_matcher):\n for i in xrange(FLAGS.gcloud_num_retries):\n try:\n return callback()\n except Exception as e: # pylint: disable=broad-except\n if not error_matcher(e):\n raise\n # Use randomized exponential backoff, like methods in\n # googleapiclient.http.\n retry_seconds = random.random() * 2**(i + 1)\n logging.warning('Request raised an error: %s\\n'\n 'Will retry in %f seconds.', e, retry_seconds)\n time.sleep(retry_seconds)\n\n return callback()", "def test_producer_stop_waiting_to_retry(self):\n clock = MemoryReactorClock()\n client = Mock(reactor=clock)\n f = Failure(BrokerNotAvailableError())\n ret = [fail(f)]\n client.send_produce_request.side_effect = ret\n client.topic_partitions = {self.topic: [0, 1, 2, 3]}\n client.metadata_error_for_topic.return_value = False\n msgs = [self.msg(\"one\"), self.msg(\"two\")]\n batch_n = 2\n\n producer = Producer(client, batch_every_n=batch_n, batch_send=True)\n d = producer.send_messages(self.topic, msgs=msgs)\n # At first, there's no result. Have to retry due to first failure\n self.assertNoResult(d)\n # Advance the clock, some, but not enough to retry\n clock.advance(producer._retry_interval / 2)\n # Stop the producer before the retry\n producer.stop()\n self.failureResultOf(d, tid_CancelledError)", "def retry(retry_times=3, interval=0.5, exceptions=Exception):\n def _decorator(func):\n @wraps(func)\n def _wrapped_func(*args, **kwargs):\n for attempt in range(1, retry_times + 1):\n try:\n return func(*args, **kwargs)\n except exceptions: # pylint: disable=broad-except\n if attempt < retry_times:\n logger.debug(\"%s failed in No. %d attempt\", func, attempt)\n import traceback\n import time\n logger.debug(traceback.format_exc())\n time.sleep(interval)\n else:\n raise # End of retry. Re-raise the exception as-is.\n return _wrapped_func\n return _decorator", "async def wait_random(max_delay: int = 10) -> float:\n actual_delay: float = random.uniform(0, max_delay)\n await asyncio.sleep(actual_delay)\n return actual_delay", "def test_select_current_delay_factor():\n connection = FakeBaseConnection(global_delay_factor=4, fast_cli=False)\n assert connection.select_delay_factor(10) == 10", "def enqueue_retry(queue, retry=3):\n count = 0\n while count < retry:\n try:\n yield WorkerQueues.get(queue)\n except (OSError, BusyLoadingError):\n time.sleep(2**count)\n count += 1\n break", "def extend_backoff(durations, max_sleep=20):\n durations.append(random.random() * min(max_sleep, (2**len(durations) - 1)))", "def retry(action, attempts=5, sleeptime=60, max_sleeptime=5 * 60,\n sleepscale=1.5, jitter=1, retry_exceptions=(Exception,),\n cleanup=None, args=(), kwargs={}, log_args=True):\n assert callable(action)\n assert not cleanup or callable(cleanup)\n\n action_name = getattr(action, '__name__', action)\n if log_args and (args or kwargs):\n log_attempt_args = (\"retry: calling %s with args: %s,\"\n \" kwargs: %s, attempt #%d\",\n action_name, args, kwargs)\n else:\n log_attempt_args = (\"retry: calling %s, attempt #%d\",\n action_name)\n\n if max_sleeptime < sleeptime:\n log.debug(\"max_sleeptime %d less than sleeptime %d\",\n max_sleeptime, sleeptime)\n\n n = 1\n for _ in retrier(attempts=attempts, sleeptime=sleeptime,\n max_sleeptime=max_sleeptime, sleepscale=sleepscale,\n jitter=jitter):\n try:\n logfn = log.info if n != 1 else log.debug\n logfn_args = log_attempt_args + (n, )\n logfn(*logfn_args)\n return action(*args, **kwargs)\n except retry_exceptions:\n log.debug(\"retry: Caught exception: \", exc_info=True)\n if cleanup:\n cleanup()\n if n == attempts:\n log.info(\"retry: Giving up on %s\", action_name)\n raise\n continue\n finally:\n n += 1", "async def wait_random(max_delay: int = 10) -> float:\n delay = random.uniform(0, max_delay)\n await asyncio.sleep(delay)\n return delay", "def delay():\r\n time.sleep(2)", "def retry(times: int, except_callback: Optional[Callable[..., Any]] = None):\n\n def wrap(func):\n @wraps(func)\n def retry_it(*args, **kwargs):\n nonlocal times\n if times < 0: # forever\n times = 1 << 32\n\n for i in range(1, times + 1):\n try:\n r = func(*args, **kwargs)\n return r\n except Exception as err:\n if except_callback is not None:\n except_callback(err, i)\n\n if i == times:\n raise err\n\n return retry_it\n\n return wrap", "async def wait_random(max_delay: int = 10) -> float:\n delay = random.uniform(0, max_delay)\n await asyncio.sleep(delay)\n\n return delay", "def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1):\n jitter = jitter or 0 # py35 barfs on the next line if jitter is None\n if jitter > sleeptime:\n # To prevent negative sleep times\n raise Exception('jitter ({}) must be less than sleep time ({})'.format(jitter, sleeptime))\n\n sleeptime_real = sleeptime\n for _ in range(attempts):\n log.debug(\"attempt %i/%i\", _ + 1, attempts)\n\n yield sleeptime_real\n\n if jitter:\n sleeptime_real = sleeptime + random.uniform(-jitter, jitter)\n # our jitter should scale along with the sleeptime\n jitter = jitter * sleepscale\n else:\n sleeptime_real = sleeptime\n\n sleeptime *= sleepscale\n\n if sleeptime_real > max_sleeptime:\n sleeptime_real = max_sleeptime\n\n # Don't need to sleep the last time\n if _ < attempts - 1:\n log.debug(\"sleeping for %.2fs (attempt %i/%i)\", sleeptime_real, _ + 1, attempts)\n time.sleep(sleeptime_real)", "def getRetryCount():\n return int(webapp2.get_request().headers.get('X-Appengine-TaskRetryCount', 0))", "def wait(delay=2):\n time.sleep(delay)", "def default_delay(self):\n return DEFAULT_DELAY", "def retry_after(self):\n return self._retry_after", "def wait_until(condition, delay, max_attempts):\n attempt = 0\n while not condition() and attempt < max_attempts:\n attempt += 1\n time.sleep(delay)\n\n if attempt >= max_attempts:\n raise Exception(\"Condition is still False after {} attempts.\".format(max_attempts))", "def auto_retry(fun):\n\n @functools.wraps(fun)\n def decorated(instance, *args, **kwargs):\n \"\"\"Wrapper around a decorated function.\"\"\"\n cfg = instance._retry_config\n remaining_tries = cfg.retry_attempts\n current_wait = cfg.retry_wait\n retry_backoff = cfg.retry_backoff\n last_error = None\n\n while remaining_tries >= 0:\n try:\n return fun(instance, *args, **kwargs)\n except socket.error as e:\n last_error = e\n instance._retry_logger.warning('Connection failed: %s', e)\n\n remaining_tries -= 1\n if remaining_tries == 0:\n # Last attempt\n break\n\n # Wait a bit\n time.sleep(current_wait)\n current_wait *= retry_backoff\n\n # All attempts failed, let's raise the last error.\n raise last_error\n\n return decorated", "def connect_retry_interval(self) -> int:\n return pulumi.get(self, \"connect_retry_interval\")", "def wait_for_kafka_connection(delay=5):\n while True:\n try:\n kafka = KafkaProducer(bootstrap_servers=KAFKA_BROKERS)\n LOGGER.info('Connection to kafka cluster established')\n kafka.close()\n break\n except:\n LOGGER.error('Can not connect to kafka cluster')\n time.sleep(delay)" ]
[ "0.68163663", "0.67791754", "0.6515797", "0.64622295", "0.64621955", "0.6419269", "0.6208343", "0.6206229", "0.61321574", "0.60527027", "0.60336995", "0.6022903", "0.594526", "0.5920799", "0.58964133", "0.5895078", "0.5889684", "0.5877148", "0.5873413", "0.58332604", "0.5820755", "0.579556", "0.57905287", "0.57700413", "0.575844", "0.57461435", "0.5746022", "0.57102484", "0.5704346", "0.56945366", "0.56938654", "0.5675756", "0.5672719", "0.5643842", "0.5642079", "0.5623642", "0.5617596", "0.5616433", "0.55953395", "0.55878145", "0.5579193", "0.55732036", "0.5565798", "0.55506694", "0.55423117", "0.55416405", "0.55359685", "0.5528022", "0.55170333", "0.5511304", "0.55095935", "0.55094695", "0.55085385", "0.5499867", "0.5488797", "0.5471203", "0.5465653", "0.5434102", "0.54306734", "0.54306734", "0.54144835", "0.54116994", "0.5400306", "0.5388235", "0.5371071", "0.5366387", "0.5363892", "0.5358639", "0.5356065", "0.5340602", "0.533758", "0.5333721", "0.53181964", "0.53170806", "0.5309279", "0.52811944", "0.5269926", "0.52646714", "0.52644455", "0.52592504", "0.5253911", "0.5241211", "0.524053", "0.5237464", "0.5227023", "0.522653", "0.5210372", "0.5208136", "0.5202759", "0.518968", "0.5185257", "0.51799285", "0.5177741", "0.5174514", "0.5162429", "0.5140063", "0.5139289", "0.51309836", "0.5121584", "0.51105636", "0.510954" ]
0.0
-1
Start twisted event loop and the fun should begin... brokerTimeout how long to wait for a broker a negative number upon failure. Otherwise, it never returns.
def start(config, brokerTimeout = 60.0): manager = multiprocessing.Manager() serverUpEvent = manager.Event() broker = multiprocessing.Process(target=startSTOMPBroker, args=(config,serverUpEvent)) broker.daemon = True broker.name = 'STOMP-Broker' broker.start() serverUpEvent.wait(brokerTimeout) if not serverUpEvent.is_set(): logger.fatal("Broker not available after %.1f seconds. Giving up", brokerTimeout) return -1 #host side logic host = config.get('Broker', 'host') port = int(config.get('Broker', 'port')) username = config.get('Broker', 'username') password = config.get('Broker', 'password') hostEngine = HostStompEngine(config) stompProtocolFactory = StompProtocolFactory(hostEngine, username, password) HostXMLRPCService(config).makeEngineAccesible(hostEngine) reactor.connectTCP(host, port, stompProtocolFactory) reactor.run()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_running():\n log.msg('reactor_loop Starting')\n try:\n conn = client.connect(reactor)\n si446x_do = Si446xComponent(conn)\n conn.addCallback(si446x_do.start)\n conn.addErrback(si446x_do.on_error)\n except error.DBusException, e:\n log.msg('reactor_loop Setup Error: {}'.format(e))\n reactor.stop()", "def start(self):\n if not self._connected:\n self._client.connect(self._addr, port=self._port, keepalive=60, bind_address=\"\")\n self._client.loop_start()\n self._connected = True\n logger.info(\"Connection with MQTT Broker at %s:%d estabilished.\", self._addr, self._port)", "def test_main():\n\n listener = Qe2ServerListener('', 4000)\n reactor.run()", "def reactor_loop():\n def on_running():\n \"\"\"\n called when the twisted reactor is running\n \"\"\"\n log.msg('reactor_loop Starting')\n try:\n conn = client.connect(reactor)\n si446x_do = Si446xComponent(conn)\n conn.addCallback(si446x_do.start)\n conn.addErrback(si446x_do.on_error)\n except error.DBusException, e:\n log.msg('reactor_loop Setup Error: {}'.format(e))\n reactor.stop()\n\n signal.signal(signal.SIGINT, SIGINT_CustomEventHandler)\n signal.signal(signal.SIGHUP, SIGINT_CustomEventHandler)\n reactor.callWhenRunning(on_running)\n reactor.run()", "def connect(self):\n\t\tself.printed_sub = False\n\t\tself.client.connect(BROKER)\n\t\tself.client.loop_forever()", "async def async_start(self) -> None:\n\n self._shutdown = False\n\n # Start up the LifeSOS interface\n self._baseunit.start()\n\n # Connect to the MQTT broker\n self._mqtt_was_connected = False\n if self._config.mqtt.uri.port:\n self._mqtt.connect_async(\n self._config.mqtt.uri.hostname,\n self._config.mqtt.uri.port,\n keepalive=Translator.KEEP_ALIVE)\n else:\n self._mqtt.connect_async(\n self._config.mqtt.uri.hostname,\n keepalive=Translator.KEEP_ALIVE)\n\n # Start processing MQTT messages\n self._mqtt.loop_start()", "def start_call_back_loop(loop: asyncio.AbstractEventLoop) -> None:\n asyncio.set_event_loop(loop)\n loop.run_forever()", "def start(self) -> None:\n conn_manager = ConnectionManager(broker_host=self.broker_host, queue=self.queue)\n channel = conn_manager.start_channel()\n channel.basic_consume(queue=self.queue, on_message_callback=self.callback)\n\n try:\n print(\"PV Simulator...\")\n channel.start_consuming()\n except KeyboardInterrupt:\n pass", "def test_runsUntilAsyncCallback(self):\n timePassed = []\n def main(reactor):\n finished = defer.Deferred()\n reactor.callLater(1, timePassed.append, True)\n reactor.callLater(2, finished.callback, None)\n return finished\n r = _FakeReactor()\n exitError = self.assertRaises(\n SystemExit, task.react, main, _reactor=r)\n self.assertEqual(0, exitError.code)\n self.assertEqual(timePassed, [True])\n self.assertEqual(r.seconds(), 2)", "def test_eventloop_api_reactor(self):\n from twisted.internet import reactor\n _main.no_setup()\n self.assertIdentical(_main._reactor, reactor)", "def run_reactor(self):\n self.reactor.run()", "def init(\n self,\n ) -> bool:\n success = True\n try:\n self.client = mqtt.Client(client_id=\"Draco\", protocol=mqtt.MQTTv5)\n self.client.on_connect = self.on_connect\n self.client.on_message = self.on_message\n self.client.connect(\n host=self._config[\"broker_ip\"], port=self._config[\"broker_port\"]\n )\n self.client.loop_start()\n\n except Exception as error:\n print(f\"Process {self._pid} - \" + repr(error))\n success = False\n return success", "def _start_in_thread(self):\n return spawn_waitready(self._listen, self.start)[0]", "def __call__(self, timeout=None, *args, **kwargs):\n if timeout:\n self.timeout = timeout\n started_observer = self.start(timeout, *args, **kwargs)\n if started_observer:\n return started_observer.await_done(*args, **kwargs)\n # TODO: raise ConnectionObserverFailedToStart", "def start(self):\n self._connect()\n self._init_exchange()\n self._init_queue()\n self._bind_queue()", "def _mqttActor(self) -> bool:\n\t\tself.isStopped = False\n\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.INFO, 'MQTT: client started')\n\t\twhile not self.isStopped:\n\t\t\tself.mqttClient.loop_forever()\t# Will return when disconnect() is called\n\t\tif self.messageHandler:\n\t\t\tself.messageHandler.onShutdown(self)\n\t\treturn True", "def run(self) -> None:\n\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.DEBUG, f'MQTT: client name: {self.clientID}')\n\t\tself.mqttClient = mqtt.Client(client_id=self.clientID, clean_session=False if self.clientID else True)\t# clean_session=False is defined by TS-0010\n\n\t\t# Enable SSL\n\t\tif self.useTLS:\n\t\t\tself.mqttClient.tls_set(ca_certs=self.caFile, cert_reqs=ssl.CERT_REQUIRED if self.verifyCertificate else ssl.CERT_NONE)\n\n\t\t# Set username/password\n\t\tif self.username and self.password:\n\t\t\tself.mqttClient.username_pw_set(self.username, self.password)\n\t\t\n\t\tself.mqttClient.on_connect \t\t= self._onConnect\n\t\tself.mqttClient.on_disconnect\t= self._onDisconnect\n\t\tself.mqttClient.on_log\t\t\t= self._onLog\n\t\tself.mqttClient.on_subscribe\t= self._onSubscribe\n\t\tself.mqttClient.on_unsubscribe\t= self._onUnsubscribe\n\t\tself.mqttClient.on_message\t\t= self._onMessage\n\n\t\ttry:\n\t\t\tself.messageHandler and self.messageHandler.logging(self.mqttClient, logging.DEBUG, f'MQTT: connecting to host:{self.address}, port:{self.port}, keepalive: {self.keepalive}, bind: {self.bindIF}')\n\t\t\tself.mqttClient.connect(host=self.address, port=self.port, keepalive=self.keepalive, bind_address=self.bindIF)\n\t\texcept Exception as e:\n\t\t\tif self.messageHandler:\n\t\t\t\tself.messageHandler.logging(self.mqttClient, logging.ERROR, f'MQTT: cannot connect to broker: {e}')\n\t\t\t\tself.messageHandler.onError(self, -1)\n\n\t\t# Actually start the actor to run the MQTT client as a thread\n\t\tself.actor = BackgroundWorkerPool.newActor(self._mqttActor, name='MQTTClient').start()", "async def async_connect(self) -> None:\n # pylint: disable-next=import-outside-toplevel\n import paho.mqtt.client as mqtt\n\n result: int | None = None\n try:\n result = await self.hass.async_add_executor_job(\n self._mqttc.connect,\n self.conf[CONF_BROKER],\n self.conf.get(CONF_PORT, DEFAULT_PORT),\n self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE),\n )\n except OSError as err:\n _LOGGER.error(\"Failed to connect to MQTT server due to exception: %s\", err)\n\n if result is not None and result != 0:\n _LOGGER.error(\n \"Failed to connect to MQTT server: %s\", mqtt.error_string(result)\n )\n\n self._mqttc.loop_start()", "def test_reactor_stop_unblocks_EventualResult(self):\n program = \"\"\"\\\nimport os, threading, signal, time, sys\n\nfrom twisted.internet.defer import Deferred\nfrom twisted.internet import reactor\n\nimport crochet\ncrochet.setup()\n\n@crochet.run_in_reactor\ndef run():\n reactor.callLater(0.1, reactor.stop)\n return Deferred()\n\ner = run()\ntry:\n er.wait(timeout=10)\nexcept crochet.ReactorStopped:\n sys.exit(23)\n\"\"\"\n process = subprocess.Popen([sys.executable, \"-c\", program],\n cwd=crochet_directory)\n self.assertEqual(process.wait(), 23)", "def test_reactor_stop_unblocks(self):\n program = \"\"\"\\\nimport os, threading, signal, time, sys\n\nfrom twisted.internet.defer import Deferred\nfrom twisted.internet import reactor\n\nimport crochet\ncrochet.setup()\n\n@crochet.%s\ndef run():\n reactor.callLater(0.1, reactor.stop)\n return Deferred()\n\ntry:\n er = run()\nexcept crochet.ReactorStopped:\n sys.exit(23)\n\"\"\" % (self.DECORATOR_CALL, )\n process = subprocess.Popen([sys.executable, \"-c\", program],\n cwd=crochet_directory)\n self.assertEqual(process.wait(), 23)", "def start():\n server = current_server()\n logger.info('Starting Flexx event loop.')\n server.start()", "def run(self):\n\n if self._quit_time is not None:\n self.schedule_quit_time(self._quit_time)\n\n # Run the broker until it, and thus the whole scale client, have a stop event fully propagated\n self.__broker.run()", "def test_connect_success():\n\n t = Thread(target=setup_socket)\n t.start()\n\n data_sender = DataSender('127.0.0.1', 12345)\n server_response = data_sender.notify('test')\n\n assert server_response == 'ok'\n\n data_sender.close()\n t.join()", "def start(self):\n if not self._host:\n print(\"No host selected, starting local instance.\")\n self._database = quartjes.controllers.database.default_database()\n self._stock_exchange = quartjes.controllers.stock_exchange2.StockExchange2()\n else:\n reactor.callLater(0, self._connect) #@UndefinedVariable\n if not reactor.running: #@UndefinedVariable\n self._reactor_thread = ClientConnector._ReactorThread()\n self._reactor_thread.start()\n self._factory.wait_for_connection()\n\n self._database = self.get_service_interface(\"database\")\n self._stock_exchange = self.get_service_interface(\"stock_exchange\")", "def acqstart(self):\n return 0", "async def twisted_sleep(delay: float, twisted_reactor: \"SygnalReactor\") -> None:\n deferred: Deferred[None] = Deferred()\n twisted_reactor.callLater(delay, deferred.callback, None)\n await deferred", "async def server_loop(host=None, port=23, evt=None, protocol_factory=TelnetServer, shell=None, log=None, **kwds):\n \"\"\"\n :param float connect_maxwait: If the remote end is not compliant, or\n otherwise confused by our demands, the shell continues anyway after the\n greater of this value has elapsed. A client that is not answering\n option negotiation will delay the start of the shell by this amount.\n \"\"\"\n\n protocol_factory = protocol_factory or TelnetServer\n l = await anyio.create_tcp_listener(local_host=host, local_port=port)\n log = log or logging.getLogger(__name__)\n if shell is None:\n async def shell(_s):\n while True:\n await anyio.sleep(99999)\n async def serve(s):\n async with protocol_factory(s, log=log, **kwds) as stream:\n await shell(stream)\n\n log.info('Server ready on {0}:{1}'.format(host, port))\n if evt is not None:\n evt.set()\n await l.serve(serve)", "def run(self, timeout=None):\n self.factory.manager.run()\n\n wait_connect = threading.Event()\n self.factory.on_ready(lambda _: wait_connect.set())\n\n if not wait_connect.wait(timeout):\n raise Exception('Failed to connect to ROS')", "def start_tor_with_timer(reactor, config, control_port, tor_binary, data_dir,\n bridges, timeout):\n error_msg = \"Bootstrapping has exceeded the timeout limit...\"\n with_timeout = deferred_timeout(timeout, e=error_msg)(start_tor)\n try:\n setup = yield with_timeout(reactor, config, control_port, tor_binary,\n data_dir, process_cb=setup_done,\n process_eb=setup_fail)\n except TimeoutError, te:\n log.err(te)\n defer.returnValue(None)\n #except Exception, e:\n # log.err(e)\n # defer.returnValue(None)\n else:\n state = yield remove_public_relays(setup, bridges)\n defer.returnValue(state)", "def run(self):\n\n if reactor.running:\n return\n\n self._thread = threading.Thread(target=reactor.run, args=(False,))\n self._thread.daemon = True\n self._thread.start()", "def start_task():\n get_results_from_message_queue()\n test_all_servers_connection()", "def run_client(self, event_loop, irc_client):\n # Deliberately written in \"synchronous\" style with run_until_complete()\n # instead of await because async generators don't work in Python 3.5.\n with self.mock_open_connection():\n # Start the client\n run_fut = event_loop.create_task(irc_client.run())\n event_loop.run_until_complete(irc_client.connected.wait())\n # Allow the test to run\n yield\n # Cleanly end the read loop and wait for client to exit\n irc_client.disconnect()\n event_loop.run_until_complete(run_fut)", "def test_run_in_reactor_thread(self):\n myreactor = FakeReactor()\n c = EventLoop(lambda: myreactor, lambda f, g: None)\n c.no_setup()\n calls = []\n\n @c.run_in_reactor\n def func(a, b, c):\n self.assertTrue(myreactor.in_call_from_thread)\n calls.append((a, b, c))\n\n func(1, 2, c=3)\n self.assertEqual(calls, [(1, 2, 3)])", "def start(self) -> None:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n self.wserver = websockets.serve(self.__producer_handler, port=self.port, loop=loop)\n try:\n # run server forever\n self.server = asyncio.get_event_loop()\n self.server.run_until_complete(self.wserver)\n self.server.run_forever()\n except Exception:\n self.close()\n\n loop.run_forever()", "def run(self):\n\n if reactor.running:\n misc.formatted_print('RosBridgeWebSockComms\\t|\\tTwisted reactor is already running', None, 'error')\n return\n\n self._thread = threading.Thread(target=reactor.run, args=(False,))\n self._thread.daemon = True\n self._thread.start()", "async def do_startup(config, output_fname, event_loop):\n\n log.info(\"Creating AMQP receive channel ...\")\n rcv_trans, rcv_proto, rcv_chan = await make_amqp_channel(config)\n\n log.info(\"Setting up event exchange ...\")\n await rcv_chan.exchange_declare(\n exchange_name=config.event_exchange, type_name=\"fanout\"\n )\n\n logger = EventLogger(config, output_fname, event_loop)\n\n for signame in [\"SIGINT\", \"SIGTERM\", \"SIGHUP\"]:\n signum = getattr(signal, signame)\n handler = partial(term_handler, signame=signame, loop=event_loop)\n event_loop.add_signal_handler(signum, handler)\n\n log.info(\"Setting up AMQP receiver ...\")\n bm_callback = partial(handle_broker_message, logger)\n await make_receiver_queue(bm_callback, rcv_chan, config, \"\")\n\n return rcv_trans, rcv_proto", "async def run(self) -> None:\n await self._mqtt.connect()\n LOGGER.info(\"Connected to MQTT Broker\")\n\n async with ClientSession() as websession:\n try:\n await self._setup_bridge(websession)\n except aiohue.errors.Unauthorized:\n LOGGER.error(\"Bridge rejected username. Please use --discover\")\n self.halt()\n return\n await self._publish_bridge_status()\n await self.main(websession)\n\n LOGGER.info(\"Disconnecting from MQTT Broker\")\n await self._publish_bridge_status(online=False)\n await self._mqtt.disconnect()", "def test_connect_deferred(self):\n er = EventualResult(None, None)\n self.assertRaises(TimeoutError, er.wait, 0)\n d = Deferred()\n er._connect_deferred(d)\n self.assertRaises(TimeoutError, er.wait, 0)\n d.callback(123)\n self.assertEqual(er.wait(0.1), 123)", "async def twisted_sleep(delay, twisted_reactor):\n deferred: Deferred[None] = Deferred()\n twisted_reactor.callLater(delay, deferred.callback, None)\n await deferred", "async def test_startup_errors_should_halt_ready():\n network = Network()\n network.set_config(url=\"http://nowhere.at.all.localdomain:6865/\", connect_timeout=1)\n\n client = network.aio_party(\"SillyParty\")\n\n # place the ready() call BEFORE the run\n f = ensure_future(wait_for(client.ready(), 2))\n\n try:\n await network.aio_run(keep_open=False)\n except Exception:\n pass\n\n try:\n await f\n raise AssertionError(\n \"client.ready() should not have ended because there is nothing to connect to\"\n )\n except asyncio.TimeoutError:\n raise AssertionError(\n \"client.ready() did not abort with an appropriate exception and was killed because \"\n \"it was taking too long\"\n )\n except CancelledError:\n LOG.info(\"Successfully terminated because ready() was cancelled.\")", "def start(self):\n l.debug(\"Initializing the MQTT connection...\")\n self._mqtt_client.connect(self.domain, self.port, keepalive=30)\n\n # Starts a new thread that handles mqtt protocol and calls us back via callbacks\n l.debug(\"(Re)Starting the MQTT loop.\")\n self._mqtt_client.loop_stop(True)\n self._mqtt_client.loop_start()\n self.connect_event.wait()\n\n # Subscribe to the corresponding topics ...\n self.device_topic = build_device_request_topic(self.target_device_uuid)\n self.client_response_topic = build_client_response_topic(self.user_id, self._app_id)\n self.user_topic = build_client_user_topic(self.user_id)\n\n l.info(f\"Subscribing to topic: {self.device_topic}\")\n self._mqtt_client.subscribe(self.device_topic)\n self.subscribe_event.wait()\n self.subscribe_event.clear()\n\n l.info(f\"Subscribing to topic: {self.client_response_topic}\")\n self._mqtt_client.subscribe(self.client_response_topic)\n self.subscribe_event.wait()\n self.subscribe_event.clear()\n\n l.info(f\"Subscribing to topic: {self.user_topic}\")\n self._mqtt_client.subscribe(self.user_topic)\n self.subscribe_event.wait()\n self.subscribe_event.clear()", "def begin_handling(self, butterfly: Butterfly):\n res = self.net.handle(butterfly)\n return self._event_loop.create_task(res)", "def _connect(self):\n #print(\"Connecting...\")\n self._connection = reactor.connectTCP(self.host, self.port, self.factory) #@UndefinedVariable", "def startReactor(self):\n reactor = self.options.get(RunnerOptions.reactor)\n if reactor is None:\n reactor = defaultReactor\n reactor.install()\n self.options[RunnerOptions.reactor] = reactor\n\n reactor.callWhenRunning(self.whenRunning)\n\n self.log.info(\"Starting reactor...\")\n reactor.run()", "def start(self):\n if self._pumping:\n return\n self._pumping = True\n self._global_reactor.callLater(0, self._pump_once)", "def connect(self):\n #print(\"try to connect connect\")\n if self._loop is not None and not self._loop.ready():\n #print(\"RE\")\n raise RuntimeError(\"Already (auto-re)connecting\")\n self._loop = gevent.spawn(self._run)", "def run_forever(self):\n reactor.run()", "def run_forever(self):\n reactor.run()", "def run(self):\n try:\n\n self._connection = self.connect()\n self._connection.ioloop.start()\n except (KeyboardInterrupt, SystemExit):\n self.stop()\n except Exception as e:\n logger.warn(\"Exception: %s\", str(e))\n logger.warn(\"Exception caught on rabbit consumer for process: %s with consumer id %s\", threading.current_thread, str(self.consumer_id))\n self.internal_error_queue.put(self.consumer_id)", "async def start(self) -> None:\n while self.producer is None:\n try:\n self.producer = self._producer_factory(\n bootstrap_servers=self.bootstrap_servers,\n ssl_cafile=self.ssl_cafile,\n ssl_certfile=self.ssl_certfile,\n ssl_keyfile=self.ssl_keyfile,\n security_protocol='SSL',\n value_serializer=lambda v: json.dumps(v).encode('utf-8'),\n )\n except kafka.errors.NoBrokersAvailable:\n await trio.sleep(self.connect_interval_secs)\n else:\n logger.info('kafka-ready: %s', self.producer)\n async with self.has_producer:\n self.has_producer.notify_all()", "def connectionMade (self) :\r\n self.state = 'wait_hello'\r\n self.handshake_timeout = reactor.callLater(HANDSHAKE_TIMEOUT, self.err, \"handshake timeout expired\")\r\n self.log(\"connected\")", "def test_connectEvent(self):\n reactor = self.buildReactor()\n\n self.listen(reactor, ServerFactory.forProtocol(Protocol))\n connected = []\n\n class CheckConnection(Protocol):\n def connectionMade(self):\n connected.append(self)\n reactor.stop()\n\n clientFactory = Stop(reactor)\n clientFactory.protocol = CheckConnection\n\n needsRunningReactor(reactor, lambda: self.connect(reactor, clientFactory))\n\n reactor.run()\n\n self.assertTrue(connected)", "def test_runsUntilSyncCallback(self):\n def main(reactor):\n return defer.succeed(None)\n r = _FakeReactor()\n exitError = self.assertRaises(\n SystemExit, task.react, main, _reactor=r)\n self.assertEqual(0, exitError.code)\n self.assertEqual(r.seconds(), 0)", "def _run(self):\n #print(\"try to connect run\")\n while True:\n self._connect()\n while not self.connected and self.auto_retry is not None:\n gevent.sleep(self.auto_retry)\n self._connect()\n if self.connected:\n self.run()\n if self.auto_retry is None:\n break", "def test_startService(self):\r\n self.pm.addProcess(\"foo\", [\"foo\"])\r\n # Schedule the process to start\r\n self.pm.startService()\r\n # advance the reactor to start the process\r\n self.reactor.advance(0)\r\n self.assertTrue(\"foo\" in self.pm.protocols)", "def main():\n #define the callbacks\n mqttc.on_message = on_message\n mqttc.on_connect = on_connect\n mqttc.on_publish = on_publish\n mqttc.on_subscribe = on_subscribe\n \n mqttc.will_set(willtopic, payload=\"offline\", qos=0, retain=True)\n mqttc.reconnect_delay_set(delay=3, delay_max=30, exponential_backoff=True)\n \n try:\n mqttc.connect(\"mqtt.localdomain\", 1883, 60)\n except Exception, e:\n print(\"XBMC MQTT -- MQTT connection failed: %s\" % (str(e)))\n sys.exit(1)\n \n while True:\n try:\n mqttc.loop_forever()\n except socket.error:\n print(\"XBMC MQTT --MQTT server disconnected; sleeping\")\n time.sleep(5)\n xbmc.executebuiltin('Notification(Error, mqtt disconnected pls chk,5000,'+mqtt_logo+')\\'') \n except:\n raise", "def start(self):\n if self._callable:\n self._is_running = True\n self._run_client()", "def do_start(self, args) :\r\n if not self.wait2start:\r\n Thread(target=self.start_loop).start()\r\n self.wait2start = True\r\n else:\r\n self.__Logger.warn(\"Waiting for simulators to be ready. To force start, type \\\"forcestart\\\"\")", "def run(self):\n self.loop.spawn_callback(self.main)\n self.loop.start()\n if self.exc_info:\n six.reraise(*self.exc_info)", "def _run_loop(self):\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n\n self._server = websockets.serve(self._log_message, self._host, self._port)\n\n loop.run_until_complete(self._server)\n loop.run_forever()", "def use_twisted(app):\n activity.EventLoop <<= activity.TwistedEventLoop\n REACTOR_INIT.notify(app)", "def start_actor(actor: Actor):\n actor.start()\n actor.connect_control()\n actor.connect_data()\n actor.send_control(StartMessage('system'))\n _ = actor.receive_control(2000)", "def start(self):\n assert(self._cbs is not None)\n self._as.start() # start the server", "def run(on_create):\n from twisted.internet import reactor\n\n # multiple, configurable transports, either via dict-like config, or\n # from native Twisted endpoints\n transports = [\n {\n \"type\": \"websocket\",\n \"url\": \"ws://127.0.0.1:8080/ws\"\n }\n ]\n\n # a connection connects and automatically reconnects WAMP client\n # transports to a WAMP router. A connection has a listener system\n # where user code can hook into different events : on_join\n connection = Connection(on_create, realm='public',\n transports=transports, reactor=reactor)\n\n # the following returns a deferred that fires when the connection is\n # finally done: either by explicit close by user code, or by error or\n # when stop reconnecting\n done = connection.connect()\n\n def finish(res):\n print(res)\n reactor.stop()\n\n done.addBoth(finish)\n\n reactor.run()", "def start(self):\n\t\tif self._send_greenlet is None:\n\t\t\tself._send_greenlet = gevent.spawn(self._send_loop)", "def start_tor(reactor, config, control_port, tor_binary, data_dir,\n report=None, progress=updates,\n process_cb=None, process_eb=None):\n try:\n from functools import partial\n from twisted.internet.endpoints import TCP4ClientEndpoint\n from ooni.lib.txtorcon import TorProtocolFactory\n from ooni.lib.txtorcon import TorProcessProtocol\n except ImportError, ie:\n log.err(ie)\n\n ## TODO: add option to specify an already existing torrc, which\n ## will require prior parsing to enforce necessary lines\n (torrc, data_dir, to_delete) = write_torrc(config, data_dir)\n\n log.msg(\"Starting Tor ...\")\n log.msg(\"Using the following as our torrc:\\n%s\" % config.create_torrc())\n if report is None:\n report = {'torrc': config.create_torrc()}\n else:\n report.update({'torrc': config.create_torrc()})\n\n end_point = TCP4ClientEndpoint(reactor, 'localhost', control_port)\n connection_creator = partial(end_point.connect, TorProtocolFactory())\n process_protocol = TorProcessProtocol(connection_creator, progress)\n process_protocol.to_delete = to_delete\n\n if process_cb is not None and process_eb is not None:\n process_protocol.connected_cb.addCallbacks(process_cb, process_eb)\n\n reactor.addSystemEventTrigger('before', 'shutdown',\n partial(delete_files_or_dirs, to_delete))\n try:\n transport = reactor.spawnProcess(process_protocol,\n tor_binary,\n args=(tor_binary,'-f',torrc),\n env={'HOME': data_dir},\n path=data_dir)\n transport.closeStdin()\n except RuntimeError, e:\n log.err(\"Starting Tor failed:\")\n process_protocol.connected_cb.errback(e)\n except NotImplementedError, e:\n url = \"http://starship.python.net/crew/mhammond/win32/Downloads.html\"\n log.msg(\"Running bridget on Windows requires pywin32: %s\" % url)\n process_protocol.connected_cb.errback(e)\n\n return process_protocol.connected_cb", "async def _async_start_addon(self) -> None:\n addon_manager: AddonManager = get_addon_manager(self.hass)\n\n try:\n await addon_manager.async_schedule_start_addon()\n # Sleep some seconds to let the add-on start properly before connecting.\n for _ in range(ADDON_SETUP_TIMEOUT_ROUNDS):\n await asyncio.sleep(ADDON_SETUP_TIMEOUT)\n try:\n if not (ws_address := self.ws_address):\n discovery_info = await self._async_get_addon_discovery_info()\n ws_address = self.ws_address = build_ws_address(\n discovery_info[\"host\"], discovery_info[\"port\"]\n )\n await validate_input(self.hass, {CONF_URL: ws_address})\n except (AbortFlow, CannotConnect) as err:\n LOGGER.debug(\n \"Add-on not ready yet, waiting %s seconds: %s\",\n ADDON_SETUP_TIMEOUT,\n err,\n )\n else:\n break\n else:\n raise FailedConnect(\"Failed to start Matter Server add-on: timeout\")\n finally:\n # Continue the flow after show progress when the task is done.\n self.hass.async_create_task(\n self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)\n )", "def attach_message_bus(self):\n print(\"Connecting to Mycroft message bus\")\n self.client = MessageBusClient()\n print(\"Calling client.run_in_thread()\")\n try:\n self.client.run_in_thread()\n except Exception as e:\n print(\"ERROR: run_in_thread() failed - is Mycroft running?\")\n sys.exit(1)", "def startSTOMPBroker(config, serverUpEvent, tries=-1, delay=1, backoff=1.5):\n #stomp broker\n mtries = tries\n mdelay = delay\n coilserver = None\n from coilmq.config import config as coilconfig\n if config.has_section('coilmq'):\n for k,v in config.items('coilmq'):\n coilconfig.set('coilmq', k, v)\n logger.debug(\"Set %s to %s for coilmq config.\" % (k,v))\n while True:\n try:\n coilserver = coilmq.start.server_from_config(coilconfig)\n logger.info(\"Stomp server listening on %s:%s\" % \\\n coilserver.server_address)\n serverUpEvent.set()\n coilserver.serve_forever()\n except IOError as ex:\n logger.error(\"Exception while starting coilmq broker: '%s'\", ex)\n if mtries != 0: \n logger.debug(\"Retrying coilmq startup in %.1f seconds...\", mdelay)\n time.sleep(mdelay)\n mdelay *= backoff\n mtries -= 1\n else:\n logger.debug(\"Ran out of trials (tried %d times) for coilmq startup. Giving up.\", tries)\n break\n finally:\n if coilserver: coilserver.server_close()", "def main():\n usage = \"usage: %prog [options] channels\"\n parser = OptionParser(usage=usage)\n\n (options, args) = parser.parse_args()\n\n if len(args) < 1:\n parser.print_help()\n return 2\n\n # do stuff\n # This runs the program in the foreground. We tell the reactor to connect\n # over TCP using a given factory, and once the reactor is started, it will\n # open that connection.\n reactor.connectTCP(HOST, PORT, MyFirstIRCFactory(args))\n # Since we're running in the foreground anyway, show what's happening by\n # logging to stdout.\n log.startLogging(sys.stdout)\n # And this starts the reactor running. This call blocks until everything is\n # done, because this runs the whole twisted mainloop.\n reactor.run()", "def start(self):\n if self._chan is not None:\n try:\n self._chan.start_consume()\n except ChannelError:\n log.info('Subscriber is already started')\n\n else:\n self.gl = spawn(self.listen)", "def __init__(self, reactor=None):\n self.Setup()\n self.ServiceEnabled = settings.SERVICE_ENABLED\n self.peer_zero_count = 0 # track the number of times PeerCheckLoop saw a Peer count of zero. Reset e.g. after 3 times\n self.connection_queue = []\n self.reactor = twisted_reactor\n self.incoming_server_running = False\n self.forced_disconnect_by_us = 0\n self.peers_connecting = 0\n\n # for testability\n if reactor:\n self.reactor = reactor", "def _wait_what(self, expected):\r\n \r\n self._msg_server(cb.WAITWHATSERVER % (expected))", "def start(self, autologin=True, autoreconnect=False):\n self.autologin = autologin\n self.autoreconnect = autoreconnect\n if self.loop.is_running():\n self.add_task(self._handler())\n logger.info(\n \"The client's event loop was already running. \"\n \"The client will run as a new task on the loop.\"\n )\n return True\n else:\n self.loop.run_until_complete(self._handler())\n return False", "def run_starter(self, expect_to_fail=False):", "def connect(self):\n self.start()", "async def test_waiting_for_client_setup_fails(\n hass: HomeAssistant,\n mqtt_client_mock: MqttMockPahoClient,\n) -> None:\n hass.state = CoreState.starting\n await hass.async_block_till_done()\n\n entry = MockConfigEntry(\n domain=mqtt.DOMAIN,\n data={\"broker\": \"test-broker\"},\n state=ConfigEntryState.NOT_LOADED,\n )\n entry.add_to_hass(hass)\n\n async def _async_just_in_time_subscribe() -> Callable[[], None]:\n assert not await mqtt.async_wait_for_mqtt_client(hass)\n\n hass.async_add_job(_async_just_in_time_subscribe)\n assert entry.state == ConfigEntryState.NOT_LOADED\n\n # Simulate MQTT setup fails before the client would become available\n mqtt_client_mock.connect.side_effect = Exception\n assert not await hass.config_entries.async_setup(entry.entry_id)\n assert entry.state == ConfigEntryState.SETUP_ERROR", "def test_async_function(self):\n myreactor = FakeReactor()\n c = EventLoop(lambda: myreactor, lambda f, g: None)\n c.no_setup()\n calls = []\n\n @c.run_in_reactor\n async def go():\n self.assertTrue(myreactor.in_call_from_thread)\n calls.append(1)\n return 23\n\n self.assertEqual((go().wait(0.1), go().wait(0.1)), (23, 23))\n self.assertEqual(len(calls), 2)\n self.assertFalse(inspect.iscoroutinefunction(go))", "def test_defaultReactor(self):\n def main(reactor):\n self.passedReactor = reactor\n return defer.succeed(None)\n\n reactor = _FakeReactor()\n with NoReactor():\n installReactor(reactor)\n exitError = self.assertRaises(SystemExit, task.react, main, [])\n self.assertEqual(0, exitError.code)\n self.assertIs(reactor, self.passedReactor)", "def mqttloop(self): \n self.log.info(u\"==> Start MQTT loop\")\n while not self.stopplugin.isSet():\n self.stopplugin.wait(3)\n self.MQTTClient.disconnect()\n self.MQTTClient.loop_stop()", "def set_ready(self, timeout=None):\r\n\r\n # Send a message to the server, and wait for confirmation of receipt.\r\n success, reply = self._wait_for_reply(cb.CLIENTREADY, \\\r\n cb.CLIENTGOGOGO, timeout=timeout)\r\n \r\n return success", "async def test_waiting_for_client_timeout(\n hass: HomeAssistant,\n) -> None:\n hass.state = CoreState.starting\n await hass.async_block_till_done()\n\n entry = MockConfigEntry(\n domain=mqtt.DOMAIN,\n data={\"broker\": \"test-broker\"},\n state=ConfigEntryState.NOT_LOADED,\n )\n entry.add_to_hass(hass)\n\n assert entry.state == ConfigEntryState.NOT_LOADED\n # returns False after timeout\n assert not await mqtt.async_wait_for_mqtt_client(hass)", "def start(self):\n retries=0\n while retries<=self.RETRIES and self._running==False:\n try:\n print(\"Trying to start server\")\n self._s.bind((self.address, self._listenToPort))\n #self._s.listen(5) #not required for UDP\n self._running = True\n print(\"Running\")\n\n except Exception as e:\n #self.stop()\n if retries<self.RETRIES:\n print(\"starting server failed, retrying...\",e)\n sleep(1)\n else:\n print(\"Server Failed \",e)\n return False\n retries=retries+1\n\n\n self._listen=True\n self._t1.start() #_listenForDataThread\n return True", "def test_execute_check_tcp(delay):\n port = port_for.select_random()\n check = check_tcp(port)\n\n assert check() is False\n process = execute(\n [SERVICE, '--delay', str(delay), 'tcp', '--port', str(port)],\n [check_tcp(port)],\n timeout=1 + delay)\n assert check() is True\n assert process.poll() is None # Still running.\n process.kill()", "def test_add_channel_starts_loop(self):\n self.notifier.add_channel(Mock())\n self.notifier_start_mock.assert_called_once()", "def cb_test( self, ):\r\n # this shows how to run stuff in the helper -- call thru queue, post to queue\r\n self.post_to_queue( \"call\", self.helper_thread.test_test_ports , ( ) )", "def run(self):\n self.connect()\n self.run_forever()", "def _connect(self) -> None:\n if self._agent_thread.is_alive():\n raise ValueError(\"Agent already running.\")\n self._agent_thread.start()\n\n while not self._agent.runtime.is_running: # check agent completely running\n time.sleep(0.01)", "def run(self, event_loop):\n protocol_factory = lambda: self.PROTOCOL_CLS(self)\n coro = event_loop.create_server(protocol_factory, port=self.port)\n event_loop.run_until_complete(coro)", "def start(self):\n loop = aio.get_event_loop()\n\n if self._with_subscribers:\n # Start the server to listen to events\n self.registry = SubscriptionRegistry()\n server = self.registry.server\n xx = aio.ensure_future(server)\n\n if self._with_discovery:\n # Start the server to listen to new devices\n addrinfo = socket.getaddrinfo(UPNP_ADDR, None)[0]\n sock = socket.socket(addrinfo[0], socket.SOCK_DGRAM)\n # Allow multiple copies of this program on one machine\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n listen = loop.create_datagram_endpoint(\n partial(UPnP,loop,UPNP_ADDR,self._found_device,self.upnp),\n sock=sock\n )\n xx = aio.ensure_future(listen)\n\n if self._with_discovery or self._with_subscribers:\n xx = aio.ensure_future(self.real_start())", "def run_loop(self):\r\n server_log.info('Server now accepting client connections.')\r\n while not self.clients_done():\r\n asyncore.loop(timeout=config[\"server_timeout\"], count=config[\"server_loop_count\"])", "def _connect_later(self, wait_time):\n # Trivial function, but useful for unit testing\n self._io_loop.call_later(wait_time, self._connect, True)", "def run(self, addr='127.0.0.1', port=8888, loop=None):\n if loop is None:\n loop = asyncio.get_event_loop()\n\n callback = functools.partial(self.dispatch, 'connect')\n start_server = websockets.serve(callback, addr, port, loop=loop)\n coro = loop.run_until_complete(start_server)\n\n try:\n loop.run_forever()\n except KeyboardInterrupt:\n pass\n # cleanup, just to be safe\n coro.close()\n loop.run_until_complete(coro.wait_closed())\n loop.close()", "def run(self):\n self.active_tasks, self.coros_result = {}, {}\n try:\n self.event_loop = asyncio.get_event_loop()\n except Exception:\n self.event_loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.event_loop)\n self.master_queue = asyncio.Queue()\n self.queue_listen_task = asyncio.Task(self.receive_queue_coro())\n self.is_initialized = True\n try:\n self.get_event_loop().run_until_complete(self.queue_listen_task)\n except CancelledError:\n logging.debug(\"Closing the Thread.\")", "def do_start(self):\n threading.Thread(group = None, \n target = self._subscribe_message, name = \"RabbitMQSubscribeThread\") .start()\n threading.Thread(group = None, \n target = self._publish_message, name = \"RabbitMQPublishThread\").start()", "def run_message_loop(self):\n raise NotImplementedError", "def test_begin(mock_machine,mock_network,mock_umqtt):\n ab = AppSwitch.CAppSwitch(app_device)\n ab.begin()\n # checking subscribes\n subscribe_calls = [\n call(b'contX/switch/1/cmnd/version'),\n call(b'contX/switch/1/cmnd/repoversion'),\n call(b'contX/switch/1/cmnd/download'),\n call(b'contX/switch/1/cmnd/install'),\n call(b'contX/switch/1/cmnd/memfree'),\n call(b'contX/switch/1/cmnd/memalloc'),\n call(b'contX/switch/1/cmnd/reboot'),\n call(b'contX/switch/1/cmnd/getip'),\n call(b'contX/switch/1/cmnd/state'),\n call(b'contX/switch/1/cmnd/state1')\n ]\n mock_umqtt.assert_has_calls(subscribe_calls)", "def test_eventloop_api(self):\n from twisted.python.log import startLoggingWithObserver\n from crochet import _shutdown\n self.assertIsInstance(_main, EventLoop)\n self.assertEqual(_main.setup, setup_crochet)\n self.assertEqual(_main.no_setup, no_setup)\n self.assertEqual(_main.run_in_reactor, run_in_reactor)\n self.assertEqual(_main.wait_for, wait_for)\n self.assertIdentical(_main._atexit_register, _shutdown.register)\n self.assertIdentical(\n _main._startLoggingWithObserver, startLoggingWithObserver)\n self.assertIdentical(_main._watchdog_thread, _shutdown._watchdog)", "async def test_run_without_launching(self):\n\n port = get_first_available_port(7860, 7870)\n\n io = gr.Interface(lambda s: s, gr.Textbox(), gr.Textbox()).queue()\n\n config = uvicorn.Config(app=io.app, port=port, log_level=\"warning\")\n\n server = Server(config=config)\n server.run_in_thread()\n\n try:\n async with websockets.connect(f\"ws://localhost:{port}/queue/join\") as ws:\n completed = False\n while not completed:\n msg = json.loads(await ws.recv())\n if msg[\"msg\"] == \"send_data\":\n await ws.send(json.dumps({\"data\": [\"Victor\"], \"fn_index\": 0}))\n if msg[\"msg\"] == \"send_hash\":\n await ws.send(\n json.dumps({\"fn_index\": 0, \"session_hash\": \"shdce\"})\n )\n if msg[\"msg\"] == \"process_completed\":\n completed = True\n assert msg[\"output\"][\"data\"][0] == \"Victor\"\n finally:\n server.close()", "def _mqtt_on_connect(\n self,\n _mqttc: mqtt.Client,\n _userdata: None,\n _flags: dict[str, int],\n result_code: int,\n properties: mqtt.Properties | None = None,\n ) -> None:\n # pylint: disable-next=import-outside-toplevel\n import paho.mqtt.client as mqtt\n\n if result_code != mqtt.CONNACK_ACCEPTED:\n _LOGGER.error(\n \"Unable to connect to the MQTT broker: %s\",\n mqtt.connack_string(result_code),\n )\n return\n\n self.connected = True\n dispatcher_send(self.hass, MQTT_CONNECTED)\n _LOGGER.info(\n \"Connected to MQTT server %s:%s (%s)\",\n self.conf[CONF_BROKER],\n self.conf.get(CONF_PORT, DEFAULT_PORT),\n result_code,\n )\n\n self.hass.create_task(self._async_resubscribe())\n\n if birth := self.conf.get(CONF_BIRTH_MESSAGE, DEFAULT_BIRTH):\n\n async def publish_birth_message(birth_message: PublishMessage) -> None:\n await self._ha_started.wait() # Wait for Home Assistant to start\n await self._discovery_cooldown() # Wait for MQTT discovery to cool down\n # Update subscribe cooldown period to a shorter time\n self._subscribe_debouncer.set_timeout(SUBSCRIBE_COOLDOWN)\n await self.async_publish(\n topic=birth_message.topic,\n payload=birth_message.payload,\n qos=birth_message.qos,\n retain=birth_message.retain,\n )\n\n birth_message = PublishMessage(**birth)\n asyncio.run_coroutine_threadsafe(\n publish_birth_message(birth_message), self.hass.loop\n )\n else:\n # Update subscribe cooldown period to a shorter time\n self._subscribe_debouncer.set_timeout(SUBSCRIBE_COOLDOWN)" ]
[ "0.6220351", "0.62159324", "0.61877704", "0.6160593", "0.61132985", "0.59028834", "0.58682054", "0.5843688", "0.5822215", "0.5798466", "0.57124496", "0.57084095", "0.566796", "0.56646776", "0.56603914", "0.5605297", "0.55968124", "0.55797726", "0.55603266", "0.55498093", "0.5531244", "0.5510697", "0.551031", "0.5508833", "0.5504746", "0.54844993", "0.54838544", "0.5482441", "0.546809", "0.54662997", "0.54557925", "0.5451461", "0.54459524", "0.5430605", "0.5411214", "0.54016465", "0.5390656", "0.5376448", "0.53647137", "0.53585744", "0.5355091", "0.5354466", "0.535348", "0.5344444", "0.534225", "0.532511", "0.53193784", "0.53193784", "0.5315732", "0.5311879", "0.53061455", "0.53059405", "0.5298914", "0.5285996", "0.5278686", "0.52769005", "0.52762", "0.5275943", "0.52448606", "0.5240247", "0.5235783", "0.5233642", "0.52268064", "0.5223447", "0.52222586", "0.5215293", "0.52116746", "0.5208539", "0.52003866", "0.51952827", "0.5191461", "0.5182851", "0.51817703", "0.5173075", "0.51710796", "0.51675296", "0.516203", "0.5161089", "0.5151446", "0.51456416", "0.5142798", "0.51374334", "0.5125905", "0.5122713", "0.5115725", "0.5113631", "0.51099765", "0.5107809", "0.50996125", "0.5098706", "0.5097959", "0.5082956", "0.5082725", "0.508177", "0.5079071", "0.5075954", "0.5073031", "0.5072444", "0.50684536", "0.5067362" ]
0.6690256
0
Initializes the object with a tuple of information.
def __init__(self, info=None): self.astral = None if info is None: self.name = 'Greenwich' self.country = 'England' self._latitude = 51.168 self._longitude = 0 self._timezone_group = 'Europe' self._timezone_location = 'London' else: self._latitude = 0 self._longitude = 0 self._timezone_group = '' self._timezone_location = '' try: self.name = info[0].encode('utf-8') self.country = info[1].encode('utf-8') self.latitude = info[2] self.longitude = info[3] self.timezone = info[4] except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, fields):\n self.__init_handle_by_constructor__(_make.TupleType, fields)", "def __init__(self, *args):\n _snap.TAttrPair_swiginit(self, _snap.new_TAttrPair(*args))", "def __init__(self, config: Tuple):", "def __init__(self):\n\n data_extract=DataExtracter()\n self.data = tuple()", "def initialize(self, tup):\n self.set_io_dims(tup)", "def init(self, *args, **kwds):\n pass", "def __init__(self, (u, v, o)):\r\n self.u = u\r\n self.v = v\r\n self.o = o", "def __init__(self, *args):\n if len(args) == 1 and isinstance(args[0], str):\n self._data = tuple(int(i) for i in str(args[0]).split(\".\"))\n elif len(args) == 1 and isinstance(args[0], Iterable):\n self._data = tuple(int(i) for i in args[0])\n else:\n self._data = tuple(int(i) for i in args)", "def __init__(self, a, b, t):\n\t\tself.a = a\n\t\tself.b = b\n\t\tself.t = t", "def __init__(self):\n self.info = dict()", "def initialize(self, **kwargs):", "def __init__(self, *values):\n if (len(values) == 1) and (type(values[0]) in SequenceTypes):\n values = values[0]\n self.values = tuple(values)\n self.fast_validate = (5, self.values)", "def init(self) -> None:", "def __init__(self, grid_tuples):\n super(ParamGrid, self).__init__()\n self.grid = OrderedDict(grid_tuples)", "def __init__(self):\n self._values = {\n 'typeName': None,\n 'message': None,\n 'hasFullStack': True,\n }\n self._initialize()", "def __init__(self, keys, values):\n self.keys = keys\n self.values = values", "def __init__(self, x=0, y=0, z=0):\n if type(x) is tuple:\n self.x = x[0]\n self.y = x[1]\n self.z = x[2]\n elif type(x) is vector:\n self.x = x.x\n self.y = x.y\n self.z = x.z\n else:\n self.x = x\n self.y = y\n self.z = z", "def __init__(self, **kwds):\n raise NotImplementedError", "def __init__(self, seqid, pos, refr, alt, **kwargs):\n self._seqid = seqid\n self._pos = pos\n self._refr = refr\n self._alt = alt\n self.info = dict()\n for key, value in kwargs.items():\n self.info[key] = value", "def __init__(self, **kwargs):\n self.x = kwargs.get('x', 0.0)\n self.y = kwargs.get('y', 0.0)\n self.z = kwargs.get('z', 0.0)\n self.t = kwargs.get('t', 0.0)\n \"\"\" End of the struct contents\"\"\"\n\n \"\"\" Run code to convert struct contents into self.data_dict \"\"\"\n self._create_dict()\n\n # Parent needed so that it can be a SubElement of something\n self.parent = None\n\n self.STRUCTNAME = STRUCTNAME", "def __init__(self, (u, v)):\r\n self.u = u\r\n self.v = v", "def init(self, parameters):\n pass", "def __init__(self, left, right):\n self._left = tuple(left)\n self._right = tuple(right)", "def __init__(self, msg_id=0, xtd=0, rtr=0, ndata=0, data=() ):\r\n self.msg_id = msg_id\r\n self.rtr = rtr\r\n self.xtd = xtd\r\n self.ndata = ndata\r\n self.data = data # tuple with length 0..8\r\n self.timestamp = time.time() # Timestamp of object creation\r", "def __getinitargs__(self):\n\n return (self.name, self.value)", "def __init__(self, data):\n assert len(data) == 6\n for i in range(5):\n assert data[i].keys() == data[5].keys()\n if isinstance(data, tuple):\n self.weights = data\n else:\n self.weights = tuple(data)", "def initialize(cls):", "def __init__(self):\n self._values = {\n 'ver': 2,\n 'name': None,\n 'duration': None,\n 'success': True,\n }\n self._initialize()", "def __init__(self, error: List[Error] = None, info: Dict = None):\n self.error = error if error is not None else []\n self.info = info if info is not None else {}\n self.is_valid = True", "def __init__(self, data_info, slot_temp):\n self.ID = data_info['ID']\n self.turn_domain = data_info['turn_domain']\n self.turn_id = data_info['turn_id']\n self.dialog_history = data_info['dialog_history']\n self.turn_belief = data_info['turn_belief']\n self.gating_label = data_info['gating_label']\n self.turn_uttr = data_info['turn_uttr']\n self.generate_y = data_info[\"generate_y\"]\n self.num_total_seqs = len(self.dialog_history)\n self.slot_temp = slot_temp", "def __init__(self):\n self._exchange_params_by_currency_id = None # type: Optional[ExchangeParams]\n self._utility_params_by_good_id = None # type: Optional[UtilityParams]\n self._transaction_fees = None # type: Optional[Dict[str, int]]\n self._quantity_shift = QUANTITY_SHIFT", "def initialize(self, *args, **kwargs):", "def __init__(self, orbit_category, event_ref, user_ticket_pairs, chatroom_ref, cost_estimate, status):\n\n super().__init__()\n self.orbit_category = orbit_category\n self.event_ref = event_ref\n self.user_ticket_pairs = user_ticket_pairs\n self.chatroom_ref = chatroom_ref\n self.cost_estimate = cost_estimate\n self.status = status", "def __init__(self,data):\n\t\tself.data = tuple([tuple(x) if isiterable(x) else (x,) for x in data])\n\t\tself.rows = len(self.data)\n\t\tself.cols = len(self.data[0]) if len(self.data)>0 else 0", "def __init__(self, s, a, b, c, d, t = None):\n self.s = s\n self.a = a\n self.b = b\n self.c = c\n self.d = d\n self.t = t", "def __init__(self, initial_data=[]):\n hdict.__init__(self)\n\n for elt in initial_data:\n self.add(elt)", "def init(self) -> None:\n ...", "def initialize(self, *args, **kwargs):\n pass", "def __init__(self, index, atoms, values):\n self.atoms = tuple(atoms)\n self.values = tuple(values)\n self.compiled = []\n self.n = [index]", "def __init__(self, name: str, values: List[Tuple[str, int]]) -> None:\n super().__init__(name)\n self.values = values", "def from_tuple(cls, tpl):\n obj = cls()\n obj._x = tpl[0]\n obj._y = tpl[1]\n obj._z = tpl[2]\n return obj", "def initialise(self):", "def initialize(self, **kwargs: Any) -> None:\n pass", "def __init__(self):\n\t\tsuper().__init__()\n\t\t\n\t\t# Typically a list of data here\n\t\t# Typically a dict of header keys and values here", "def initialise(self, **kwargs):\n pass", "def from_tuple(cls, t):\n return cls(t[0], t[1])", "def __init__(self, data={}):\n self._update_(data)", "def __init__(self, pairdata):\n\n self.data = deepcopy(pairdata)\n gear = {'z': self.data['z'], 'x': self.data['x'], 'alpha_n': self.data['alpha_n'], 'beta': self.data['beta'],\n 'm_n': self.data['m_n'], 'rho_f': self.data['rho_f'], 'd_s': self.data['d_s'], 'c': self.data['c'],\n 'b': self.data['b']}\n\n self.gear = self.__set_gear(gear)", "def ani_init(self):\n self.line.set_data([], [])\n self.galactic_centre.set_data([], [])\n self.impactor.set_data([], [])\n self.time_text.set_text(\"\")\n self.KE_text.set_text(\"\")\n self.GPE_text.set_text(\"\")\n self.energy_text.set_text(\"\")\n return (\n self.line,\n self.galactic_centre,\n self.impactor,\n self.time_text,\n self.KE_text,\n self.GPE_text,\n self.energy_text,\n )\n # One might hope that there was a better way to return objects", "def __init__(self, desc = {}):\n (\n self.x,\n self.y,\n self.z\n ) = itemgetter('x','y','z')(desc['position'])\n (\n self.rotX,\n self.rotY,\n self.rotZ\n ) = itemgetter('x','y','z')(desc['rotation'])", "def initialize(self):\n\t\tpass", "def from_tuple(cls, t):\n if (isinstance(t, tuple) and len(t) == 3):\n l, n, r = t\n return cls(n, l, r)\n else:\n raise ValueError(\"Expected a 3-tuple!\")", "def __init__(self, a, b, h):\n self.a = a\n self.b = b\n self.h = h", "def _init(self):\n raise NotImplementedError", "def initialize(self) -> None:\n pass", "def __init__(self):\n # Dict of minecraft object in form of \"dict[id] = name\"\n self.data_values = dict()\n self.parser = self.setup_parser()", "def __init__(self):\n\t\tself.state = None\n\t\tself.info = None\n\t\tself.next = None", "def __init__(self, device_pair_identifier, device_pair_info):\n\n self.raw_info = device_pair_info\n self.identifier = device_pair_identifier\n self.watch_udid = device_pair_info[\"watch\"][\"udid\"]\n self.phone_udid = device_pair_info[\"phone\"][\"udid\"]", "def __init__(self, data: dict = {}):\n pass", "def init(self):\n raise NotImplementedError", "def init(self):\n raise NotImplementedError", "def __init__(self, *args):\n this = _libsbml.new_InitialAssignment(*args)\n try: self.this.append(this)\n except: self.this = this", "def __init__(self, *args):\n _snap.TAttr_swiginit(self, _snap.new_TAttr(*args))", "def __init__(self, data: dict):\n self._data = {\n '': 'Location', # this is required\n 'street': '',\n 'suburb': '',\n 'location': '',\n 'stop': ''\n }\n\n self._data.update(data)", "def initialize(self, args):\n\t\tpass", "def __init__(self, *args: Union[List[AtomKey], HKT], **kwargs: str) -> None:\n ...", "def __init__(self) -> None:\n # Values are already set on __new__.\n # Override this method when value modification on initialization is\n # required.\n raise NotImplementedError()", "def __init__(self):\n self.metadata = {}\n self.geometry = {'array': None, \n 'geom': None, \n 'wkt': None}", "def __init__(self, first_name, last_name, address):\n\n self.first_name = first_name\n self.last_name = last_name\n self.address = address\n\n # Creates dictionary for each student with the label & info.\n\n self.info = {\n 'first name': self.first_name,\n 'last name': self.last_name,\n 'address': self.address,\n }", "def __init__(self):\n self._distance_data = []\n self._location_data = []\n self._package_data = []", "def __init__(self):\n self.relation = ''\n self.attributes = []\n self.attribute_types = dict()\n self.attribute_data = dict()\n self.comment = []\n self.data = []\n pass", "def initialize(self):\r\n pass", "def initialize(self):\r\n pass", "def __init__(self):\n\n # initialise the empty mappings dictionary\n self.data = {\n 'loan_id': None,\n 'product': None,\n 'origination_date': None,\n 'reversion_date': None,\n 'rate_term': None,\n 'loan_amount': None,\n 'initial_rate': None,\n 'reversion_rate': None,\n 'term': None,\n 'interest_only_amount': None,\n 'upfront_fees': None,\n 'upfront_costs': None,\n 'entity_eir': None\n }", "def __init__(self, *args):\n this = _libsbml.new_ListOfInitialAssignments(*args)\n try: self.this.append(this)\n except: self.this = this", "def __init__(self):\n self.key2value = {}\n self.key2time = {}", "def __init__(self):\n log.msg(\"Initializing Twitch parser.\")\n\n # initialize our data members\n self.streams = tuple()\n self.crc32 = 0", "def test_init(self, r, attribute_tup):\n num_wires, num_gates, gate_types, gate_sizes, depth, shots = attribute_tup\n\n assert r.num_wires == num_wires\n assert r.num_gates == num_gates\n assert r.depth == depth\n assert r.shots == shots\n assert r.gate_types == gate_types\n assert r.gate_sizes == gate_sizes", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def __init__(self, *args, **kwds):\n if args or kwds:\n super(WorldModelInfo, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.header is None:\n self.header = std_msgs.msg.Header()\n if self.obstacleinfo is None:\n self.obstacleinfo = nubot_common.msg.ObstaclesInfo()\n if self.oppinfo is None:\n self.oppinfo = nubot_common.msg.ObstaclesInfo()\n if self.robotinfo is None:\n self.robotinfo = []\n if self.ballinfo is None:\n self.ballinfo = []\n if self.coachinfo is None:\n self.coachinfo = nubot_common.msg.CoachInfo()\n if self.pass_cmd is None:\n self.pass_cmd = nubot_common.msg.PassCommands()\n else:\n self.header = std_msgs.msg.Header()\n self.obstacleinfo = nubot_common.msg.ObstaclesInfo()\n self.oppinfo = nubot_common.msg.ObstaclesInfo()\n self.robotinfo = []\n self.ballinfo = []\n self.coachinfo = nubot_common.msg.CoachInfo()\n self.pass_cmd = nubot_common.msg.PassCommands()", "def __init__(self, gets: Iterable[Get[_Product]]) -> None:\n self.gets = tuple(gets)", "def __init__(self, a, b, c):\r\n self.a = a\r\n self.b = b\r\n self.c = c", "def __init__(self, data, v1, v2):\n self.data = data\n self.v1 = v1\n self.v2 = v2", "def __init__(self, *args, **kwargs) -> None:\n pass", "def __init__(self, *args, **kwargs) -> None:\n pass", "def __init__(self, a, b):\n\t\tself.a = a\n\t\tself.b = b", "def __init__(self, a, b):\n\t\tself.a = a\n\t\tself.b = b", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n raise NotImplementedError" ]
[ "0.76838994", "0.6909169", "0.69027334", "0.6892625", "0.6837433", "0.6632949", "0.65825474", "0.656346", "0.6476574", "0.6431456", "0.6426662", "0.6349654", "0.6341158", "0.63148826", "0.6312479", "0.62745905", "0.6267313", "0.62587535", "0.6253988", "0.6251733", "0.6246671", "0.62452847", "0.6241837", "0.6233956", "0.62241673", "0.6221004", "0.6216889", "0.6207879", "0.61951375", "0.619341", "0.61901504", "0.61883175", "0.6174178", "0.61677176", "0.6147908", "0.6144092", "0.61401397", "0.6136998", "0.61309797", "0.613059", "0.61298597", "0.6127435", "0.61092836", "0.60974556", "0.6086211", "0.607887", "0.60770106", "0.6076661", "0.60735726", "0.6057453", "0.60566884", "0.6056496", "0.6050062", "0.6048419", "0.60445195", "0.6043851", "0.6043105", "0.60381275", "0.6025702", "0.60178316", "0.60178316", "0.60111046", "0.6005937", "0.6004607", "0.5998205", "0.59953064", "0.5989962", "0.5989662", "0.5984801", "0.5983377", "0.5981632", "0.5978795", "0.5978795", "0.5976906", "0.5976224", "0.597473", "0.5974384", "0.5973927", "0.59732187", "0.59732187", "0.59732187", "0.59732187", "0.59732187", "0.59732187", "0.59732187", "0.59732187", "0.5965136", "0.5963864", "0.596354", "0.5961921", "0.59617084", "0.59617084", "0.595854", "0.595854", "0.59500164", "0.59500164", "0.59500164", "0.59500164", "0.59500164", "0.5948367" ]
0.6001026
64
Calculates the time in the morning when the sun is a certain number of degrees below the horizon. By default this is 6 degrees but can be changed
def dawn(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() dawn = self.astral.dawn_utc(date, self.latitude, self.longitude) if local: return dawn.astimezone(self.tz) else: return dawn
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def morning_twilight_6(self, date=None):\n self.site.horizon = self.horizon6\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def what_night_is_it():\n d = datetime.datetime.utcnow() - datetime.timedelta(7 / 24 + 0.5)\n tonight = int(d.strftime('%Y%m%d'))\n return tonight", "def MoonPhase(time):\n return PairLongitude(Body.Moon, Body.Sun, time)", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def horiz_angle(time, data):\n\n # TODO What should 0deg be? Set it to inline w/ target? facing target?\n\n # direction of the sun. measured in degrees counted clockwise from north.\n azimuth = data[time]['azimuth']\n\n h_angle = (azimuth / 2 - 90)\n\n # returns answer between -180 and 180 degrees\n return round(((h_angle + 180) % 360) - 180, 4)", "def time_interval( self ):\n begin = self.begin; end = self.end\n if end - begin < 600*self.hour_switch:\n return 600\n if end - begin < 86400*self.day_switch:\n return 3600\n elif end - begin < 86400*7*self.week_switch:\n return 86400\n else:\n return 86400*7", "def _unit_hr(self):\n return self.time_base * 60.0", "def unit_hr(self):\n return self.time_base * 60.0", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def _unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def Horizon(time, observer, ra, dec, refraction):\n if not (Refraction.Airless.value <= refraction.value <= Refraction.JplHorizons.value):\n raise Error('Invalid refraction type')\n\n latrad = math.radians(observer.latitude)\n lonrad = math.radians(observer.longitude)\n decrad = math.radians(dec)\n rarad = ra * _HOUR2RAD\n\n sinlat = math.sin(latrad)\n coslat = math.cos(latrad)\n sinlon = math.sin(lonrad)\n coslon = math.cos(lonrad)\n sindc = math.sin(decrad)\n cosdc = math.cos(decrad)\n sinra = math.sin(rarad)\n cosra = math.cos(rarad)\n\n # Calculate three mutually perpendicular unit vectors\n # in equatorial coordinates: uze, une, uwe.\n #\n # uze = The direction of the observer's local zenith (straight up).\n # une = The direction toward due north on the observer's horizon.\n # uwe = The direction toward due west on the observer's horizon.\n #\n # HOWEVER, these are uncorrected for the Earth's rotation due to the time of day.\n #\n # The components of these 3 vectors are as follows:\n # [0] = x = direction from center of Earth toward 0 degrees longitude (the prime meridian) on equator.\n # [1] = y = direction from center of Earth toward 90 degrees west longitude on equator.\n # [2] = z = direction from center of Earth toward the north pole.\n\n uze = [coslat*coslon, coslat*sinlon, sinlat]\n une = [-sinlat*coslon, -sinlat*sinlon, coslat]\n uwe = [sinlon, -coslon, 0.0]\n\n # Correct the vectors uze, une, uwe for the Earth's rotation by calculating\n # sideral time. Call spin() for each uncorrected vector to rotate about\n # the Earth's axis to yield corrected unit vectors uz, un, uw.\n # Multiply sidereal hours by -15 to convert to degrees and flip eastward\n # rotation of the Earth to westward apparent movement of objects with time.\n\n angle = -15.0 * _sidereal_time(time)\n uz = _spin(angle, uze)\n un = _spin(angle, une)\n uw = _spin(angle, uwe)\n\n # Convert angular equatorial coordinates (RA, DEC) to\n # cartesian equatorial coordinates in 'p', using the\n # same orientation system as uze, une, uwe.\n\n p = [cosdc*cosra, cosdc*sinra, sindc]\n\n # Use dot products of p with the zenith, north, and west\n # vectors to obtain the cartesian coordinates of the body in\n # the observer's horizontal orientation system.\n #\n # pz = zenith component [-1, +1]\n # pn = north component [-1, +1]\n # pw = west component [-1, +1]\n\n pz = p[0]*uz[0] + p[1]*uz[1] + p[2]*uz[2]\n pn = p[0]*un[0] + p[1]*un[1] + p[2]*un[2]\n pw = p[0]*uw[0] + p[1]*uw[1] + p[2]*uw[2]\n\n # proj is the \"shadow\" of the body vector along the observer's flat ground.\n proj = math.sqrt(pn*pn + pw*pw)\n\n # Calculate az = azimuth (compass direction clockwise from East.)\n if proj > 0.0:\n # If the body is not exactly straight up/down, it has an azimuth.\n # Invert the angle to produce degrees eastward from north.\n az = math.degrees(-math.atan2(pw, pn))\n if az < 0:\n az += 360\n else:\n # The body is straight up/down, so it does not have an azimuth.\n # Report an arbitrary but reasonable value.\n az = 0.0\n\n # zd = the angle of the body away from the observer's zenith.\n zd = math.degrees(math.atan2(proj, pz))\n hor_ra = ra\n hor_dec = dec\n\n if refraction != Refraction.Airless:\n zd0 = zd\n refr = RefractionAngle(refraction, 90.0 - zd)\n zd -= refr\n if refr > 0.0 and zd > 3.0e-4:\n zdrad = math.radians(zd)\n sinzd = math.sin(zdrad)\n coszd = math.cos(zdrad)\n zd0rad = math.radians(zd0)\n sinzd0 = math.sin(zd0rad)\n coszd0 = math.cos(zd0rad)\n\n pr = [(((p[j] - coszd0 * uz[j]) / sinzd0)*sinzd + uz[j]*coszd) for j in range(3)]\n proj = math.sqrt(pr[0]*pr[0] + pr[1]*pr[1])\n if proj > 0:\n hor_ra = _RAD2HOUR * math.atan2(pr[1], pr[0])\n if hor_ra < 0:\n hor_ra += 24\n else:\n hor_ra = 0\n hor_dec = math.degrees(math.atan2(pr[2], proj))\n\n return HorizontalCoordinates(az, 90.0 - zd, hor_ra, hor_dec)", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def wind_chill(T_a, v):\r\n return 13.12 + 0.6215*(T_a) - 11.37*(v)**0.16 + 0.3965*(T_a)*(v)**0.16", "def morning_star(self):\n self.data['morning_star'] = ((self.data['Open'].shift(2) > self.data['Close'].shift(2)) & \\\n ((self.data['Open'].shift(2) - self.data['Close'].shift(2))/\\\n (0.001+self.data['High'].shift(2)-self.data['Low'].shift(2))>.6) &\\\n (self.data['Close'].shift(2) > self.data['Open'].shift(1)) & \\\n (self.data['Open'].shift(1)>self.data['Close'].shift(1)) & \\\n ((self.data['High'].shift(1)-self.data['Low'].shift(1)) > \\\n (3*(self.data['Close'].shift(1)-self.data['Open'].shift(1))))&\\\n (self.data['Close']>self.data['Open'])&\\\n (self.data['Open']>self.data['Open'].shift(1)))", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def _clock_day(self):\n return int(self._shifted_time / 86400)", "def hours_of_daylight(date, axis=23.44, latitude=47.61):\n days = (date - pd.datetime(2000, 12, 21)).days\n m = (1. - np.tan(np.radians(latitude))\n * np.tan(np.radians(axis) * np.cos(days * 2 * np.pi / 365.25)))\n return 24. * np.degrees(np.arccos(1 - np.clip(m, 0, 2))) / 180.", "def ra_dec_calculate(self) -> dict:\n for sec in range(self.delta_time):\n if 0 < self.ra_start + self.one_sec_walk_ra < 360 * 3600:\n self.ra = self.ra_start + self.one_sec_walk_ra\n self.ra_start = self.ra\n else:\n self.ra = self.ra_start + self.one_sec_walk_ra - 360 * 3600\n self.ra_start = self.ra\n if self.ra_dec_min < self.ra < self.ra_dec_max:\n self.dec = self.dec_start - self.one_sec_walk_dec\n self.dec_start = self.dec\n else:\n self.dec = self.dec_start + self.one_sec_walk_dec\n self.dec_start = self.dec\n\n ra_res = f'{int(self.ra // (3600 * 15))}:{int((self.ra % 3600) // 60)}:' \\\n f'{round(float((self.ra % 3600) % 60), 1)}'\n dec_res = f'{int(self.dec // 3600)}:{int((self.dec % 3600) // 60)}:' \\\n f'{round(float((self.dec % 3600) % 60), 1)}'\n moon = {\n 'ra': ra_res,\n 'dec': dec_res\n }\n return moon", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def morning_twilight_18(self, date=None):\n self.site.horizon = self.horizon18\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def wind_heat_transfer_coefficient(self) -> float:\n\n return 3.8 + 2 * self.wind_speed\n # return 4.5 + 2.9 * self.wind_speed", "def compute_windchill(t,v):\n a = 35.74\n b = 0.6215\n c = 35.75\n d = 0.4275\n v16 = v**0.16\n wci = a+(b*t)-(c*v16)+(d*t*v16)\n return wci", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def get_nightly_end_time():\n month = time.localtime().tm_mon\n if np.abs(month - 6) > 2:\n end_night = 8\n else:\n end_night = 7\n return end_night # local Tucson time the following morning", "def time(self) -> float:\n return self.state.game_loop / 22.4 # / (1/1.4) * (1/16)", "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def get_nightly_start_time():\n return 14 # 2PM local Tucson time", "def get_radiation():\n sun_pos = get_sun_position()\n if sun_pos <= POSITION_MIN or sun_pos >= POSITION_MAX:\n return 0\n else:\n # Calculate a new delta.\n delta = random.randint(0, RADIATION_DELTA)\n if random.random() > 0.5:\n delta = -1 * delta\n # Calculate the radiation based on the sun position.\n new_radiation = round(-0.1279 * pow(sun_pos, 2) + 46.05 * sun_pos - 3100)\n # Apply the delta and return the value.\n return new_radiation + delta", "def wind_speed(self):\r\n return self._yesterdays_weather.get_average_wind_speed()", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def get_mean_sun_angles(self) -> (float, float):\n # Get MTD XML file\n root, _ = self.read_mtd()\n\n # Open zenith and azimuth angle\n zenith_angle = float(root.findtext(\".//SolarZenith\"))\n azimuth_angle = float(root.findtext(\".//SolarAzimuth\"))\n\n return azimuth_angle, zenith_angle", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def calc_time(self, distance):\r\n if distance < 400:\r\n return 2*math.sqrt(distance / 1406.25)\r\n else:\r\n distance -= 400\r\n return distance / 750 + 16 / 15", "def _unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def getHeadingTime(self) -> float:\n return self.timestep_cached_heading_tm", "def day_length(day_of_year, lat):\n day_hours = np.deg2rad(lat)\n declination = 23.45 * np.sin(np.deg2rad(360.0 * (283.0 + day_of_year) / 365.0))\n const_day = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) <= -1.0\n day_hours[const_day] = 24.0\n const_night = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) >= 1.0\n day_hours[const_night] = 0.0\n day_night = ~((const_day) | (const_night))\n hour_angle = np.rad2deg(np.arccos(-np.tan(day_hours[day_night]) *\n np.tan(np.deg2rad(declination))))\n day_hours[day_night] = (hour_angle / 7.5)\n return day_hours", "def hours_of_daylight(date, axis=23.44, latitude=47.61):\n days = (date - pd.datetime(2000, 12, 21)).days\n m = (1. - np.tan(np.radians(latitude))* np.tan(np.radians(axis) * np.cos(days * 2 *\n np.pi / 365.25)))\n return 24. * np.degrees(np.arccos(1 - np.clip(m, 0, 2))) / 180.", "def main():\n current_time = datetime.datetime.now()\n is_night = False\n\n while True:\n sleep(HOUR_DURATION)\n current_time += datetime.timedelta(hours=HOUR_DURATION)\n light_changed = False\n\n if (current_time.hour >= NIGHT_STARTS or current_time.hour < DAY_STARTS) and not is_night:\n is_night = True\n light_changed = True\n elif DAY_STARTS <= current_time.hour < NIGHT_STARTS and is_night:\n is_night = False\n light_changed = True\n\n if light_changed:\n if is_night:\n write_file_and_screen(\"Se ha hecho de noche\", \"horas.txt\")\n else:\n write_file_and_screen(\"Se ha hecho de dia\", \"horas.txt\")\n\n write_file_and_screen(\"La hora actual es: {}\".format(current_time), \"horas.txt\")", "def _sunrise_sunset(self, seconds=None, milliseconds=None, hour=None, freq=None, temp_start=None, temp_end=None, setting=True):\n FUDGE_FACTOR = 0.86\n if hour==None:\n # Work out what the defaults should be\n ## MOVE IT INSIDE THE Override values.\n t0 = temp_start.split('K')[0]\n t1 = temp_end.split('K')[0]\n if t0 > t1:\n temp_step = -100\n x_start = 0\n x_step_amount = 1\n else:\n temp_step = 100\n x_start = 60\n x_step_amount = -1\n temp_0 = int(t0)\n temp_n = int(t1)\n # You can override these defaults if either temp_start or temp_end is set\n if temp_start:\n try:\n _exists = NAMED_COLOURS[temp_start.lower()]\n except (TypeError,ValueError): # Means the starting temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your starting colour temperature '{}' is not a valid colour temperature\".format(temp_start))\n if temp_end:\n try:\n _exists = NAMED_COLOURS[temp_end.lower()]\n except (TypeError, ValueError): # Means the ending temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your ending colour temperature '{}' is not a valid colour temperature\".format(temp_end))\n\n #Add in a fudge factor to cater for CPU doing other things:\n #Calculate our z scaling factor:\n target_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=1000)\n z_factor = (target_time*FUDGE_FACTOR) / 2.564949357\n x_step = x_start\n #And run the loop\n t1 = time.time()\n check = True #We only check the current values on the first run\n for temp in xrange(temp_0,temp_n,temp_step):\n if self._sequence_stop_signal: #Bail if sequence should stop\n return None\n k = u\"%sk\" % temp\n self.fade(k, fade_time=((100+z_factor)/(65-x_step)), check=check) #ms, slows down as sunset progresses\n x_step += x_step_amount\n check=False\n t2 = time.time()\n logging.info(\"%ss, target=%ss\" % ((t2-t1),target_time/1000.0))\n else:\n temp_0=temp_start[0].split('K')[0]\n\t temp_n=temp_end[0].split('K')[0]\n if self.p_alarm != []:\n self.teardown_alarm()\n process_alarm=[]\n for tt in range(0,len(hour)):\n milliseconds=0\n proc_hour=hour[tt]\n\t\talarm_arg=(proc_hour,temp_0,temp_n,FUDGE_FACTOR,freq,seconds[tt],milliseconds)\n \n process_alarm.append(Process(target=self.schedule_alarm,args=alarm_arg))\n [pp.start() for pp in process_alarm] # Start processes in the background which contain the schedule of the alarm\n self.p_alarm=process_alarm", "def calculation_time_analysis():\n\tfrom . import spectra as sp\n\tp_dict = {'Bfield':700,'rb85frac':1,'Btheta':88*np.pi/180,'Bphi':0*np.pi/180,'lcell':75e-3,'T':84,'Dline':'D2','Elem':'Cs'}\n\tchiL,chiR,chiZ = sp.calc_chi([-3500],p_dict)\n\t\n\tfor angle in [0, np.pi/32, np.pi/16, np.pi/8, np.pi/4, np.pi/2]:\n\t\tprint(('Angle (degrees): ',angle*180/np.pi))\n\t\tRotMat, n1, n2 = solve_diel(chiL,chiR,chiZ,angle)", "def unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def time_step(self):\n\n rho_rel = np.abs(self.rho_dt / self.rho)\n rho_rel_max = np.max(rho_rel)\n e_rel = np.abs(self.e_dt / self.e)\n e_rel_max = np.max(e_rel)\n x_rel = np.abs(self.u / self.dx)\n x_rel_max = np.max(x_rel)\n y_rel = np.abs(self.w / self.dy)\n y_rel_max = np.max(y_rel)\n rel = [rho_rel_max, e_rel_max, x_rel_max, y_rel_max]\n delta = np.max(np.abs(rel))\n\n if 0.1 <= delta <= 1e3:\n self.dt = self.p / delta\n else:\n self.dt = self.p", "def lunarperigee(time):\n dtor = np.pi / 180\n t1 = 1 + time\n t2 = t1 * t1\n t3 = t2 * t1\n perigee = (\n 334.329653 * dtor\n + 4069.0340329575 * dtor * t1\n - 0.010325 * dtor * t2\n - 1.2e-5 * dtor * t3\n )\n return perigee", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def moon_phase(self, date):\n \n jd = self._julianday(date.day, date.month, date.year)\n DT = pow((jd - 2382148), 2) / (41048480*86400)\n T = (jd + DT - 2451545.0) / 36525\n T2 = pow(T,2)\n T3 = pow(T,3)\n D = 297.85 + (445267.1115*T) - (0.0016300*T2) + (T3/545868)\n D = radians(self._proper_angle(D))\n M = 357.53 + (35999.0503*T)\n M = radians(self._proper_angle(M))\n M1 = 134.96 + (477198.8676*T) + (0.0089970*T2) + (T3/69699)\n M1 = radians(self._proper_angle(M1))\n elong = degrees(D) + 6.29*sin(M1)\n elong -= 2.10*sin(M)\n elong += 1.27*sin(2*D - M1)\n elong += 0.66*sin(2*D)\n elong = self._proper_angle(elong)\n moon = int(floor(((elong + 6.43) / 360) * 28))\n if moon == 28:\n moon = 0\n \n return moon", "def _get_wall_clock_step_time_threshold(self):\n if self.constants.physical:\n sim = self.mujoco_simulation.mj_sim\n return float(sim.nsubsteps) * sim.model.opt.timestep\n else:\n # No minimum threshold for simulation.\n return 0", "def calc_swd(lon, lat, hour, doy):\n\n lon = -lon\n sda = 0.409 * np.cos(2. * np.pi * (doy - 173.) / 365.)\n sinlea = np.sin(2. * np.pi * lat / 360.) * np.sin(sda) - \\\n np.cos(2. * np.pi * lat / 360.) * np.cos(sda) * \\\n np.cos(2. * np.pi * (hour*3600.) / 86400. - 2. * np.pi * lon / 360.)\n sinlea = np.maximum(sinlea, 1e-9)\n Tr = (0.6 + 0.2 * sinlea)\n swin = 1368. * Tr * sinlea\n\n return swin", "def hour_hands():\n secX = (HYP * math.cos(math.radians(move_sec))) + 200\n secY = (HYP * math.sin(math.radians(move_sec))) + 200\n pygame.draw.line(screen, RED, (200, 200), (secX, secY), 2)\n\n \"\"\"Runs and Draws the Minutes-Hand\"\"\"\n minX = (HYP * math.cos(math.radians(move_min))) + 200\n minY = (HYP * math.sin(math.radians(move_min))) + 200\n pygame.draw.line(screen, BLACK, (200, 200), (minX, minY), 3)\n\n \"\"\"Runs and Draws the Hours-Hand\"\"\"\n hrX = (HYP * math.cos(math.radians(move_hr))/1.5) + 200\n hrY = (HYP * math.sin(math.radians(move_hr))/1.5) + 200\n pygame.draw.line(screen, BLACK, (200, 200), (hrX, hrY), 3)", "def timer_lights_on_off_room1():\n localtime = time.localtime(time.time())[3] # Hour of the day\n day_number = days_since_start()\n if day_number < 30:\n return True # Lights On\n elif day_number >= 30 and day_number < 60:\n if localtime >= 10 and localtime < 16:\n return False # Lights Off\n else:\n return True # Lights On\n elif day_number >= 60 and day_number < 90:\n if localtime >= 6 and localtime < 18:\n return False # Lights Off\n else:\n return True # Lights On\n else:\n if localtime >= 0 and localtime < 6:\n return True # Lights On\n else:\n return False # Lights Off", "def zodiac(cls, tee):\n return quotient(float(cls.solar_longitude(tee)), 30) + 1", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time", "def moon_phase(\n datetime_index,\n epsilon=1e-6,\n epoch=2444237.905,\n ecliptic_longitude_epoch=278.833540,\n ecliptic_longitude_perigee=282.596403,\n eccentricity=0.016718,\n moon_mean_longitude_epoch=64.975464,\n moon_mean_perigee_epoch=349.383063,\n):\n # set time to Noon if not otherwise given, as midnight is confusingly close to previous day\n if np.sum(datetime_index.hour) == 0:\n datetime_index = datetime_index + pd.Timedelta(hours=12)\n days = datetime_index.to_julian_date() - epoch\n\n # Mean anomaly of the Sun\n a = (360 / 365.2422) * days\n N = a - 360.0 * np.floor(a / 360.0)\n N = N + ecliptic_longitude_epoch - ecliptic_longitude_perigee\n # Convert from perigee coordinates to epoch 1980\n M = a - 360.0 * np.floor(N / 360.0)\n\n m = torad(M)\n e = m.copy()\n while 1:\n delta = e - eccentricity * np.sin(e) - m\n e = e - delta / (1.0 - eccentricity * np.cos(e))\n if abs(delta).max() <= epsilon:\n break\n\n Ec = sqrt((1 + eccentricity) / (1 - eccentricity)) * np.tan(e / 2.0)\n # True anomaly\n Ec = 2 * todeg(np.arctan(Ec))\n # Suns's geometric ecliptic longuitude\n a = Ec + ecliptic_longitude_perigee\n lambda_sun = a - 360.0 * np.floor(a / 360.0)\n\n # Calculation of the Moon's position\n\n # Moon's mean longitude\n a = 13.1763966 * days + moon_mean_longitude_epoch\n moon_longitude = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's mean anomaly\n a = moon_longitude - 0.1114041 * days - moon_mean_perigee_epoch\n MM = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's ascending node mean longitude\n # MN = fixangle(c.node_mean_longitude_epoch - 0.0529539 * day)\n\n evection = 1.2739 * np.sin(torad(2 * (moon_longitude - lambda_sun) - MM))\n\n # Annual equation\n annual_eq = 0.1858 * np.sin(torad(M))\n\n # Correction term\n A3 = 0.37 * np.sin(torad(M))\n\n MmP = MM + evection - annual_eq - A3\n\n # Correction for the equation of the centre\n mEc = 6.2886 * np.sin(torad(MmP))\n\n # Another correction term\n A4 = 0.214 * np.sin(torad(2 * MmP))\n\n # Corrected longitude\n lP = moon_longitude + evection + mEc - annual_eq + A4\n\n # Variation\n variation = 0.6583 * np.sin(torad(2 * (lP - lambda_sun)))\n\n # True longitude\n lPP = lP + variation\n\n # Calculation of the phase of the Moon\n\n # Age of the Moon, in degrees\n moon_age = lPP - lambda_sun\n\n # Phase of the Moon\n moon_phase = (1 - np.cos(torad(moon_age))) / 2.0\n return moon_phase\n # return pd.Series(moon_phase, index=datetime_index)", "def lunar_day_from_moment(cls, tee):\n return quotient(cls.lunar_phase(tee), 12) + 1", "def morning_twilight_12(self, date=None):\n self.site.horizon = self.horizon12\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def get_chime_time(self):\n actual_time = datetime(year=self.start_time.year, month=self.start_time.month, day=self.start_time.day,\n hour=self.start_time.hour, minute=0, second=0, microsecond=0)\n if self.start_time.minute > 30:\n actual_time = actual_time + timedelta(hours=1)\n return actual_time", "def timer_lights_on_off_room2():\n localtime = time.localtime(time.time())[3] # Hour of the day\n day_number = days_since_start()\n if day_number < 30:\n return True # Lights On\n elif day_number >= 30 and day_number < 60:\n if localtime >= 10 and localtime < 16:\n return False # Lights Off\n else:\n return True # Lights On\n elif day_number >= 60 and day_number < 90:\n if localtime >= 6 and localtime < 18:\n return False # Lights Off\n else:\n return True # Lights On\n else:\n if localtime >= 0 and localtime < 6:\n return True # Lights On\n else:\n return False # Lights Off", "def lunar_day_at_or_after(cls, k, tee):\n phase = (k - 1) * 12\n tau = tee + ((1/360) * mod(phase - cls.lunar_phase(tee), 360) * cls.SYNODIC_MONTH)\n a = max(tee, tau - 2)\n b = tau + 2\n return invert_angular(cls.lunar_phase, phase, a, b)", "def calc_optimal_spacing(sun_properties, tilt_angle, module_length):\n h = module_length * sin(tilt_angle)\n D1 = h / tan(radians(sun_properties.worst_sh))\n D = max(D1 * cos(radians(180 - sun_properties.worst_Az)), D1 * cos(radians(sun_properties.worst_Az - 180)))\n return D", "def _unit_mo(self):\n return (((self.time_base * 60.0) * 24.0) * 365.0) / 12", "def get_weather_with_time(time):\n global DARK\n\n if TIME in range(6, 9):\n DARK = False\n return 1\n elif TIME in range(9, 13):\n return 2\n elif TIME in range(13, 16):\n return 3\n elif TIME in range(16, 19):\n if HAS_RAINCOAT:\n return 4\n else:\n if not NICE_WEATHER:\n add_strength(False, 10)\n return 5\n\n elif TIME in range(19, 22):\n if HAS_RAINCOAT:\n return 7\n else:\n if not NICE_WEATHER:\n add_strength(False, 10)\n return 6\n\n else: # 9 - 6am\n DARK = True\n if HAS_FLASHLIGHT:\n return 9\n else:\n return 8", "def time_interval_prop(self, time_step, nsteps):\n world.time = TimeAxis(0.0, int(nsteps), float(time_step))\n print(\"Setting time\")", "def test_check_sun_above_horizon():\n pass", "def time_period(s,h=30):\n\n t = 0\n\n old_z, pass_1 = 0, None\n\n while(True):\n k1 = h*sdot(s)\n k2 = h*sdot(s+k1/2)\n k3 = h*sdot(s+k2/2)\n k4 = h*sdot(s+k3)\n\n s = s+(k1+2*k2+2*k3+k4)/6\n t = t+h\n\n if (s[2]>=0 and old_z<0):\n dt = -s[2]/s[5]\n t2 = t+dt\n\n if pass_1 is None:\n pass_1 = t2\n else:\n return t2-pass_1\n\n old_z = s[2]", "def _work_hour_value(self):\n if self.month_workdays == 0 or self.workday_hours == 0:\n self.work_hour_value = 0\n else:\n self.work_hour_value = round(self.wage / self.month_workdays / self.workday_hours, 2)", "def hp(self):\n minute_int, second = divmod(self.minute, 1)\n if self.positive:\n return self.degree + (minute_int / 100) + (second * 0.006)\n else:\n return -(self.degree + (minute_int / 100) + (second * 0.006))", "def wall_time(self):", "def time_function(t):\n\n omega = np.pi\n return np.sin(omega * t) + np.sin(10 * omega * t) + np.sin(20 * omega * t)", "def Elongation(body, time):\n angle = PairLongitude(body, Body.Sun, time)\n if angle > 180.0:\n visibility = Visibility.Morning\n esep = 360.0 - angle\n else:\n visibility = Visibility.Evening\n esep = angle\n angle = AngleFromSun(body, time)\n return ElongationEvent(time, visibility, angle, esep)", "def evening_twilight_6(self, date=None):\n self.site.horizon = self.horizon6\n self._set_site_date(date)\n r_date = self.site.next_setting(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def _shifted_time(self):\n return self.sim_time + self.options.time.start_clocktime", "def get_time_in_round() -> int:\n # FIXME - returning negative value for projectiles\n return store.round_time", "def half_step_time(self):\n\n return self.full_step_time() * self.half_to_full_step_time_ratio", "def hp(self):\n if self.positive:\n return self.degree + (self.minute / 100) + (self.second / 10000)\n else:\n return -(self.degree + (self.minute / 100) + (self.second / 10000))", "def civil_twilight(topos, earth, sun, time):\n\n location = earth + topos\n astrocentric = location.at(time).observe(sun).apparent()\n alt, _, _ = astrocentric.altaz('standard')\n return alt.degrees <= -6.0 # definition of civil twilight", "def windcal(v,u):\r\n \r\n ws = (u**2 + v**2)**0.5\r\n wd = np.arctan2(u,v)\r\n wd_ang = wd *180/np.pi\r\n wd_ang = wd_ang + 180\r\n\r\n return wd_ang,ws", "def get_duration_heat(hobo, start, stop):\n alltimes = []\n heat = 0\n numsecs = 0\n for i, t in enumerate(hobo[\"Temp\"][start:stop]):\n if t > TEMP_FLOOR:\n alltimes.append(hobo[\"Time\"][start:stop][i])\n heat += 1/60.0 * t\n numsecs+=1\n if len(alltimes) < 1 : return (timedelta(0),0,0)\n begin = min(alltimes)\n end = max(alltimes)\n return(end-begin,numsecs, heat)", "def time(self):\n return sum(self._interval) * .5", "def day_from_moment(cls, tee):\n return quotient(cls.lunar_phase(tee), 12) + 1", "def run_hour(self) -> int:\n return self.timestamp.hour", "def show_time(self):\n hour = str(datetime.datetime.now().strftime(\"%H\"))\n minute = str(datetime.datetime.now().strftime(\"%M\"))\n\n hour1 = int(hour[0])\n hour2 = int(hour[1])\n minute1 = int(minute[0])\n minute2 = int(minute[1])\n\n self.light_number(self.numbers[hour1], [0, 5])\n self.light_number(self.numbers[hour2], [0, 0])\n self.light_number(self.numbers[minute1], [5, 5])\n self.light_number(self.numbers[minute2], [5, 0])", "def timeStep(self):\n return self.params['h']", "def unit_mo(self):\n return (((self.time_base * 60.0) * 24.0) * 365.0) / 12", "def new_moon_before(cls, tee):\n varepsilon = pow(2, -1000)\n tau = tee - ((1/360) * cls.lunar_phase(tee) * cls.SYNODIC_MONTH)\n return binary_search(tau - 1, min(tee, tau + 1),\n lambda l, u: cls.zodiac(l) == cls.zodiac(u) or u - l < varepsilon,\n lambda x: cls.lunar_phase(x) < 180)", "def initialtime_hour(self):\n return self._get_time_info([\"Initial_Time_H\", \"initialTimeHour\"])", "def __init__(self, h=0, m=0, s=0, after_sunrise=False, after_sunset=False):\n assert (after_sunrise and after_sunset) == False, \\\n \"Must not specify both after_sunrise and after_sunset\"\n \n self.time = (h, m, s)\n self.after_sunrise = after_sunrise\n self.after_sunset = after_sunset", "def _calcPlungerMoveTime(self, move_steps):\n sd = self.sim_state\n start_speed = sd['start_speed']\n top_speed = sd['top_speed']\n cutoff_speed = sd['cutoff_speed']\n slope = sd['slope']\n microstep = sd['microstep']\n\n slope *= 2500.0\n if microstep:\n move_steps = move_steps / 8.0\n theo_top_speed = sqrt((4.0 * move_steps*slope) + start_speed ** 2.0)\n # If theoretical top speed will not exceed cutoff speed\n if theo_top_speed < cutoff_speed:\n move_t = theo_top_speed - (start_speed/slope)\n else:\n theo_top_speed = sqrt(((2.0*move_steps*slope) +\n ((start_speed**2.0+cutoff_speed**2.0)/2.0)))\n # If theoretical top speed with exceed cutoff speed but not\n # reach the set top speed\n if cutoff_speed < theo_top_speed < top_speed:\n move_t = ((1 / slope) * (2.0 * theo_top_speed - start_speed -\n cutoff_speed))\n # If start speed, top speed, and cutoff speed are all the same\n elif start_speed == top_speed == cutoff_speed:\n move_t = (2.0 * move_steps) / top_speed\n # Otherwise, calculate time spent in each phase (start, constant,\n # ramp down)\n else:\n ramp_up_halfsteps = ((top_speed ** 2.0 - start_speed ** 2.0) /\n (2.0 * slope))\n ramp_down_halfsteps = ((top_speed ** 2.0 - cutoff_speed ** 2.0) /\n (2.0 * slope))\n if (ramp_up_halfsteps + ramp_down_halfsteps) < (2.0 * top_speed):\n ramp_up_t = (top_speed - start_speed) / slope\n ramp_down_t = (top_speed - cutoff_speed) / slope\n constant_halfsteps = (2.0 * move_steps - ramp_up_halfsteps -\n ramp_down_halfsteps)\n constant_t = constant_halfsteps / top_speed\n move_t = ramp_up_t + ramp_down_t + constant_t\n return move_t", "def get_night(self, time):\n \n if time.tzinfo is not None:\n # time is aware\n \n # convert time to station time zone\n time = time.astimezone(self.time_zone)\n \n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n \n return time.date()", "def rhe(m):\n \n m = m*u.kg.to(u.M_sun)\n \n logr = np.full(m.shape,0)\n \n iless = np.where(m<=2.5)\n igreater = np.where(m>2.5)\n \n logr[iless] = 3.0965 - 2.013*np.log10(m[iless])\n logr[igreater] = 0.0557*(np.log10(m[igreater])-0.172)**-2.5\n return (10**logr)*u.Rsun.to(u.m)", "def stormEnd(tp, Ks, F, Fp, presHead, thetaSat, thetaInit, endingTime):\n\n numeratorLN = Fp + np.absolute(presHead)*(thetaSat - thetaInit)\n denomLN = F + np.absolute(presHead)*(thetaSat - thetaInit)\n naturalLog = np.log(numeratorLN/denomLN)\n\n product1 = np.absolute(presHead)*(thetaSat - thetaInit)*naturalLog\n brackets = F - Fp + product1\n\n product2 = (1/Ks)*brackets\n time = tp + product2 - endingTime\n return time", "def from_min_to_day(time):\n return str(round(int(time) / (60 * 8), 1))", "def fahrenheit(T_in_celsius):\n return (T_in_celsius * 9 / 5) + 32", "def calculate_dew_point(temp, hum):\n return temp - (100 - hum) / 5", "def seasonal_pattern(season_time):\r\n return np.where(season_time < 0.4,\r\n np.cos(season_time * 2 * np.pi),\r\n 1 / np.exp(3 * season_time))", "def seasonal_pattern(season_time):\r\n return np.where(season_time < 0.4,\r\n np.cos(season_time * 2 * np.pi),\r\n 1 / np.exp(3 * season_time))" ]
[ "0.6528656", "0.63950557", "0.60731083", "0.5948047", "0.58831996", "0.58726454", "0.586126", "0.58426833", "0.58409107", "0.5811609", "0.57901573", "0.5728057", "0.56637347", "0.56392944", "0.56149286", "0.5611691", "0.55851465", "0.5576015", "0.55250823", "0.5508363", "0.5496339", "0.5484336", "0.5480274", "0.54623485", "0.5459303", "0.54575485", "0.5407863", "0.5407617", "0.5406497", "0.53856826", "0.5381907", "0.5359786", "0.53371555", "0.5320649", "0.5308598", "0.53072464", "0.530641", "0.5300217", "0.5299178", "0.5293525", "0.52881694", "0.5287512", "0.5280069", "0.52794737", "0.5263685", "0.52558106", "0.5254811", "0.5251531", "0.5242317", "0.5238359", "0.52323884", "0.52277243", "0.5221858", "0.5221761", "0.52170944", "0.5215655", "0.52118087", "0.5206533", "0.52062696", "0.5198106", "0.51975125", "0.5189848", "0.5187714", "0.5179213", "0.5173651", "0.51597846", "0.5159717", "0.51584", "0.51497996", "0.51425415", "0.5136089", "0.51306427", "0.51197517", "0.5118599", "0.51069885", "0.51027703", "0.5089823", "0.508705", "0.5077411", "0.5074757", "0.5067561", "0.50606775", "0.50545335", "0.5049823", "0.5049559", "0.5046349", "0.50427336", "0.5041238", "0.5037288", "0.50369674", "0.5030613", "0.50302124", "0.50299925", "0.50258726", "0.50131094", "0.5012278", "0.5008274", "0.50076914", "0.50074977", "0.5006946", "0.5006946" ]
0.0
-1
Return sunrise time. Calculates the time in the morning when the sun is a 0.833 degrees below the horizon. This is to account for refraction.
def sunrise(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude) if local: return sunrise.astimezone(self.tz) else: return sunrise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def sunrise(self):\r\n try:\r\n return str(self.connect()['sys']['sunrise'])\r\n except:\r\n return '@weather_sunrise'", "def sunrise_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunrise", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def estimate_sunrise_sunset(self, date):\n\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n historic_data = self.data\n # The range is 14 days ago to the end of yesterday\n start_date = date - datetime.timedelta(days=14)\n end_date = date - datetime.timedelta(hours=1)\n\n # We grab all hours where actual power is greater than 0\n relevant_data = historic_data[start_date:end_date]\n daylight_data = relevant_data[relevant_data['actuals'] > 0]\n\n # We do this to stop a warning from appearing, we know it's a copy\n daylight_data.is_copy = False\n daylight_data['hours'] = daylight_data.index.hour\n\n # Find the min and max hour for each day where we have positive\n # observed power generation.\n sunrises = daylight_data.groupby(daylight_data.index.date).min()['hours']\n sunsets = daylight_data.groupby(daylight_data.index.date).max()['hours']\n\n # We round in order to have an integer value for sunrise and sunset.\n average_sunrise = int(max(round(sunrises.mean()) - 1, 0))\n average_sunset = int(min(round(sunsets.mean()) + 1, 23))\n\n return average_sunrise, average_sunset", "def sun_set_rise_times(self, date=None):\n rstimes = (self.sunset(date=date),\n self.evening_twilight_12(date=date),\n self.evening_twilight_18(date=date),\n self.morning_twilight_18(date=date),\n self.morning_twilight_12(date=date),\n self.sunrise(date=date))\n return rstimes", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset", "def _sunrise_sunset(self, seconds=None, milliseconds=None, hour=None, freq=None, temp_start=None, temp_end=None, setting=True):\n FUDGE_FACTOR = 0.86\n if hour==None:\n # Work out what the defaults should be\n ## MOVE IT INSIDE THE Override values.\n t0 = temp_start.split('K')[0]\n t1 = temp_end.split('K')[0]\n if t0 > t1:\n temp_step = -100\n x_start = 0\n x_step_amount = 1\n else:\n temp_step = 100\n x_start = 60\n x_step_amount = -1\n temp_0 = int(t0)\n temp_n = int(t1)\n # You can override these defaults if either temp_start or temp_end is set\n if temp_start:\n try:\n _exists = NAMED_COLOURS[temp_start.lower()]\n except (TypeError,ValueError): # Means the starting temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your starting colour temperature '{}' is not a valid colour temperature\".format(temp_start))\n if temp_end:\n try:\n _exists = NAMED_COLOURS[temp_end.lower()]\n except (TypeError, ValueError): # Means the ending temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your ending colour temperature '{}' is not a valid colour temperature\".format(temp_end))\n\n #Add in a fudge factor to cater for CPU doing other things:\n #Calculate our z scaling factor:\n target_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=1000)\n z_factor = (target_time*FUDGE_FACTOR) / 2.564949357\n x_step = x_start\n #And run the loop\n t1 = time.time()\n check = True #We only check the current values on the first run\n for temp in xrange(temp_0,temp_n,temp_step):\n if self._sequence_stop_signal: #Bail if sequence should stop\n return None\n k = u\"%sk\" % temp\n self.fade(k, fade_time=((100+z_factor)/(65-x_step)), check=check) #ms, slows down as sunset progresses\n x_step += x_step_amount\n check=False\n t2 = time.time()\n logging.info(\"%ss, target=%ss\" % ((t2-t1),target_time/1000.0))\n else:\n temp_0=temp_start[0].split('K')[0]\n\t temp_n=temp_end[0].split('K')[0]\n if self.p_alarm != []:\n self.teardown_alarm()\n process_alarm=[]\n for tt in range(0,len(hour)):\n milliseconds=0\n proc_hour=hour[tt]\n\t\talarm_arg=(proc_hour,temp_0,temp_n,FUDGE_FACTOR,freq,seconds[tt],milliseconds)\n \n process_alarm.append(Process(target=self.schedule_alarm,args=alarm_arg))\n [pp.start() for pp in process_alarm] # Start processes in the background which contain the schedule of the alarm\n self.p_alarm=process_alarm", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def morning_twilight_6(self, date=None):\n self.site.horizon = self.horizon6\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def sunrise(self, date=None):\n self.site.horizon = self.horizon\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def computeDaytimeStartEnd(self, date):\n dayStartTime = datetime.datetime.combine(date.date(), datetime.time())\n #compute sunrise time for that date\n (h, m, s) = self.sun.sunrise(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunrise_datetime = dayStartTime + time_delta\n #print(sunrise_datetime) \n #compute sunset time for that date \n (h, m, s) = self.sun.sunset(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunset_datetime = dayStartTime + time_delta\n \n return (sunrise_datetime, sunset_datetime)", "async def sunrise(self, aware=False, today=False, days_offset=0) -> dt.datetime:\n return await self.AD.sched.sunrise(aware, today=today, days_offset=days_offset)", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def morning_twilight_18(self, date=None):\n self.site.horizon = self.horizon18\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def risetime_calc(self):\n\n # given the transmitter's 20%-80% risetime, and assuming a\n # Gaussian impulse response, calculate the 10%-90% risetime\n # cell G3\n\n #self.tx_1090_rise = 1.518*self.tx_2080_rise #Fix 1 : Formula not same as in Cell T7\n self.tx_1090_rise = 329*1000/self.tx_2080_rise\n \n # calculate the effective risetimes for the fiber channel, given\n # the bandwidths calculated in the previous section, assuming\n # a Gaussian impulse response model\n self.cd_1090_rise = 0.48E6 / self.bw_cd\n self.md_1090_rise = 0.48E6 / self.bw_md\n\n # calculate the risetime for the link receiver, given its\n # bandwidth and assuming a single pole impulse response\n # Cell T7\n self.rx_1090_rise = 0.329E6/self.rx_bw\n\n # calculate the risetime for the test receiver used for transmitter\n # eye displays, given its bandwidth and assuming a single pole\n # response\n self.rx_txeye_1090_rise = 0.329E6 / self.txeye_rx_bw\n\n # calculate Te from column H and Tc from column I\n tr_tx_2 = self.tx_1090_rise**2*self.l_1\n tr_rx_2 = self.rx_1090_rise**2*self.l_1\n tr_cd_2 = np.square(self.cd_1090_rise)\n tr_md_2 = np.square(self.md_1090_rise)\n self.te = np.sqrt(tr_cd_2 + tr_md_2 + tr_tx_2) # column H\n \n self.tc = np.sqrt(tr_cd_2 + tr_md_2 + tr_tx_2 + tr_rx_2) # column I\n \n\n # end of GbE10..risetime_calc", "def calc_rise_time(self, contact, start_time):\n # The percentage from and to of the rise time calculations\n # for example 0.1 of the final value to 0.9 of it\n from_percent = 0.1\n to_percent = 0.9\n times = self.get_data_set(\"time\")\n assert times[0] <= start_time < times[-1]\n # The index to start looking from\n starting_index = find_index_of_closets_value(self.get_data_set(\"time\"), start_time)\n # Get relevant voltages\n voltages = self.get_data_set(\"%s InnerVoltage\" % contact)[starting_index:]\n final_voltage = voltages[-1]\n from_index = find_index_of_closets_value(voltages, from_percent * final_voltage)\n to_index = find_index_of_closets_value(voltages[from_index:], to_percent * final_voltage)\n rise_time = times[starting_index+from_index+to_index] - times[starting_index+from_index]\n return rise_time", "def seconds_since_midnight(time):\n return time.hour * 3600 + time.minute * 60 + time.second", "def unit_hr(self):\n return self.time_base * 60.0", "def output(self):\n if self.after_sunrise:\n return \"%02d:%02d:%02dR\" % self.time\n if self.after_sunset:\n return \"%02d:%02d:%02dT\" % self.time\n return \"%02d:%02d:%02d\" % self.time", "def _unit_hr(self):\n return self.time_base * 60.0", "def _shifted_time(self):\n return self.sim_time + self.options.time.start_clocktime", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def get_nightly_start_time():\n return 14 # 2PM local Tucson time", "def get_time(cls):\n now = rospy.Time.now()\n return now.secs + now.nsecs*(10**-9) # time in seconds", "def get_sky_ir_temperature(self) -> float:\n self.serial.write(b\"S!\")\n sky_ir_temp = self.__extract_int(self.__read_response(1)[0], b\"!1\")\n\n return round(sky_ir_temp / 100, 2)", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def get_mean_sun_angles(self) -> (float, float):\n # Get MTD XML file\n root, _ = self.read_mtd()\n\n # Open zenith and azimuth angle\n zenith_angle = float(root.findtext(\".//SolarZenith\"))\n azimuth_angle = float(root.findtext(\".//SolarAzimuth\"))\n\n return azimuth_angle, zenith_angle", "def initialTime(self):\n return self.params['t0']", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def civil_twilight(topos, earth, sun, time):\n\n location = earth + topos\n astrocentric = location.at(time).observe(sun).apparent()\n alt, _, _ = astrocentric.altaz('standard')\n return alt.degrees <= -6.0 # definition of civil twilight", "def moon_rise(self, date=None):\n self._set_site_date(date)\n moonrise = self.site.next_rising(self.moon)\n moonrise = self.date_to_local(moonrise.datetime())\n ## if moonrise < self.sunset():\n ## moonrise = None\n return moonrise", "def get_time_in_round() -> int:\n # FIXME - returning negative value for projectiles\n return store.round_time", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def startTime(self) -> float:\n try: return self.times[0]\n except IndexError: return 0.0", "def what_night_is_it():\n d = datetime.datetime.utcnow() - datetime.timedelta(7 / 24 + 0.5)\n tonight = int(d.strftime('%Y%m%d'))\n return tonight", "def _prev_shifted_time(self):\n return self._prev_sim_time + self.options.time.start_clocktime", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def get_time(self):\n return self.get_timed() / 10.0", "def wind_speed(self):\r\n return self._yesterdays_weather.get_average_wind_speed()", "def get_radiation():\n sun_pos = get_sun_position()\n if sun_pos <= POSITION_MIN or sun_pos >= POSITION_MAX:\n return 0\n else:\n # Calculate a new delta.\n delta = random.randint(0, RADIATION_DELTA)\n if random.random() > 0.5:\n delta = -1 * delta\n # Calculate the radiation based on the sun position.\n new_radiation = round(-0.1279 * pow(sun_pos, 2) + 46.05 * sun_pos - 3100)\n # Apply the delta and return the value.\n return new_radiation + delta", "def SunPosition(time):\n # Correct for light travel time from the Sun.\n # Otherwise season calculations (equinox, solstice) will all be early by about 8 minutes!\n adjusted_time = time.AddDays(-1.0 / C_AUDAY)\n earth2000 = _CalcEarth(adjusted_time)\n sun2000 = [-earth2000.x, -earth2000.y, -earth2000.z]\n\n # Convert to equatorial Cartesian coordinates of date.\n stemp = _precession(sun2000, adjusted_time, _PrecessDir.From2000)\n sun_ofdate = _nutation(stemp, adjusted_time, _PrecessDir.From2000)\n\n # Convert equatorial coordinates to ecliptic coordinates.\n true_obliq = math.radians(adjusted_time._etilt().tobl)\n return _RotateEquatorialToEcliptic(sun_ofdate, true_obliq, time)", "def MoonPhase(time):\n return PairLongitude(Body.Moon, Body.Sun, time)", "def _clock_day(self):\n return int(self._shifted_time / 86400)", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def timeCalc(image):\n telheader = astropy.io.fits.open(image)\n UT = telheader[0].header['UT']\n secs = float(UT[6:10])\n mins = float(UT[3:5])\n hours = float(UT[0:2])\n time = secs+mins*60.+hours*(60.*60.)\n\n return time", "def time(self):\n try:\n if self.single_date:\n return self.stime\n else:\n return self.stime + (self.etime - self.stime) / 2\n except TypeError:\n return None", "def _get_twilight(self, datetime_today, lon, lat, alt):\r\n\r\n dusk = self._astral.dusk_utc(datetime_today, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n dawn = self._astral.dawn_utc(datetime_today + _delta_dt, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n return dusk, dawn", "def sunset(self, seconds=None, milliseconds=None, temp_start=None, temp_end=None):\n return self.run_sequence(self._sunrise_sunset, seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end, setting=True)", "def rahukaalam_utc(self, date, latitude, longitude):\n \n if date is None:\n date = datetime.date.today()\n\n try:\n sunrise = self.sunrise_utc(date, latitude, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n \n octant_duration = (sunset - sunrise) / 8\n\n # Mo,Sa,Fr,We,Th,Tu,Su\n octant_index = [1,6,4,5,3,2,7]\n \n weekday = date.weekday()\n octant = octant_index[weekday]\n \n start = sunrise + (octant_duration * octant)\n end = start + octant_duration\n \n return {'start': start, 'end': end}", "def generate_sunrise_sunset_chart(\n sunrise: datetime.datetime,\n sunset: datetime.datetime,\n current: datetime.datetime,\n):\n\n fig, ax = plt.subplots()\n\n # The bar should shart at sunrise and end at sunset\n # Black bar showing sunset\n ax.barh([1], [time_to_float(sunset)+0.75], color=\"black\")\n # First make an orange bar for the sunset\n ax.barh([1], [time_to_float(sunset)], color=\"orange\")\n # Then make a black bar for sunset\n ax.barh([1], [time_to_float(sunrise)], color=\"black\")\n \n\n # Vertical line to show current time\n ax.axvline(x=time_to_float(current), linewidth=1, color=\"black\", ls=\"--\")\n\n # x-axis labels should be the time\n ax.set_xticks(\n [time_to_float(sunrise), 12, time_to_float(sunset)],\n )\n ax.set_xticklabels(\n [format_time(sunrise), \"12:00\", format_time(sunset)],\n )\n\n # Don't make the graph too wide\n ax.set_xlim([time_to_float(sunrise)-0.75, time_to_float(sunset)+0.75])\n\n # No y-axis labels required\n ax.set_yticks([])\n\n plt.tight_layout()\n\n fig.set_size_inches(7, 3)\n fig.savefig(\"./assets/sun-chart.png\")", "def get_time(self):\n return numpy.linspace(self.header.time_gate_start, \\\n self.header.time_gate_stop, self.num_time_bins())", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def get_night(self, time):\n \n if time.tzinfo is not None:\n # time is aware\n \n # convert time to station time zone\n time = time.astimezone(self.time_zone)\n \n if time.hour < 12:\n time -= datetime.timedelta(hours=12)\n \n return time.date()", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def calculate_time(start_time):\r\n return round(time() - start_time, 2)", "def _unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def initialtime_hour(self):\n return self._get_time_info([\"Initial_Time_H\", \"initialTimeHour\"])", "def time_of_day(self):\n return self.time_of_day_value", "def __init__(self, h=0, m=0, s=0, after_sunrise=False, after_sunset=False):\n assert (after_sunrise and after_sunset) == False, \\\n \"Must not specify both after_sunrise and after_sunset\"\n \n self.time = (h, m, s)\n self.after_sunrise = after_sunrise\n self.after_sunset = after_sunset", "def unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def morning_twilight_12(self, date=None):\n self.site.horizon = self.horizon12\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def time(self):\n return sum(self._interval) * .5", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def time_NEURON():\n recorded_time = h.Vector()\n recorded_time.record(h._ref_t)\n return recorded_time", "def _normalizeTime(self, t : float) -> float:\n return (t - self.t0)/self.tau", "def Horizon(time, observer, ra, dec, refraction):\n if not (Refraction.Airless.value <= refraction.value <= Refraction.JplHorizons.value):\n raise Error('Invalid refraction type')\n\n latrad = math.radians(observer.latitude)\n lonrad = math.radians(observer.longitude)\n decrad = math.radians(dec)\n rarad = ra * _HOUR2RAD\n\n sinlat = math.sin(latrad)\n coslat = math.cos(latrad)\n sinlon = math.sin(lonrad)\n coslon = math.cos(lonrad)\n sindc = math.sin(decrad)\n cosdc = math.cos(decrad)\n sinra = math.sin(rarad)\n cosra = math.cos(rarad)\n\n # Calculate three mutually perpendicular unit vectors\n # in equatorial coordinates: uze, une, uwe.\n #\n # uze = The direction of the observer's local zenith (straight up).\n # une = The direction toward due north on the observer's horizon.\n # uwe = The direction toward due west on the observer's horizon.\n #\n # HOWEVER, these are uncorrected for the Earth's rotation due to the time of day.\n #\n # The components of these 3 vectors are as follows:\n # [0] = x = direction from center of Earth toward 0 degrees longitude (the prime meridian) on equator.\n # [1] = y = direction from center of Earth toward 90 degrees west longitude on equator.\n # [2] = z = direction from center of Earth toward the north pole.\n\n uze = [coslat*coslon, coslat*sinlon, sinlat]\n une = [-sinlat*coslon, -sinlat*sinlon, coslat]\n uwe = [sinlon, -coslon, 0.0]\n\n # Correct the vectors uze, une, uwe for the Earth's rotation by calculating\n # sideral time. Call spin() for each uncorrected vector to rotate about\n # the Earth's axis to yield corrected unit vectors uz, un, uw.\n # Multiply sidereal hours by -15 to convert to degrees and flip eastward\n # rotation of the Earth to westward apparent movement of objects with time.\n\n angle = -15.0 * _sidereal_time(time)\n uz = _spin(angle, uze)\n un = _spin(angle, une)\n uw = _spin(angle, uwe)\n\n # Convert angular equatorial coordinates (RA, DEC) to\n # cartesian equatorial coordinates in 'p', using the\n # same orientation system as uze, une, uwe.\n\n p = [cosdc*cosra, cosdc*sinra, sindc]\n\n # Use dot products of p with the zenith, north, and west\n # vectors to obtain the cartesian coordinates of the body in\n # the observer's horizontal orientation system.\n #\n # pz = zenith component [-1, +1]\n # pn = north component [-1, +1]\n # pw = west component [-1, +1]\n\n pz = p[0]*uz[0] + p[1]*uz[1] + p[2]*uz[2]\n pn = p[0]*un[0] + p[1]*un[1] + p[2]*un[2]\n pw = p[0]*uw[0] + p[1]*uw[1] + p[2]*uw[2]\n\n # proj is the \"shadow\" of the body vector along the observer's flat ground.\n proj = math.sqrt(pn*pn + pw*pw)\n\n # Calculate az = azimuth (compass direction clockwise from East.)\n if proj > 0.0:\n # If the body is not exactly straight up/down, it has an azimuth.\n # Invert the angle to produce degrees eastward from north.\n az = math.degrees(-math.atan2(pw, pn))\n if az < 0:\n az += 360\n else:\n # The body is straight up/down, so it does not have an azimuth.\n # Report an arbitrary but reasonable value.\n az = 0.0\n\n # zd = the angle of the body away from the observer's zenith.\n zd = math.degrees(math.atan2(proj, pz))\n hor_ra = ra\n hor_dec = dec\n\n if refraction != Refraction.Airless:\n zd0 = zd\n refr = RefractionAngle(refraction, 90.0 - zd)\n zd -= refr\n if refr > 0.0 and zd > 3.0e-4:\n zdrad = math.radians(zd)\n sinzd = math.sin(zdrad)\n coszd = math.cos(zdrad)\n zd0rad = math.radians(zd0)\n sinzd0 = math.sin(zd0rad)\n coszd0 = math.cos(zd0rad)\n\n pr = [(((p[j] - coszd0 * uz[j]) / sinzd0)*sinzd + uz[j]*coszd) for j in range(3)]\n proj = math.sqrt(pr[0]*pr[0] + pr[1]*pr[1])\n if proj > 0:\n hor_ra = _RAD2HOUR * math.atan2(pr[1], pr[0])\n if hor_ra < 0:\n hor_ra += 24\n else:\n hor_ra = 0\n hor_dec = math.degrees(math.atan2(pr[2], proj))\n\n return HorizontalCoordinates(az, 90.0 - zd, hor_ra, hor_dec)", "def tt(self):\n return self.MJD + self.tt_ut1 + 2400000.5", "def evening_twilight_6(self, date=None):\n self.site.horizon = self.horizon6\n self._set_site_date(date)\n r_date = self.site.next_setting(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def get_chime_time(self):\n actual_time = datetime(year=self.start_time.year, month=self.start_time.month, day=self.start_time.day,\n hour=self.start_time.hour, minute=0, second=0, microsecond=0)\n if self.start_time.minute > 30:\n actual_time = actual_time + timedelta(hours=1)\n return actual_time", "def estimateTime(numparts, maskpixrad=None):\n\t#min time 60 sec vs. 289 from model\n\t#linear time 0 sec vs. -1.1587 from model\n\t#quadradic time March 14, 2008\n\tx = float(maskpixrad*numparts*2.0)\n\testtime = ( 26.83 + 0.001809 * x + 1.8542e-09 * x**2 )\n\t#ln(y) = -13.182 + 1.531 * ln(x) ==>\n\t#esttime = 1.884e-6 * (x**1.531) + 26.0\n\treturn esttime", "def orig_time(self) -> float:\n return ntp_to_system_time(self.orig_timestamp)", "def goto_sun(self, seconds_ahead = 0, blocking = True):\n assert self.is_initialized\n solar_ephemeris = self.devices['solar_ephemeris']\n tracking_mirror_positioner = self.controllers['tracking_mirror_positioner']\n #self.set_windings('on')\n #start tracking time\n t0 = time.time()\n #get current sun location\n jd_now, el_now, az_now = solar_ephemeris.update()\n #predict where sun will be at next control point\n jd_future, el_future, az_future = solar_ephemeris.predict(seconds_ahead, jd_now)\n #send start event\n info = OrderedDict()\n info['timestamp'] = t0\n info['seconds_ahead'] = seconds_ahead\n info['jd_now'] = jd_now\n info['az_now'] = az_now\n info['el_now'] = el_now\n info['jd_future'] = jd_future\n info['az_future'] = az_future\n info['el_future'] = el_future\n \n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_STARTED\", info)\n if blocking:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )\n t1 = time.time()\n used_t = t1-t0\n #send end event\n info = OrderedDict()\n info['timestamp'] = t1\n info['az_pos'] = self.az_pos\n info['el_pos'] = self.el_pos\n info['used_time'] = used_t\n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_COMPLETED\", info)\n return used_t\n else:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )", "def flattop_risefall_1ns(t, params):\n params['risefall'] = 1e-9\n return flattop_risefall(t, params)", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def get_time_step(self):\n for body in self.bodies:\n # If body is a Satelite\n if body.name == \"Satelite\":\n # Assuming that acceleration for a small times step is constant\n t = 0.01 * norm(body.velocity) / norm(body.acc)\n if t < self.t:\n return t\n return self.t", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def get_time(self):\n start=''\n end=''\n time=''\n times=self.times\n print(times[self.istep])\n if self.istep > 0:\n start=ncEarth.beginstr % times[self.istep].isoformat()\n\n\n if self.istep < len(times)-2:\n end = ncEarth.endstr % times[self.istep+1].isoformat()\n\n if start is not '' or end is not '':\n time=ncEarth.timestr % {'begin':start,'end':end}\n\n return time", "def half_step_time(self):\n\n return self.full_step_time() * self.half_to_full_step_time_ratio", "def sim_step(self):\n return traci.simulation.getCurrentTime()/1000 # milliseconds to seconds", "def getHeadingTime(self) -> float:\n return self.timestep_cached_heading_tm", "def gettime():\n return libruss.russ_gettime()", "def calc_swd(lon, lat, hour, doy):\n\n lon = -lon\n sda = 0.409 * np.cos(2. * np.pi * (doy - 173.) / 365.)\n sinlea = np.sin(2. * np.pi * lat / 360.) * np.sin(sda) - \\\n np.cos(2. * np.pi * lat / 360.) * np.cos(sda) * \\\n np.cos(2. * np.pi * (hour*3600.) / 86400. - 2. * np.pi * lon / 360.)\n sinlea = np.maximum(sinlea, 1e-9)\n Tr = (0.6 + 0.2 * sinlea)\n swin = 1368. * Tr * sinlea\n\n return swin", "def get_vsolar(self):\n return self.read_register(4098, 1, 3)", "def time_interval( self ):\n begin = self.begin; end = self.end\n if end - begin < 600*self.hour_switch:\n return 600\n if end - begin < 86400*self.day_switch:\n return 3600\n elif end - begin < 86400*7*self.week_switch:\n return 86400\n else:\n return 86400*7", "def get_time():\n # Use this one for production:\n now_time = pendulum.now(tz=pendulum.timezone(\"America/New_York\"))\n # Use this one for testing and modify as needed:\n # now_time = pendulum.datetime(2019, 7, 21, 20, 00, tz='America/New_York')\n\n return now_time", "def get_time(self) -> float:\n raise NotImplementedError()", "def get_time(self) -> float:\n self.rocket.update()\n return self.rocket.time", "def raw_rain_sensor_temp(self) -> int:\n self._update_analog_value_cache()\n return self.analog_cache.rain_sensor_temp", "def calculate_pv_output(dt: datetime, sunrise: datetime, sunset: datetime) -> int:\n\n distance_to_zenith = (sunset - sunrise) / 2\n zenith = sunrise + distance_to_zenith\n dist_to_zenith_seconds = distance_to_zenith.total_seconds()\n\n zenith_percentage = abs(zenith - dt).total_seconds() / dist_to_zenith_seconds\n\n sun_intensity = zenith_percentage ** 2\n output = PV_MAX_TOTAL_OUTPUT_KW - (PV_MAX_TOTAL_OUTPUT_KW * sun_intensity)\n\n return int(output)", "def compute_time_step():\n\n dt = Hydro.compute_time_step()\n\n return dt", "def get_time_round(date):\r\n return int(date / self.timeframe) * self.timeframe" ]
[ "0.7141806", "0.7119291", "0.70426834", "0.68516636", "0.6563814", "0.64964134", "0.6399623", "0.6321392", "0.63076687", "0.60870713", "0.5994337", "0.59702235", "0.5949885", "0.5923036", "0.5906305", "0.5899862", "0.5849699", "0.57508343", "0.57498085", "0.5634182", "0.55695426", "0.5560819", "0.5534628", "0.5530005", "0.5525836", "0.5521706", "0.5469477", "0.5446264", "0.54422927", "0.5418754", "0.54136485", "0.5401768", "0.5384227", "0.5382641", "0.5379776", "0.53786516", "0.5358492", "0.5343965", "0.5331119", "0.5308618", "0.52943045", "0.52723473", "0.5261115", "0.5259306", "0.5251353", "0.5247045", "0.52451724", "0.52360034", "0.52212566", "0.5218366", "0.5216309", "0.5209245", "0.5185816", "0.51804215", "0.5160225", "0.5147242", "0.5144603", "0.5141436", "0.5137429", "0.5136018", "0.512954", "0.51277906", "0.51218086", "0.511175", "0.50911754", "0.5084243", "0.50833374", "0.5081585", "0.5075172", "0.5074979", "0.5070587", "0.5056715", "0.5051505", "0.5049562", "0.5048928", "0.5045774", "0.50413597", "0.5040506", "0.503611", "0.50348544", "0.50328577", "0.50305086", "0.5020845", "0.5012889", "0.5008646", "0.5008368", "0.5004543", "0.4984058", "0.49760476", "0.4970081", "0.49684533", "0.49637595", "0.49630564", "0.4961508", "0.49573368", "0.4955301", "0.49517998", "0.49468407", "0.49424687", "0.4941758" ]
0.53256434
39
Calculates the solar noon (the time when the sun is at its highest point.)
def solar_noon(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() noon = self.astral.solar_noon_utc(date, self.longitude) if local: return noon.astimezone(self.tz) else: return noon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_noon_local(LonDegE):\n return 12.", "def solar_noon_utc(self, date, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0)\n\n eqtime = self._eq_of_time(newt)\n timeUTC = 720.0 + (longitude * 4.0) - eqtime\n\n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return noon", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def solar_time_index(self):\n return self.data.solar_time_index", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def get_vsolar(self):\n return self.read_register(4098, 1, 3)", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time", "def solar_noon_utc(LonDegE):\n _timezone = array([-180, -172.5, -157.5, -142.5, -127.5, -112.5, -97.5, -82.5, -67.5, -52.5, -37.5, -22.5, -7.5, 7.5, 22.5, 37.5, 52.5, 67.5, 82.5, 97.5, 112.5, 127.5, 142.5, 157.5, 172.5, 180]).repeat(2, 0)[1:-1].reshape(-1, 2)\n for i, (low, high) in enumerate(_timezone):\n if LonDegE >= low:\n if LonDegE <= high:\n return 12 -(-12 + i)", "def time_NEURON():\n recorded_time = h.Vector()\n recorded_time.record(h._ref_t)\n return recorded_time", "def moon_phase(\n datetime_index,\n epsilon=1e-6,\n epoch=2444237.905,\n ecliptic_longitude_epoch=278.833540,\n ecliptic_longitude_perigee=282.596403,\n eccentricity=0.016718,\n moon_mean_longitude_epoch=64.975464,\n moon_mean_perigee_epoch=349.383063,\n):\n # set time to Noon if not otherwise given, as midnight is confusingly close to previous day\n if np.sum(datetime_index.hour) == 0:\n datetime_index = datetime_index + pd.Timedelta(hours=12)\n days = datetime_index.to_julian_date() - epoch\n\n # Mean anomaly of the Sun\n a = (360 / 365.2422) * days\n N = a - 360.0 * np.floor(a / 360.0)\n N = N + ecliptic_longitude_epoch - ecliptic_longitude_perigee\n # Convert from perigee coordinates to epoch 1980\n M = a - 360.0 * np.floor(N / 360.0)\n\n m = torad(M)\n e = m.copy()\n while 1:\n delta = e - eccentricity * np.sin(e) - m\n e = e - delta / (1.0 - eccentricity * np.cos(e))\n if abs(delta).max() <= epsilon:\n break\n\n Ec = sqrt((1 + eccentricity) / (1 - eccentricity)) * np.tan(e / 2.0)\n # True anomaly\n Ec = 2 * todeg(np.arctan(Ec))\n # Suns's geometric ecliptic longuitude\n a = Ec + ecliptic_longitude_perigee\n lambda_sun = a - 360.0 * np.floor(a / 360.0)\n\n # Calculation of the Moon's position\n\n # Moon's mean longitude\n a = 13.1763966 * days + moon_mean_longitude_epoch\n moon_longitude = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's mean anomaly\n a = moon_longitude - 0.1114041 * days - moon_mean_perigee_epoch\n MM = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's ascending node mean longitude\n # MN = fixangle(c.node_mean_longitude_epoch - 0.0529539 * day)\n\n evection = 1.2739 * np.sin(torad(2 * (moon_longitude - lambda_sun) - MM))\n\n # Annual equation\n annual_eq = 0.1858 * np.sin(torad(M))\n\n # Correction term\n A3 = 0.37 * np.sin(torad(M))\n\n MmP = MM + evection - annual_eq - A3\n\n # Correction for the equation of the centre\n mEc = 6.2886 * np.sin(torad(MmP))\n\n # Another correction term\n A4 = 0.214 * np.sin(torad(2 * MmP))\n\n # Corrected longitude\n lP = moon_longitude + evection + mEc - annual_eq + A4\n\n # Variation\n variation = 0.6583 * np.sin(torad(2 * (lP - lambda_sun)))\n\n # True longitude\n lPP = lP + variation\n\n # Calculation of the phase of the Moon\n\n # Age of the Moon, in degrees\n moon_age = lPP - lambda_sun\n\n # Phase of the Moon\n moon_phase = (1 - np.cos(torad(moon_age))) / 2.0\n return moon_phase\n # return pd.Series(moon_phase, index=datetime_index)", "def lunarperigee(time):\n dtor = np.pi / 180\n t1 = 1 + time\n t2 = t1 * t1\n t3 = t2 * t1\n perigee = (\n 334.329653 * dtor\n + 4069.0340329575 * dtor * t1\n - 0.010325 * dtor * t2\n - 1.2e-5 * dtor * t3\n )\n return perigee", "def zodiac(cls, tee):\n return quotient(float(cls.solar_longitude(tee)), 30) + 1", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def hindu_lunar_station(date):\n critical = HinduDate.sunrise(date)\n return quotient(HinduLunarDate.longitude(critical), angle(0, 800, 0)) + 1", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def SunPosition(time):\n # Correct for light travel time from the Sun.\n # Otherwise season calculations (equinox, solstice) will all be early by about 8 minutes!\n adjusted_time = time.AddDays(-1.0 / C_AUDAY)\n earth2000 = _CalcEarth(adjusted_time)\n sun2000 = [-earth2000.x, -earth2000.y, -earth2000.z]\n\n # Convert to equatorial Cartesian coordinates of date.\n stemp = _precession(sun2000, adjusted_time, _PrecessDir.From2000)\n sun_ofdate = _nutation(stemp, adjusted_time, _PrecessDir.From2000)\n\n # Convert equatorial coordinates to ecliptic coordinates.\n true_obliq = math.radians(adjusted_time._etilt().tobl)\n return _RotateEquatorialToEcliptic(sun_ofdate, true_obliq, time)", "def calculate_sun_earth_distance(doy):\n #The eccentricity of the Earth's orbit is currently about 0.0167 (wiki)\n ec=0.0167\n d=1+ec*np.sin(2*np.pi*(doy-93.5)/365)\n return d", "def solar_model():\n \n latitude, longitude, timezone, elevation = location_input()\n year, time = time_input()\n\n lat_r = latitude/180*np.pi\n lon_r = longitude/180*np.pi \n n = 0\n for i in range(1900,year):\n if i%4 == 0:\n n += 366\n else:\n n+=365\n JulD = n + time + 2415018.5 - (timezone)/24\n LT = time - int(time)\n JC = (JulD - 2451545) / 36525\n x = 46.815 + JC * (0.00059 - JC * 0.001813)\n M_OE = 23 + (26 + (21.448 - JC * x) / 60) / 60\n EEO = 0.016708634 - JC * (0.000042037 + 0.0000001267 * JC)\n GMAS = 357.52911 + JC * (35999.05029 - 0.0001537 * JC)\n GMAS_r = m.radians(GMAS)\n GMLS = (280.46646 + JC * (36000.76983 + JC * 0.0003032))%360\n GMLS_r = m.radians(GMLS)\n Obliq_C = M_OE + 0.00256 * np.cos((125.04 - 1934.136 * JC) / 180 * np.pi)\n Obliq_C_r = m.radians(Obliq_C)\n SEC = np.sin(GMAS_r) * (1.914602 - JC * (0.004817 + 0.000014 * JC)) + np.sin(2 * GMAS_r) * (0.019993 - 0.000101 * JC) + np.sin(3 * GMAS_r) * 0.000289\n STL = GMLS + SEC\n SAL = STL - 0.00569 - 0.00478 * np.sin((125.04 - 1934.136 * JC) / 180 * np.pi)\n SAL_r = m.radians(SAL)\n sin_Delta = np.sin(Obliq_C_r) * np.sin(SAL_r)\n Delta_r = np.arcsin(sin_Delta) #in radians \n Var_y = np.tan((Obliq_C / 2) / 180 * np.pi) * np.tan((Obliq_C / 2) / 180 * np.pi)\n EOT_prime = Var_y * np.sin(2 * GMLS_r) - 2 * EEO * np.sin(GMAS_r) + 4 * EEO * Var_y * np.sin(GMAS_r) * np.cos(2 * GMLS_r) - 0.5 * Var_y * Var_y * np.sin(4 * GMLS_r) - 1.25 * EEO * EEO * np.sin(2 * GMAS_r)\n EOT = 4 * EOT_prime / np.pi * 180 \n TST = (LT * 1440 + EOT + 4 * longitude - 60 * timezone)%1440\n if TST / 4 < 0:\n Omega = TST/4+180\n else:\n Omega = TST/4 - 180 \n Omega_r = m.radians(Omega)\n \n cos_Zenith = np.sin(lat_r) * np.sin(Delta_r) + np.cos(lat_r) * np.cos(Delta_r) * np.cos(Omega_r)\n Zenith_r = np.arccos(cos_Zenith) #in radians\n Aprime_r = np.arccos((np.sin(lat_r) * np.cos(Zenith_r) - np.sin(Delta_r)) / (np.cos(lat_r) * np.sin(Zenith_r)))\n Aprime = Aprime_r / np.pi * 180\n if Omega > 0:\n Azimuth = (Aprime + 180) % 360 #in degrees\n else:\n Azimuth = (540 - Aprime) % 360 #in degrees \n Azimuth_r = Azimuth / 180 * np.pi\n Elev_angle = (np.pi)/2 - Zenith_r\n\n \n # calculate incidence angle\n # Beta is equal to angle of tilted surface to horizontal (in radians)\n Beta = 45 # in degrees\n Beta_r = m.radians(Beta)\n \n cos_incidence = np.sin(Delta_r)* np.sin(lat_r) * np.cos(Beta_r) - np.sin(Delta_r) * np.cos(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) + np.cos(Delta_r) * np.cos(lat_r) * np.cos(Beta_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(Beta_r) * np.sin(Azimuth_r) * np.sin(Omega_r) \n incidence_ang_r = np.arccos(cos_incidence)\n \n return Delta_r, lat_r, Omega_r, Zenith_r, Azimuth_r, Elev_angle", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def nancay():\n return coord.EarthLocation(lat=47.376511*u.deg, lon=2.1924002*u.deg)", "def apparent_magnitude(sat, topos, earth, sun, time):\n\n position = earth + sat\n observer = earth + topos\n barycentric_o = position.at(time).observe(observer)\n barycentric_s = position.at(time).observe(sun)\n phase_angle = barycentric_o.separation_from(barycentric_s).radians\n _, _, distance = barycentric_o.radec()\n term_1 = -1.3 # standard satellite intrinsic magnitude\n term_2 = +5.0 * np.log10(distance.km / 1000.)\n arg = np.sin(phase_angle) + (np.pi - phase_angle) * np.cos(phase_angle)\n term_3 = -2.5 * np.log10(arg)\n return term_1 + term_2 + term_3", "def sidereal_zodiac(tee):\n return quotient(int(sidereal_solar_longitude(tee)), 30) + 1", "def sidereal_solar_longitude(tee):\n return mod(Solar.solar_longitude(tee) - Astro.precession(tee) + SIDEREAL_START, 360)", "def earth_tide(theta, lamda, gtime):\n\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n global h, k, l # love common block\n h = [0.6114, 0.2891, 0.175]\n k = [0.304, 0.09421, 0.043]\n l = [0.0832, 0.0145, 0.0103]\n\n global azt, azs # azimut common block\n global etmut # tdiff common block\n global moon # sunny common block\n moon = 0\n # hardwire these - you can only send it ONE droptime\n deltat = 1\n NPT = 1\n\n temp_time = num2date(gtime)\n\n YY = temp_time.year\n MO = temp_time.month\n DD = temp_time.day\n HH = temp_time.hour\n MM = temp_time.minute\n SS = temp_time.second\n # Initialize variables\n irl = 1\n iflag = 0\n ntotl = 1\n iget = [0, 0, 0, 0, 0, 0, 0] # ' !!!\n ispc = [0, 0, 0, 0] # ' !!!\n ntw = [1, 0, 0] # ' !!!\n ioptn = 't'\n ielement = 0\n # \tdata statements for input and output unit numbers (on terminal I/O)\n inun = 5\n ioun = 6\n nptpb = 6\n\n yr1 = YY - 1900\n day1 = date2num(datetime(YY, MO, DD))\n # \tfind times in hours from 0 hr, 1 jan 1900\n # matlab:\n ts = (\n SS / 3600\n + MM / 60\n + HH\n + 24 * (day1 - 1)\n + 8760 * yr1\n + 24 * np.fix((yr1 - 1) / 4)\n )\n # python:\n dj = date_to_julian_day(datetime(YY, MO, DD))\n djref = date_to_julian_day(datetime(1899, 12, 31, 0, 0, 0))\n delta_dj = (\n dj - djref\n ) # difference in days from current date (0hr) to 0hr, 1 jan 1900\n delta_djhr = float(delta_dj) * 24.0 + HH - 12.0 + MM / 60.0 + SS / 3600.0\n te = ts + (NPT - 1) * deltat / 3600\n d = deltat / 3600\n # terms=(te-ts)/d + 1\n terms = NPT\n\n # done asking questions - begin execution\n i = 1\n tt = ts\n sph(theta, lamda, 0)\n etmut = 41.184 + yr1 - 70\n # matlab:\n # t = (tt+12 + (etmut/3600))/876600\n t = (delta_djhr + etmut / 3600) / 876600\n # t is ephemeris time in julian centuries from 12 hr 0 jan 1900\n ephem(t)\n\n # calculate normalized gravity tides\n [grav, tilt, strain, gdc] = elastd(ntw)\n\n gravtide = 1.0e5 * grav\n # convert m/s² to mgal: 1m/s² = 100 gal = 100 000 mgal\n\n iflag = 1\n\n iterms = np.fix(terms)\n i = 1\n return gravtide", "def nyquist(self):\n return 1 / (2 * np.median(np.diff(self.lc.time)))", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def solar_sidereal_difference(cls, date):\n return cls.daily_motion(date) * cls.rising_sign(date)", "def power_output_existing_solar_rule(_m, g, y, s, t):\r\n\r\n if y != m.Y.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_6[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + (m.DELTA[y] * m.RHO[y, s] * m.C_MC[g, y])\r\n == 0)\r\n\r\n else:\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_6[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + (m.DELTA[y] * m.RHO[y, s] * (1 + (1 / m.INTEREST_RATE)) * m.C_MC[g, y])\r\n == 0)", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def nanos(self) -> int:\n return pulumi.get(self, \"nanos\")", "def nanos(self) -> int:\n return pulumi.get(self, \"nanos\")", "def solar_longitude(cls, tee):\n return cls.true_position(tee, cls.SIDEREAL_YEAR, 14/360, cls.ANOMALISTIC_YEAR, 1/42)", "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n \n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n exoatmElevation = 90.0 - zenith\n\n if exoatmElevation > 85.0:\n refractionCorrection = 0.0\n else:\n te = tan(radians(exoatmElevation))\n if exoatmElevation > 5.0:\n refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te)\n elif exoatmElevation > -0.575:\n step1 = (-12.79 + exoatmElevation * 0.711)\n step2 = (103.4 + exoatmElevation * (step1))\n step3 = (-518.2 + exoatmElevation * (step2))\n refractionCorrection = 1735.0 + exoatmElevation * (step3)\n else:\n refractionCorrection = -20.774 / te\n \n refractionCorrection = refractionCorrection / 3600.0\n \n solarzen = zenith - refractionCorrection\n \n solarelevation = 90.0 - solarzen\n \n return solarelevation", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def tropical_longitude(cls, fixed_date):\n days = ifloor(fixed_date - OldHindu.EPOCH)\n precession = 27 - abs(54 - mod(27 + (108 * 600/1577917828 * days), 108))\n return mod(cls.solar_longitude(fixed_date) - precession, 360)", "def solar_azimuth(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n\n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0#\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n return azimuth", "def _compute_solar_torque(self):\n pass", "def atmospheric_ion_neutral_collision_frequency(self):\n nu = 3.8e-11*self.msis[\"nn\"]\n return nu", "def psi_inf(n):\n v = np.array([sqrt(2 / a) * sin(n * pi * (i - a) / (2 * a)) for i in np.arange(-a, a + dx, dx)])\n v = v / np.linalg.norm(v) # normalize\n return v", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def to_fixed(self):\n approx = OldHindu.EPOCH - 3 + ifloor(((self.year + HinduSolarDate.SOLAR_ERA) + ((self.month - 1) / 12)) * self.MEAN_SIDEREAL_YEAR)\n begin = next_int(approx, lambda i: sidereal_zodiac(self.sunset(i)) == self.month)\n return begin + self.day - 1", "def solar_time_index(self):\n if self._solar_time_index is None:\n with Resource(self.solar_fpath) as res:\n self._solar_time_index = res.time_index\n return self._solar_time_index", "def Nsat(self, m):\n result = (m - self.kappa * self.mMinHod)\n if result>0.:\n result /= self.m1\n result **= self.alpha\n result *= self.Ncen(m)\n else:\n result = 0.\n return result", "def power_output_candidate_solar_rule(_m, g, y, s, t):\r\n\r\n if y != m.Y.last():\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_7[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (m.C_MC[g, y] - (m.baseline[y] * m.permit_price[y])))\r\n == 0)\r\n\r\n else:\r\n return (- m.sigma_1[g, y, s, t] + m.sigma_7[g, y, s, t]\r\n - m.lamb[self.k(m, g), y, s, t]\r\n + ((m.DELTA[y] * m.RHO[y, s]) * (1 + (1 / m.INTEREST_RATE)) * (\r\n m.C_MC[g, y] - (m.baseline[y] * m.permit_price[y])))\r\n == 0)", "def MoonPhase(time):\n return PairLongitude(Body.Moon, Body.Sun, time)", "def dynamic_viscosity_of_air(self) -> float:\n\n return (1.458 * (10 ** (-6)) * (self.ambient_temperature**1.5)) / (\n self.ambient_temperature + 110.4\n )", "def _earth_distance(time='now'):\n return get_earth(time).radius", "def goto_sun(self, seconds_ahead = 0, blocking = True):\n assert self.is_initialized\n solar_ephemeris = self.devices['solar_ephemeris']\n tracking_mirror_positioner = self.controllers['tracking_mirror_positioner']\n #self.set_windings('on')\n #start tracking time\n t0 = time.time()\n #get current sun location\n jd_now, el_now, az_now = solar_ephemeris.update()\n #predict where sun will be at next control point\n jd_future, el_future, az_future = solar_ephemeris.predict(seconds_ahead, jd_now)\n #send start event\n info = OrderedDict()\n info['timestamp'] = t0\n info['seconds_ahead'] = seconds_ahead\n info['jd_now'] = jd_now\n info['az_now'] = az_now\n info['el_now'] = el_now\n info['jd_future'] = jd_future\n info['az_future'] = az_future\n info['el_future'] = el_future\n \n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_STARTED\", info)\n if blocking:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )\n t1 = time.time()\n used_t = t1-t0\n #send end event\n info = OrderedDict()\n info['timestamp'] = t1\n info['az_pos'] = self.az_pos\n info['el_pos'] = self.el_pos\n info['used_time'] = used_t\n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_COMPLETED\", info)\n return used_t\n else:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def molar_mass_dry_air():\n return 28.9647", "def solar_meter(self):\n return self._solar_meter", "def N(latitude):\n return a/math.sqrt(1-e2*pow(math.sin(latitude),2.0))", "def hindu_solar_longitude_at_or_after(lam, tee):\n tau = tee + (HinduSolarDate.SIDEREAL_YEAR * (1 / 360) * mod(lam - HinduDate.solar_longitude(tee), 360))\n a = max(tee, tau - 5)\n b = tau +5\n return invert_angular(HinduDate.solar_longitude, lam, a, b)", "def new_moon_before(cls, tee):\n varepsilon = pow(2, -1000)\n tau = tee - ((1/360) * cls.lunar_phase(tee) * cls.SYNODIC_MONTH)\n return binary_search(tau - 1, min(tee, tau + 1),\n lambda l, u: cls.zodiac(l) == cls.zodiac(u) or u - l < varepsilon,\n lambda x: cls.lunar_phase(x) < 180)", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def lunar_day_from_moment(cls, tee):\n return quotient(cls.lunar_phase(tee), 12) + 1", "def Ni_find(t):\r\n return ep(t) - 1", "def calc_swd(lon, lat, hour, doy):\n\n lon = -lon\n sda = 0.409 * np.cos(2. * np.pi * (doy - 173.) / 365.)\n sinlea = np.sin(2. * np.pi * lat / 360.) * np.sin(sda) - \\\n np.cos(2. * np.pi * lat / 360.) * np.cos(sda) * \\\n np.cos(2. * np.pi * (hour*3600.) / 86400. - 2. * np.pi * lon / 360.)\n sinlea = np.maximum(sinlea, 1e-9)\n Tr = (0.6 + 0.2 * sinlea)\n swin = 1368. * Tr * sinlea\n\n return swin", "def moon_phase(self, date):\n \n jd = self._julianday(date.day, date.month, date.year)\n DT = pow((jd - 2382148), 2) / (41048480*86400)\n T = (jd + DT - 2451545.0) / 36525\n T2 = pow(T,2)\n T3 = pow(T,3)\n D = 297.85 + (445267.1115*T) - (0.0016300*T2) + (T3/545868)\n D = radians(self._proper_angle(D))\n M = 357.53 + (35999.0503*T)\n M = radians(self._proper_angle(M))\n M1 = 134.96 + (477198.8676*T) + (0.0089970*T2) + (T3/69699)\n M1 = radians(self._proper_angle(M1))\n elong = degrees(D) + 6.29*sin(M1)\n elong -= 2.10*sin(M)\n elong += 1.27*sin(2*D - M1)\n elong += 0.66*sin(2*D)\n elong = self._proper_angle(elong)\n moon = int(floor(((elong + 6.43) / 360) * 28))\n if moon == 28:\n moon = 0\n \n return moon", "def moonlongitude(time):\n B0 = 481267.8809\n C0 = 218.3162\n # fmt: off\n A = np.array([62888.e-4, 12740.e-4, 6583.e-4, 2136.e-4, 1851.e-4, \\\n 1144.e-4, 588.e-4, 571.e-4, 533.e-4, 458.e-4, 409.e-4, \\\n 347.e-4, 304.e-4, 154.e-4, 125.e-4, 110.e-4, 107.e-4, \\\n 100.e-4, 85.e-4, 79.e-4, 68.e-4, 52.e-4, 50.e-4, 40.e-4, \\\n 40.e-4, 40.e-4, 38.e-4, 37.e-4, 28.e-4, 27.e-4, 26.e-4, \\\n 24.e-4, 23.e-4, 22.e-4, 21.e-4, 21.e-4, 21.e-4, 18.e-4, \\\n 16.e-4, 12.e-4, 11.e-4, 9.e-4, 8.e-4, 7.e-4, 7.e-4, \\\n 7.e-4, 7.e-4, 6.e-4, 6.e-4, 5.e-4, 5.e-4, 5.e-4, \\\n 4.e-4, 4.e-4, 3.e-4, 3.e-4, 3.e-4, 3.e-4, 3.e-4, \\\n 3.e-4, 3.e-4])\n B = np.array([477198.868, 413335.35, 890534.22, 954397.74, \\\n 35999.05, 966404.0, 63863.5, 377336.3, \\\n 1367733.1, 854535.2, 441199.8, 445267.1, \\\n 513197.9, 75870, 1443603, 489205, 1303870, \\\n 1431597, 826671, 449334, 926533, 31932, \\\n 481266, 1331734, 1844932, 133, 1781068, \\\n 541062, 1934, 918399, 1379739, 99863, \\\n 922466, 818536, 990397, 71998, 341337, \\\n 401329, 1856938, 1267871, 1920802, 858602, \\\n 1403732, 790672, 405201, 485333, 27864, \\\n 111869, 2258267, 1908795, 1745069, 509131, \\\n 39871, 12006, 958465, 381404, 349472, \\\n 1808933, 549197, 4067, 2322131.])\n C = np.array([44.963, 10.74, 145.70, 179.93, 87.53, 276.5, \\\n 124.2, 13.2, 280.7, 148.2, 47.4, 27.9, 222.5, \\\n 41, 52, 142, 246, 315, 111, 188, \\\n 323, 107, 205, 283, 56, 29, 21, \\\n 259, 145, 182, 17, 122, 163, 151, \\\n 357, 85, 16, 274, 152, 249, 186, \\\n 129, 98, 114, 50, 186, 127, 38, \\\n 156, 90, 24, 242, 223, 187, 340, \\\n 354, 337, 58, 220, 70, 191])\n # fmt: on\n RAD = 0.0174532925199433\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n moonlon = np.sum(amp)\n moonlon = (moonlon + B0 * time + C0) * RAD\n return moonlon", "def mean_earth_sun_distance(utc_datetime): \n\n return (1 - (0.0335 * math.sin(360 * ((solar.GetDayOfYear(utc_datetime)) - 94)) / (365)))", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def to_fixed(self):\n begin = ifloor((self.year + self.SOLAR_ERA + ((self.month - 1)/12)) * self.SIDEREAL_YEAR + OldHindu.EPOCH)\n return self.day - 1 + next_int(begin - 3, lambda d: self.zodiac(self.sunrise(d + 1)) == self.month)", "def to_fixed(self):\n approx = (OldHindu.EPOCH + self.MEAN_SIDEREAL_YEAR * (self.year + self.LUNAR_ERA + ((self.month - 1) / 12)))\n s = ifloor(approx -\n 1/360 * self.MEAN_SIDEREAL_YEAR *\n (mod(sidereal_solar_longitude(approx) -\n (self.month - 1) * 30 + 180, 360) - 180))\n k = self.day_from_moment(s + Clock.days_from_hours(6))\n if (3 < k < 27):\n temp = k\n else:\n mid = self.from_fixed(s - 15)\n if ((mid.month != self.month) or (mid.leap_month and not self.leap_month)):\n temp = mod(k + 15, 30) - 15\n else:\n temp = mod(k - 15, 30) + 15\n est = s + self.day - temp\n tau = est - mod(self.day_from_moment(est + Clock.days_from_hours(6)) - self.day + 15, 30) + 15\n date = next_int(tau - 1,\n lambda d: (self.day_from_moment(self.alt_sunrise(d)) in\n [self.day, amod(self.day + 1, 30)]))\n return (date + 1) if self.leap_day else date", "def get_state_planet_or_moon(body, t):\n # t = Time.now().utc.datetime\n\n b = None # I don't like red\n exec('b = ephem.{:s}()'.format(body.title()))\n b.compute(ephem.Date(t), epoch='2000')\n\n # fix zero padding\n ra = str(b.a_ra)\n if ra.index(':') == 1:\n ra = '0' + ra\n dec = str(b.a_dec)\n if dec.index(':') == 1:\n dec = '0' + dec\n elif dec.index(':') == 2 and dec[0] == '-':\n dec = '-0' + dec[1:]\n\n # compute rates in arcsec/s:\n dt = datetime.timedelta(seconds=1)\n b.compute(ephem.Date(t + dt), epoch='2000')\n ra_p1 = b.a_ra*180.0/np.pi*3600.0\n dec_p1 = b.a_dec*180.0/np.pi*3600.0\n b.compute(ephem.Date(t - dt), epoch='2000')\n ra_m1 = b.a_ra*180.0/np.pi*3600.0\n dec_m1 = b.a_dec*180.0/np.pi*3600.0\n\n ra_rate = (ra_p1 - ra_m1)/2.0\n dec_rate = (dec_p1 - dec_m1)/2.0\n\n return ra, dec, ra_rate, dec_rate", "def _compute_solar_torque(self, curr_date):\n if self._to_add[2]:\n ratio = self.SolarModel.getLightingRatio(self.satPos_i,\n self.in_frame,\n curr_date)\n\n sunPos = self.inertial2Sat.applyTo(\n self.sun.getPVCoordinates(curr_date,\n self.in_frame).getPosition())\n sunPos = np.array([sunPos.x, sunPos.y, sunPos.z], dtype='float64')\n\n CoM = self.meshDA['CoM_np']\n normal = self.meshDA['Normal_np']\n area = self.meshDA['Area_np']\n coefs = self.meshDA['Coefs_np']\n\n sunSatVector = self.satPos_s + CoM - sunPos\n r = np.linalg.norm(sunSatVector, axis=1)\n rawP = ratio * self.K_REF / (r**2)\n flux = (rawP / r)[:, None] * sunSatVector\n # eliminate arrays where zero flux\n fluxNorm = np.linalg.norm(flux, axis=1)\n Condflux = fluxNorm**2 > Precision.SAFE_MIN\n flux = flux[Condflux]\n normal = normal[Condflux]\n\n # dot product for multidimensional arrays:\n dot = np.einsum('ij,ij->i', flux, normal)\n dot[dot > 0] = dot[dot > 0] * (-1.0)\n if dot.size > 0:\n normal[dot > 0] = normal[dot > 0] * (-1.0)\n\n cN = 2 * area * dot * (coefs[:, 2] / 3 - coefs[:, 1] * dot / fluxNorm)\n cS = (area * dot / fluxNorm) * (coefs[:, 1] - 1)\n force = cN[:, None] * normal + cS[:, None] * flux\n\n sT = np.sum(np.cross(CoM, force), axis=0)\n\n self._sTorque = Vector3D(float(sT[0]), float(sT[1]), float(sT[2]))\n\n else:\n self._sTorque = Vector3D.ZERO", "def mesha_samkranti(g_year):\n jan1 = GregorianDate.new_year(g_year)\n return hindu_solar_longitude_at_or_after(0, jan1)", "def civil_twilight(topos, earth, sun, time):\n\n location = earth + topos\n astrocentric = location.at(time).observe(sun).apparent()\n alt, _, _ = astrocentric.altaz('standard')\n return alt.degrees <= -6.0 # definition of civil twilight", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def sectional_moment(self,strain,na_z):\r\n\t\treturn self.steel_total_moment(strain,na_z) + \\\r\n\t\t\tself.concrete_total_moment(strain,na_z)", "def ephem(t):\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n #\n # common block containing the difference ephemeris minus\n # universal time, in seconds. if this is not known it should\n # be set to zero, and the argument to the program should be\n # universal rather than ephemeris time.\n #\n global etmut # tdiff common block\n # common block containing the instruction on which ephemeris to compute\n # moon = 0 - both sun and moon\n # 1 - moon only\n # 2 - sun only\n global moon\n\n pi20 = 62.8318530717958\n # compute universal time in hours\n ts = 876600 * t - 12 - (etmut / 3600)\n hr = np.mod(ts, 24)\n # compute obliquity of the ecliptic\n w = 0.409319747 - 0.0002271107 * t\n cosw = np.cos(w)\n sinw = np.sin(w)\n t2 = t * t\n if moon != 1:\n # compute solar constants for given t\n hs = 4.881627982482 + 628.3319508731 * t + 0.523598775578 * 10 ** (-5) * t2\n hs = np.mod(np.mod(hs, pi20) + pi20, pi20)\n ps = 4.908229466993 + 0.03000526416690 * t + 0.790246300201 * 10 ** (-5) * t2\n es = 0.01675104 - 0.00004180 * t - 0.000000126 * t2\n psig = 0.2617993877971 * (hr - 12.0) + hs\n chmp = np.cos(hs - ps)\n shmp = np.sin(hs - ps)\n ls = hs + shmp * es * (2.0 + 2.5 * es * chmp)\n sls = np.sin(ls)\n cz = sinw * sls\n sz = np.sqrt(1.0 - cz ** 2)\n psis = math.atan2(cosw * sls, np.cos(ls))\n rbarr = 1.0 + es * (chmp + es * (chmp - shmp) * (chmp + shmp))\n ll = psis - psig\n scz = cz\n ssz = sz\n ssl = np.sin(ll)\n scl = np.cos(ll)\n sdist = 1 / rbarr\n\n # compute lunar constants for given t\n\n if moon == 2:\n return\n hm = 4.7199666 + 8399.7091449 * t - 0.0000198 * t2\n pm = 5.83515154 + 71.01804120839 * t - 0.180205 * 10 ** (-3) * t2\n nm = 4.523601515 - 33.75714624 * t + 0.3626406335 * 10 ** (-4) * t2\n # bl bls bf bd are the fundamental arguments of browns theory\n bl = hm - pm\n bls = hs - ps\n bf = hm - nm\n bd = hm - hs\n # lunar lat long and parallax from brown. latter two from\n # improved lunar ephemeris, latitude from ras paper of 1908...\n tlongm = (\n hm\n + 0.10976 * np.sin(bl)\n - 0.02224 * np.sin(bl - 2.0 * bd)\n + 0.01149 * np.sin(2.0 * bd)\n + 0.00373 * np.sin(2.0 * bl)\n - 0.00324 * np.sin(bls)\n - 0.00200 * np.sin(2.0 * bf)\n - 0.00103 * np.sin(2.0 * bl - 2.0 * bd)\n - 0.00100 * np.sin(bl + bls - 2.0 * bd)\n + 0.00093 * np.sin(bl + 2.0 * bd)\n - 0.00080 * np.sin(bls - 2.0 * bd)\n + 0.00072 * np.sin(bl - bls)\n - 0.00061 * np.sin(bd)\n - 0.00053 * np.sin(bl + bls)\n )\n tlatm = (\n 0.08950 * np.sin(bf)\n + 0.00490 * np.sin(bl + bf)\n - 0.00485 * np.sin(bf - bl)\n - 0.00303 * np.sin(bf - 2.0 * bd)\n + 0.00097 * np.sin(2.0 * bd + bf - bl)\n - 0.00081 * np.sin(bl + bf - 2.0 * bd)\n + 0.00057 * np.sin(bf + 2.0 * bd)\n )\n plx = (\n 3422.45\n + 186.54 * np.cos(bl)\n + 34.31 * np.cos(bl - 2.0 * bd)\n + 28.23 * np.cos(2.0 * bd)\n + 10.17 * np.cos(2.0 * bl)\n + 3.09 * np.cos(bl + 2.0 * bd)\n + 1.92 * np.cos(bls - 2.0 * bd)\n + 1.44 * np.cos(bl + bls - 2.0 * bd)\n + 1.15 * np.cos(bl - bls)\n - 0.98 * np.cos(bd)\n - 0.95 * np.cos(bl + bls)\n - 0.71 * np.cos(bl - 2.0 * bf)\n + 0.62 * np.cos(3.0 * bl)\n + 0.60 * np.cos(bl - 4.0 * bd)\n )\n sinmla = np.sin(tlatm)\n cosmla = np.cos(tlatm)\n sinmln = np.sin(tlongm)\n cosmln = np.cos(tlongm)\n # ...convert from celestial lat and long according to explan suppl of\n # ......na and le page 26\n cz = cosmla * sinmln * sinw + sinmla * cosw\n sz = np.sqrt(1.0 - cz ** 2)\n at1 = cosmla * sinmln * cosw - sinmla * sinw\n at2 = cosmla * cosmln\n ram = math.atan2(at1, at2)\n ll = ram - psig\n dcz = cz\n dsz = sz\n dsl = np.sin(ll)\n dcl = np.cos(ll)\n dpar = plx\n\n # ------------------------------------------------------------------", "def prada(self):\n scale_factor = 1.0 / (1.0 + self.snapshot.header.redshift)\n r200c_physical = self.r200c * scale_factor / 1000.0 # units Mpc\n\n v200 = (\n (self.snapshot.const.G * self.m200c)\n / r200c_physical\n * self.snapshot.const.Mpc ** 2\n / 1000.0 ** 2\n ) ** 0.5 # units km/s\n\n def y(x, vmax, v200):\n func = np.log(1 + x) - (x / (1 + x))\n return ((0.216 * x) / func) ** 0.5 - (vmax / v200)\n\n concentration = np.zeros((len(self.vmax)))\n for halo in range(self.N_halos):\n if v200[halo] > self.vmax[halo]:\n concentration[halo] = -9999.0\n else:\n try:\n concentration[halo] = newton(\n y, x0=5.0, args=(self.vmax[halo], v200[halo])\n )\n except:\n concentration[halo] = -9999.0\n\n return concentration", "def to_fixed(self):\n approx = OldHindu.EPOCH + (self.SIDEREAL_YEAR * (self.year + self.LUNAR_ERA + ((self.month - 1) / 12)))\n s = ifloor(approx - ((1/360) * self.SIDEREAL_YEAR * mod(self.hindu_solar_longitude(approx) - ((self.month - 1) * 30) + 180, 360) - 180))\n k = self.lunar_day_from_moment(s + Clock.days_from_hours(6))\n if (3 < k < 27):\n temp = k\n else:\n mid = self.lunar_from_fixed(s - 15)\n if ((mid.month != self.month) or\n (mid.leap_month and not self.leap_month)):\n temp = mod(k + 15, 30) - 15\n else:\n temp = mod(k - 15, 30) + 15\n est = s + self.day - temp\n tau = est - mod(self.lunar_day_from_moment(est + Clock.days_from_hours(6)) - self.day + 15, 30) + 15\n date = next_int(tau - 1, lambda d: self.lunar_day_from_moment(self.sunrise(d)) in [self.day, amod(self.day + 1, 30)])\n return date + 1 if self.leap_day else date", "def _P(time='now'):\n obstime = parse_time(time)\n\n # Define the frame where its Z axis is aligned with geocentric north\n geocentric = PrecessedGeocentric(equinox=obstime, obstime=obstime)\n\n return _sun_north_angle_to_z(geocentric)", "def what_night_is_it():\n d = datetime.datetime.utcnow() - datetime.timedelta(7 / 24 + 0.5)\n tonight = int(d.strftime('%Y%m%d'))\n return tonight", "def nanos(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"nanos\")", "def nanos(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"nanos\")", "def calc_worst_hour(latitude, weather_data, solar_window_solstice):\n if latitude > 0:\n northern_solstice = weather_data.query('month == 12 & day == 21')\n worst_hour = northern_solstice[northern_solstice.hour == (12 - round(solar_window_solstice/2))].index[0]\n else:\n southern_solstice = weather_data.query('month == 6 & day == 21')\n worst_hour = southern_solstice[southern_solstice.hour == (12 - round(solar_window_solstice/2))].index[0]\n\n return worst_hour", "def _L0(time='now'):\n obstime = parse_time(time)\n\n # Calculate the de-tilt longitude of the meridian due to the Sun's sidereal rotation\n dlon_meridian = Longitude(_DLON_MERIDIAN + (obstime - _J2000) * 14.1844*u.deg/u.day)\n\n # Calculate the de-tilt longitude of the Earth\n dlon_earth = _detilt_lon(get_earth(obstime))\n\n return Longitude(dlon_earth - dlon_meridian)", "def ST_zero_Jy(self):\n c = 1e-8 * Constants.c.to('m/s').value\n f = 1e5 / c * self.lpivot.to('AA').value ** 2 * self.ST_zero_flux.value\n return f * Unit('Jy')", "def nze(self) -> int:", "def nze(self) -> int:", "def yoga(date):\n return ifloor(mod((HinduSolarDate.longitude(date) + HinduLunarDate.longitude(date)) / angle(0, 800, 0), 27)) + 1", "def rising_sign(cls, fixed_date):\n i = quotient(float(cls.tropical_longitude(fixed_date)), 30)\n return [1670/1800, 1795/1800, 1935/1800, 1935/1800, 1795/1800, 1670/1800][mod(i, 6)]", "def sun_is_down(check_time, observatory):\n sun = get_sun(check_time).transform_to(AltAz(obstime=check_time, location=observatory))\n return sun.alt.value < -14", "def sunrise(self):\r\n try:\r\n return str(self.connect()['sys']['sunrise'])\r\n except:\r\n return '@weather_sunrise'", "def get_townsend_pi(time, rates):\n return 16 * (rates**2) * time * numpy.exp(-(4 * rates * time))", "def solvate(self):\n\n pass", "def getZeros(self, times):\n T = convertTimes(times = times, log = self.log)\n T[T == 0.] = 1.e-16 # replace by epsilon to allow division\n return -self.getLnDisc(times = T)/T", "def solar_angles(df, lat, lon, alt=0):\n\n jd = pd.Timestamp(df).to_julian_date()\n\n # offset (2451543.5)\n d_offset = pd.Timestamp('1999-12-31 00:00:00').to_julian_date()\n\n d = jd - d_offset\n\n\n # Keplerian elements for the sun (geocentric)\n w = 282.9404 + 4.70935E-5 * d # longitude of perihelion [degrees]\n a = 1.0 # mean distance [AU]\n e = 0.016709 - 1.151E-9 * d # eccentricity [-]\n M = np.mod(356.0470 + 0.9856002585 * d, 360.0) # mean anomaly [degrees]\n L = w + M # Sun's mean longitude [degrees]\n oblecl = 23.4393 - 3.563E-7 * d # Sun's obliquity of the eliptic [degrees]\n\n # Auxiliary angle [degrees]\n E = M + (180.0 / np.pi) * e * np.sin(np.deg2rad(M)) * (1.0 + e * np.cos(np.deg2rad(M)))\n\n # Rectangular coordinates in the plane of the ecliptic (x-axis toward perihelion)\n x = np.cos(np.deg2rad(E)) - e\n y = np.sin(np.deg2rad(E)) * np.sqrt(1 - (e ** 2))\n\n # Distance (r) and true anomaly (v)\n r = np.sqrt((x ** 2) + (y ** 2))\n v = np.rad2deg(np.arctan2(y, x))\n\n # Longitude of the sun\n lon_sun = v + w\n\n # Ecliptic rectangular coordinates\n xeclip = r * np.cos(np.deg2rad(lon_sun))\n yeclip = r * np.sin(np.deg2rad(lon_sun))\n zeclip = 0.0\n\n # Rotate coordinates to equatorial rectangular coordinates\n xequat = xeclip\n yequat = yeclip * np.cos(np.deg2rad(oblecl)) + zeclip * np.sin(np.deg2rad(oblecl))\n zequat = yeclip * np.sin(np.deg2rad(23.4406)) + zeclip * np.cos(np.deg2rad(oblecl))\n\n # Convert equatorial rectangular coordinates to right-ascension (RA) and declination\n r = np.sqrt(xequat ** 2 + yequat ** 2 + zequat ** 2) - (alt / 149598000.0)\n RA = np.rad2deg(np.arctan2(yequat, xequat))\n delta = np.rad2deg(np.arcsin(zequat / r))\n\n # Calculate local siderial time\n uth = df.hour + (df.minute / 60.0) + (df.second / 3600.0)\n gmst0 = np.mod(L + 180.0, 360.0) / 15.0\n sidtime = gmst0 + uth + (lon / 15.0)\n\n # Replace RA with hour-angle (HA)\n HA = sidtime * 15.0 - RA\n\n # Convert to rectangular coordinates\n x = np.cos(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n y = np.sin(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n z = np.sin(np.deg2rad(delta))\n\n # Rotate along an axis going East-West\n xhor = x * np.cos(np.deg2rad(90.0 - lat)) - z * np.sin(np.deg2rad(90.0 - lat))\n yhor = y\n zhor = x * np.sin(np.deg2rad(90.0 - lat)) + z * np.cos(np.deg2rad(90.0 - lat))\n\n # Find azimuthal and elevation angles\n azimuthal = np.rad2deg(np.arctan2(yhor, xhor)) + 180.0\n elevation = np.rad2deg(np.arcsin(zhor))\n\n zenith = 90.0 - elevation\n\n return np.column_stack((zenith, elevation, azimuthal))", "def _compute_solar_torque(self, curr_date):\n if self._to_add[2]:\n inertial2Sat = self.spacecraft_state.getAttitude().getRotation()\n\n ratio = self.SolarModel.getLightingRatio(self.satPos_i,\n self.in_frame,\n curr_date)\n\n sunPos = inertial2Sat.applyTo(\n self.sun.getPVCoordinates(curr_date,\n self.in_frame).getPosition())\n self._sTorque = Vector3D.ZERO\n\n iterator = itertools.izip(self.meshDA['CoM'],\n self.meshDA['Normal'],\n self.meshDA['Area'],\n self.meshDA['Coefs'])\n\n for CoM, normal, area, coefs in iterator:\n position = self.satPos_s.add(CoM)\n\n # compute flux in inertial frame\n sunSatVector = \\\n position.subtract(sunPos)\n r2 = sunSatVector.getNormSq()\n\n rawP = ratio * self.K_REF / r2\n flux = Vector3D(rawP / sqrt(r2), sunSatVector)\n\n # compute Radiation Pressure Force:\n if flux.getNormSq() > Precision.SAFE_MIN:\n # illumination (we are not in umbra)\n # rotate flux to spacecraft frame:\n dot = self.V3_dot(normal, flux)\n\n if dot > 0:\n # the solar array is illuminated backward,\n # fix signs to compute contribution correctly\n dot = -dot\n normal = normal.negate()\n absorbCoeff = coefs[0]\n specularReflCoeff = coefs[1]\n diffuseReflCoeff = 1 - (absorbCoeff + specularReflCoeff)\n try:\n assert(diffuseReflCoeff >= 0)\n except AssertionError:\n raise AssertionError(\n \"Negative diffuse reflection coefficient not possible!\")\n psr = flux.getNorm()\n # Vallado's equation uses different parameters which are\n # related to our parameters as:\n # cos (phi) = - dot / (psr*area)\n # n = N (n...unit vector)\n # s = -fluxSat / psr (s...unit vector)\n cN = 2 * area * dot * (diffuseReflCoeff / 3 -\n specularReflCoeff * dot / psr)\n cS = (area * dot / psr) * (specularReflCoeff - 1)\n Force = Vector3D(float(cN), normal, float(cS), flux)\n # Force already in spacecraft frame. No need to convert\n self._sTorque = self._sTorque.add(self.V3_cross(CoM, Force))\n\n else:\n self._sTorque = Vector3D.ZERO", "def big_psi(sun_pos, sat_3d_pos):\n return np.arccos(np.dot(sun_pos.T, sat_3d_pos) / (vector_magnitude(sun_pos[0], sun_pos[1], sun_pos[2]) * vector_magnitude(sat_3d_pos[0], sat_3d_pos[1], sat_3d_pos[2])))", "def checkSun(ontology_sun):\n elevation = ontology_sun.has_elevation[0] #gets the elevation value of the Sun in the ontology. \n azimuth = ontology_sun.has_azimuth[0] #gets the azimuth value of the Sun in the ontology. \n intensity = ontology_sun.has_intensity[0] #gets the intensity value of the Sun in the ontology.\n return xosc.Sun(intensity,azimuth,elevation)", "def _get_storm_velocities_missing(\n storm_object_table,\n e_folding_radius_metres=DEFAULT_VELOCITY_EFOLD_RADIUS_METRES):\n\n east_velocities_m_s01 = storm_object_table[\n tracking_utils.EAST_VELOCITY_COLUMN].values\n\n north_velocities_m_s01 = storm_object_table[\n tracking_utils.NORTH_VELOCITY_COLUMN].values\n\n if not numpy.any(numpy.isnan(east_velocities_m_s01)):\n return storm_object_table\n\n unique_times_unix_sec, orig_to_unique_indices = numpy.unique(\n storm_object_table[tracking_utils.VALID_TIME_COLUMN].values,\n return_inverse=True)\n\n num_times = len(unique_times_unix_sec)\n\n # Use neighbouring storms at same time to estimate missing velocities.\n for j in range(num_times):\n these_indices = numpy.where(orig_to_unique_indices == j)[0]\n if not numpy.any(numpy.isnan(east_velocities_m_s01[these_indices])):\n continue\n\n (east_velocities_m_s01[these_indices],\n north_velocities_m_s01[these_indices]\n ) = _estimate_velocity_by_neigh(\n x_coords_metres=storm_object_table[\n CENTROID_X_COLUMN].values[these_indices],\n y_coords_metres=storm_object_table[\n CENTROID_Y_COLUMN].values[these_indices],\n x_velocities_m_s01=east_velocities_m_s01[these_indices],\n y_velocities_m_s01=north_velocities_m_s01[these_indices],\n e_folding_radius_metres=e_folding_radius_metres)\n\n if not numpy.any(numpy.isnan(east_velocities_m_s01)):\n return storm_object_table.assign(**{\n tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,\n tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01\n })\n\n # Use all storms at same time to estimate missing velocities.\n for j in range(num_times):\n these_indices = numpy.where(orig_to_unique_indices == j)[0]\n if not numpy.any(numpy.isnan(east_velocities_m_s01[these_indices])):\n continue\n\n (east_velocities_m_s01[these_indices],\n north_velocities_m_s01[these_indices]\n ) = _estimate_velocity_by_neigh(\n x_coords_metres=storm_object_table[\n CENTROID_X_COLUMN].values[these_indices],\n y_coords_metres=storm_object_table[\n CENTROID_Y_COLUMN].values[these_indices],\n x_velocities_m_s01=east_velocities_m_s01[these_indices],\n y_velocities_m_s01=north_velocities_m_s01[these_indices],\n e_folding_radius_metres=numpy.nan)\n\n if not numpy.any(numpy.isnan(east_velocities_m_s01)):\n return storm_object_table.assign(**{\n tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,\n tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01\n })\n\n # Use neighbouring storms at all times to estimate missing velocities.\n for j in range(num_times):\n these_indices = numpy.where(orig_to_unique_indices == j)[0]\n if not numpy.any(numpy.isnan(east_velocities_m_s01[these_indices])):\n continue\n\n these_east_velocities_m_s01, these_north_velocities_m_s01 = (\n _estimate_velocity_by_neigh(\n x_coords_metres=storm_object_table[CENTROID_X_COLUMN].values,\n y_coords_metres=storm_object_table[CENTROID_Y_COLUMN].values,\n x_velocities_m_s01=east_velocities_m_s01 + 0.,\n y_velocities_m_s01=north_velocities_m_s01 + 0.,\n e_folding_radius_metres=e_folding_radius_metres)\n )\n\n east_velocities_m_s01[these_indices] = these_east_velocities_m_s01[\n these_indices]\n north_velocities_m_s01[these_indices] = these_north_velocities_m_s01[\n these_indices]\n\n if not numpy.any(numpy.isnan(east_velocities_m_s01)):\n return storm_object_table.assign(**{\n tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,\n tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01\n })\n\n # Use all storms at all times to estimate missing velocities.\n for j in range(num_times):\n these_indices = numpy.where(orig_to_unique_indices == j)[0]\n if not numpy.any(numpy.isnan(east_velocities_m_s01[these_indices])):\n continue\n\n these_east_velocities_m_s01, these_north_velocities_m_s01 = (\n _estimate_velocity_by_neigh(\n x_coords_metres=storm_object_table[CENTROID_X_COLUMN].values,\n y_coords_metres=storm_object_table[CENTROID_Y_COLUMN].values,\n x_velocities_m_s01=east_velocities_m_s01 + 0.,\n y_velocities_m_s01=north_velocities_m_s01 + 0.,\n e_folding_radius_metres=numpy.nan)\n )\n\n east_velocities_m_s01[these_indices] = these_east_velocities_m_s01[\n these_indices]\n north_velocities_m_s01[these_indices] = these_north_velocities_m_s01[\n these_indices]\n\n if not numpy.any(numpy.isnan(east_velocities_m_s01)):\n return storm_object_table.assign(**{\n tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,\n tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01\n })\n\n # Replace missing velocities with defaults.\n nan_indices = numpy.where(numpy.isnan(east_velocities_m_s01))[0]\n east_velocities_m_s01[nan_indices] = DEFAULT_EAST_VELOCITY_M_S01\n north_velocities_m_s01[nan_indices] = DEFAULT_NORTH_VELOCITY_M_S01\n\n return storm_object_table.assign(**{\n tracking_utils.EAST_VELOCITY_COLUMN: east_velocities_m_s01,\n tracking_utils.NORTH_VELOCITY_COLUMN: north_velocities_m_s01\n })", "def getSolar():\n ina = INA219(address=int('0x44', 16))\n sol_bus_v = ina.getBusVoltage_V()\n sol_shunt_mv = ina.getShuntVoltage_mV()\n sol_curr_ma = ina.getCurrent_mA()\n sol_volt_v = (ina.getBusVoltage_V() + ina.getShuntVoltage_mV() / 1000)\n sol_power_mw = ina.getPower_mW()\n return sol_volt_v, sol_curr_ma", "def solar_azimuth(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n \n return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)", "def Nsat(self, m):\n result = (m - self.kappa * self.mCut)\n if result>0.:\n result /= self.m1\n result **= self.alpha\n result *= self.Ncen(m)\n else:\n result = 0.\n return result", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset" ]
[ "0.6816998", "0.66865885", "0.6304783", "0.6012745", "0.60126984", "0.59854287", "0.592383", "0.585429", "0.5843106", "0.5810727", "0.58008647", "0.5759068", "0.5738313", "0.57033736", "0.570194", "0.56586534", "0.5638657", "0.5631576", "0.5618385", "0.5601817", "0.55713826", "0.55486596", "0.552072", "0.5519554", "0.55015796", "0.5499403", "0.54887146", "0.5464477", "0.5449517", "0.5447395", "0.5447132", "0.54467803", "0.54467803", "0.54356337", "0.54350466", "0.54340696", "0.5433409", "0.54333764", "0.54201436", "0.5389852", "0.538593", "0.53787833", "0.5370324", "0.5356593", "0.5342005", "0.5312221", "0.5310917", "0.5310474", "0.5299721", "0.52922916", "0.52905494", "0.5285112", "0.5284906", "0.5272643", "0.5272352", "0.52620685", "0.5252269", "0.5243369", "0.5239879", "0.5221649", "0.52160645", "0.5200024", "0.5197512", "0.5190975", "0.5181375", "0.51802456", "0.5158112", "0.5128135", "0.5127656", "0.5125659", "0.5123375", "0.5108874", "0.51028585", "0.5101231", "0.51004505", "0.5100154", "0.509112", "0.50831354", "0.50831354", "0.5079778", "0.5061979", "0.5061307", "0.5058663", "0.5058663", "0.5051523", "0.5044744", "0.5033029", "0.5024348", "0.50209194", "0.5015247", "0.50133365", "0.5007794", "0.5005464", "0.49906677", "0.49833307", "0.49794018", "0.4970615", "0.4970185", "0.4967969", "0.49673373" ]
0.6865988
0
Calculates sunset time (the time in the evening when the sun is a 0.833 degrees below the horizon. This is to account for refraction.)
def sunset(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sunset = self.astral.sunset_utc(date, self.latitude, self.longitude) if local: return sunset.astimezone(self.tz) else: return sunset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def MoonPhase(time):\n return PairLongitude(Body.Moon, Body.Sun, time)", "def _sunrise_sunset(self, seconds=None, milliseconds=None, hour=None, freq=None, temp_start=None, temp_end=None, setting=True):\n FUDGE_FACTOR = 0.86\n if hour==None:\n # Work out what the defaults should be\n ## MOVE IT INSIDE THE Override values.\n t0 = temp_start.split('K')[0]\n t1 = temp_end.split('K')[0]\n if t0 > t1:\n temp_step = -100\n x_start = 0\n x_step_amount = 1\n else:\n temp_step = 100\n x_start = 60\n x_step_amount = -1\n temp_0 = int(t0)\n temp_n = int(t1)\n # You can override these defaults if either temp_start or temp_end is set\n if temp_start:\n try:\n _exists = NAMED_COLOURS[temp_start.lower()]\n except (TypeError,ValueError): # Means the starting temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your starting colour temperature '{}' is not a valid colour temperature\".format(temp_start))\n if temp_end:\n try:\n _exists = NAMED_COLOURS[temp_end.lower()]\n except (TypeError, ValueError): # Means the ending temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your ending colour temperature '{}' is not a valid colour temperature\".format(temp_end))\n\n #Add in a fudge factor to cater for CPU doing other things:\n #Calculate our z scaling factor:\n target_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=1000)\n z_factor = (target_time*FUDGE_FACTOR) / 2.564949357\n x_step = x_start\n #And run the loop\n t1 = time.time()\n check = True #We only check the current values on the first run\n for temp in xrange(temp_0,temp_n,temp_step):\n if self._sequence_stop_signal: #Bail if sequence should stop\n return None\n k = u\"%sk\" % temp\n self.fade(k, fade_time=((100+z_factor)/(65-x_step)), check=check) #ms, slows down as sunset progresses\n x_step += x_step_amount\n check=False\n t2 = time.time()\n logging.info(\"%ss, target=%ss\" % ((t2-t1),target_time/1000.0))\n else:\n temp_0=temp_start[0].split('K')[0]\n\t temp_n=temp_end[0].split('K')[0]\n if self.p_alarm != []:\n self.teardown_alarm()\n process_alarm=[]\n for tt in range(0,len(hour)):\n milliseconds=0\n proc_hour=hour[tt]\n\t\talarm_arg=(proc_hour,temp_0,temp_n,FUDGE_FACTOR,freq,seconds[tt],milliseconds)\n \n process_alarm.append(Process(target=self.schedule_alarm,args=alarm_arg))\n [pp.start() for pp in process_alarm] # Start processes in the background which contain the schedule of the alarm\n self.p_alarm=process_alarm", "def sunset(self):\r\n try:\r\n return str(self.connect()['sys']['sunset'])\r\n except:\r\n return '@weather_sunset'", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def _normalizeTime(self, t : float) -> float:\n return (t - self.t0)/self.tau", "def estimate_sunrise_sunset(self, date):\n\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n historic_data = self.data\n # The range is 14 days ago to the end of yesterday\n start_date = date - datetime.timedelta(days=14)\n end_date = date - datetime.timedelta(hours=1)\n\n # We grab all hours where actual power is greater than 0\n relevant_data = historic_data[start_date:end_date]\n daylight_data = relevant_data[relevant_data['actuals'] > 0]\n\n # We do this to stop a warning from appearing, we know it's a copy\n daylight_data.is_copy = False\n daylight_data['hours'] = daylight_data.index.hour\n\n # Find the min and max hour for each day where we have positive\n # observed power generation.\n sunrises = daylight_data.groupby(daylight_data.index.date).min()['hours']\n sunsets = daylight_data.groupby(daylight_data.index.date).max()['hours']\n\n # We round in order to have an integer value for sunrise and sunset.\n average_sunrise = int(max(round(sunrises.mean()) - 1, 0))\n average_sunset = int(min(round(sunsets.mean()) + 1, 23))\n\n return average_sunrise, average_sunset", "def sunset(self, seconds=None, milliseconds=None, temp_start=None, temp_end=None):\n return self.run_sequence(self._sunrise_sunset, seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end, setting=True)", "def calc_swd(lon, lat, hour, doy):\n\n lon = -lon\n sda = 0.409 * np.cos(2. * np.pi * (doy - 173.) / 365.)\n sinlea = np.sin(2. * np.pi * lat / 360.) * np.sin(sda) - \\\n np.cos(2. * np.pi * lat / 360.) * np.cos(sda) * \\\n np.cos(2. * np.pi * (hour*3600.) / 86400. - 2. * np.pi * lon / 360.)\n sinlea = np.maximum(sinlea, 1e-9)\n Tr = (0.6 + 0.2 * sinlea)\n swin = 1368. * Tr * sinlea\n\n return swin", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def computeDaytimeStartEnd(self, date):\n dayStartTime = datetime.datetime.combine(date.date(), datetime.time())\n #compute sunrise time for that date\n (h, m, s) = self.sun.sunrise(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunrise_datetime = dayStartTime + time_delta\n #print(sunrise_datetime) \n #compute sunset time for that date \n (h, m, s) = self.sun.sunset(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunset_datetime = dayStartTime + time_delta\n \n return (sunrise_datetime, sunset_datetime)", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def stormEnd(tp, Ks, F, Fp, presHead, thetaSat, thetaInit, endingTime):\n\n numeratorLN = Fp + np.absolute(presHead)*(thetaSat - thetaInit)\n denomLN = F + np.absolute(presHead)*(thetaSat - thetaInit)\n naturalLog = np.log(numeratorLN/denomLN)\n\n product1 = np.absolute(presHead)*(thetaSat - thetaInit)*naturalLog\n brackets = F - Fp + product1\n\n product2 = (1/Ks)*brackets\n time = tp + product2 - endingTime\n return time", "def sun_set_rise_times(self, date=None):\n rstimes = (self.sunset(date=date),\n self.evening_twilight_12(date=date),\n self.evening_twilight_18(date=date),\n self.morning_twilight_18(date=date),\n self.morning_twilight_12(date=date),\n self.sunrise(date=date))\n return rstimes", "def calculate_pv_output(dt: datetime, sunrise: datetime, sunset: datetime) -> int:\n\n distance_to_zenith = (sunset - sunrise) / 2\n zenith = sunrise + distance_to_zenith\n dist_to_zenith_seconds = distance_to_zenith.total_seconds()\n\n zenith_percentage = abs(zenith - dt).total_seconds() / dist_to_zenith_seconds\n\n sun_intensity = zenith_percentage ** 2\n output = PV_MAX_TOTAL_OUTPUT_KW - (PV_MAX_TOTAL_OUTPUT_KW * sun_intensity)\n\n return int(output)", "def __init__(self, h=0, m=0, s=0, after_sunrise=False, after_sunset=False):\n assert (after_sunrise and after_sunset) == False, \\\n \"Must not specify both after_sunrise and after_sunset\"\n \n self.time = (h, m, s)\n self.after_sunrise = after_sunrise\n self.after_sunset = after_sunset", "def seconds_since_midnight(time):\n return time.hour * 3600 + time.minute * 60 + time.second", "def civil_twilight(topos, earth, sun, time):\n\n location = earth + topos\n astrocentric = location.at(time).observe(sun).apparent()\n alt, _, _ = astrocentric.altaz('standard')\n return alt.degrees <= -6.0 # definition of civil twilight", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def sunrise(self):\r\n try:\r\n return str(self.connect()['sys']['sunrise'])\r\n except:\r\n return '@weather_sunrise'", "def sky(seed=425, th=150, old=False):\n \n # impact parameters\n M = 3e7*u.Msun\n B = 19.95*u.kpc\n #B = 20.08*u.kpc\n V = 190*u.km/u.s\n phi = coord.Angle(0*u.deg)\n th = 150\n theta = coord.Angle(th*u.deg)\n Tenc = 0.01*u.Gyr\n T = 0.5*u.Gyr\n dt = 0.05*u.Myr\n rs = 0*u.pc\n \n old_label = ''\n np.random.seed(seed)\n observer = {'z_sun': 27.*u.pc, 'galcen_distance': 8.3*u.kpc, 'roll': 60*u.deg, 'galcen_coord': coord.SkyCoord(ra=300*u.deg, dec=-90*u.deg, frame='icrs')}\n vobs = {'vcirc': 220*u.km/u.s, 'vlsr': [0, 0, 0]*u.km/u.s}\n wangle = 180*u.deg\n \n if old:\n old_label = '_old_up'\n observer = {'z_sun': -2000.*u.pc, 'galcen_distance': 8.3*u.kpc, 'roll': 50*u.deg, 'galcen_coord': coord.SkyCoord(ra=300*u.deg, dec=-90*u.deg, frame='icrs')}\n vobs = {'vcirc': 220*u.km/u.s, 'vlsr': [0,0,0]*u.km/u.s}\n \n # impact parameters\n M = 3e7*u.Msun\n B = 20.06*u.kpc\n V = 190*u.km/u.s\n phi = coord.Angle(0*u.deg)\n th = 155\n theta = coord.Angle(th*u.deg)\n Tenc = 0.01*u.Gyr\n T = 0.55*u.Gyr\n dt = 0.05*u.Myr\n #dt = 1*u.Myr\n rs = 0*u.pc\n \n # potential parameters\n potential = 3\n Vh = 220*u.km/u.s\n q = 1*u.Unit(1)\n rhalo = 20*u.pc\n par_pot = np.array([Vh.si.value, q.value, rhalo.si.value])\n \n # setup tube\n Nstar = 1400\n wx = 30*u.kpc\n wy = 0*u.pc\n wz = 0*u.pc\n sx = 0*u.km/u.s\n \n xphi = np.linspace(-0.3*np.pi,0.3*np.pi, Nstar)\n xphi0 = np.linspace(-0.1*np.pi, 0.1*np.pi, 1000)\n xphi1 = np.linspace(-0.28*np.pi, -0.1*np.pi, 200)\n xphi2 = np.linspace(0.1*np.pi, 0.32*np.pi, 200)\n xphi = np.concatenate([xphi1, xphi0, xphi2])\n \n xr = 20*u.kpc + np.random.randn(Nstar)*0.0*u.kpc\n x = np.sin(xphi) * xr\n y = np.cos(xphi) * xr\n z = x * 0\n vx = -np.cos(xphi) * Vh# * 0.94\n vy = np.sin(xphi) * Vh #* 0.97\n vz = vx * 0\n # closest to impact\n ienc = np.argmin(np.abs(x))\n \n # generate stream model\n potential_perturb = 1\n par_perturb = np.array([M.si.value, 0., 0., 0.])\n x1, x2, x3, v1, v2, v3 = interact.interact(par_perturb, B.si.value, phi.rad, V.si.value, theta.rad, Tenc.si.value, T.si.value, dt.si.value, par_pot, potential, potential_perturb, x.si.value, y.si.value, z.si.value, vx.si.value, vy.si.value, vz.si.value)\n stream = {}\n stream['x'] = (np.array([x1, x2, x3])*u.m).to(u.pc)\n stream['v'] = (np.array([v1, v2, v3])*u.m/u.s).to(u.km/u.s)\n \n # sky coordinates\n xgal = coord.Galactocentric(stream['x'], **observer)\n xeq = xgal.transform_to(coord.ICRS)\n veq_ = gc.vgal_to_hel(xeq, stream['v'], **vobs)\n veq = [None] * 3\n veq[0] = veq_[0].to(u.mas/u.yr)\n veq[1] = veq_[1].to(u.mas/u.yr)\n veq[2] = veq_[2].to(u.km/u.s)\n \n # unperturbed stream\n par_perturb = np.array([0*M.si.value, 0., 0., 0.])\n x1, x2, x3, v1, v2, v3 = interact.interact(par_perturb, B.si.value, phi.rad, V.si.value, theta.rad, Tenc.si.value, T.si.value, dt.si.value, par_pot, potential, potential_perturb, x.si.value, y.si.value, z.si.value, vx.si.value, vy.si.value, vz.si.value)\n stream0 = {}\n stream0['x'] = (np.array([x1, x2, x3])*u.m).to(u.pc)\n stream0['v'] = (np.array([v1, v2, v3])*u.m/u.s).to(u.km/u.s)\n \n # sky coordinates\n xgal0 = coord.Galactocentric(stream0['x'], **observer)\n xeq0 = xgal0.transform_to(coord.ICRS)\n veq0_ = gc.vgal_to_hel(xeq0, stream0['v'], **vobs)\n veq0 = [None] * 3\n veq0[0] = veq0_[0].to(u.mas/u.yr)\n veq0[1] = veq0_[1].to(u.mas/u.yr)\n veq0[2] = veq0_[2].to(u.km/u.s)\n \n # rotate to native coordinate system\n R = find_greatcircle(xeq0.ra.deg[::10], xeq0.dec.deg[::10])\n xi0, eta0 = myutils.rotate_angles(xeq0.ra, xeq0.dec, R)\n xi0 = coord.Angle(xi0*u.deg)\n \n # place gap at xi~0\n xioff = xi0[ienc]\n xi0 -= xioff\n \n xi, eta = myutils.rotate_angles(xeq.ra, xeq.dec, R)\n xi = coord.Angle(xi*u.deg)\n xi -= xioff\n \n vlabel = ['$\\mu_{\\\\alpha_\\star}$ [mas yr$^{-1}$]','$\\mu_{\\delta}$ [mas yr$^{-1}$]', '$V_r$ [km s$^{-1}$]']\n ylims = [[-0.5, 0.5], [-0.5, 0.5], [-25,25]]\n color = '0.35'\n ms = 4\n \n # plotting\n plt.close()\n fig, ax = plt.subplots(5,1,figsize=(12,12), sharex=True)\n \n plt.sca(ax[0])\n g = Table(fits.getdata('/home/ana/projects/GD1-DR2/output/gd1_members.fits'))\n plt.scatter(g['phi1']+40, g['phi2'], s=g['pmem']*2, c=g['pmem'], cmap=mpl.cm.binary, vmin=0.5, vmax=1.1)\n \n plt.xlim(-45,45)\n plt.ylim(-10,10)\n plt.gca().set_aspect('equal')\n plt.ylabel('$\\phi_1$ [deg]')\n \n plt.sca(ax[1])\n plt.plot(xi.wrap_at(wangle), eta, 'o', mec='none', color=color, ms=ms)\n \n plt.ylabel('$\\phi_1$ [deg]')\n plt.ylim(-10,10)\n plt.gca().set_aspect('equal')\n \n xeqs = [xeq.ra, xeq.dec, xeq.distance.to(u.kpc)]\n for i in range(3):\n plt.sca(ax[i+2])\n \n # interpolate expected kinematics from an unperturbed stream\n vexp = np.interp(xi.wrap_at(wangle), xi0.wrap_at(wangle), veq0[i].value) * veq0[i].unit\n plt.plot(xi.wrap_at(wangle), veq[i]-vexp, 'o', mec='none', color=color, ms=ms)\n \n plt.ylabel('$\\Delta$ {}'.format(vlabel[i]))\n plt.ylim(*ylims[i])\n\n plt.xlabel('$\\phi_2$ [deg]')\n \n plt.tight_layout()\n plt.savefig('../plots/spur_morphology_sky{}.png'.format(old_label))", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def calc_optimal_spacing(sun_properties, tilt_angle, module_length):\n h = module_length * sin(tilt_angle)\n D1 = h / tan(radians(sun_properties.worst_sh))\n D = max(D1 * cos(radians(180 - sun_properties.worst_Az)), D1 * cos(radians(sun_properties.worst_Az - 180)))\n return D", "def get_time_step(self):\n for body in self.bodies:\n # If body is a Satelite\n if body.name == \"Satelite\":\n # Assuming that acceleration for a small times step is constant\n t = 0.01 * norm(body.velocity) / norm(body.acc)\n if t < self.t:\n return t\n return self.t", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time", "def time(tp, Ks, F, Fp, presHead, thetaSat, thetaInit):\n\n numeratorLN = Fp + np.absolute(presHead)*(thetaSat - thetaInit)\n denomLN = F + np.absolute(presHead)*(thetaSat - thetaInit)\n naturalLog = np.log(numeratorLN/denomLN)\n\n product1 = np.absolute(presHead)*(thetaSat - thetaInit)*naturalLog\n brackets = F - Fp + product1\n\n product2 = (1/Ks)*brackets\n time = tp + product2\n return time", "def stump_S(z) :\n\n if z > 0:\n sz = sqrt(z) \n return (sz - sin(sz))/pow(sz,3)\n elif z < 0 :\n s_z = sqrt(-z) \n # According to the equation the denominatori is pow(sqrt(z),3)\n return (sinh(s_z) - s_z)/pow(s_z,3)\n else :\n return 0.1666666666666666", "def sunrise_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunrise", "def earth_tide(theta, lamda, gtime):\n\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n global h, k, l # love common block\n h = [0.6114, 0.2891, 0.175]\n k = [0.304, 0.09421, 0.043]\n l = [0.0832, 0.0145, 0.0103]\n\n global azt, azs # azimut common block\n global etmut # tdiff common block\n global moon # sunny common block\n moon = 0\n # hardwire these - you can only send it ONE droptime\n deltat = 1\n NPT = 1\n\n temp_time = num2date(gtime)\n\n YY = temp_time.year\n MO = temp_time.month\n DD = temp_time.day\n HH = temp_time.hour\n MM = temp_time.minute\n SS = temp_time.second\n # Initialize variables\n irl = 1\n iflag = 0\n ntotl = 1\n iget = [0, 0, 0, 0, 0, 0, 0] # ' !!!\n ispc = [0, 0, 0, 0] # ' !!!\n ntw = [1, 0, 0] # ' !!!\n ioptn = 't'\n ielement = 0\n # \tdata statements for input and output unit numbers (on terminal I/O)\n inun = 5\n ioun = 6\n nptpb = 6\n\n yr1 = YY - 1900\n day1 = date2num(datetime(YY, MO, DD))\n # \tfind times in hours from 0 hr, 1 jan 1900\n # matlab:\n ts = (\n SS / 3600\n + MM / 60\n + HH\n + 24 * (day1 - 1)\n + 8760 * yr1\n + 24 * np.fix((yr1 - 1) / 4)\n )\n # python:\n dj = date_to_julian_day(datetime(YY, MO, DD))\n djref = date_to_julian_day(datetime(1899, 12, 31, 0, 0, 0))\n delta_dj = (\n dj - djref\n ) # difference in days from current date (0hr) to 0hr, 1 jan 1900\n delta_djhr = float(delta_dj) * 24.0 + HH - 12.0 + MM / 60.0 + SS / 3600.0\n te = ts + (NPT - 1) * deltat / 3600\n d = deltat / 3600\n # terms=(te-ts)/d + 1\n terms = NPT\n\n # done asking questions - begin execution\n i = 1\n tt = ts\n sph(theta, lamda, 0)\n etmut = 41.184 + yr1 - 70\n # matlab:\n # t = (tt+12 + (etmut/3600))/876600\n t = (delta_djhr + etmut / 3600) / 876600\n # t is ephemeris time in julian centuries from 12 hr 0 jan 1900\n ephem(t)\n\n # calculate normalized gravity tides\n [grav, tilt, strain, gdc] = elastd(ntw)\n\n gravtide = 1.0e5 * grav\n # convert m/s² to mgal: 1m/s² = 100 gal = 100 000 mgal\n\n iflag = 1\n\n iterms = np.fix(terms)\n i = 1\n return gravtide", "def generate_sunrise_sunset_chart(\n sunrise: datetime.datetime,\n sunset: datetime.datetime,\n current: datetime.datetime,\n):\n\n fig, ax = plt.subplots()\n\n # The bar should shart at sunrise and end at sunset\n # Black bar showing sunset\n ax.barh([1], [time_to_float(sunset)+0.75], color=\"black\")\n # First make an orange bar for the sunset\n ax.barh([1], [time_to_float(sunset)], color=\"orange\")\n # Then make a black bar for sunset\n ax.barh([1], [time_to_float(sunrise)], color=\"black\")\n \n\n # Vertical line to show current time\n ax.axvline(x=time_to_float(current), linewidth=1, color=\"black\", ls=\"--\")\n\n # x-axis labels should be the time\n ax.set_xticks(\n [time_to_float(sunrise), 12, time_to_float(sunset)],\n )\n ax.set_xticklabels(\n [format_time(sunrise), \"12:00\", format_time(sunset)],\n )\n\n # Don't make the graph too wide\n ax.set_xlim([time_to_float(sunrise)-0.75, time_to_float(sunset)+0.75])\n\n # No y-axis labels required\n ax.set_yticks([])\n\n plt.tight_layout()\n\n fig.set_size_inches(7, 3)\n fig.savefig(\"./assets/sun-chart.png\")", "def setStopTime(self, t1):\n self._simulator_.update(t1=t1)\n return", "def initialTime(self):\n return self.params['t0']", "def break_time(self):\n\t\ts = timedelta()\n\t\tfor i in xrange(1, len(self.toggles)-1, 2):\n\t\t\ts += self.toggles[i+1] - self.toggles[i]\n\n\t\t# If not working need to add the last period of time\n\t\tif not self.status():\n\t\t\ts += datetime.now() - self.toggles[-1]\n\t\treturn s", "def time_period(s,h=30):\n\n t = 0\n\n old_z, pass_1 = 0, None\n\n while(True):\n k1 = h*sdot(s)\n k2 = h*sdot(s+k1/2)\n k3 = h*sdot(s+k2/2)\n k4 = h*sdot(s+k3)\n\n s = s+(k1+2*k2+2*k3+k4)/6\n t = t+h\n\n if (s[2]>=0 and old_z<0):\n dt = -s[2]/s[5]\n t2 = t+dt\n\n if pass_1 is None:\n pass_1 = t2\n else:\n return t2-pass_1\n\n old_z = s[2]", "def flattop_risefall(t, params):\n risefall = tf.cast(params['risefall'].get_value(), dtype=tf.float64)\n t_final = tf.cast(params['t_final'].get_value(), dtype=tf.float64)\n t_up = risefall\n t_down = t_final - risefall\n return (1 + tf.math.erf((t - t_up) / risefall)) / 2 * \\\n (1 + tf.math.erf((-t + t_down) / risefall)) / 2", "def SunPosition(time):\n # Correct for light travel time from the Sun.\n # Otherwise season calculations (equinox, solstice) will all be early by about 8 minutes!\n adjusted_time = time.AddDays(-1.0 / C_AUDAY)\n earth2000 = _CalcEarth(adjusted_time)\n sun2000 = [-earth2000.x, -earth2000.y, -earth2000.z]\n\n # Convert to equatorial Cartesian coordinates of date.\n stemp = _precession(sun2000, adjusted_time, _PrecessDir.From2000)\n sun_ofdate = _nutation(stemp, adjusted_time, _PrecessDir.From2000)\n\n # Convert equatorial coordinates to ecliptic coordinates.\n true_obliq = math.radians(adjusted_time._etilt().tobl)\n return _RotateEquatorialToEcliptic(sun_ofdate, true_obliq, time)", "def kts(self):\n return CAL_TO_J * 0.0077 * (self.rho/1000.0) * (self.rho/1000.0)", "def get_power_production_current_hour(self):\n if self.pulses_produced_1h is None:\n return None\n elif self.pulses_produced_1h == 0:\n return 0.0\n return self.pulses_to_kWs(self.pulses_produced_1h, 3600)", "def new_moon_before(cls, tee):\n varepsilon = pow(2, -1000)\n tau = tee - ((1/360) * cls.lunar_phase(tee) * cls.SYNODIC_MONTH)\n return binary_search(tau - 1, min(tee, tau + 1),\n lambda l, u: cls.zodiac(l) == cls.zodiac(u) or u - l < varepsilon,\n lambda x: cls.lunar_phase(x) < 180)", "def _calculate_strehl(self):\n\n self.strehl = np.exp(-1*((2*np.pi/self.science_wavelength)*self.high_order_wfe)**2)", "def set_stop_time(self, stop_time = 650*pq.ms):\n self.tstop = float(stop_time.rescale(pq.ms))", "def set_stop_time(self, stop_time = 650*pq.ms):\n self.tstop = float(stop_time.rescale(pq.ms))", "def subtract_sine(t, y, P):\n f = np.zeros(y.size)\n amp, phase, offset = find_sine_params(t.jyear, y, P)\n params = (P, amp, phase, offset)\n f += sinefunc(t.jyear, P, amp, phase, offset)\n return y - f, params", "def st(self):\n # sidereal time polynomial coefficients in arcseconds\n sidereal_time = np.array([0.014506, 4612.156534, 1.3915817, -4.4e-7,\n -2.9956e-05, -3.68e-08])\n ST = self.polynomial_sum(sidereal_time, self.T)\n # get earth rotation angle and convert to arcseconds\n return np.mod(ST + self.era*self.deg2asec, self.turnasec)/self.turnasec", "def timespan(self):\n center = self._half_temp_res + self._shifts * self._half_temp_res\n return Timespan(start=center - self._half_temp_res, end=center + self._half_temp_res)", "def HMStime(s):\n if s < 60.:\n return '%.2f s' % s\n elif s < 3600.:\n return '%d:%.2f' % (int(s / 60 % 60), s % 60)\n else:\n return '%d:%d:%.2f' % (int(s / 3600), int(s / 60 % 60), s % 60)", "def _shifted_time(self):\n return self.sim_time + self.options.time.start_clocktime", "def _unit_hr(self):\n return self.time_base * 60.0", "def flattop_risefall_1ns(t, params):\n params['risefall'] = 1e-9\n return flattop_risefall(t, params)", "def t0shft(t,P,t0):\n t = t.copy()\n dt = 0\n\n t -= t0 # Shifts the timeseries s.t. transits are at 0,P,2P ...\n dt -= t0\n\n # The first transit is at t = nFirstTransit * P\n nFirstTrans = np.ceil(t[0]/P) \n dt -= nFirstTrans*P \n\n return dt", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def overheads(NPT, DIT, NDIT):\n ov = 360. + 120. + NPT*NDIT*(DIT + 80. + 15.)\n print 'Telescope time in h = ', ov/3600.", "def _normalizeDeltaTime(self, dt : float) -> float:\n return dt / self.tau", "def mu_sun(m2_over_m1: float) -> float:\n\n mu = K * np.sqrt(1. + m2_over_m1)\n return mu * mu", "def sunset(cls, date):\n return cls.UJJAIN.dusk(date, 0)", "def rough_time_estimate(m1, m2, flow, fudge_length=1.1, fudge_min=0.02):\n m = m1 + m2\n msun = m * lal.MTSUN_SI\n t = 5.0 / 256.0 * m * m * msun / (m1 * m2) / (numpy.pi * msun * flow) ** (8.0 / 3.0)\n\n # fudge factoriness\n return .022 if t < 0 else (t + fudge_min) * fudge_length", "def _prev_shifted_time(self):\n return self._prev_sim_time + self.options.time.start_clocktime", "def get_state_planet_or_moon(body, t):\n # t = Time.now().utc.datetime\n\n b = None # I don't like red\n exec('b = ephem.{:s}()'.format(body.title()))\n b.compute(ephem.Date(t), epoch='2000')\n\n # fix zero padding\n ra = str(b.a_ra)\n if ra.index(':') == 1:\n ra = '0' + ra\n dec = str(b.a_dec)\n if dec.index(':') == 1:\n dec = '0' + dec\n elif dec.index(':') == 2 and dec[0] == '-':\n dec = '-0' + dec[1:]\n\n # compute rates in arcsec/s:\n dt = datetime.timedelta(seconds=1)\n b.compute(ephem.Date(t + dt), epoch='2000')\n ra_p1 = b.a_ra*180.0/np.pi*3600.0\n dec_p1 = b.a_dec*180.0/np.pi*3600.0\n b.compute(ephem.Date(t - dt), epoch='2000')\n ra_m1 = b.a_ra*180.0/np.pi*3600.0\n dec_m1 = b.a_dec*180.0/np.pi*3600.0\n\n ra_rate = (ra_p1 - ra_m1)/2.0\n dec_rate = (dec_p1 - dec_m1)/2.0\n\n return ra, dec, ra_rate, dec_rate", "def wind_shear(self):\n return self.flow_field.wind_shear", "def unit_hr(self):\n return self.time_base * 60.0", "def time(self) -> float:\n return self.state.game_loop / 22.4 # / (1/1.4) * (1/16)", "def unit_sun_r(sun_pos):\n return sun_pos / vector_magnitude(sun_pos[0], sun_pos[1], sun_pos[2])", "def unsetTimeUnits(self):\n return _libsbml.KineticLaw_unsetTimeUnits(self)", "def morletft(s, w, w0, dt):\n \n p = 0.75112554446494251 # pi**(-1.0/4.0)\n wavelet = np.zeros((s.shape[0], w.shape[0]))\n pos = w > 0\n\n for i in range(s.shape[0]):\n n = normalization(s[i], dt)\n wavelet[i][pos] = n * p * np.exp(-(s[i] * w[pos] - w0)**2 / 2.0)\n \n return wavelet", "def zero_timings(self):\r\n self.step = 0\r\n self.current_T = 0.0", "def si_2_kts(vals):\n return vals * 3600.0 / meters_per_nautical_mile", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def sweep50T(self):\n return 35.6", "def sidereal_solar_longitude(tee):\n return mod(Solar.solar_longitude(tee) - Astro.precession(tee) + SIDEREAL_START, 360)", "def _format_seconds_since_midnight(self, s):\n return \"%02d:%02d:%02d\" % (s / 3600, (s / 60) % 60, s % 60)", "def slant_time(sx, sy, sz, rx, ry, rz, v):\n d = np.sqrt((rx - sx) ** 2 + (ry - sy) ** 2)\n h = rz - sz\n r = np.sqrt(d ** 2 + h ** 2)\n return r / v", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def half_step_time(self):\n\n return self.full_step_time() * self.half_to_full_step_time_ratio", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def apparent_magnitude(sat, topos, earth, sun, time):\n\n position = earth + sat\n observer = earth + topos\n barycentric_o = position.at(time).observe(observer)\n barycentric_s = position.at(time).observe(sun)\n phase_angle = barycentric_o.separation_from(barycentric_s).radians\n _, _, distance = barycentric_o.radec()\n term_1 = -1.3 # standard satellite intrinsic magnitude\n term_2 = +5.0 * np.log10(distance.km / 1000.)\n arg = np.sin(phase_angle) + (np.pi - phase_angle) * np.cos(phase_angle)\n term_3 = -2.5 * np.log10(arg)\n return term_1 + term_2 + term_3", "def get_elapsed_seconds():\n\tutcnow = datetime.utcnow()\n\tmidnight_utc = datetime.combine(utcnow.date(), time(0))\n\tdelta = utcnow - midnight_utc\n\treturn delta.total_seconds()", "def generate_sunsets(self, nyears=13, day_pad=50):\n\n # Set observatory horizon to zero\n doff = ephem.Date(0)-ephem.Date('1858/11/17')\n\n self.obs.horizon = 0.\n\n # Swipe dates to match sims_skybrightness_pre365\n mjd_start = self.mjd\n mjd_end = np.arange(mjd_start, mjd_start+365.25*nyears+day_pad+366, 366).max()\n step = 0.25\n mjds = np.arange(mjd_start, mjd_end+step, step)\n setting = mjds*0.\n\n # Stupid Dublin Julian Date\n djds = mjds - doff\n sun = ephem.Sun()\n\n for i, (mjd, djd) in enumerate(zip(mjds, djds)):\n sun.compute(djd)\n setting[i] = self.obs.previous_setting(sun, start=djd, use_center=True)\n setting = setting + doff\n\n # zomg, round off crazy floating point precision issues\n setting_rough = np.round(setting*100.)\n u, indx = np.unique(setting_rough, return_index=True)\n self.setting_sun_mjds = setting[indx]\n left = np.searchsorted(self.setting_sun_mjds, mjd_start)\n self.setting_sun_mjds = self.setting_sun_mjds[left:]", "def dynstall_mhh_steady(t, u, p):\n # Inputs\n U = u['U'](t)\n alpha_34 = u['alpha_34'](t)\n return dynstall_mhh_steady_simple(U, alpha_34, p)", "def time(self):\n try:\n if self.single_date:\n return self.stime\n else:\n return self.stime + (self.etime - self.stime) / 2\n except TypeError:\n return None", "def svpice(t):\n A0=0.7859063157e0\n A1=0.357924232e-1\n A2=-0.1292820828e-3\n A3=0.5937519208e-6\n A4=0.4482949133e-9\n A5=0.2176664827e-10\n T = t - 273.16\n e = pow(10.0,A0+T*(A1 + T*(A2 + T*(A3 + T*(A4 + T*A5)))))\n return e", "def lunar_day_at_or_after(cls, k, tee):\n phase = (k - 1) * 12\n tau = tee + ((1/360) * mod(phase - cls.lunar_phase(tee), 360) * cls.SYNODIC_MONTH)\n a = max(tee, tau - 2)\n b = tau + 2\n return invert_angular(cls.lunar_phase, phase, a, b)", "def getHeadingTime(self) -> float:\n return self.timestep_cached_heading_tm", "def ephem(t):\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n #\n # common block containing the difference ephemeris minus\n # universal time, in seconds. if this is not known it should\n # be set to zero, and the argument to the program should be\n # universal rather than ephemeris time.\n #\n global etmut # tdiff common block\n # common block containing the instruction on which ephemeris to compute\n # moon = 0 - both sun and moon\n # 1 - moon only\n # 2 - sun only\n global moon\n\n pi20 = 62.8318530717958\n # compute universal time in hours\n ts = 876600 * t - 12 - (etmut / 3600)\n hr = np.mod(ts, 24)\n # compute obliquity of the ecliptic\n w = 0.409319747 - 0.0002271107 * t\n cosw = np.cos(w)\n sinw = np.sin(w)\n t2 = t * t\n if moon != 1:\n # compute solar constants for given t\n hs = 4.881627982482 + 628.3319508731 * t + 0.523598775578 * 10 ** (-5) * t2\n hs = np.mod(np.mod(hs, pi20) + pi20, pi20)\n ps = 4.908229466993 + 0.03000526416690 * t + 0.790246300201 * 10 ** (-5) * t2\n es = 0.01675104 - 0.00004180 * t - 0.000000126 * t2\n psig = 0.2617993877971 * (hr - 12.0) + hs\n chmp = np.cos(hs - ps)\n shmp = np.sin(hs - ps)\n ls = hs + shmp * es * (2.0 + 2.5 * es * chmp)\n sls = np.sin(ls)\n cz = sinw * sls\n sz = np.sqrt(1.0 - cz ** 2)\n psis = math.atan2(cosw * sls, np.cos(ls))\n rbarr = 1.0 + es * (chmp + es * (chmp - shmp) * (chmp + shmp))\n ll = psis - psig\n scz = cz\n ssz = sz\n ssl = np.sin(ll)\n scl = np.cos(ll)\n sdist = 1 / rbarr\n\n # compute lunar constants for given t\n\n if moon == 2:\n return\n hm = 4.7199666 + 8399.7091449 * t - 0.0000198 * t2\n pm = 5.83515154 + 71.01804120839 * t - 0.180205 * 10 ** (-3) * t2\n nm = 4.523601515 - 33.75714624 * t + 0.3626406335 * 10 ** (-4) * t2\n # bl bls bf bd are the fundamental arguments of browns theory\n bl = hm - pm\n bls = hs - ps\n bf = hm - nm\n bd = hm - hs\n # lunar lat long and parallax from brown. latter two from\n # improved lunar ephemeris, latitude from ras paper of 1908...\n tlongm = (\n hm\n + 0.10976 * np.sin(bl)\n - 0.02224 * np.sin(bl - 2.0 * bd)\n + 0.01149 * np.sin(2.0 * bd)\n + 0.00373 * np.sin(2.0 * bl)\n - 0.00324 * np.sin(bls)\n - 0.00200 * np.sin(2.0 * bf)\n - 0.00103 * np.sin(2.0 * bl - 2.0 * bd)\n - 0.00100 * np.sin(bl + bls - 2.0 * bd)\n + 0.00093 * np.sin(bl + 2.0 * bd)\n - 0.00080 * np.sin(bls - 2.0 * bd)\n + 0.00072 * np.sin(bl - bls)\n - 0.00061 * np.sin(bd)\n - 0.00053 * np.sin(bl + bls)\n )\n tlatm = (\n 0.08950 * np.sin(bf)\n + 0.00490 * np.sin(bl + bf)\n - 0.00485 * np.sin(bf - bl)\n - 0.00303 * np.sin(bf - 2.0 * bd)\n + 0.00097 * np.sin(2.0 * bd + bf - bl)\n - 0.00081 * np.sin(bl + bf - 2.0 * bd)\n + 0.00057 * np.sin(bf + 2.0 * bd)\n )\n plx = (\n 3422.45\n + 186.54 * np.cos(bl)\n + 34.31 * np.cos(bl - 2.0 * bd)\n + 28.23 * np.cos(2.0 * bd)\n + 10.17 * np.cos(2.0 * bl)\n + 3.09 * np.cos(bl + 2.0 * bd)\n + 1.92 * np.cos(bls - 2.0 * bd)\n + 1.44 * np.cos(bl + bls - 2.0 * bd)\n + 1.15 * np.cos(bl - bls)\n - 0.98 * np.cos(bd)\n - 0.95 * np.cos(bl + bls)\n - 0.71 * np.cos(bl - 2.0 * bf)\n + 0.62 * np.cos(3.0 * bl)\n + 0.60 * np.cos(bl - 4.0 * bd)\n )\n sinmla = np.sin(tlatm)\n cosmla = np.cos(tlatm)\n sinmln = np.sin(tlongm)\n cosmln = np.cos(tlongm)\n # ...convert from celestial lat and long according to explan suppl of\n # ......na and le page 26\n cz = cosmla * sinmln * sinw + sinmla * cosw\n sz = np.sqrt(1.0 - cz ** 2)\n at1 = cosmla * sinmln * cosw - sinmla * sinw\n at2 = cosmla * cosmln\n ram = math.atan2(at1, at2)\n ll = ram - psig\n dcz = cz\n dsz = sz\n dsl = np.sin(ll)\n dcl = np.cos(ll)\n dpar = plx\n\n # ------------------------------------------------------------------", "def time_function(t):\n\n omega = np.pi\n return np.sin(omega * t) + np.sin(10 * omega * t) + np.sin(20 * omega * t)", "def delayS(self):\n e = self.ecc()\n cE = np.cos(self.E())\n sE = np.sin(self.E())\n sOmega = np.sin(self.omega())\n cOmega = np.cos(self.omega())\n TM2 = self.M2.value*Tsun\n\n sDelay = -2*TM2* np.log(1-e*cE-self.SINI*(sOmega*(cE-e)+\n (1-e**2)**0.5*cOmega*sE))\n return sDelay", "def time_step(self):\n\n rho_rel = np.abs(self.rho_dt / self.rho)\n rho_rel_max = np.max(rho_rel)\n e_rel = np.abs(self.e_dt / self.e)\n e_rel_max = np.max(e_rel)\n x_rel = np.abs(self.u / self.dx)\n x_rel_max = np.max(x_rel)\n y_rel = np.abs(self.w / self.dy)\n y_rel_max = np.max(y_rel)\n rel = [rho_rel_max, e_rel_max, x_rel_max, y_rel_max]\n delta = np.max(np.abs(rel))\n\n if 0.1 <= delta <= 1e3:\n self.dt = self.p / delta\n else:\n self.dt = self.p", "def startTime(self) -> float:\n try: return self.times[0]\n except IndexError: return 0.0", "def check_time(self, m, s):\r\n if m*60 + s > 5400:\r\n self.unit.s = 0\r\n self.unit.m = 90\r\n return\r\n if s < 0:\r\n s = 0\r\n if m < 0:\r\n m = 0\r\n self.unit.s = s\r\n self.unit.m = m", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon" ]
[ "0.74437636", "0.6815162", "0.6379348", "0.6302862", "0.6234464", "0.61841786", "0.61803925", "0.591679", "0.57105523", "0.57086027", "0.5666004", "0.5648924", "0.56333864", "0.5610499", "0.55877644", "0.55800503", "0.556043", "0.5549913", "0.55466384", "0.5520775", "0.54911834", "0.54817915", "0.54291177", "0.53982097", "0.53865033", "0.53768975", "0.53671443", "0.53644484", "0.5362223", "0.5326342", "0.53084445", "0.52740103", "0.5268591", "0.5255494", "0.5247835", "0.5242759", "0.5223092", "0.52214146", "0.5218471", "0.51957417", "0.5191143", "0.5182483", "0.5175497", "0.51677126", "0.5160168", "0.515681", "0.5154205", "0.51535386", "0.5152495", "0.51512825", "0.5143489", "0.5143489", "0.5140794", "0.51381123", "0.51364225", "0.5124006", "0.5122363", "0.51074934", "0.51042664", "0.51032364", "0.50998574", "0.5093214", "0.5092504", "0.5090204", "0.5089804", "0.508517", "0.5084945", "0.5081613", "0.5064296", "0.5061935", "0.50571305", "0.50540465", "0.5053448", "0.5052789", "0.5034102", "0.5032226", "0.50310063", "0.50281537", "0.50249875", "0.5022111", "0.50179017", "0.50174314", "0.5017121", "0.5016773", "0.50110626", "0.50048244", "0.5004688", "0.50043213", "0.50017905", "0.5000991", "0.49983254", "0.4989315", "0.49872953", "0.49828702", "0.4981121", "0.49803352", "0.49791005", "0.4978969", "0.49758312", "0.49757347" ]
0.5021731
80
Calculates the dusk time (the time in the evening when the sun is a certain number of degrees below the horizon. By default this is 6 degrees but can be changed
def dusk(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() dusk = self.astral.dusk_utc(date, self.latitude, self.longitude) if local: return dusk.astimezone(self.tz) else: return dusk
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def time_for_travel(self):\n return great_circle(self.pickupcoords, self.dropoffcoords).miles * 3600 / 25", "def time(self) -> float:\n return self.state.game_loop / 22.4 # / (1/1.4) * (1/16)", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def calc_swd(lon, lat, hour, doy):\n\n lon = -lon\n sda = 0.409 * np.cos(2. * np.pi * (doy - 173.) / 365.)\n sinlea = np.sin(2. * np.pi * lat / 360.) * np.sin(sda) - \\\n np.cos(2. * np.pi * lat / 360.) * np.cos(sda) * \\\n np.cos(2. * np.pi * (hour*3600.) / 86400. - 2. * np.pi * lon / 360.)\n sinlea = np.maximum(sinlea, 1e-9)\n Tr = (0.6 + 0.2 * sinlea)\n swin = 1368. * Tr * sinlea\n\n return swin", "def _unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def sweep50T(self):\n return 35.6", "def unit_wk(self):\n return ((self.time_base * 60.0) * 24.0) * 7", "def what_night_is_it():\n d = datetime.datetime.utcnow() - datetime.timedelta(7 / 24 + 0.5)\n tonight = int(d.strftime('%Y%m%d'))\n return tonight", "def get_time_in_round() -> int:\n # FIXME - returning negative value for projectiles\n return store.round_time", "def compute_windchill(t,v):\n a = 35.74\n b = 0.6215\n c = 35.75\n d = 0.4275\n v16 = v**0.16\n wci = a+(b*t)-(c*v16)+(d*t*v16)\n return wci", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def getUpTime(self):\n return self.__upTime + time() - self.__fingerTime", "def overheads(NPT, DIT, NDIT):\n ov = 360. + 120. + NPT*NDIT*(DIT + 80. + 15.)\n print 'Telescope time in h = ', ov/3600.", "def calc_time(self, distance):\r\n if distance < 400:\r\n return 2*math.sqrt(distance / 1406.25)\r\n else:\r\n distance -= 400\r\n return distance / 750 + 16 / 15", "def unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def half_step_time(self):\n\n return self.full_step_time() * self.half_to_full_step_time_ratio", "def calc_optimal_spacing(sun_properties, tilt_angle, module_length):\n h = module_length * sin(tilt_angle)\n D1 = h / tan(radians(sun_properties.worst_sh))\n D = max(D1 * cos(radians(180 - sun_properties.worst_Az)), D1 * cos(radians(sun_properties.worst_Az - 180)))\n return D", "def _unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def wind_chill(T_a, v):\r\n return 13.12 + 0.6215*(T_a) - 11.37*(v)**0.16 + 0.3965*(T_a)*(v)**0.16", "def _get_twilight(self, datetime_today, lon, lat, alt):\r\n\r\n dusk = self._astral.dusk_utc(datetime_today, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n dawn = self._astral.dawn_utc(datetime_today + _delta_dt, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n return dusk, dawn", "def time(self):\n return sum(self._interval) * .5", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def calculate_dew_point(temp, hum):\n return temp - (100 - hum) / 5", "def sim_step(self):\n return traci.simulation.getCurrentTime()/1000 # milliseconds to seconds", "def sweep50W(self):\n return 25.9", "def wind_speed(self):\r\n return self._yesterdays_weather.get_average_wind_speed()", "def virtual_round(self):\n return math.ceil(self.virtual_time())", "def lunarperigee(time):\n dtor = np.pi / 180\n t1 = 1 + time\n t2 = t1 * t1\n t3 = t2 * t1\n perigee = (\n 334.329653 * dtor\n + 4069.0340329575 * dtor * t1\n - 0.010325 * dtor * t2\n - 1.2e-5 * dtor * t3\n )\n return perigee", "def hindu_lunar_station(date):\n critical = HinduDate.sunrise(date)\n return quotient(HinduLunarDate.longitude(critical), angle(0, 800, 0)) + 1", "def time_step(self) -> float:\n return self._timestep", "def time(self, step: int) -> float:\n return self._start_time + self._parameters.dt*(step - self._start_step)", "def hp(self):\n if self.positive:\n return self.degree + (self.minute / 100) + (self.second / 10000)\n else:\n return -(self.degree + (self.minute / 100) + (self.second / 10000))", "def get_time_step(self):\n for body in self.bodies:\n # If body is a Satelite\n if body.name == \"Satelite\":\n # Assuming that acceleration for a small times step is constant\n t = 0.01 * norm(body.velocity) / norm(body.acc)\n if t < self.t:\n return t\n return self.t", "def _unit_hr(self):\n return self.time_base * 60.0", "def get_time_walking(self):\n return self.time_step_to_enqueue - self.time_entered", "def getHeadingTime(self) -> float:\n return self.timestep_cached_heading_tm", "def method_compute_timestep(self):\n\n myg = self.cc_data.grid\n\n cfl = self.rp.get_param(\"driver.cfl\")\n\n u = self.cc_data.get_var(\"x-velocity\")\n v = self.cc_data.get_var(\"y-velocity\")\n\n # the timestep is min(dx/|u|, dy|v|)\n xtmp = ytmp = 1.e33\n if not abs(u).max() == 0:\n xtmp = myg.dx/abs(u.v()).max()\n if not abs(v).max() == 0:\n ytmp = myg.dy/abs(v.v()).max()\n\n dt = cfl*min(xtmp, ytmp)\n\n # We need an alternate timestep that accounts for buoyancy, to\n # handle the case where the velocity is initially zero.\n rho = self.cc_data.get_var(\"density\")\n rho0 = self.base[\"rho0\"]\n rhoprime = self.make_prime(rho, rho0)\n\n g = self.rp.get_param(\"lm-atmosphere.grav\")\n\n F_buoy = (abs(rhoprime*g).v()/rho.v()).max()\n\n dt_buoy = np.sqrt(2.0*myg.dx/F_buoy)\n\n self.dt = min(dt, dt_buoy)\n if self.verbose > 0:\n print(f\"timestep is {dt}\")", "def GAME_TIME_ADVANCE(dt):", "def drillTime(matID, thickness_mm, W, FWHM_mm):\n return thickness_mm / drillSpeed(matID, W, FWHM_mm)", "def timeStep(self):\n return self.params['h']", "def get_nightly_start_time():\n return 14 # 2PM local Tucson time", "def wind_heat_transfer_coefficient(self) -> float:\n\n return 3.8 + 2 * self.wind_speed\n # return 4.5 + 2.9 * self.wind_speed", "def hp(self):\n minute_int, second = divmod(self.minute, 1)\n if self.positive:\n return self.degree + (minute_int / 100) + (second * 0.006)\n else:\n return -(self.degree + (minute_int / 100) + (second * 0.006))", "def set_step_time(self, us):\n if us < 20: # 20 us is the shortest possible for esp8266\n self.step_time = 20\n else:\n self.step_time = us", "def earth_tide(theta, lamda, gtime):\n\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n global h, k, l # love common block\n h = [0.6114, 0.2891, 0.175]\n k = [0.304, 0.09421, 0.043]\n l = [0.0832, 0.0145, 0.0103]\n\n global azt, azs # azimut common block\n global etmut # tdiff common block\n global moon # sunny common block\n moon = 0\n # hardwire these - you can only send it ONE droptime\n deltat = 1\n NPT = 1\n\n temp_time = num2date(gtime)\n\n YY = temp_time.year\n MO = temp_time.month\n DD = temp_time.day\n HH = temp_time.hour\n MM = temp_time.minute\n SS = temp_time.second\n # Initialize variables\n irl = 1\n iflag = 0\n ntotl = 1\n iget = [0, 0, 0, 0, 0, 0, 0] # ' !!!\n ispc = [0, 0, 0, 0] # ' !!!\n ntw = [1, 0, 0] # ' !!!\n ioptn = 't'\n ielement = 0\n # \tdata statements for input and output unit numbers (on terminal I/O)\n inun = 5\n ioun = 6\n nptpb = 6\n\n yr1 = YY - 1900\n day1 = date2num(datetime(YY, MO, DD))\n # \tfind times in hours from 0 hr, 1 jan 1900\n # matlab:\n ts = (\n SS / 3600\n + MM / 60\n + HH\n + 24 * (day1 - 1)\n + 8760 * yr1\n + 24 * np.fix((yr1 - 1) / 4)\n )\n # python:\n dj = date_to_julian_day(datetime(YY, MO, DD))\n djref = date_to_julian_day(datetime(1899, 12, 31, 0, 0, 0))\n delta_dj = (\n dj - djref\n ) # difference in days from current date (0hr) to 0hr, 1 jan 1900\n delta_djhr = float(delta_dj) * 24.0 + HH - 12.0 + MM / 60.0 + SS / 3600.0\n te = ts + (NPT - 1) * deltat / 3600\n d = deltat / 3600\n # terms=(te-ts)/d + 1\n terms = NPT\n\n # done asking questions - begin execution\n i = 1\n tt = ts\n sph(theta, lamda, 0)\n etmut = 41.184 + yr1 - 70\n # matlab:\n # t = (tt+12 + (etmut/3600))/876600\n t = (delta_djhr + etmut / 3600) / 876600\n # t is ephemeris time in julian centuries from 12 hr 0 jan 1900\n ephem(t)\n\n # calculate normalized gravity tides\n [grav, tilt, strain, gdc] = elastd(ntw)\n\n gravtide = 1.0e5 * grav\n # convert m/s² to mgal: 1m/s² = 100 gal = 100 000 mgal\n\n iflag = 1\n\n iterms = np.fix(terms)\n i = 1\n return gravtide", "def time(n):\n steps = 3 + math.ceil(n/5.0)*2\n return steps", "def _get_wall_clock_step_time_threshold(self):\n if self.constants.physical:\n sim = self.mujoco_simulation.mj_sim\n return float(sim.nsubsteps) * sim.model.opt.timestep\n else:\n # No minimum threshold for simulation.\n return 0", "def time_interval( self ):\n begin = self.begin; end = self.end\n if end - begin < 600*self.hour_switch:\n return 600\n if end - begin < 86400*self.day_switch:\n return 3600\n elif end - begin < 86400*7*self.week_switch:\n return 86400\n else:\n return 86400*7", "def unit_hr(self):\n return self.time_base * 60.0", "def sweep25W(self):\n return 28.8", "def wind_bearing(self) -> float:\r\n return self._first_timeserie[\"data\"][\"instant\"][\"details\"][\r\n \"wind_from_direction\"\r\n ]", "def compute_dewpoint(t,h):\n\n tempC = (t-32)*5/9 #convert from deg F to deg C\n rh = h/100\n\n b = 18.678\n c = 257.14 #deg C\n\n gamma = math.log(rh)+(b*tempC)/(c+tempC)\n tdp = c*gamma/(b-gamma)\n\n tdp_F = 9/5*tdp +32 #convert from deg C to deg F\n\n return tdp_F", "def time_step(self):\n\n rho_rel = np.abs(self.rho_dt / self.rho)\n rho_rel_max = np.max(rho_rel)\n e_rel = np.abs(self.e_dt / self.e)\n e_rel_max = np.max(e_rel)\n x_rel = np.abs(self.u / self.dx)\n x_rel_max = np.max(x_rel)\n y_rel = np.abs(self.w / self.dy)\n y_rel_max = np.max(y_rel)\n rel = [rho_rel_max, e_rel_max, x_rel_max, y_rel_max]\n delta = np.max(np.abs(rel))\n\n if 0.1 <= delta <= 1e3:\n self.dt = self.p / delta\n else:\n self.dt = self.p", "def break_time(self):\n\t\ts = timedelta()\n\t\tfor i in xrange(1, len(self.toggles)-1, 2):\n\t\t\ts += self.toggles[i+1] - self.toggles[i]\n\n\t\t# If not working need to add the last period of time\n\t\tif not self.status():\n\t\t\ts += datetime.now() - self.toggles[-1]\n\t\treturn s", "def _clock_day(self):\n return int(self._shifted_time / 86400)", "def _STEPS2TIME(step):\n return step/1000.", "def get_van_Der_Waals_radius(self):\n return self.van_Der_Waals_radius", "def _unit_sec(self):\n return self.time_base / 60.0", "def suspected_per_hour(self):\r\n return (3600.*(self.circ_suspected+self.strm_suspected\r\n +self.circ_failed+self.strm_failed))/self.current_uptime()", "def timestep(self) -> Optional[float]:\n dt = None\n if len(self.time) > 1 and self.is_equidistant:\n dt = (self.time[1] - self.time[0]).total_seconds() # type: ignore\n return dt", "def ut1(self):\n return self.MJD + 2400000.5", "def dt(self):\n return self._data_writer.get_simulation_time_step_ms()", "def theta_v_time():\n pass", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def _calcPlungerMoveTime(self, move_steps):\n sd = self.sim_state\n start_speed = sd['start_speed']\n top_speed = sd['top_speed']\n cutoff_speed = sd['cutoff_speed']\n slope = sd['slope']\n microstep = sd['microstep']\n\n slope *= 2500.0\n if microstep:\n move_steps = move_steps / 8.0\n theo_top_speed = sqrt((4.0 * move_steps*slope) + start_speed ** 2.0)\n # If theoretical top speed will not exceed cutoff speed\n if theo_top_speed < cutoff_speed:\n move_t = theo_top_speed - (start_speed/slope)\n else:\n theo_top_speed = sqrt(((2.0*move_steps*slope) +\n ((start_speed**2.0+cutoff_speed**2.0)/2.0)))\n # If theoretical top speed with exceed cutoff speed but not\n # reach the set top speed\n if cutoff_speed < theo_top_speed < top_speed:\n move_t = ((1 / slope) * (2.0 * theo_top_speed - start_speed -\n cutoff_speed))\n # If start speed, top speed, and cutoff speed are all the same\n elif start_speed == top_speed == cutoff_speed:\n move_t = (2.0 * move_steps) / top_speed\n # Otherwise, calculate time spent in each phase (start, constant,\n # ramp down)\n else:\n ramp_up_halfsteps = ((top_speed ** 2.0 - start_speed ** 2.0) /\n (2.0 * slope))\n ramp_down_halfsteps = ((top_speed ** 2.0 - cutoff_speed ** 2.0) /\n (2.0 * slope))\n if (ramp_up_halfsteps + ramp_down_halfsteps) < (2.0 * top_speed):\n ramp_up_t = (top_speed - start_speed) / slope\n ramp_down_t = (top_speed - cutoff_speed) / slope\n constant_halfsteps = (2.0 * move_steps - ramp_up_halfsteps -\n ramp_down_halfsteps)\n constant_t = constant_halfsteps / top_speed\n move_t = ramp_up_t + ramp_down_t + constant_t\n return move_t", "def daysinunit(self, unit):\n\t\treturn float(self.daysinbase)/self.unitsize(unit)", "def unit_sec(self):\n return self.time_base / 60.0", "def k_Sw07(wind_second_moment, temp_C):\n\n U2 = wind_second_moment\n\n Sc = schmidt_number(temp_C)\n k = (0.27 * U2) * (660 / Sc) ** 0.5\n\n return k", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def get_time(self):\n return self.get_timed() / 10.0", "def _get_delta_time(r0):\n\n s1 = random() # To pick time\n epsilon = 0.001 # To avoid division by zero\n lam = (1 / (r0 + epsilon))\n return lam * pow(e, -lam * s1)", "def target_temperature_step(self):\n return PRECISION_HALVES", "def wind_speed(self):\n return self.flow_field.wind_speed", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def get_dt(radius: float, mean_speed: float, drpf: float = 0.01) -> float:\n if mean_speed == 0:\n mean_speed = 1\n return drpf * radius / mean_speed\n return drpf * radius / mean_speed", "def sky(seed=425, th=150, old=False):\n \n # impact parameters\n M = 3e7*u.Msun\n B = 19.95*u.kpc\n #B = 20.08*u.kpc\n V = 190*u.km/u.s\n phi = coord.Angle(0*u.deg)\n th = 150\n theta = coord.Angle(th*u.deg)\n Tenc = 0.01*u.Gyr\n T = 0.5*u.Gyr\n dt = 0.05*u.Myr\n rs = 0*u.pc\n \n old_label = ''\n np.random.seed(seed)\n observer = {'z_sun': 27.*u.pc, 'galcen_distance': 8.3*u.kpc, 'roll': 60*u.deg, 'galcen_coord': coord.SkyCoord(ra=300*u.deg, dec=-90*u.deg, frame='icrs')}\n vobs = {'vcirc': 220*u.km/u.s, 'vlsr': [0, 0, 0]*u.km/u.s}\n wangle = 180*u.deg\n \n if old:\n old_label = '_old_up'\n observer = {'z_sun': -2000.*u.pc, 'galcen_distance': 8.3*u.kpc, 'roll': 50*u.deg, 'galcen_coord': coord.SkyCoord(ra=300*u.deg, dec=-90*u.deg, frame='icrs')}\n vobs = {'vcirc': 220*u.km/u.s, 'vlsr': [0,0,0]*u.km/u.s}\n \n # impact parameters\n M = 3e7*u.Msun\n B = 20.06*u.kpc\n V = 190*u.km/u.s\n phi = coord.Angle(0*u.deg)\n th = 155\n theta = coord.Angle(th*u.deg)\n Tenc = 0.01*u.Gyr\n T = 0.55*u.Gyr\n dt = 0.05*u.Myr\n #dt = 1*u.Myr\n rs = 0*u.pc\n \n # potential parameters\n potential = 3\n Vh = 220*u.km/u.s\n q = 1*u.Unit(1)\n rhalo = 20*u.pc\n par_pot = np.array([Vh.si.value, q.value, rhalo.si.value])\n \n # setup tube\n Nstar = 1400\n wx = 30*u.kpc\n wy = 0*u.pc\n wz = 0*u.pc\n sx = 0*u.km/u.s\n \n xphi = np.linspace(-0.3*np.pi,0.3*np.pi, Nstar)\n xphi0 = np.linspace(-0.1*np.pi, 0.1*np.pi, 1000)\n xphi1 = np.linspace(-0.28*np.pi, -0.1*np.pi, 200)\n xphi2 = np.linspace(0.1*np.pi, 0.32*np.pi, 200)\n xphi = np.concatenate([xphi1, xphi0, xphi2])\n \n xr = 20*u.kpc + np.random.randn(Nstar)*0.0*u.kpc\n x = np.sin(xphi) * xr\n y = np.cos(xphi) * xr\n z = x * 0\n vx = -np.cos(xphi) * Vh# * 0.94\n vy = np.sin(xphi) * Vh #* 0.97\n vz = vx * 0\n # closest to impact\n ienc = np.argmin(np.abs(x))\n \n # generate stream model\n potential_perturb = 1\n par_perturb = np.array([M.si.value, 0., 0., 0.])\n x1, x2, x3, v1, v2, v3 = interact.interact(par_perturb, B.si.value, phi.rad, V.si.value, theta.rad, Tenc.si.value, T.si.value, dt.si.value, par_pot, potential, potential_perturb, x.si.value, y.si.value, z.si.value, vx.si.value, vy.si.value, vz.si.value)\n stream = {}\n stream['x'] = (np.array([x1, x2, x3])*u.m).to(u.pc)\n stream['v'] = (np.array([v1, v2, v3])*u.m/u.s).to(u.km/u.s)\n \n # sky coordinates\n xgal = coord.Galactocentric(stream['x'], **observer)\n xeq = xgal.transform_to(coord.ICRS)\n veq_ = gc.vgal_to_hel(xeq, stream['v'], **vobs)\n veq = [None] * 3\n veq[0] = veq_[0].to(u.mas/u.yr)\n veq[1] = veq_[1].to(u.mas/u.yr)\n veq[2] = veq_[2].to(u.km/u.s)\n \n # unperturbed stream\n par_perturb = np.array([0*M.si.value, 0., 0., 0.])\n x1, x2, x3, v1, v2, v3 = interact.interact(par_perturb, B.si.value, phi.rad, V.si.value, theta.rad, Tenc.si.value, T.si.value, dt.si.value, par_pot, potential, potential_perturb, x.si.value, y.si.value, z.si.value, vx.si.value, vy.si.value, vz.si.value)\n stream0 = {}\n stream0['x'] = (np.array([x1, x2, x3])*u.m).to(u.pc)\n stream0['v'] = (np.array([v1, v2, v3])*u.m/u.s).to(u.km/u.s)\n \n # sky coordinates\n xgal0 = coord.Galactocentric(stream0['x'], **observer)\n xeq0 = xgal0.transform_to(coord.ICRS)\n veq0_ = gc.vgal_to_hel(xeq0, stream0['v'], **vobs)\n veq0 = [None] * 3\n veq0[0] = veq0_[0].to(u.mas/u.yr)\n veq0[1] = veq0_[1].to(u.mas/u.yr)\n veq0[2] = veq0_[2].to(u.km/u.s)\n \n # rotate to native coordinate system\n R = find_greatcircle(xeq0.ra.deg[::10], xeq0.dec.deg[::10])\n xi0, eta0 = myutils.rotate_angles(xeq0.ra, xeq0.dec, R)\n xi0 = coord.Angle(xi0*u.deg)\n \n # place gap at xi~0\n xioff = xi0[ienc]\n xi0 -= xioff\n \n xi, eta = myutils.rotate_angles(xeq.ra, xeq.dec, R)\n xi = coord.Angle(xi*u.deg)\n xi -= xioff\n \n vlabel = ['$\\mu_{\\\\alpha_\\star}$ [mas yr$^{-1}$]','$\\mu_{\\delta}$ [mas yr$^{-1}$]', '$V_r$ [km s$^{-1}$]']\n ylims = [[-0.5, 0.5], [-0.5, 0.5], [-25,25]]\n color = '0.35'\n ms = 4\n \n # plotting\n plt.close()\n fig, ax = plt.subplots(5,1,figsize=(12,12), sharex=True)\n \n plt.sca(ax[0])\n g = Table(fits.getdata('/home/ana/projects/GD1-DR2/output/gd1_members.fits'))\n plt.scatter(g['phi1']+40, g['phi2'], s=g['pmem']*2, c=g['pmem'], cmap=mpl.cm.binary, vmin=0.5, vmax=1.1)\n \n plt.xlim(-45,45)\n plt.ylim(-10,10)\n plt.gca().set_aspect('equal')\n plt.ylabel('$\\phi_1$ [deg]')\n \n plt.sca(ax[1])\n plt.plot(xi.wrap_at(wangle), eta, 'o', mec='none', color=color, ms=ms)\n \n plt.ylabel('$\\phi_1$ [deg]')\n plt.ylim(-10,10)\n plt.gca().set_aspect('equal')\n \n xeqs = [xeq.ra, xeq.dec, xeq.distance.to(u.kpc)]\n for i in range(3):\n plt.sca(ax[i+2])\n \n # interpolate expected kinematics from an unperturbed stream\n vexp = np.interp(xi.wrap_at(wangle), xi0.wrap_at(wangle), veq0[i].value) * veq0[i].unit\n plt.plot(xi.wrap_at(wangle), veq[i]-vexp, 'o', mec='none', color=color, ms=ms)\n \n plt.ylabel('$\\Delta$ {}'.format(vlabel[i]))\n plt.ylim(*ylims[i])\n\n plt.xlabel('$\\phi_2$ [deg]')\n \n plt.tight_layout()\n plt.savefig('../plots/spur_morphology_sky{}.png'.format(old_label))", "def wahrscheinlichkeit(Zeit):\n # Nach Zeit TIME soll spätestens ein neues Ereignis kommen!\n WKEIT = Zeit/MAX_TIME\n return WKEIT", "def native_wind_speed(self) -> float:\r\n return self._first_timeserie[\"data\"][\"instant\"][\"details\"][\"wind_speed\"]", "def unit_of_measurement(self):\n return UNIT_GALLONS_PER_MINUTE", "def phi_up(self):\n return 0.5 * (self.phi + 10 * (self.phi / 30.0) ** 2) / 180.0 * np.pi", "def calculate_pv_output(dt: datetime, sunrise: datetime, sunset: datetime) -> int:\n\n distance_to_zenith = (sunset - sunrise) / 2\n zenith = sunrise + distance_to_zenith\n dist_to_zenith_seconds = distance_to_zenith.total_seconds()\n\n zenith_percentage = abs(zenith - dt).total_seconds() / dist_to_zenith_seconds\n\n sun_intensity = zenith_percentage ** 2\n output = PV_MAX_TOTAL_OUTPUT_KW - (PV_MAX_TOTAL_OUTPUT_KW * sun_intensity)\n\n return int(output)", "def ra_dec_calculate(self) -> dict:\n for sec in range(self.delta_time):\n if 0 < self.ra_start + self.one_sec_walk_ra < 360 * 3600:\n self.ra = self.ra_start + self.one_sec_walk_ra\n self.ra_start = self.ra\n else:\n self.ra = self.ra_start + self.one_sec_walk_ra - 360 * 3600\n self.ra_start = self.ra\n if self.ra_dec_min < self.ra < self.ra_dec_max:\n self.dec = self.dec_start - self.one_sec_walk_dec\n self.dec_start = self.dec\n else:\n self.dec = self.dec_start + self.one_sec_walk_dec\n self.dec_start = self.dec\n\n ra_res = f'{int(self.ra // (3600 * 15))}:{int((self.ra % 3600) // 60)}:' \\\n f'{round(float((self.ra % 3600) % 60), 1)}'\n dec_res = f'{int(self.dec // 3600)}:{int((self.dec % 3600) // 60)}:' \\\n f'{round(float((self.dec % 3600) % 60), 1)}'\n moon = {\n 'ra': ra_res,\n 'dec': dec_res\n }\n return moon", "def si_2_kts(vals):\n return vals * 3600.0 / meters_per_nautical_mile", "def _normalizeDeltaTime(self, dt : float) -> float:\n return dt / self.tau", "def litres(time):\n return int(time / 2)", "def calculate_time(start_time):\r\n return round(time() - start_time, 2)", "def get_duration(self):\n return (self.stop_day - self.start_day) * (24 * 60) \\\n + (self.stop_hour - self.start_hour) * 60", "def test_scalar_dewpoint_from_relative_humidity():\n td = dewpoint_from_relative_humidity(10.6 * units.degC, 0.37)\n assert_almost_equal(td, 26. * units.degF, 0)", "def tt(self):\n return self.MJD + self.tt_ut1 + 2400000.5", "def deg2dms(dy):\n\n from numpy import floor\n dy_deg = floor(dy)\n dy_min = floor((dy-dy_deg)*60.)\n dy_sec = (dy-dy_deg-dy_min/60.)*3600.\n return dy_deg,dy_min,dy_sec", "def get_time_round(date):\r\n return int(date / self.timeframe) * self.timeframe", "def estimateTime(numparts, maskpixrad=None):\n\t#min time 60 sec vs. 289 from model\n\t#linear time 0 sec vs. -1.1587 from model\n\t#quadradic time March 14, 2008\n\tx = float(maskpixrad*numparts*2.0)\n\testtime = ( 26.83 + 0.001809 * x + 1.8542e-09 * x**2 )\n\t#ln(y) = -13.182 + 1.531 * ln(x) ==>\n\t#esttime = 1.884e-6 * (x**1.531) + 26.0\n\treturn esttime", "def k_Wa92(wind_second_moment, temp_C):\n\n U2 = wind_second_moment\n\n Sc = schmidt_number(temp_C)\n k = (0.31 * U2) * (660 / Sc) ** 0.5\n\n return k", "def dechours(self, yr, mn, d, t):\n dt = datetime(int(yr), int(mn), int(d))\n if self.epochhours == 0:\n self.epochhours = self.datehours(dt)\n\n e = self.datehours(dt)\n result=-1.0\n if len(t) > 0:\n (h, m) = t.split(':')\n fh = float(h)\n fm = float(m)\n r = fh + fm/60.0\n result = e + round(r, 2)\n\n # print \"dechours: \" + str(result) + \" yr=\" + str(yr) + \" mn=\" + str(mn) + \" d=\" + str(d) + \" \" + t\n return result", "def rough_time_estimate(m1, m2, flow, fudge_length=1.1, fudge_min=0.02):\n m = m1 + m2\n msun = m * lal.MTSUN_SI\n t = 5.0 / 256.0 * m * m * msun / (m1 * m2) / (numpy.pi * msun * flow) ** (8.0 / 3.0)\n\n # fudge factoriness\n return .022 if t < 0 else (t + fudge_min) * fudge_length", "def ventilation_rate_per_second(self):\n return self.volume * self.outdoor_air_ventilation * 1000 / 3600", "def day_length(day_of_year, lat):\n day_hours = np.deg2rad(lat)\n declination = 23.45 * np.sin(np.deg2rad(360.0 * (283.0 + day_of_year) / 365.0))\n const_day = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) <= -1.0\n day_hours[const_day] = 24.0\n const_night = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) >= 1.0\n day_hours[const_night] = 0.0\n day_night = ~((const_day) | (const_night))\n hour_angle = np.rad2deg(np.arccos(-np.tan(day_hours[day_night]) *\n np.tan(np.deg2rad(declination))))\n day_hours[day_night] = (hour_angle / 7.5)\n return day_hours", "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk_utc(date, latitude, longitude)\n \n return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}" ]
[ "0.6691584", "0.6364968", "0.6130741", "0.59310466", "0.583694", "0.5834618", "0.5786861", "0.5731586", "0.56987673", "0.5689336", "0.568822", "0.56476253", "0.56455123", "0.56278443", "0.5611392", "0.5609227", "0.5550629", "0.5508177", "0.55058354", "0.5498458", "0.54921347", "0.547559", "0.5461327", "0.54512614", "0.54122394", "0.5406073", "0.5405952", "0.53978395", "0.5381898", "0.53758276", "0.53741527", "0.53348416", "0.5328854", "0.5325596", "0.531836", "0.53109455", "0.5309194", "0.5286783", "0.5278188", "0.5270068", "0.5264539", "0.52633965", "0.52458996", "0.5239786", "0.5233789", "0.52310663", "0.5226949", "0.5223877", "0.5222017", "0.52153236", "0.5213725", "0.5210451", "0.5204997", "0.52023125", "0.5184939", "0.5179109", "0.51756436", "0.5174882", "0.5166736", "0.5162409", "0.5162242", "0.5160006", "0.5136086", "0.5125863", "0.5122937", "0.5122574", "0.5112161", "0.5110072", "0.51075464", "0.5106801", "0.5105611", "0.510403", "0.5095896", "0.50913185", "0.5079821", "0.5076772", "0.50753313", "0.5072943", "0.50634015", "0.50599295", "0.5059222", "0.5056818", "0.50500256", "0.50466603", "0.5040582", "0.5034383", "0.5028854", "0.50288016", "0.50201076", "0.50193167", "0.5015344", "0.5015316", "0.5012272", "0.50098884", "0.5009176", "0.5006999", "0.5002435", "0.49969506", "0.4996496", "0.49960524", "0.49944496" ]
0.0
-1
Returns dawn, sunrise, noon, sunset and dusk as a dictionary.
def sun(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sun = self.astral.sun_utc(date, self.latitude, self.longitude) if local: for key, dt in sun.items(): sun[key] = dt.astimezone(self.tz) return sun
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk_utc(date, latitude, longitude)\n \n return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}", "def dt():\n wheels = [-2, -1, 0, 1, 2]\n stations = [1,2,3,4]\n sectors = range(1,13)\n sectors4 = range(1,15) # more sectors for station 4\n\n dt_info = {\n \"wheels\":wheels,\n \"stations\":stations,\n \"sectors\": sectors,\n \"sectors4\":sectors4}\n\n return dt_info", "def get_times():\n server = datetime.datetime.now().astimezone()\n local = server.astimezone(dwx.dwx_tz())\n target = post_time_today(local)\n return {'server': server, 'local': local, 'target': target}", "def _get_information(self):\n weather_dict = {}\n table_body = self.climate_table\n\n rows = table_body.find_all('tr')\n months = [col.get_text() for col in rows[0].find_all('td')[1:]]\n\n for row in rows[1:]:\n cols = row.find_all('td')\n key = cols[0].get_text()\n value_getter = self._value_getters_by_key.get(key, self._get_remote_workers)\n\n weather_dict.update({key: [(months[i],) + value_getter(col) for i, col in enumerate(cols[1:])]})\n\n return weather_dict", "def get_entry(self):\n return {'Time String': self.get_time_string(),\n 'Mode': self.mode,\n 'Radars': self.radar}", "def calculate_statistics(self) -> Dict[str, Tuple[str, float]]:\n tempDict = {\n 'max_start': ('', -1),\n 'max_end': ('', -1),\n 'max_time_low_availability': ('', -1),\n 'max_time_low_unoccupied': ('', -1)\n }\n\n\n\n return {\n 'max_start': ('', -1),\n 'max_end': ('', -1),\n 'max_time_low_availability': ('', -1),\n 'max_time_low_unoccupied': ('', -1)\n }", "def dates(self):\n #{{{ function to return start and end times for a station\n return self.wfdates.keys()", "def get_time() -> dict:\n prague = pytz.timezone('Europe/Prague')\n now = prague.localize(datetime.now())\n fmt = '%H:%M'\n au_tz = pytz.timezone('Australia/Sydney')\n sydney = now.astimezone(au_tz).strftime(fmt)\n lon_tz = pytz.timezone('Europe/London')\n london = now.astimezone(lon_tz).strftime(fmt)\n ny_tz = pytz.timezone('US/Eastern')\n ny = now.astimezone(ny_tz).strftime(fmt)\n sf_tz = pytz.timezone('US/Pacific')\n sf = now.astimezone(sf_tz).strftime(fmt)\n return {\"sydney\": sydney, \"london\": london, \"ny\": ny, \"sf\": sf}", "def state_attributes(self):\n return {\n ATTR_WEATHER_ATTRIBUTION: ATTRIBUTION,\n ATTR_STATION: self.probe.get_data('station_name'),\n ATTR_UPDATED: '{} {}'.format(self.probe.get_data('update_date'),\n self.probe.get_data('update_time')),\n }", "def raw_data() -> Dict:\n return {\"neighbourhood\":\"Buttes-Montmartre\",\"room_type\":\"Entire home/apt\",\"minimum_nights\":1.555,\"mois\":2,\"voyageurs\":2.5,\"chambres\":1,\"lits\":1,\"salle_de_bains\":1}", "def create_state_dict(self):\n return {\n 'resting': self.resting,\n 'moving': self.moving,\n 'animated resting': self.animated_resting,\n 'autoresting': self.auto_resting,\n 'automoving': self.auto_moving,\n 'battle resting': self.battle_resting,\n 'attack': self.attack,\n 'enemy attack': self.enemy_attack,\n c.RUN_AWAY: self.run_away,\n c.VICTORY_DANCE: self.victory_dance,\n c.KNOCK_BACK: self.knock_back,\n c.FADE_DEATH: self.fade_death\n }", "def getMyInfoAsDict(self):\n list = ['name', 'version', 'systemSize', 'xMax', \n 'yMax', 'currentRound', 'currentHoursLeft']\n d = self.getSelectedAttr(list)\n return d", "def device_state_attributes(self):\n # TODO: convert RH from Elk to AH ?\n #if self.current_humidity > 0:\n # humidity = self.current_humidity\n data = {\n 'hidden': self._hidden,\n 'temp_unit' : self.temperature_unit,\n }\n if self._device.temp_outside is not None and self._device.temp_outside > -460:\n data['temp_outside'] = self._device.temp_outside\n if self._device.temp_3 is not None and self._device.temp_3 > -460:\n data['temp_3'] = self._device.temp_3\n if self._device.temp_4 is not None and self._device.temp_4 > -460:\n data['temp_4'] = self._device.temp_4\n return data", "def GetState(self):\n self.update()\n d = {}\n for n in ['imgtype', 'object', 'path', 'filename', 'nextfile', 'lastfile', 'filectr', 'observer', 'filter', 'filterid',\n 'guider', 'mirror', 'lastact', 'initialized', 'errors', 'highcap', 'preamp', 'hsspeed', 'vsspeed', 'cycletime',\n 'readouttime', 'mode', 'cool', 'tset', 'settemp', 'temp', 'tempstatus', 'imaging', 'shuttermode', 'exptime',\n 'xmin', 'xmax', 'ymin', 'ymax', 'roi', 'xbin', 'ybin', 'isdark', 'MonitorActive', 'lastact']:\n d[n] = self.__dict__.get(n)\n return d", "def as_dict(self):\n return {\"metadata\": self.metadata.as_dict(),\n \"rosters\": {\"home\": self.initial_rosters[\"home\"].as_dict(),\n \"away\": self.initial_rosters[\"away\"].as_dict()},\n \"events\": [e.as_dict() for e in self.events]}", "def _get_simulation_info(self) -> dict:\n # Just a stub for now\n return {}", "def make_image_dict(self):\n sprite_sheet = setup.GFX['treasurechest']\n image_dict = {'closed': self.get_image(0, 0, 32, 32, sprite_sheet),\n 'opened': self.get_image(32, 0, 32, 32, sprite_sheet)}\n\n return image_dict", "def ra_dec_calculate(self) -> dict:\n for sec in range(self.delta_time):\n if 0 < self.ra_start + self.one_sec_walk_ra < 360 * 3600:\n self.ra = self.ra_start + self.one_sec_walk_ra\n self.ra_start = self.ra\n else:\n self.ra = self.ra_start + self.one_sec_walk_ra - 360 * 3600\n self.ra_start = self.ra\n if self.ra_dec_min < self.ra < self.ra_dec_max:\n self.dec = self.dec_start - self.one_sec_walk_dec\n self.dec_start = self.dec\n else:\n self.dec = self.dec_start + self.one_sec_walk_dec\n self.dec_start = self.dec\n\n ra_res = f'{int(self.ra // (3600 * 15))}:{int((self.ra % 3600) // 60)}:' \\\n f'{round(float((self.ra % 3600) % 60), 1)}'\n dec_res = f'{int(self.dec // 3600)}:{int((self.dec % 3600) // 60)}:' \\\n f'{round(float((self.dec % 3600) % 60), 1)}'\n moon = {\n 'ra': ra_res,\n 'dec': dec_res\n }\n return moon", "def get_schedule_dict(state_data):\n\n times = np.array(list((zip(state_data['tstart'], state_data['tstop'])))).reshape((-1,))\n pitch = np.array(list((zip(state_data['pitch'], state_data['pitch'])))).reshape((-1,))\n roll = np.array(list((zip(state_data['off_nom_roll'], state_data['off_nom_roll'])))).reshape((-1,))\n clocking = np.array(list((zip(state_data['clocking'], state_data['clocking'])))).reshape((-1,))\n vid_board = np.array(list((zip(state_data['vid_board'], state_data['vid_board'])))).reshape((-1,))\n ccd_count = np.array(list((zip(state_data['ccd_count'], state_data['ccd_count'])))).reshape((-1,))\n fep_count = np.array(list((zip(state_data['fep_count'], state_data['fep_count'])))).reshape((-1,))\n sim_z = np.array(list((zip(state_data['simpos'], state_data['simpos'])))).reshape((-1,))\n\n schedule = {'pitch': pitch,\n 'roll': roll,\n 'fep_count': fep_count,\n 'ccd_count': ccd_count,\n 'clocking': clocking,\n 'vid_board': vid_board,\n 'sim_z': sim_z}\n\n return times, schedule", "def get_information(self):\n info_dict = dict()\n info_dict['run'] = self._runNumber\n info_dict['reduced'] = self._isReduced\n if self._slicerKey is None:\n # regular reduced data\n info_dict['slicer_key'] = None\n else:\n # chopped run\n info_dict['slicer_key'] = self._slicerKey\n info_dict['workspaces'] = self._choppedWorkspaceNameList[:]\n info_dict['raw_files'] = self._choppedNeXusFileList[:]\n if self._reducedFiles is not None:\n info_dict['files'] = self._reducedFiles[:]\n else:\n info_dict['files'] = None\n\n return info_dict", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def instruments():\n instr_dict = {}\n #\n instr_dict['LRISr'] = 2**0\n instr_dict['LRISb'] = 2**1\n instr_dict['Kastb'] = 2**2\n instr_dict['shane_kast_red'] = 2**3\n instr_dict['shane_kast_red_ret'] = 2**3\n instr_dict['DEIMOS'] = 2**4\n instr_dict['NIRSPEC'] = 2**5\n instr_dict['GMOS'] = 2**6\n instr_dict['DBSP'] = 2**7\n #\n return instr_dict", "def device_state_attributes(self):\n attr = {}\n attr[\"enabled\"] = self._controller.enabled\n attr[\"zone_count\"] = len(self._controller._zones)\n attr[\"zones\"] = \"\"\n current = self._controller.runs.current_run\n if current is not None:\n attr[\"current_zone\"] = current.index + 1\n attr[\"current_name\"] = current.zone.name\n attr[\"current_start\"] = dt.as_local(current.start_time)\n attr[\"current_duration\"] = str(current.duration)\n attr[\"time_remaining\"] = str(current.time_remaining)\n attr[\"percent_complete\"] = current.percent_complete\n else:\n attr[\"current_schedule\"] = RES_NOT_RUNNING\n attr[\"percent_complete\"] = 0\n\n next = self._controller.runs.next_run\n if next is not None:\n attr[\"next_zone\"] = next.index + 1\n attr[\"next_name\"] = next.zone.name\n attr[\"next_start\"] = dt.as_local(next.start_time)\n attr[\"next_duration\"] = str(next.duration)\n else:\n attr[\"next_schedule\"] = RES_NONE\n\n return attr", "def get_dates():\n return {\n \"years\": range(datetime.date.today().year, datetime.date.today().year + 5),\n \"months\": range(1, 13),\n \"days\": range(1, 32)\n }", "def as_dict(self) -> dict[str, str]:\n return {\n LENGTH: self.length_unit,\n ACCUMULATED_PRECIPITATION: self.accumulated_precipitation_unit,\n MASS: self.mass_unit,\n PRESSURE: self.pressure_unit,\n TEMPERATURE: self.temperature_unit,\n VOLUME: self.volume_unit,\n WIND_SPEED: self.wind_speed_unit,\n }", "def extra_state_attributes(self) -> dict[str, int | str]:\n return {\n ATTR_STATION_ID: self._station_id,\n ATTR_STATION_NAME: self._get_station_name(),\n }", "def get_state(self) -> Dict:\n return {\n \"patience\": self.patience,\n \"cooldown\": self.cooldown,\n \"cooldown_counter\": self.cooldown_counter,\n \"mode\": self.mode,\n \"threshold\": self.threshold,\n \"threshold_mode\": self.threshold_mode,\n \"best\": self.best,\n \"num_bad_epochs\": self.num_bad_epochs,\n \"mode_worse\": self.mode_worse,\n \"last_epoch\": self.last_epoch,\n }", "def reset_state():\n return {'seer_info':[],\n 's_found_w_prev_night':False}", "def times(self):\n ret = {}\n for tag in self.TIMETAGLIST:\n if self.has_tag(tag):\n try:\n ret[tag] = safeInt(self.tag(tag))\n except TypeError:\n pass\n return ret", "def device_state_attributes(self):\n # Move these to Thermostat Device and make them global\n return {\n \"current_humidity\": self._current_humidity,\n \"status\": self._current_state,\n \"program\": self._current_program,\n \"away_mode\": self._away\n }", "def get_hours_per_unit_snap(now):\n print(\"/\"*50)\n print(\"GET hours_per_unit SNAP\")\n print(\"/\"*50)\n plant_settings = PlantSetting.objects.latest('timestamp')\n # print(\"SETINGS\",settings.timestamp)\n print(\"NOW: \", now)\n # preventing processing data before start of defined shift\n start, shift = get_shift_info(plant_settings, now)\n print(\"Start: \", start)\n print(\"Shift: \", shift)\n\n if start > now:\n print(\"NOT IN SHIFT\")\n return\n hours_per_unit_dict = main(start, now)\n hours_per_unit_dict['shift'] = shift\n\n return hours_per_unit_dict", "def get_perfect_information(self):\n state = {}\n state[\"chips\"] = [self.game.players[i].in_chips for i in range(self.player_num)]\n state[\"public_card\"] = (\n self.game.public_card.get_index() if self.game.public_card else None\n )\n state[\"hand_cards\"] = [\n self.game.players[i].hand.get_index() for i in range(self.player_num)\n ]\n state[\"current_round\"] = self.game.round_counter\n state[\"current_player\"] = self.game.game_pointer\n state[\"legal_actions\"] = self.game.get_legal_actions()\n return state", "def _get_dict_weather_data(self, weather_current):\n\n returned_dict = dict()\n returned_dict[\"weather_status\"] = weather_current.get_detailed_status()\n\n time_format = '%H:%M'\n if self.am_pm_time:\n time_format = '%I:%M %p'\n\n returned_dict[\"sunset\"] = datetime.fromtimestamp(weather_current.get_sunset_time()).strftime(time_format)\n returned_dict[\"sunrise\"] = datetime.fromtimestamp(weather_current.get_sunrise_time()).strftime(time_format)\n\n returned_dict[\"temperature\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp\"]))\n returned_dict[\"temperature_min\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp_min\"]))\n returned_dict[\"temperature_max\"] = int(round(weather_current.get_temperature(unit=self.temp_unit)[\"temp_max\"]))\n\n returned_dict[\"pressure\"] = weather_current.get_pressure()[\"press\"]\n returned_dict[\"sea_level_pressure\"] = weather_current.get_pressure()[\"sea_level\"]\n\n returned_dict[\"humidity\"] = weather_current.get_humidity()\n\n wind = weather_current.get_wind()\n wind_deg = wind.get(\"deg\", None)\n wind_speed = wind.get(\"speed\", None)\n returned_dict[\"wind_deg\"] = wind_deg\n returned_dict[\"wind_speed\"] = wind_speed\n\n snow_current = weather_current.get_snow()\n snow_current = snow_current.get('all', None)\n rain_current = weather_current.get_rain()\n rain_current = rain_current.get('all', None)\n returned_dict[\"rainfall\"] = rain_current\n returned_dict[\"snow\"] = snow_current\n\n returned_dict[\"clouds_coverage\"] = weather_current.get_clouds()\n\n return returned_dict", "def get(self):\n\t\treturn {\n\t\t\t'system': self.get_system_information(),\n\t\t\t'cpu': self.get_cpu_stats(),\n\t\t\t'gpu': self.get_gpu_stats(),\n\t\t\t'ram': self.get_ram_stats(),\n\t\t\t'storage': self.get_storage_stats(),\n\t\t\t'battery': self.get_battery_stats(),\n\t\t\t'temps': self.get_temperatures()\n\t\t}", "def device_state_attributes(self):\n attributes = {}\n\n if self._type == \"weather\":\n attributes[\"data\"] = self._connector.get_condition_hourly()\n elif self._type == \"weather_report\":\n attributes[\"data\"] = self._connector.get_weather_report()\n elif self._type == \"temperature\":\n attributes[\"data\"] = self._connector.get_temperature_hourly()\n elif self._type == \"dewpoint\":\n attributes[\"data\"] = self._connector.get_dewpoint_hourly()\n elif self._type == \"pressure\":\n attributes[\"data\"] = self._connector.get_pressure_hourly()\n elif self._type == \"wind_speed\":\n attributes[\"data\"] = self._connector.get_wind_speed_hourly()\n elif self._type == \"wind_direction\":\n attributes[\"data\"] = self._connector.get_wind_direction_hourly()\n elif self._type == \"wind_gusts\":\n attributes[\"data\"] = self._connector.get_wind_gusts_hourly()\n elif self._type == \"precipitation\":\n attributes[\"data\"] = self._connector.get_precipitation_hourly()\n elif self._type == \"precipitation_probability\":\n attributes[\"data\"] = self._connector.get_precipitation_probability_hourly()\n elif self._type == \"precipitation_duration\":\n attributes[\"data\"] = self._connector.get_precipitation_duration_hourly()\n elif self._type == \"cloud_coverage\":\n attributes[\"data\"] = self._connector.get_cloud_coverage_hourly()\n elif self._type == \"visibility\":\n attributes[\"data\"] = self._connector.get_visibility_hourly()\n elif self._type == \"sun_duration\":\n attributes[\"data\"] = self._connector.get_sun_duration_hourly()\n elif self._type == \"sun_irradiance\":\n attributes[\"data\"] = self._connector.get_sun_irradiance_hourly()\n elif self._type == \"fog_probability\":\n attributes[\"data\"] = self._connector.get_fog_probability_hourly()\n elif self._type == \"humidity\":\n attributes[\"data\"] = self._connector.get_humidity_hourly()\n\n attributes[ATTR_ISSUE_TIME] = self._connector.infos[ATTR_ISSUE_TIME]\n attributes[ATTR_LATEST_UPDATE] = self._connector.infos[ATTR_LATEST_UPDATE]\n attributes[ATTR_STATION_ID] = self._connector.infos[ATTR_STATION_ID]\n attributes[ATTR_STATION_NAME] = self._connector.infos[ATTR_STATION_NAME]\n attributes[ATTR_ATTRIBUTION] = ATTRIBUTION\n return attributes", "def get_results(self) -> Dict:\n results = {}\n results['time'] = np.array(self._results['time'])\n results['norm'] = np.array(self._results['norm'])\n results['bond_dimensions'] = np.array(self._results['bond_dimensions'])\n results['dynamics'] = self._results['dynamics']\n results['pt_bond_dimensions'] = self._results['pt_bond_dimensions']\n return results", "def extra_state_attributes(self):\n return {\n ATTR_ATTRIBUTION: ATTRIBUTION,\n ATTR_STATION: self.probe.get_data(\"station_name\"),\n ATTR_UPDATED: self.probe.last_update.isoformat(),\n }", "def device_state_attributes(self):\n attr = {}\n attr['remote_lock'] = self.remote_lock\n attr['power_state'] = self._power_state\n attr['heating_active'] = self._is_heating_active\n attr['auto_override'] = self.auto_override\n attr['sensor_mode'] = self.sensor_mode\n attr['external_sensor_temprange'] = self.external_temp\n attr['deadzone_sensor_temprange'] = self.deadzone_sensor_temprange\n attr['loop_mode'] = self._loop_mode\n attr['roomtemp_offset'] = self.roomtemp_offset\n attr['anti_freeze_function'] = self.anti_freeze_function\n attr['poweron_mem'] = self.poweron_mem\n attr['external_temp'] = self.external_temp\n attr['clock_hour'] = self.clock_hour\n attr['clock_min'] = self.clock_min\n attr['clock_sec'] = self.clock_sec\n attr['day_of_week'] = self.day_of_week\n attr['week_day'] = self.week_day\n attr['week_end'] = self.week_end\n return attr", "def getParsedDic(self):\n return {}", "def _get_dict_weather_data(self, weather_data):\n returned_dict = dict()\n returned_dict[\"weather_status\"] = weather_data.detailed_status\n \n returned_dict[\"sunset\"] = self._get_sun_rise_set_time(weather_data.sunset_time())\n returned_dict[\"sunrise\"] = self._get_sun_rise_set_time(weather_data.sunrise_time())\n \n temp, temp_max, temp_min = self._get_temperatures_vaĺues(weather_data.temperature(unit=self.temp_unit))\n returned_dict[\"temperature\"] = temp\n returned_dict[\"temperature_min\"] = temp_min\n returned_dict[\"temperature_max\"] = temp_max\n \n returned_dict[\"pressure\"] = weather_data.pressure[\"press\"]\n returned_dict[\"sea_level_pressure\"] = weather_data.pressure[\"sea_level\"]\n returned_dict[\"humidity\"] = weather_data.humidity\n \n current_wind = weather_data.wind(self.wind_speed_unit)\n returned_dict[\"wind_deg\"] = current_wind.get(\"deg\", None)\n returned_dict[\"wind_speed\"] = current_wind.get(\"speed\", None)\n \n returned_dict[\"snow\"] = self._get_snow_rain_value(weather_data.snow)\n returned_dict[\"rainfall\"] = self._get_snow_rain_value(weather_data.rain)\n returned_dict[\"clouds_coverage\"] = weather_data.clouds\n\n return returned_dict", "def _stateDict(self):\n\n data = {}\n # if self.currentState[4]:\n # data['action'] = 'BRAK'\n # else:\n data['action'] = 'MCTL'\n data['speed'] = float(self.speed)\n data['steerAngle'] = float(self.steering_angle)\n\n return data", "def getTimeStamps():\n\n # Initialize\n results = dict()\n\n # UT time\n ut = utils.getUT(pointing=True).split()\n results['utday'] = ut[0]\n results['ut'] = float(ut[1])\n\n # year/month/day/second\n utStamp = time.gmtime()\n utHour = maybeAddAZero(utStamp[3])\n utMin = maybeAddAZero(utStamp[4])\n utSec = maybeAddAZero(utStamp[5])\n results['timeLab'] = ''.join([commands.yearMonthDay(),'_',utHour,utMin,utSec])\n\n # Done\n return results", "def device_state_attributes(self):\n if self._sensor_type == DEVICE_TYPE_DOORBELL:\n return {\n ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION,\n ATTR_LAST_TRIP_TIME: self._camera_data[\"last_ring\"],\n }\n if (\n self._camera_data[\"event_object\"] is not None\n and len(self._camera_data[\"event_object\"]) > 0\n ):\n detected_object = self._camera_data[\"event_object\"][0]\n _LOGGER.debug(\n f\"OBJECTS: {self._camera_data['event_object']} on {self._name}\"\n )\n else:\n detected_object = \"None Identified\"\n return {\n ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION,\n ATTR_LAST_TRIP_TIME: self._camera_data[\"last_motion\"],\n ATTR_EVENT_SCORE: self._camera_data[\"event_score\"],\n ATTR_EVENT_LENGTH: self._camera_data[\"event_length\"],\n ATTR_EVENT_OBJECT: detected_object,\n }", "def standard_variables(self):\n\t\tstd_vars = {\n\t\t\t'time': {\n\t\t\t\t'local': datetime.datetime.now(),\n\t\t\t\t'utc': datetime.datetime.utcnow()\n\t\t\t}\n\t\t}\n\t\treturn std_vars", "def _get_ogd_stations():\n return {r[\"Station\"] for r in ZamgData.current_observations()}", "def dictionary():\r\n kluisDict = dict.fromkeys(range(1, 21))\r\n\r\n with open('FietsStalling.txt', 'r+') as readFile:\r\n for line in readFile: # kluizen uit bestand lezen\r\n splitLine = line.split(' ') # regels opdelen\r\n kluisNummer = int(splitLine[0].strip(';')) # eerste getal is kluisnummer\r\n OVNummer = int(splitLine[3].strip('\\n')) # laatste getal is OV nummer\r\n dateTime = splitLine[1] + ' ' + splitLine[2].strip(',') # middelste deel is datum en tijd\r\n kluisDict[kluisNummer] = (dateTime, OVNummer) # keys zijn kluisnummers, values zijn OV nummer en datetime\r\n return kluisDict", "def _get_twilight(self, datetime_today, lon, lat, alt):\r\n\r\n dusk = self._astral.dusk_utc(datetime_today, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n dawn = self._astral.dawn_utc(datetime_today + _delta_dt, lat, lon,\r\n observer_elevation=alt,\r\n depression=self.twilight_alt)\r\n\r\n return dusk, dawn", "def states_to_dict(self):\n test_cases = {}\n i = 0\n for element in self.states:\n test_cases[\"st\" + str(i)] = {}\n test_cases[\"st\" + str(i)][\"model\"] = int(element[0])\n test_cases[\"st\" + str(i)][\"duration\"] = int(element[1])\n test_cases[\"st\" + str(i)][\"temp\"] = int(element[2])\n i += 1\n\n return test_cases", "def getGM2MIsd(self):\n return { 'pga' : self.__constants['pga']['SMMI'],\n 'pgv' : self.__constants['pgv']['SMMI'],\n 'psa03' : self.__constants['psa03']['SMMI'],\n 'psa10' : self.__constants['psa10']['SMMI'],\n 'psa30' : self.__constants['psa30']['SMMI'] }", "def output(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"type\": 2,\n \"enabled\": 1,\n \"days_of_week\": self.days,\n \"time\": self.time.output()\n }", "def stations_dict(self):\n return self.__stations_dict", "def extra_state_attributes(self):\n return {\n ATTR_DURATION: self.cycle.duration,\n ATTR_CYCLE_LAST_UPDATED: self.cycle.last_updated.isoformat(),\n ATTR_MODE: self.cycle.mode.name.lower(),\n ATTR_START_HOUR: self.cycle.start_hour,\n }", "def device_state_attributes(self):\n attr = {}\n attr[\"enabled\"] = self._zone.enabled and self._controller.enabled\n attr[\"status\"] = self._zone.status\n attr[\"schedule_count\"] = len(self._zone.schedules)\n attr[\"schedules\"] = \"\"\n attr[\"adjustment\"] = self._zone.adjustment.as_string\n current = self._zone.runs.current_run\n if current is not None:\n if current.schedule is not None:\n attr[\"current_schedule\"] = current.schedule.schedule_index + 1\n attr[\"current_name\"] = current.schedule.name\n else:\n attr[\"current_schedule\"] = RES_MANUAL\n attr[\"current_name\"] = RES_MANUAL\n attr[\"current_start\"] = dt.as_local(current.start_time)\n attr[\"current_duration\"] = str(current.duration)\n attr[\"time_remaining\"] = str(current.time_remaining)\n attr[\"percent_complete\"] = current.percent_complete\n else:\n attr[\"current_schedule\"] = RES_NOT_RUNNING\n attr[\"percent_complete\"] = 0\n\n next = self._zone.runs.next_run\n if next is not None:\n if next.schedule is not None:\n attr[\"next_schedule\"] = next.schedule.schedule_index + 1\n attr[\"next_name\"] = next.schedule.name\n else:\n attr[\"next_schedule\"] = RES_MANUAL\n attr[\"next_name\"] = RES_MANUAL\n attr[\"next_start\"] = dt.as_local(next.start_time)\n attr[\"next_duration\"] = str(next.duration)\n else:\n attr[\"next_schedule\"] = RES_NONE\n\n return attr", "def sample_simulation() -> Dict[str, Tuple[str, float]]:\n sim = Simulation('stations.json', 'sample_rides.csv')\n sim.run(datetime(2017, 6, 1, 8, 0, 0),\n datetime(2017, 6, 1, 9, 0, 0))\n\n return sim.calculate_statistics()", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n # type: () -> Dict[Text: Union[Dict, List[Dict]]]\n return {\n \"fecha_hora\": [\n self.from_entity(entity=\"time\"),\n\n ]\n }", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n # type: () -> Dict[Text: Union[Dict, List[Dict]]]\n return {\n \"fecha_hora\": [\n self.from_entity(entity=\"time\"),\n\n ]\n }", "def get_operation_times(self):\n self.write(\"TIMERS?\")\n timers = {}\n timers['psu'] = int(re.search(r\"\\d+\", self.read()).group())\n timers['laser'] = int(re.search(r\"\\d+\", self.read()).group())\n timers['laser_above_1A'] = int(re.search(r\"\\d+\", self.read()).group())\n self.read() # an empty line is at the end.\n return timers", "def to_dict(self, with_tag = False):\n\n res_dict = {'runtimes' : self.run_times,\n 'tmean' : self.tmean,\n 'tstdev' : self.tstdev}\n\n if with_tag:\n res_dict = {\"{0}_{1}\".format(self.tag, _k) : _v for _k, _v in res_dict.items()}\n\n return res_dict", "def device_state_attributes(self):\n return {\n ATTR_ATTRIBUTION: ATTRIBUTION,\n ATTR_LAST_UPDATE: self.metoffice_now.date if self.metoffice_now else None,\n ATTR_SENSOR_ID: self._type,\n ATTR_SITE_ID: self.metoffice_site_id if self.metoffice_site_id else None,\n ATTR_SITE_NAME: self.metoffice_site_name\n if self.metoffice_site_name\n else None,\n }", "def stats(self):\n\n res = self.read_block(REG_STATS, 9)\n\n ret = {\n \"completed_cycles\": (res[1] << 8) + (res[0] << 0),\n \"last_boot\": {\n \"retries\": res[2],\n \"duration\": (res[6] << 24) + (res[5] << 16) + (res[4] << 8) + (res[3] << 0)\n },\n \"forced_shutdowns\": (res[8] << 8) + (res[7] << 0)\n }\n\n return ret", "def extra_state_attributes(self) -> dict[str, Any]:\n return {\n \"heat_demand\": self._device.heat_demand,\n \"heat_demands\": self._device.heat_demands,\n \"relay_demands\": self._device.relay_demands,\n \"system_mode\": self._device.system_mode,\n \"tpi_params\": self._device.tpi_params,\n # \"faults\": self._device.faultlog,\n }", "def device_state_attributes(self):\n if self.ticker is not None:\n return {\n ATTR_VOLUME_24H: self.ticker.values.get(\"volume\"),\n ATTR_ATTRIBUTION: ATTRIBUTION,\n ATTR_HIGH: self.ticker.values.get(\"high\"),\n ATTR_LOW: self.ticker.values.get(\"low\"),\n ATTR_VWAP: self.ticker.values.get(\"vwap\")\n }", "def timing_default(self):\n\n return {\"runtimes\": [], \"dates\": []}", "def get_state(self):\n return {\n \"board\": self.board,\n \"player\": self.player,\n \"winner\": self.winner\n }", "def device_state_attributes(self):\n if self._data is not None:\n return {\n \"阳历\": self._data.yangli,\n \"阴历\": self._data.yinli,\n \"五行\": self._data.wuxing,\n \"冲煞\": self._data.chongsha,\n \"百忌\": self._data.baiji,\n \"吉神\": self._data.jishen,\n \"宜\": self._data.yi,\n \"凶神\": self._data.xiongshen,\n \"忌\": self._data.ji,\n }", "def extra_state_attributes(self) -> Mapping[str, str]:\n if (update_time := self.coordinator.data[\"last_update\"]) is not None:\n update_time = update_time.isoformat()\n return {\n ATTR_STATION: self.coordinator.data[\"Name\"],\n ATTR_UPDATED: update_time,\n }", "def get_pump_times(self, start):\n pumps_dict = {}\n for pump in self.pumps:\n dataframe_ = pd.DataFrame()\n time = []\n command = []\n for i in range(len(pump.start_intervals)):\n t_on = pump.start_intervals[i].epanet_on_time\n t_off = pump.start_intervals[i].epanet_off_time\n time += [start + t_on * pd.Timedelta(\"1S\"),\n start + t_off * pd.Timedelta(\"1S\")]\n command += [1, 0]\n dataframe_['Time'] = time\n dataframe_[pump.link_id] = command\n pumps_dict[pump.link_id] = dataframe_\n return pumps_dict", "def data(self):\n return dict(name=self.name, ra=self.ra, dec=self.dec, mjd=self.mjd,\n type_=self.type, cosmo= (self.cosmo.name if self.cosmo is not None else None),\n zcmb = self.zcmb, zcmb_err=self.zcmb_err)", "def getStateData(self):\n data = {}\n # Traffic lights\n lights = {}\n for light in self._lights:\n lights[light.id] = light.getStateData()\n data['trafficLights'] = lights\n\n # Cars\n cars = {}\n for car in self._cars:\n cars[car.id] = car.getStateData()\n cars[car.id].update(car.getDescriptionData())\n data['cars'] = cars\n\n # Enter queue\n enterQueue = []\n for car in self._enterQueue:\n enterQueue.append(car.getStateData())\n enterQueue[-1].update(car.getDescriptionData())\n data['enterQueue'] = enterQueue\n\n # Fields\n data['time'] = kts46.utils.timedelta2str(self.time)\n data['lastCarGenerationTime'] = kts46.utils.timedelta2str(self._lastCarGenerationTime)\n data['lastCarId'] = self._lastCarId\n\n # Result.\n return data", "def state_attributes(self) -> Dict[str, Any]:\n return {**super(WarmupThermostat, self).state_attributes, **self.attributes}", "def get_states(self):\n states = {}\n if hasattr(self, 'random_mask_state'):\n states['random_mask_state'] = self.random_mask_state.get_state()\n if hasattr(self, 'deformrandomstate'):\n states['deformrandomstate'] = self.deformrandomstate.get_state()\n states['randomstate'] = self.randomstate.get_state()\n return states", "def sun_set_rise_times(self, date=None):\n rstimes = (self.sunset(date=date),\n self.evening_twilight_12(date=date),\n self.evening_twilight_18(date=date),\n self.morning_twilight_18(date=date),\n self.morning_twilight_12(date=date),\n self.sunrise(date=date))\n return rstimes", "def get_state(self):\n return self.agents, self.foods, self.viruses, self.masses, self.time", "def get_attributes(self):\n \n retdict = {}\n retdict['s'] = str(self.s)\n if self.t != None:\n retdict['t'] = str(self.t)\n retdict['a'] = str(self.a)\n retdict['b'] = str(self.b)\n retdict['c'] = str(self.c)\n retdict['d'] = str(self.d)\n return retdict", "def get_timeslots(self):\n\n timeorder = self.xml().find(\".//TIME_ORDER\")\n try:\n timeslots = {\n slot.attrib[\"TIME_SLOT_ID\"]: slot.attrib[\"TIME_VALUE\"]\n for slot in timeorder.findall(\"TIME_SLOT\")\n }\n except AttributeError:\n timeslots = {}\n return timeslots", "def scrap_data(self, device):\n data = {}\n for i in zip(self.id, self.gpio):\n try:\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(int(i[1]), GPIO.OUT)\n data[i[0]] = [datetime.datetime.now(), GPIO.input(int(i[1])), i[1]]\n except:\n data[i[0]] = [datetime.datetime.now(), None, i[1]]\n\n return data", "def dataForMonitoring(self):\n dict = MinderBase.dataForMonitoring(self)\n \n dict['nTests'] = len(self.tests.keys())\n dict['done'] = self.isDone()\n dict['nTestsSuccess'] = len([s for s in self.finishedTests if s.result == 0])\n dict['nTestsFailure'] = len([s for s in self.finishedTests if s.result != 0])\n dict['nRetries'] = self.errorStateCurRetry\n dict['ppFailure'] = (self.postProcessingResult == 'error')\n dict['ppSuccess'] = (self.postProcessingResult == 'success')\n\n return dict", "def get_general_info(self):\n table = Character._get_main_page(self._sprites).find('table', {'class': 'stripe'})\n\n result = []\n for td in table.find_all('td'):\n if td.get_text() != '':\n result.append(td.get_text())\n\n return {\n result[0]: result[1],\n result[2]: result[3],\n result[4]: result[5]\n }", "def stadates(self,start=False,end=False):\n #{{{ function to return start and end times for a station\n\n if not start: return self.stachan_cache.keys()\n\n\n cache = {}\n\n if not end: end = stock.now()\n if start > end: end = stock.now()\n start = float(start)\n end = float(end)\n\n for sta in self.stachan_cache:\n for chan in self.stachan_cache[sta]:\n for date in self.stachan_cache[sta][chan]['dates']:\n\n if date[1] == -1:\n\n if date[0] <= start: cache[sta] = 1\n if date[0] <= end: cache[sta] = 1\n\n else:\n\n if date[0] <= start and start <= date[1]: cache[sta] = 1\n if date[0] <= end and end <= date[1]: cache[sta] = 1\n if start <= date[0] and date[1] <= end: cache[sta] = 1\n\n print cache.keys()\n return cache.keys()", "def get_state_dict(self) -> Dict[Text, Text]:\n # get the containerStatus.state dict\n container_state: Dict = self._container_status_dict.get(KubernetesResource.Keys.STATE)\n\n # create a dictionary to hold the state values\n state_dict: Dict = dict()\n\n # if there is no state dict, return the empty dict object\n if not container_state:\n return state_dict\n\n # handle the \"running\" state\n if \"running\" in container_state:\n running: Dict = container_state[\"running\"]\n state_dict[\"container-state\"] = \"running\"\n state_dict[\"container-started-at\"] = running[\"startedAt\"]\n\n # handle the terminated state\n elif \"terminated\" in container_state:\n terminated: Dict = container_state[\"terminated\"]\n state_dict[\"container-state\"] = \"terminated\"\n state_dict[\"container-state-reason\"] = terminated[\"reason\"]\n state_dict[\"container-started-at\"] = terminated[\"startedAt\"]\n state_dict[\"container-finished-at\"] = terminated[\"finishedAt\"]\n\n # handle the waiting state\n elif \"waiting\" in container_state:\n waiting: Dict = container_state[\"waiting\"]\n state_dict[\"container-state\"] = \"waiting\"\n state_dict[\"container-state-reason\"] = waiting[\"reason\"]\n\n return state_dict", "def _get_options(self) -> Dict[str, Any]:\n # TODO: handle holidays as well\n return {\n \"growth\": self.growth,\n \"changepoints\": self.changepoints and list(self.changepoints.astype('str')),\n \"n_changepoints\": self.n_changepoints,\n \"changepoint_range\": self.changepoint_range,\n \"changepoint_prior_scale\": self.changepoint_prior_scale,\n \"mcmc_samples\": self.mcmc_samples,\n \"interval_width\": self.interval_width,\n \"uncertainty_samples\": self.uncertainty_samples,\n \"yearly_seasonality\": self.yearly_seasonality,\n \"weekly_seasonality\": self.weekly_seasonality,\n \"daily_seasonality\": self.daily_seasonality,\n \"seasonality_mode\": self.seasonality_mode,\n \"seasonality_prior_scale\": self.seasonality_prior_scale,\n\n \"seasonalities\": self.seasonalities,\n \"extra_regressors\": self.extra_regressors\n }", "def device_state_attributes(self):\n res = {}\n res[ATTR_DEPARTURE] = self._departure\n res[ATTR_DURATION] = self._duration\n res[ATTR_DELAY] = self._delay\n res[ATTR_CONNECTIONS] = self._connections\n res[ATTR_DESCRIPTION] = self._description\n res[ATTR_START_TIME] = self._start_time\n res[ATTR_DETAIL] = self._detail\n return res", "def gameState(self):\n gameState = {\"counter\" : {\"Team1\" : self.counter[\"Team1\"], \"Team2\" : self.counter[\"Team2\"]},\\\n \"lastChanged\" : self.lastChanged,\\\n \"wonRounds\" : {\"Team1\" : self.wonRounds[\"Team1\"], \"Team2\" : self.wonRounds[\"Team2\"]},\\\n \"wonGames\" : {\"Team1\" : self.wonGames[\"Team1\"], \"Team2\" : self.wonGames[\"Team2\"]},\\\n \"currentMaxPoints\" : self.currentMaxPoints,\\\n \"sidesChanged\" : self.sidesChanged,\\\n \"playerPositions\" : self.playerPositions,\\\n \"servePosition\" : self.servePosition,\\\n \"playerColors\" : self.playerColors,\\\n \"undoStack\" : self._undoStack,\\\n \"redoStack\" : self._redoStack,\\\n \"observers\" : self.__observers,\\\n \"gameName\" : self._getGameName()}\n return gameState", "def get_powerups() -> tuple:\n return tuple(PowerUp.powers.keys())", "def device_info(self) -> Dict[str, any]:\n return {\n \"identifiers\": {\n (\n DOMAIN,\n self._api.information.serial,\n SynoSurveillanceStation.INFO_API_KEY,\n )\n },\n \"name\": \"Surveillance Station\",\n \"manufacturer\": \"Synology\",\n \"model\": self._api.information.model,\n \"sw_version\": self._version,\n \"via_device\": (DOMAIN, self._api.information.serial),\n }", "def getMI2GMsd(self):\n return { 'pga' : 10**self.__constants['pga']['SPGM'],\n 'pgv' : 10**self.__constants['pgv']['SPGM'],\n 'psa03' : 10**self.__constants['psa03']['SPGM'],\n 'psa10' : 10**self.__constants['psa10']['SPGM'],\n 'psa30' : 10**self.__constants['psa30']['SPGM'] }", "def parse_uptime():\n\tr = subprocess.check_output([\"uptime\"])\n\tparsed_dict = {}\n\n\t#load average over past minute\n\n\t# code for linux\n\tuptime_values = re.split(\", \", r)\n\tload_averages = re.split(\"load average: \", uptime_values[3])\n\tparsed_dict[\"load\"] = re.split(\", \",load_averages[1])[0]\n\t\n\t# code for Unix (Mac)\n\t# uptime_values = re.split(\", \", r)\n\t# load_averages = re.split(\"load averages: \", uptime_values[3])\n\t# parsed_dict[\"load\"] = re.split(\" \",load_averages[1])[0].replace(',', '.')\n\n\tparsed_dict[\"users\"] = uptime_values[2]\n\tparsed_dict[\"uptime\"] = re.split(\"up \", uptime_values[0])[1]\n\t# US formated datetime to be displayed in top right corner\n\tparsed_dict[\"date\"] = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')\n\t# Server IP Adress\n\tparsed_dict[\"ip\"] = socket.gethostbyname(socket.gethostname())\n\t# Time to be displayed in alert container\n\n\treturn parsed_dict", "def as_dict(self):\n species_dict = dict()\n species_dict['force_field'] = self.force_field\n species_dict['is_ts'] = self.is_ts\n if self.e_elect is not None:\n species_dict['e_elect'] = self.e_elect\n if self.e0 is not None:\n species_dict['e0'] = self.e0\n species_dict['arkane_file'] = self.arkane_file\n if self.yml_path is not None:\n species_dict['yml_path'] = self.yml_path\n if self.is_ts:\n species_dict['ts_methods'] = self.ts_methods\n species_dict['ts_guesses'] = [tsg.as_dict() for tsg in self.ts_guesses]\n species_dict['ts_conf_spawned'] = self.ts_conf_spawned\n species_dict['ts_number'] = self.ts_number\n species_dict['ts_report'] = self.ts_report\n species_dict['rxn_label'] = self.rxn_label\n species_dict['successful_methods'] = self.successful_methods\n species_dict['unsuccessful_methods'] = self.unsuccessful_methods\n species_dict['chosen_ts_method'] = self.chosen_ts_method\n species_dict['chosen_ts'] = self.chosen_ts\n if self.run_time is not None:\n species_dict['run_time'] = self.run_time.total_seconds()\n species_dict['t1'] = self.t1\n species_dict['label'] = self.label\n species_dict['long_thermo_description'] = self.long_thermo_description\n species_dict['multiplicity'] = self.multiplicity\n if self.number_of_radicals is not None:\n species_dict['number_of_radicals'] = self.number_of_radicals\n species_dict['charge'] = self.charge\n species_dict['generate_thermo'] = self.generate_thermo\n if self.opt_level is not None:\n species_dict['opt_level'] = self.opt_level\n if self.final_xyz is not None:\n species_dict['final_xyz'] = self.final_xyz\n species_dict['number_of_rotors'] = self.number_of_rotors\n species_dict['rotors_dict'] = self.rotors_dict\n species_dict['external_symmetry'] = self.external_symmetry\n species_dict['optical_isomers'] = self.optical_isomers\n species_dict['neg_freqs_trshed'] = self.neg_freqs_trshed\n if self.conf_is_isomorphic is not None:\n species_dict['conf_is_isomorphic'] = self.conf_is_isomorphic\n if self.bond_corrections is not None:\n species_dict['bond_corrections'] = self.bond_corrections\n if self.mol is not None:\n species_dict['mol'] = self.mol.toAdjacencyList()\n if self.initial_xyz is not None:\n species_dict['initial_xyz'] = self.initial_xyz\n if self.checkfile is not None:\n species_dict['checkfile'] = self.checkfile\n if self.most_stable_conformer is not None:\n species_dict['most_stable_conformer'] = self.most_stable_conformer\n if self.cheap_conformer is not None:\n species_dict['cheap_conformer'] = self.cheap_conformer\n if self.recent_md_conformer is not None:\n species_dict['recent_md_conformer'] = self.recent_md_conformer\n if self.svpfit_output_file is not None:\n species_dict['svpfit_output_file'] = self.svpfit_output_file\n if self._radius is not None:\n species_dict['radius'] = self._radius\n if self.conformers:\n species_dict['conformers'] = self.conformers\n species_dict['conformer_energies'] = self.conformer_energies\n if self.conformers_before_opt is not None:\n species_dict['conformers_before_opt'] = self.conformers_before_opt\n if self.bdes is not None:\n species_dict['bdes'] = self.bdes\n return species_dict", "def asDict(self) -> dict:\n return {\n \"predominant_occlusion\": self.predominantOcclusion.value,\n \"estimations\": {\n \"chin\": self.chin,\n \"mouth\": self.mouth,\n \"clear\": self.clear,\n \"correct\": self.correct,\n \"partially\": self.partially,\n \"full\": self.full,\n },\n }", "def current_settings(self):\n return {\n 'power_state': self.power_state,\n 'brightness': self.brightness,\n }", "def device_state_attributes(self):\n data = {}\n if self._is_dimmable and self._brightness_pct:\n data = {ATTR_BRIGHTNESS_PCT: self._brightness_pct}\n data.update({#'alarm': self._alarm,\n 'operation_mode': self.operation_mode,\n 'rssi': self._rssi,\n 'occupancy': self._occupancy,\n 'wattage_override': self._wattage_override,\n 'id': self._id})\n return data", "def clocks(self):\n clks = {}\n idx = 0\n for clk in self._info.mOCLFrequency:\n if clk != 0:\n clks[\"clock\" + str(idx)] = {\"frequency\": clk}\n idx += 1\n return clks", "def get_info(self):\n # REMARK: it would be possible to use AtypicalEvent.__dict__,\n # but we'll stick to this solution if more info need to be added later\n\n dict_event_info = {}\n dict_event_info['name'] = self.name\n dict_event_info['date_start'] = self.date_start\n dict_event_info['date_end'] = self.date_end\n dict_event_info['duration'] = self.duration\n dict_event_info['type_event'] = self.type_event\n dict_event_info['is_atypical'] = self.is_atypical\n\n return dict_event_info", "def dic_info(self):\n\n info = {}\n try:\n info[\"DIC\"] = self.mc.DIC\n info[\"deviance\"] = np.mean(self.mc.db.trace(\"deviance\")(), axis=0)\n info[\"pD\"] = info[\"DIC\"] - info[\"deviance\"]\n except pm.ZeroProbability:\n info[\"DIC\"] = np.nan\n info[\"deviance\"] = np.nan\n info[\"pD\"] = np.nan\n\n return info", "def make_times(night, runs, observatory, times, full, instrument, okwrite):\n\n # use this to check times are vaguely right. time of runs\n # must lie between 06.00 local time on date corresponding to\n # start of night date and 1.5 days later. Has picked up a\n # few erroneously dated nights on the TNT.\n mjd_ref = Time(night).mjd - observatory.lon.degree/360 + 0.25\n\n tdata = {}\n with open(times if okwrite else os.devnull,'w') as tout:\n for run in runs:\n if full:\n print(f'Analysing times for run {run}')\n dfile = os.path.join(night, run)\n try:\n ntotal = 0\n if instrument == 'HiPERCAM':\n rtime = hcam.hcam.Rtime(dfile)\n else:\n rtime = hcam.ucam.Rtime(dfile)\n\n # Find first good time, has to roughly match the start\n # date of the night because some times can just be\n # junk\n not_alerted = True\n for n, tdat in enumerate(rtime):\n if instrument == 'HiPERCAM':\n time, tinfo, tflag = tdat\n expose = 1000000\n for tmid,texp,tiflag in tinfo:\n expose = min(round(texp,3),expose)\n else:\n time, tinfo = tdat[:2]\n tflag = time.good\n expose = round(time.expose,3)\n\n if instrument == 'HiPERCAM' or tflag:\n mjd_start = time.mjd\n tdelta = mjd_start-mjd_ref\n if tdelta > 0 and tdelta < 1.5:\n ts = Time(mjd_start, format=\"mjd\", precision=2)\n ut_start = ts.hms_custom\n n_start = n+1\n if expose >= 0 and expose < 2000:\n break\n elif not_alerted and (tdelta < 0 or tdelta > 1.5):\n # maximum one warning per run\n not_alerted = False\n print(f' Bad time: tdelta = {tdelta} < 0 or > 1.5 on time {n} of {dfile}')\n else:\n ntotal = 0\n raise hcam.HipercamError(f'No good times found in {dfile}')\n\n # Find last good time. First we just go for times near the\n # end of the run. Failing that, we try again from the start,\n # to account for runs with time stamp issues.\n if instrument == 'HiPERCAM':\n nback = 4\n elif rtime.header['MODE'] == 'DRIFT':\n # ultracam or hipercam\n win = rtime.win[0]\n nyu = win.ny*rtime.ybin\n nback = int((1033/nyu + 1) / 2) + 3\n elif rtime.header['MODE'] == 'UDRIFT':\n # ultraspec\n win = rtime.win[0]\n nyu = win.ny*rtime.ybin\n nback = int((1037/nyu + 1) / 2) + 3\n else:\n # non drift mode\n nback = 4\n\n if instrument == 'HiPERCAM':\n ntotal = rtime.ntotal()\n else:\n nbytes = os.stat(dfile + '.dat').st_size\n ntotal = nbytes // rtime.framesize\n\n if instrument != 'HiPERCAM' and ntotal > 20000:\n # this is a risk-reducing strategy in case the end\n # of a long ultracam or ultraspec run is\n # corrupt. Better to look at more than the\n # necessary number of frames if it prevents us\n # from having to wind through the whole lot.\n nback = max(nback, 500)\n\n # next statement basically resets the frame\n # we are on\n nreset = max(1, ntotal - nback)\n rtime.set(nreset)\n\n flast = False\n for n, tdat in enumerate(rtime):\n if instrument == 'HiPERCAM':\n time, tinfo, tflag = tdat\n nexpose = 1000000\n for tmid,texp,tiflag in tinfo:\n nexpose = min(round(texp,3),expose)\n else:\n time, tinfo = tdat[:2]\n tflag = time.good\n nexpose = round(time.expose,3)\n\n if instrument == 'HiPERCAM' or tflag:\n mjd = time.mjd\n if mjd >= mjd_start and mjd < mjd_start + 0.4:\n mjd_end = mjd\n ts = Time(mjd_end, format=\"mjd\", precision=2)\n ut_end = ts.hms_custom\n n_end = nreset + n\n if nexpose < 2000:\n expose = max(expose, nexpose)\n flast = True\n\n if not flast:\n # no good time found near end. There must be\n # one or we wouldn't get to this point, so\n # grind it out the hard way by going through\n # the whole run, which can be slow.\n rtime.set()\n for n, tdat in enumerate(rtime):\n if instrument == 'HiPERCAM':\n time, tinfo, tflag = tdat\n nexpose = 1000000\n for tmid,texp,tiflag in tinfo:\n nexpose = min(round(texp,3),expose)\n else:\n time, tinfo = tdat[:2]\n tflag = time.good\n nexpose = round(time.expose,3)\n\n if tflag:\n mjd = time.mjd\n if mjd >= mjd_start and mjd < mjd_start + 0.4:\n mjd_end = mjd\n ts = Time(mjd_end, format=\"mjd\", precision=2)\n ut_end = ts.hms_custom\n n_end = n + 1\n if nexpose < 2000:\n expose = max(expose, nexpose)\n\n nok = n_end-n_start+1\n if n_end > n_start:\n cadence = round(86400*(mjd_end-mjd_start)/(n_end-n_start),3)\n tdata[run] = [ut_start,mjd_start,ut_end,mjd_end,cadence,expose,nok,ntotal]\n else:\n cadence = 'UNDEF'\n tdata[run] = [ut_start,mjd_start,ut_end,mjd_end,'',expose,nok,ntotal]\n tout.write(f'{run} {ut_start} {mjd_start} {ut_end} {mjd_end} {cadence} {expose} {nok} {ntotal}\\n')\n\n except hcam.ucam.PowerOnOffError:\n # Power on/off\n tdata[run] = ['power-on-off',]\n tout.write(f'{run} power-on-off\\n')\n if full: print(f'{run} was a power-on or -off')\n\n except hcam.HipercamError:\n # No good times\n tdata[run] = ['','','','','','',0,ntotal]\n tout.write(f'{run} UNDEF UNDEF UNDEF UNDEF UNDEF UNDEF 0 {ntotal}\\n')\n if full:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_tb(exc_traceback, limit=1)\n traceback.print_exc()\n print(f'No good times found for {run}; ntotal = {ntotal}')\n\n except:\n # some other failure\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_tb(exc_traceback, limit=1)\n traceback.print_exc()\n print(\"Problem on run = \", dfile)\n\n # Load of undefined\n tdata[run] = 8*['']\n tout.write(f'{run} {\" \".join(8*[\"UNDEF\"])}\\n')\n\n if okwrite:\n print('Written timing data to',times)\n\n return tdata", "def data(self):\n dico = {}\n for d_ in [\"flux\",\"var\",\"lbda\",\"mjd\",\"bandname\",\"zp\",\"zpsys\"]:\n dico[d_] = self.get(d_)\n return dico", "def dictOfDraws(self):\n return dict()", "def device_state_attributes(self):\n attributes = {\n ATTR_DEVICE_TYPE: self._device_type,\n ATTR_ATTRIBUTION: ATTRIBUTION\n }\n\n if not self._coordinator.data:\n return attributes\n\n # reformat date/time\n try:\n str_start = self._coordinator.data['start']\n str_end = self._coordinator.data['end']\n dt_start = datetime.strptime(str_start, INPUT_DATETIME_FORMAT)\n dt_end = datetime.strptime(str_end, INPUT_DATETIME_FORMAT)\n attributes[ATTR_START_DATETIME] = datetime.strftime(dt_start, ATTR_DATETIME_FORMAT)\n attributes[ATTR_END_DATETIME] = datetime.strftime(dt_end, ATTR_DATETIME_FORMAT)\n except:\n _LOGGER.warning(\"Failed to reformat datetime object\")\n\n return attributes", "def getGM2MIsd(self):\r\n return {self._pga: self._constants[self._pga]['SMMI'],\r\n self._pgv: self._constants[self._pgv]['SMMI'],\r\n self._sa03: self._constants[self._sa03]['SMMI'],\r\n self._sa10: self._constants[self._sa10]['SMMI'],\r\n self._sa30: self._constants[self._sa30]['SMMI']}", "def device_state_attributes(self):\n # attributes = super().device_state_attributes\n attributes = {ATTR_UNIT_OF_MEASUREMENT: self._unit}\n return attributes", "def datetime_skeletons(self) -> localedata.LocaleDataDict:\n return self._data['datetime_skeletons']" ]
[ "0.6244639", "0.5948716", "0.5810174", "0.5733287", "0.5685426", "0.5663543", "0.56323534", "0.5594207", "0.55913526", "0.5588648", "0.5566428", "0.555051", "0.5539472", "0.5533902", "0.5526651", "0.5480028", "0.5478169", "0.54696566", "0.5455117", "0.54445684", "0.5444169", "0.54380393", "0.54355836", "0.5417831", "0.54154325", "0.5409622", "0.54042137", "0.54016465", "0.53939325", "0.53409976", "0.5335453", "0.5333065", "0.53159213", "0.5284123", "0.5271666", "0.526858", "0.5264775", "0.5264197", "0.5235405", "0.5230689", "0.52304107", "0.5199759", "0.518758", "0.51869535", "0.5182663", "0.51775044", "0.51744574", "0.5167337", "0.5161217", "0.5158477", "0.5151023", "0.5150847", "0.51500887", "0.5149788", "0.5148112", "0.5148112", "0.51408285", "0.51382804", "0.51331246", "0.51290214", "0.51161045", "0.51116383", "0.5109652", "0.509885", "0.5094097", "0.50936437", "0.50924104", "0.50916845", "0.50877017", "0.5082879", "0.50823534", "0.507674", "0.5075953", "0.5070842", "0.507023", "0.50652206", "0.5058887", "0.5056957", "0.50545406", "0.5053766", "0.5041317", "0.50410867", "0.5036692", "0.5034514", "0.50327426", "0.50323987", "0.50220853", "0.50139064", "0.50097615", "0.5008134", "0.5005728", "0.5004297", "0.5001748", "0.4998482", "0.4997552", "0.49900877", "0.49891156", "0.4982401", "0.49819732", "0.49805048", "0.49800086" ]
0.0
-1
Calculates the period of rahukaalam.
def rahukaalam(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() rahukaalam = self.astral.rahukaalam_utc(date, self.latitude, self.longitude) if local: for key, dt in rahukaalam.items(): rahukaalam[key] = dt.astimezone(self.tz) return rahukaalam
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def period(self) -> int:", "def generate_rapel(self):\n config = self.env['ka_hr_payroll.config'].default_config()\n last_period = self.get_last_period(self.status_id.id, self.company_payroll_id.id, config=config)\n if last_period:\n date_done = datetime.strptime(self.date_done, DATETIME_FORMAT)\n\n if date_done.day > config.date_end:\n date_pay = date_done + relativedelta(months=1)\n else:\n date_pay = date_done\n\n data_rapel = {\n 'new_period_id': self.id,\n 'old_period_id': last_period.id,\n 'date_start': get_utc_timezone(self.date_start + ' 00:00:00'),\n 'date_end': self.date_done,\n 'year_pay': str(date_pay.year),\n 'month_pay': date_pay.month,\n 'status_id': self.status_id.id,\n 'company_payroll_id': self.company_payroll_id.id,\n }\n\n rapel_period = self.env['ka_hr_payroll.rapel.tunjangan.khusus.period'].create(data_rapel)\n self.rapel_id = rapel_period\n\n for line in self.line_ids:\n line.generate_rapel(last_period.id, rapel_period.id)\n\n self.state_rapel = '2'\n self.env.user.notify_info(\"{0}, berhasil dibuat!\".format(rapel_period.name))\n else:\n raise ValidationError(\n \"Tunjangan khusus periode sebelumnya tidak ditemukan! Anda tidak bisa melanjutkan aksi ini.\")", "def _period( self ):\r\n\treturn 2 * pi * sqrt( self.orbital_elements[0]**3 / self.mu_central_body )\r\n\t# http://en.wikipedia.org/wiki/Orbital_period#Calculation\r", "def periodCheck(data):", "def define_period():\n\n def_period = {'lt': 5820, # 97 minutes\n 'lon': 6240, # 104 minutes\n 'angle': 5820}\n\n return def_period", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def period(self, value: int, /) -> None:", "def period(self):\n return float(self._period) / 1000", "def answer():\r\n start = time.time()\r\n result = 0\r\n # lengths of sequences to calculate to find period\r\n lengths = [200, 200, 1000, 5000, 20000, 200, 300000, 10**6, 10**7]\r\n for n, length in zip(range(2, 11), lengths):\r\n u = fast_ulam(2 * n + 1, 10**11, length)\r\n print(f'U(2, {2 * n + 1}) =', u)\r\n result += u\r\n # print results\r\n print('Result:', result)\r\n print('Runtime:', time.time() - start, 'seconds')", "def periods_in_a_year(self) -> float:\n return self.length / self.yearfrac", "def real_period(self):\n return max(\n self.period * self.PERIOD_MARGIN_FACTOR -\n (self.max_lag if self.max_lag else self.lag * self.LAG_MARGIN_FACTOR),\n 0.0)", "def period(self):\n return self.__period", "def calcula_variancias(self):\n for index in range(1, self.n_rodadas+1):\n self.var_x1 += (self.x1_med_rodada[index] - self.x1_med_total) ** 2\n self.var_w1 += (self.w1_med_rodada[index] - self.w1_med_total) ** 2\n self.var_nq1 += (self.nq1_med_rodada[index] - self.nq1_med_total) ** 2\n self.var_ns1 += (self.ns1_med_rodada[index] - self.ns1_med_total) ** 2\n self.var_n1 += (self.n1_med_rodada[index] - self.n1_med_total) ** 2\n self.var_t1 += (self.t1_med_rodada[index] - self.t1_med_total) ** 2\n self.var_w1_med += (self.var_w1_med_rodada[index] - self.var_w1_med_total) ** 2\n\n self.var_x2 += (self.x2_med_rodada[index] - self.x2_med_total) ** 2\n self.var_w2 += (self.w2_med_rodada[index] - self.w2_med_total) ** 2\n self.var_nq2 += (self.nq2_med_rodada[index] - self.nq2_med_total) ** 2\n self.var_ns2 += (self.ns2_med_rodada[index] - self.ns2_med_total) ** 2\n self.var_n2 += (self.n2_med_rodada[index] - self.n2_med_total) ** 2\n self.var_t2 += (self.t2_med_rodada[index] - self.t2_med_total) ** 2\n self.var_w2_med += (self.var_w2_med_rodada[index] - self.var_w2_med_total) ** 2\n\n self.var_x1 /= (self.n_rodadas - 1)\n self.var_w1 /= (self.n_rodadas - 1)\n self.var_nq1 /= (self.n_rodadas - 1)\n self.var_ns1 /= (self.n_rodadas - 1)\n self.var_n1 /= (self.n_rodadas - 1)\n self.var_t1 /= (self.n_rodadas - 1)\n self.var_w1_med /= (self.n_rodadas - 1)\n\n self.var_x2 /= (self.n_rodadas - 1)\n self.var_w2 /= (self.n_rodadas - 1)\n self.var_nq2 /= (self.n_rodadas - 1)\n self.var_ns2 /= (self.n_rodadas - 1)\n self.var_n2 /= (self.n_rodadas - 1)\n self.var_t2 /= (self.n_rodadas - 1)\n self.var_w2_med /= (self.n_rodadas - 1)", "def get_periods():\n return [\n relativedelta(),\n relativedelta(days=6),\n relativedelta(months=1),\n relativedelta(months=3),\n relativedelta(years=1),\n relativedelta(years=5)\n ]", "def value_ret_calendar_period(self, year: int, month: int = None) -> float:\n if month is None:\n period = str(year)\n else:\n period = '-'.join([str(year), str(month).zfill(2)])\n rtn = self.tsdf.copy().pct_change()\n rtn = rtn.loc[period] + 1\n return float(rtn.apply(np.cumprod, axis='index').iloc[-1] - 1)", "def EstimatePeriod(response):\n #is a bit shoddy, requires long time periods to produce consistent results\n \n \n roots = np.array([])\n for i in range(len(response[1])):\n try:\n if response[1][i] == 0:\n roots = np.append(roots, response[0][i])\n \n #tests for sign change\n elif response[1][i] * response[1][i+1] < 0:\n roots = np.append(roots, response[0][i])\n \n else:\n pass\n \n except IndexError:\n pass\n \n #from root(N) = t_0 + N*T/2, and sum of series in N. NB a divsion by N is\n #implicit in the mean\n roots = 2 * (roots - roots[0])\n period = 2 * np.mean(roots)/(len(roots) + 1)\n \n #could add error calculation in future\n return period", "def get_duration(period, aor, e):\n return 0.25 * period * np.sqrt(1 - e**2) / aor", "def test_period_average():\n\n time_point = datetime(2012, 12, 31)\n period = 25\n spy = DEFAULT_ASSET_FACTORY.make_asset(\"SPY\")\n\n weatherman = weathermen.period_average(CALENDAR)\n forecast = weatherman(DEFAULT_ASSET_FACTORY, time_point, period)\n\n assert is_close(forecast.cagr(spy), .152)", "def annualize_rets(r, periods_per_year):\n compounded_growth = (1+r).prod()\n n_periods = r.shape[0]\n return compounded_growth**(periods_per_year/n_periods)-1", "def get_period_from_grainy_time(time_value):\n\n if time_value.grain == 4: # \"Day\"\n start_date = end_date = string_to_date(time_value.value)\n elif time_value.grain == 3: # \"Week\"\n # Add 6 days to get the end of the week.\n start_date = string_to_date(time_value.value)\n end_date = start_date + timedelta(days=6)\n elif time_value.grain == 2: # \"Month\"\n # Get the day of the end of the month.\n start_date = string_to_date(time_value.value)\n days_in_month = monthrange(start_date.year, start_date.month)[1]\n end_date = start_date.replace(day=days_in_month)\n # TODO: What is period.grain == 1?\n elif time_value.grain == 0: # \"Year\"\n # Get the day of the end of the year.\n start_date = string_to_date(time_value.value)\n end_date = start_date.replace(month=12, day=31)\n\n return (start_date, end_date)", "def update_period(self):\n return 0.1", "def period(self):\n return f\"{self.measurement_date.year}/{self.measurement_date.month}\"", "def get_chart_period(self,req):\n now=int(DATE())\n period=INT(req.period) # allow for it having been a string\n if period>9999: # assume it is a month\n if period<(now//100): # a valid complete previous month\n prior=True# this is a previous month\n else:\n period=now//100 # default to current month\n prior=False\n start=period*100+1\n end=self.nextperiod(period)*100+1\n else: # assume it is a year\n if period and (period<(now//10000)): # a prior year\n prior=True# this is a previous year\n else:\n##\n# period=now//100 # default to current month\n# prior=False\n# start=period*100+1\n# end=self.nextperiod(period)*100+1\n##\n period=now//10000 # default to current year\n prior=False\n start=period*10000+101\n end=self.nextperiod(period)*10000+101\n return period,start,end,prior", "def AkkarRhypo2014(M, T, Rhypo, rake, vs30, stddev_type='Total'):\r\n period = [0., 0.01, 0.02, 0.03, 0.04, 0.05, 0.075, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.22, 0.24,\r\n 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9,\r\n 0.95, 1., 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2., 2.2, 2.4, 2.6, 2.8, 3., 3.2, 3.4, 3.6, 3.8, 4., -10.]\r\n n_periods = len(period) - 1\r\n \r\n # - Compute Sa and sigma with predefined periods\r\n if type(T) is int and T == 1000:\r\n Sa, Sigma = np.zeros(n_periods), np.zeros(n_periods)\r\n for ipT in range(n_periods):\r\n Sa[ipT], Sigma[ipT] = AkkarEtAlRhypo2014_sub(ipT, M, Rhypo, rake, vs30, stddev_type=stddev_type)\r\n return period[:-1], Sa, Sigma\r\n else:\r\n n_T = len(T)\r\n Sa, Sigma = np.zeros(n_T), np.zeros(n_T)\r\n for i in range(n_T):\r\n Ti = T[i]\r\n if Ti not in period: # The user defined period requires interpolation\r\n ip_low = np.where(period < Ti)[0][-1]\r\n ip_high = np.where(period > Ti)[0][0]\r\n T_low = period[ip_low]\r\n T_high = period[ip_high]\r\n \r\n Sa_low, Sigma_low = AkkarEtAlRhypo2014_sub(ip_low, M, Rhypo, rake, vs30, stddev_type=stddev_type)\r\n Sa_high, Sigma_high = AkkarEtAlRhypo2014_sub(ip_high, M, Rhypo, rake, vs30, stddev_type=stddev_type)\r\n x = [np.log(T_low), np.log(T_high)]\r\n Y_sa = [np.log(Sa_low), np.log(Sa_high)]\r\n Y_sigma = [np.log(Sigma_low), np.log(Sigma_high)]\r\n Sa[i] = np.exp(np.interp(np.log(Ti), x, Y_sa))\r\n Sigma[i] = np.exp(np.interp(np.log(Ti), x, Y_sigma))\r\n else:\r\n ip_T = np.where(period == Ti)[0][0]\r\n Sa[i], Sigma[i] = AkkarEtAlRhypo2014_sub(ip_T, M, Rhypo, rake, vs30, stddev_type=stddev_type)\r\n return Sa, Sigma", "def __get_period(self):\n return self.__period", "def _getTimePeriod(self):\n if isinstance(self.period, tuple):\n period = self.soapCustomDateRange % \\\n (self.soapCustomDate % (self.period[1].day,\n self.period[1].month,\n self.period[1].year),\n self.soapCustomDate % (self.period[0].day,\n self.period[0].month,\n self.period[0].year))\n else:\n period = self.soapPredefinedTime % self.period\n self.logger.debug(\"period = %s\", period)\n return period", "def _calculate_period(vals):\n\tif len(vals) < 4:\n\t\treturn None\n\t# if self.firmware['major'] < 16:\n\t# \treturn ((vals[3] << 24) | (vals[2] << 16) | (vals[1] << 8) | vals[0]) / 12e6\n\t# else:\n\treturn self._calculate_float(vals)", "def rama(gregorian_year):\n return hindu_lunar_event(1, 9, Clock.days_from_hours(12), gregorian_year)", "def f_precios_masivos(p0_fini, p1_ffin, p2_gran, p3_inst, p4_oatk, p5_ginc):\n\n def f_datetime_range_fx(p0_start, p1_end, p2_inc, p3_delta):\n \"\"\"\n Parameters\n ----------\n p0_start\n p1_end\n p2_inc\n p3_delta\n Returns\n -------\n ls_resultado\n Debugging\n ---------\n \"\"\"\n\n ls_result = []\n nxt = p0_start\n\n while nxt <= p1_end:\n ls_result.append(nxt)\n if p3_delta == 'minutes':\n nxt += timedelta(minutes=p2_inc)\n elif p3_delta == 'hours':\n nxt += timedelta(hours=p2_inc)\n elif p3_delta == 'days':\n nxt += timedelta(days=p2_inc)\n\n return ls_result\n\n # inicializar api de OANDA\n\n api = API(access_token=p4_oatk)\n\n gn = {'S30': 30, 'S10': 10, 'S5': 5, 'M1': 60, 'M5': 60 * 5, 'M15': 60 * 15,\n 'M30': 60 * 30, 'H1': 60 * 60, 'H4': 60 * 60 * 4, 'H8': 60 * 60 * 8,\n 'D': 60 * 60 * 24, 'W': 60 * 60 * 24 * 7, 'M': 60 * 60 * 24 * 7 * 4}\n\n # -- para el caso donde con 1 peticion se cubran las 2 fechas\n if int((p1_ffin - p0_fini).total_seconds() / gn[p2_gran]) < 4999:\n\n # Fecha inicial y fecha final\n f1 = p0_fini.strftime('%Y-%m-%dT%H:%M:%S')\n f2 = p1_ffin.strftime('%Y-%m-%dT%H:%M:%S')\n\n # Parametros pra la peticion de precios\n params = {\"granularity\": p2_gran, \"price\": \"M\", \"dailyAlignment\": 16, \"from\": f1,\n \"to\": f2}\n\n # Ejecutar la peticion de precios\n a1_req1 = instruments.InstrumentsCandles(instrument=p3_inst, params=params)\n a1_hist = api.request(a1_req1)\n\n # Para debuging\n # print(f1 + ' y ' + f2)\n lista = list()\n\n # Acomodar las llaves\n for i in range(len(a1_hist['candles']) - 1):\n lista.append({'TimeStamp': a1_hist['candles'][i]['time'],\n 'Open': a1_hist['candles'][i]['mid']['o'],\n 'High': a1_hist['candles'][i]['mid']['h'],\n 'Low': a1_hist['candles'][i]['mid']['l'],\n 'Close': a1_hist['candles'][i]['mid']['c']})\n\n # Acomodar en un data frame\n r_df_final = pd.DataFrame(lista)\n r_df_final = r_df_final[['TimeStamp', 'Open', 'High', 'Low', 'Close']]\n r_df_final['TimeStamp'] = pd.to_datetime(r_df_final['TimeStamp'])\n r_df_final['Open'] = pd.to_numeric(r_df_final['Open'], errors='coerce')\n r_df_final['High'] = pd.to_numeric(r_df_final['High'], errors='coerce')\n r_df_final['Low'] = pd.to_numeric(r_df_final['Low'], errors='coerce')\n r_df_final['Close'] = pd.to_numeric(r_df_final['Close'], errors='coerce')\n\n return r_df_final\n\n # -- para el caso donde se construyen fechas secuenciales\n else:\n\n # hacer series de fechas e iteraciones para pedir todos los precios\n fechas = f_datetime_range_fx(p0_start=p0_fini, p1_end=p1_ffin, p2_inc=p5_ginc,\n p3_delta='minutes')\n\n # Lista para ir guardando los data frames\n lista_df = list()\n\n for n_fecha in range(0, len(fechas) - 1):\n\n # Fecha inicial y fecha final\n f1 = fechas[n_fecha].strftime('%Y-%m-%dT%H:%M:%S')\n f2 = fechas[n_fecha + 1].strftime('%Y-%m-%dT%H:%M:%S')\n\n # Parametros pra la peticion de precios\n params = {\"granularity\": p2_gran, \"price\": \"M\", \"dailyAlignment\": 16, \"from\": f1,\n \"to\": f2}\n\n # Ejecutar la peticion de precios\n a1_req1 = instruments.InstrumentsCandles(instrument=p3_inst, params=params)\n a1_hist = api.request(a1_req1)\n\n # Para debuging\n print(f1 + ' y ' + f2)\n lista = list()\n\n # Acomodar las llaves\n for i in range(len(a1_hist['candles']) - 1):\n lista.append({'TimeStamp': a1_hist['candles'][i]['time'],\n 'Open': a1_hist['candles'][i]['mid']['o'],\n 'High': a1_hist['candles'][i]['mid']['h'],\n 'Low': a1_hist['candles'][i]['mid']['l'],\n 'Close': a1_hist['candles'][i]['mid']['c']})\n\n # Acomodar en un data frame\n pd_hist = pd.DataFrame(lista)\n pd_hist = pd_hist[['TimeStamp', 'Open', 'High', 'Low', 'Close']]\n pd_hist['TimeStamp'] = pd.to_datetime(pd_hist['TimeStamp'])\n\n # Ir guardando resultados en una lista\n lista_df.append(pd_hist)\n\n # Concatenar todas las listas\n r_df_final = pd.concat([lista_df[i] for i in range(0, len(lista_df))])\n\n # resetear index en dataframe resultante porque guarda los indices del dataframe pasado\n r_df_final = r_df_final.reset_index(drop=True)\n r_df_final['Open'] = pd.to_numeric(r_df_final['Open'], errors='coerce')\n r_df_final['High'] = pd.to_numeric(r_df_final['High'], errors='coerce')\n r_df_final['Low'] = pd.to_numeric(r_df_final['Low'], errors='coerce')\n r_df_final['Close'] = pd.to_numeric(r_df_final['Close'], errors='coerce')\n\n return r_df_final", "def calculo(self):\n return self.peso / (self.altura * self.altura)", "def get_periods(a,t):\n ex = get_extrema(a,t)[1]\n \n l = ipol(ex,0)\n \n diff = np.diff(l)\n \n return diff", "def to_period(self, time_unit=base_unit):\r\n tuc = time_unit_conversion\r\n scale_factor = (float(tuc['s']) / tuc[time_unit])\r\n\r\n return np.int64((1 / self) * scale_factor)", "def _get_period(self, n_sec, n_nsec, p_sec, p_nsec):\n return self._get_time(n_sec, n_nsec) - self._get_time(p_sec, p_nsec)", "def compute_angams(self, compute_lagnams=True):\n\n # INITIALISE VARIABLES\n self.jd_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_sunset = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_moonrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_moonset = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.solar_month = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.solar_month_day = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n solar_month_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.lunar_month = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.month_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.tithi_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.tithi_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.nakshatram_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.nakshatram_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.yogam_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.yogam_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.karanam_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.rashi_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.lagna_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.weekday = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.kaalas = [dict() for _x in range(jyotisha.panchangam.temporal.MAX_SZ)]\n daily_panchaangas = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.fest_days = {}\n self.festivals = [[] for _x in range(jyotisha.panchangam.temporal.MAX_SZ)]\n\n # Computing solar month details for Dec 31\n # rather than Jan 1, since we have an always increment\n # solar_month_day at the start of the loop across every day in\n # year\n daily_panchangam_start = daily.Panchangam(city=self.city, julian_day=self.jd_start - 1,\n ayanamsha_id=self.ayanamsha_id)\n daily_panchangam_start.compute_solar_day()\n self.solar_month[1] = daily_panchangam_start.solar_month\n solar_month_day = daily_panchangam_start.solar_month_day\n\n if self.solar_month[1] != 9:\n logging.error(self.solar_month[1])\n raise (ValueError('Dec 31 does not appear to be Dhanurmasa!'))\n\n month_start_after_sunset = False\n\n #############################################################\n # Compute all parameters -- sun/moon latitude/longitude etc #\n #############################################################\n\n for d in range(jyotisha.panchangam.temporal.MAX_SZ):\n self.weekday[d] = (self.weekday_start + d - 1) % 7\n\n for d in range(-1, jyotisha.panchangam.temporal.MAX_DAYS_PER_YEAR + 2):\n [y, m, dt, t] = swe.revjul(self.jd_start + d - 1)\n\n # checking @ 6am local - can we do any better?\n local_time = tz(self.city.timezone).localize(datetime(y, m, dt, 6, 0, 0))\n # compute offset from UTC in hours\n tz_off = (datetime.utcoffset(local_time).days * 86400 +\n datetime.utcoffset(local_time).seconds) / 3600.0\n\n # What is the jd at 00:00 local time today?\n jd = self.jd_start - (tz_off / 24.0) + d - 1\n\n # TODO: Eventually, we are shifting to an array of daily panchangas. Reason: Better modularity.\n # The below block is temporary code to make the transition seamless.\n daily_panchaangas[d + 1] = daily.Panchangam(city=self.city, julian_day=jd + 1, ayanamsha_id=self.ayanamsha_id)\n daily_panchaangas[d + 1].compute_sun_moon_transitions()\n daily_panchaangas[d + 1].compute_solar_month()\n self.jd_sunrise[d + 1] = daily_panchaangas[d + 1].jd_sunrise\n self.jd_sunset[d + 1] = daily_panchaangas[d + 1].jd_sunset\n self.jd_moonrise[d + 1] = daily_panchaangas[d + 1].jd_moonrise\n self.jd_moonset[d + 1] = daily_panchaangas[d + 1].jd_moonset\n self.solar_month[d + 1] = daily_panchaangas[d + 1].solar_month_sunset\n\n solar_month_sunrise[d + 1] = daily_panchaangas[d + 1].solar_month_sunrise\n\n if (d <= 0):\n continue\n # This is just to initialise, since for a lot of calculations,\n # we require comparing with tomorrow's data. This computes the\n # data for day 0, -1.\n\n # Solar month calculations\n if month_start_after_sunset is True:\n solar_month_day = 0\n month_start_after_sunset = False\n\n solar_month_end_jd = None\n if self.solar_month[d] != self.solar_month[d + 1]:\n solar_month_day = solar_month_day + 1\n if self.solar_month[d] != solar_month_sunrise[d + 1]:\n month_start_after_sunset = True\n [_m, solar_month_end_jd] = jyotisha.panchangam.temporal.get_angam_data(\n self.jd_sunrise[d], self.jd_sunrise[d + 1], jyotisha.panchangam.temporal.SOLAR_MONTH,\n ayanamsha_id=self.ayanamsha_id)[0]\n elif solar_month_sunrise[d] != self.solar_month[d]:\n # sankrAnti!\n # sun moves into next rAshi before sunset\n solar_month_day = 1\n [_m, solar_month_end_jd] = jyotisha.panchangam.temporal.get_angam_data(\n self.jd_sunrise[d], self.jd_sunrise[d + 1], jyotisha.panchangam.temporal.SOLAR_MONTH,\n ayanamsha_id=self.ayanamsha_id)[0]\n else:\n solar_month_day = solar_month_day + 1\n solar_month_end_jd = None\n\n # if self.solar_month[d-1] != self.solar_month[d]:\n # # We have a sUrya sankrAnti between yest. and today's sunsets\n # solar_month_day = 1\n # if solar_month_sunrise[d] == self.solar_month[d]:\n # #the sankrAnti happened before today's sunrise\n # #so search for the end time between yesterday and\n # #today's sunrises\n # [_m, solar_month_end_jd] = helper_functions.get_angam_data(self.jd_sunrise[d-1],\n # self.jd_sunrise[d],SOLAR_MONTH)[0]\n # else:\n # #the sankrAnti happens after today's sunrise\n # #so search for the end time between today and\n # #tomorrow's sunrises\n # [_m, solar_month_end_jd] = helper_functions.get_angam_data(self.jd_sunrise[d],\n # self.jd_sunrise[d + 1],SOLAR_MONTH)[0]\n # #print ('-----',revjul(jd = solar_month_end_jd, tz_off = tz_off))\n # else:\n # solar_month_day += 1\n # solar_month_end_jd = None\n\n if solar_month_end_jd is None:\n solar_month_end_time = ''\n else:\n solar_month_end_time = '\\\\mbox{%s{\\\\tiny\\\\RIGHTarrow}\\\\textsf{%s}}' % (\n jyotisha.panchangam.temporal.NAMES['RASHI_NAMES'][self.script][_m], jyotisha.panchangam.temporal.Time(\n 24 * (solar_month_end_jd - jd)).toString(format=self.fmt))\n\n # logging.debug(jyotisha.panchangam.temporal.NAMES)\n\n self.month_data[d] = '\\\\sunmonth{%s}{%d}{%s}' % (\n jyotisha.panchangam.temporal.NAMES['RASHI_NAMES'][self.script][self.solar_month[d]],\n solar_month_day, solar_month_end_time)\n self.solar_month_day[d] = solar_month_day\n\n # KARADAYAN NOMBU -- easy to check here\n if solar_month_end_jd is not None: # month ends today\n if (self.solar_month[d] == 12 and solar_month_day == 1) or \\\n (self.solar_month[d] == 11 and solar_month_day != 1):\n self.fest_days['ta:kAraDaiyAn2 nOn2bu'] = [d]\n\n # Compute the various kaalas\n # Sunrise/sunset and related stuff (like rahu, yama)\n YAMAGANDA_OCTETS = [4, 3, 2, 1, 0, 6, 5]\n RAHUKALA_OCTETS = [7, 1, 6, 4, 5, 3, 2]\n GULIKAKALA_OCTETS = [6, 5, 4, 3, 2, 1, 0]\n\n self.kaalas[d] = {\n 'prAtaH sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d - 1], self.jd_sunrise[d], 14, 15),\n 'prAtaH sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 4, 15),\n 'prAtah': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 0, 5),\n 'saGgava': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 1, 5),\n 'madhyAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 2, 5),\n 'mAdhyAhnika sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 5, 15),\n 'mAdhyAhnika sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 13, 15),\n 'aparAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 3, 5),\n 'sAyAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 4, 5),\n 'sAyaM sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 14, 15),\n 'sAyaM sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 1, 15),\n 'rAtri yAma 1': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 1, 4),\n 'zayana': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 3, 8),\n 'dinAnta': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 18.25, 30),\n 'rahu': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n RAHUKALA_OCTETS[self.weekday[d]], 8),\n 'yama': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n YAMAGANDA_OCTETS[self.weekday[d]], 8),\n 'gulika': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n GULIKAKALA_OCTETS[self.weekday[d]], 8)\n }\n\n # Compute all the anga datas\n self.tithi_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.TITHI,\n ayanamsha_id=self.ayanamsha_id)\n self.tithi_sunrise[d] = self.tithi_data[d][0][0]\n self.nakshatram_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d],\n self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.NAKSHATRAM,\n ayanamsha_id=self.ayanamsha_id)\n self.nakshatram_sunrise[d] = self.nakshatram_data[d][0][0]\n self.yogam_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.YOGAM,\n ayanamsha_id=self.ayanamsha_id)\n self.yogam_sunrise[d] = self.yogam_data[d][0][0]\n self.karanam_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d],\n self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.KARANAM,\n ayanamsha_id=self.ayanamsha_id)\n self.rashi_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.RASHI,\n ayanamsha_id=self.ayanamsha_id)\n if compute_lagnams:\n self.lagna_data[d] = get_lagna_data(self.jd_sunrise[d], self.city.latitude,\n self.city.longitude, tz_off, ayanamsha_id=self.ayanamsha_id)", "def getaccidentesRangoHoras(analyzer, Start_Time, End_Time): \n lst = om.values(analyzer['dateIndex'], minKey(analyzer), maxKey(analyzer))\n dicc_severidad = {\"1\":0,\"2\":0,\"3\":0,\"4\":0}\n for i in range(lt.size(lst)):\n accidentes_dia = lt.getElement(lst, i)['severityIndex'][\"table\"]\n cantidad_accidentes = lt.size(accidentes_dia) \n total_severidad = total_severidad_hora(cantidad_accidentes, accidentes_dia, Start_Time, End_Time, dicc_severidad)\n total_accidentes = 0\n for severidad in dicc_severidad:\n total_accidentes += dicc_severidad[severidad]\n for severidad in total_severidad: \n porcentaje = round(int(dicc_severidad[severidad]) / total_accidentes, 2)\n dicc_severidad[severidad] = (\"Cantidad accidentes: \" + str(dicc_severidad[severidad]), \"Porcentaje: \"+str((porcentaje * 100))) \n return dicc_severidad", "def _get_prorata_interval_rate(self, cr, uid, change_date, context=None):\n month_days = calendar.monthrange(change_date.year,\n change_date.month)[1]\n start_date = add_months(change_date, 1)\n end_date = start_date.replace(day=month_days)\n used_days = month_days - change_date.day\n ptx = self._prorata_rate(used_days, month_days)\n\n return start_date, end_date, ptx", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def period(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"period\")", "def get_period_length(self) -> int:\n return (dataset.max_date - dataset.min_date).days + 1", "def calmar_ratio(returns, period=DAILY):\n\n temp_max_dd = max_drawdown(returns=returns)\n if temp_max_dd < 0:\n temp = annual_return(\n returns=returns,\n period=period\n ) / abs(max_drawdown(returns=returns))\n else:\n return np.nan\n\n if np.isinf(temp):\n return np.nan\n\n return temp", "def periodKepler(semi_axis, M_planet):\n M_sol_kg = 1.9891e30\n M_sol_G = 2.959139768995959e-04\n M_grav = M_sol_G * M_planet / M_sol_kg\n period = np.sqrt(((2 * np.pi)**2 * semi_axis**3) / (M_sol_G + M_grav))\n return(period)", "def _prorata_rate(self, days_used, days_in_month):\n return (100 * days_used // days_in_month) / 100.0", "def annualize_vol(self,r, periods_per_year):\n return r.std() * (periods_per_year ** 0.5)", "def get_obital_periods(self):\n text = \"\"\n for body in self.bodies:\n if body.name in PLANET_LIST:\n try: # If it has done more than one lap\n # Get the period in days\n period = round(\n body.revs[1] / (body.revs[0] * 3600 * 24), 2)\n # Get the fracion of a year\n fraction = round(period / 365, 2)\n\n text += (body.name + \": \" + str(period) + \" days (\" +\n str(fraction) + \" times the real Earth's period).\\n\")\n except:\n pass\n # Write to text file.\n with open('orbital-periods.txt', 'w') as f:\n f.write(text)", "def annual_return(returns, period=DAILY):\n\n if returns.size < 1:\n return np.nan\n\n try:\n ann_factor = ANNUALIZATION_FACTORS[period]\n except KeyError:\n raise ValueError(\n \"period cannot be '{}'. \"\n \"Must be '{}', '{}', or '{}'\".format(\n period, DAILY, WEEKLY, MONTHLY\n )\n )\n\n num_years = float(len(returns)) / ann_factor\n df_cum_rets = cum_returns(returns, starting_value=100)\n start_value = 100\n end_value = df_cum_rets.iloc[-1]\n\n total_return = (end_value - start_value) / start_value\n annual_return = (1. + total_return) ** (1 / num_years) - 1\n\n return annual_return", "def analysis(self):\n df = self.loading(datafix=False)\n noIrrad = df[df['Global Horiz'] == -999]\n noIrradDays = noIrrad.index.date\n noDataIrrad = len(noIrradDays)\n totalIrrad = len(df['Global Horiz'])\n percDataIrrad = (noDataIrrad/totalIrrad) * 100\n\n yearsIrrad = sorted(set(df.index.year.values))\n\n print('\\nIntervalo de dados de medição: {0:d} a {1:d}'.format(\n min(yearsIrrad), max(yearsIrrad)))\n\n print('Número de linhas sem dados de irradiação: {0}'.format(\n noDataIrrad))\n print('Número total de linhas: {0}'.format(totalIrrad))\n print('Porcentagem de linhas sem dados de irradiação: {0:2.4f} %'.format(\n percDataIrrad))\n\n print('\\nDias do ano sem registro de irradiação:')\n for i in sorted(set(noIrradDays)):\n print(i.strftime('%d/%m/%Y'))\n\n code = [0, 1, 2, 5, 6]\n numberbyCode = {i: len(df[df[\"Code\"] == i]) for i in code}\n idbyCode = {0: 'no data', 1: 'sun below horizon',\n 2: 'satellite assessment', 5: 'interpolation in time', 6: 'forecast'}\n\n for i in numberbyCode.keys():\n print(\"{0}: {1} - {2:2.1f}%\".format(\n idbyCode[i], numberbyCode[i], (numberbyCode[i] / totalIrrad)*100))\n\n df.info().to_string()", "def get_period(self):\n raise NotImplementedError('Agent is an abstract base class')", "def quantaHandledByPeriod(self, period):\n\n start_datetime = self.start_time.to_python_datetime()\n end_datetime = self.end_time.to_python_datetime()\n\n total_quanta = 0\n\n # Iterate through the quanta of the period, while the starting_quanta is less\n # than the ending quanta\n\n quanta_start_time = period.start_time\n while quanta_start_time < period.end_time:\n quanta_end_time = quanta_start_time + timedelta(minutes=granularity_in_minutes())\n\n if start_datetime <= quanta_start_time < end_datetime:\n if start_datetime < quanta_end_time <= end_datetime:\n total_quanta = total_quanta + 1\n\n quanta_start_time = quanta_start_time + timedelta(minutes=granularity_in_minutes())\n\n return total_quanta", "def EstimatePeriodAndPlot(ID,\n f_in=\"../data/mira_asas/\",\n f_out=\"diag_figs/mira_plots/\"):\n print ID\n star = np.loadtxt(f_in + ID + \".dat\",\n usecols=(0,1,2),skiprows=0)\n ctimes = star[star[:,1] > 29.5,0]\n star = star[star[:,1] < 29.5,:]\n cvals = np.array(np.max(star[:,1])) * np.ones(ctimes.shape[0])\n ## estimate period\n freqs = lomb.get_freqs2(star[:,0])\n rss = lomb.lomb(star[:,0],star[:,1],star[:,2],freqs)\n period = 1. / freqs[np.argmin(rss)]\n ## make figure\n fig = plt.figure()\n ax = fig.add_subplot(211)\n ax.plot(star[:,0],-star[:,1],'o',color=\"gray\",alpha=.5)\n ax.plot(ctimes,-cvals,'ro',alpha=.5)\n ax.set_yticklabels(np.abs(ax.get_yticks()))\n ax.set_xlabel('Time')\n ax.set_ylabel('Magnitude')\n ax2 = fig.add_subplot(212)\n ax2.plot(star[:,0] % period,-star[:,1],'o',color=\"gray\",alpha=.5)\n ax2.plot(ctimes % period,-cvals,'ro',alpha=.5)\n ax2.set_yticklabels(np.abs(ax2.get_yticks()))\n ax2.set_xlabel('Phase')\n ax2.set_ylabel('Magnitude')\n plt.savefig(f_out + ID + \".pdf\")\n plt.close()\n return period", "def arithmetic_ret(self) -> float:\n return float(np.log(self.tsdf).diff().mean() * self.periods_in_a_year)", "def find_period(self, h_0, h_1, value_t, value_t_minus_1,t): \n if math.isclose(value_t, h_0, abs_tol=0.00001): #rounding errors in python, accepts a small diff.\n direction_start = False if h_1-h_0 < 0 else True\n direction_end = False if value_t-value_t_minus_1 < 0 else True\n if direction_start is direction_end:\n self._periodicity = t-self._t_start", "def testPeriod1(self):\n rsi = std.rsi_calc(self.gldf['Gain'], self.gldf['Loss'], 1)\n self.assertGreater(rsi.iloc[1], 99.0, \"RSI value was not close to 100 for all gain.\")\n self.assertLess(rsi.iloc[3], 0.01, \"RSI value was not close to 0 for all loss.\")", "def get_timeperiod(self, object_name, user_key = None):\n\t\treturn self.get_object('timeperiod',object_name, user_key = user_key)", "def internal_rate_of_return(proforma):\n return np.irr(proforma['Yearly Net Value'].values)", "def _unit_mo(self):\n return (((self.time_base * 60.0) * 24.0) * 365.0) / 12", "def orbital_period(self):\n return self._orbital_period", "def arithmetic_ret_func(self, months_from_last: int = None, from_date: dt.date = None, to_date: dt.date = None,\n periods_in_a_year_fixed: int = None) -> float:\n earlier, later = self.calc_range(months_from_last, from_date, to_date)\n if periods_in_a_year_fixed:\n time_factor = periods_in_a_year_fixed\n else:\n fraction = (later - earlier).days / 365.25\n how_many = self.tsdf.loc[earlier:later].count(numeric_only=True)\n time_factor = how_many / fraction\n return float(np.log(self.tsdf.loc[earlier:later]).diff().mean() * time_factor)", "def angular_momentum(self, AM):\n # Printing the amplitude to command line\n amplitude = max(AM)-min(AM)\n print('Amplitude of angular momentum during %i year(s): %g[AU²/yr²]' \\\n %(self.t, amplitude))\n # Creating an axis for the time steps\n x = np.linspace(0, self.t, self.N*self.t+1)\n # Initializing the figure\n plt.figure(figsize=(10, 10))\n # Creating the plot\n plt.plot(x, AM)\n # Decorating the plot\n plt.suptitle('Total angular momentum in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²/yr²]', fontsize=16)\n plt.legend(['AM'])", "def analyze(data):\n ## Do welch periodogram here\n pass", "def experiment_periods():\n # Temperature readings start March 22 2004, loads start Feb 01 2004.\n # period1_start = pd.Period(\"2004-02-01 00:00\", \"H\")\n # period1_end = pd.Period(\"2005-07-01 00:00\", \"H\")\n # period2_start = pd.Period(\"2005-10-01 00:00\", \"H\")\n # period2_end = pd.Period(\"2006-10-01 00:00\", \"H\")\n period1_start = datetime.datetime.strptime(\"2004-02-01 00:00\", \"%Y-%m-%d %H:%M\")\n period1_end = datetime.datetime.strptime(\"2005-07-01 00:00\", \"%Y-%m-%d %H:%M\")\n period2_start = datetime.datetime.strptime(\"2005-10-01 00:00\", \"%Y-%m-%d %H:%M\")\n period2_end = datetime.datetime.strptime(\"2006-10-01 00:00\", \"%Y-%m-%d %H:%M\")\n return ((period1_start, period1_end), (period2_start, period2_end))", "def compute_periodpayoff(self):\n logger.debug(u\"{} Period Payoff\".format(self.joueur))\n self.currentperiod.EXPERIENCE_NOM_COURT_periodpayoff = 0\n\n # cumulative payoff since the first period\n if self.currentperiod.EXPERIENCE_NOM_COURT_period < 2:\n self.currentperiod.EXPERIENCE_NOM_COURT_cumulativepayoff = \\\n self.currentperiod.EXPERIENCE_NOM_COURT_periodpayoff\n else: \n previousperiod = self.periods[self.currentperiod.EXPERIENCE_NOM_COURT_period - 1]\n self.currentperiod.EXPERIENCE_NOM_COURT_cumulativepayoff = \\\n previousperiod.EXPERIENCE_NOM_COURT_cumulativepayoff + \\\n self.currentperiod.EXPERIENCE_NOM_COURT_periodpayoff\n\n # we store the period in the self.periodes dictionnary\n self.periods[self.currentperiod.EXPERIENCE_NOM_COURT_period] = self.currentperiod\n\n logger.debug(u\"{} Period Payoff {}\".format(\n self.joueur,\n self.currentperiod.EXPERIENCE_NOM_COURT_periodpayoff))", "def ramadan(year, eve=None):\n jd = islamic.to_jd_gregorianyear(year, 9, 1)\n if eve:\n jd = jd = 1\n return gregorian.from_jd(jd)", "def R_adp(data):\n printer('S_adp = ?')\n printer('R_adp = | (U_iso_xxx - U_iso_obs) / U_iso_obs |')\n printer('mean = sum((U_iso_xxx - U_iso_obs) / U_iso_obs) / n')\n printer('abs = sum(R_adp) / n\\n')\n printer('(geometric mean is used)\\n')\n\n printer(' | ADP_calc / ADP_obs | APD_tls / ADP_obs')\n printer(' |--------------------|-------------------')\n printer(' Atom | S_adp | R_adp | S_adp | R_adp')\n printer(' ===============================================')\n S_sum = []\n R_sum = []\n S_sum_tls = []\n R_sum_tls = []\n for atom in data['exp'].atoms:\n if not atom.element == 'H':\n U_rel_calc = cg.Uiso(atom.adp['cart_sum'])\n U_rel_obs = cg.Uiso(atom.adp['cart_meas'])\n R_adp = (U_rel_calc - U_rel_obs) / U_rel_obs\n R_sum.append(R_adp)\n S_adp = ws06(atom.adp['cart_sum'], atom.adp['cart_meas'])\n S_sum.append(S_adp)\n\n U_rel_tls = cg.Uiso(atom.adp['cart_ext'])\n R_tls = (U_rel_tls - U_rel_obs) / U_rel_obs\n R_sum_tls.append(R_tls)\n\n S_tls = ws06(atom.adp['cart_ext'], atom.adp['cart_meas'])\n S_sum_tls.append(S_tls)\n\n printer(' {0:5s}| {1:4.2f} | {2:4.2f} | {3:4.2f} | {4:4.2f}'.format(atom.name,\n S_adp,\n abs(R_adp),\n S_tls,\n abs(R_tls)))\n\n printer(' ------|----------|---------|----------|--------')\n printer(' {0:5s}| {1:4.2f} | {2:4.2f} | {3:4.2f} | {4:4.2f}'.format('mean',\n np.mean(S_sum),\n np.mean(R_sum),\n np.mean(S_sum_tls),\n np.mean(R_sum_tls)))\n printer(' {0:5s}| {1:4.2f} | {2:4.2f} | {3:4.2f} | {4:4.2f}'.format('abs',\n np.mean(S_sum),\n np.mean([abs(i) for i in R_sum]),\n np.mean(S_sum_tls),\n np.mean(\n [abs(i) for i in R_sum_tls])))\n printer(' {0:5s}| {1:4.2f} | {2:4.2f} | {3:4.2f} | {4:4.2f}'.format('SD',\n np.std(S_sum),\n np.std(R_sum),\n np.std(S_sum_tls),\n np.std(R_sum_tls)))\n if config.arg('correlate'):\n printer('\\n\\'mean R_adp (ADP_calc / ADP_obs)\\' can be\\ninterpreted as the ratio 1 - (ADP_int / ADP_obs).')\n else:\n printer('\\n\\'mean R_adp (ADP_tls / ADP_obs)\\' can be\\ninterpreted as the ratio 1 - (ADP_obs / ADP_int).')", "def get_period(signal, signal_sr):\n\n # perform a sanity check\n if signal is None:\n raise ValueError(\"Input signal cannot be None\")\n\n # transform the signal to the hilbert space\n hy = hilbert(signal)\n\n ey = np.sqrt(signal ** 2 + hy ** 2)\n min_time = 1.0 / signal_sr\n tot_time = len(ey) * min_time\n pow_ft = np.abs(fft(ey))\n peak_freq = pow_ft[3: int(len(pow_ft) / 2)]\n peak_freq_pos = peak_freq.argmax()\n peak_freq_val = 2 * pi * (peak_freq_pos + 2) / tot_time\n period = 2 * pi / peak_freq_val\n\n return np.array([period])", "def orbitalPeriod_fromRad(r, muPlanet = 3.986e14):\n\tt = 2*np.pi*np.sqrt(r**3/muPlanet)\n\treturn t", "def holding_period_map(dbal):\n year = em.aggregate_returns(dbal.pct_change(), 'yearly')\n year_start = 0\n\n table = \"<table class='table table-hover table-condensed table-striped'>\"\n table += \"<tr><th>Years</th>\"\n\n for i in range(len(year)):\n table += \"<th>{}</th>\".format(i+1)\n table += \"</tr>\"\n\n for the_year, _ in year.items(): # Iterates years\n table += f\"<tr><th>{the_year}</th>\" # New table row\n\n for years_held in range(1, len(year)+1): # Iterates years held\n if years_held <= len(year.iloc[year_start:year_start + years_held]):\n ret = em.annual_return(year.iloc[year_start:year_start + years_held], 'yearly')\n table += \"<td>{:.0f}</td>\".format(ret * 100)\n table += \"</tr>\"\n year_start +=1\n display(HTML(table))", "def calculate(self):\n self._emi_months = self._period * 12\n self._total_interest = math.ceil(self._loan_amount * self._period * self._rate / 100)\n self._total_amount_pi = float(self._loan_amount + self._total_interest)\n self._emi_amount = math.ceil(self._total_amount_pi / self._emi_months)\n return self", "def get_period(self):\n # res\n if self._cacheExpiration <= YAPI.GetTickCount():\n if self.load(YAPI._yapiContext.GetCacheValidity()) != YAPI.SUCCESS:\n return YPwmOutput.PERIOD_INVALID\n res = self._period\n return res", "def imposto_de_renda(salario_anual):\n saldo = salario_anual\n aliquotas_faixas = {0.275: 55_976.16, 0.225: 45_012.60, 0.15: 33_919.80, 0.075: 22_847.76, 0: 0}\n ir = 0\n desconto_simplificado = 0.25 # 25% de desconto simplificado\n for aliquota, faixa in aliquotas_faixas.items():\n delta = max(saldo - faixa, 0)\n ir += delta * aliquota * (1 - desconto_simplificado)\n saldo = min(saldo, faixa)\n return ir", "def fft_find_period(data):\n fourier = np.fft.fft(data) # 傅里叶变换\n modulus = np.abs(fourier) # 振幅?自相关?\n max_ind = np.argmax(modulus[1:]) + 1\n n = len(data)\n timestep = 1 # 最小分辨率\n freq_ary = np.fft.fftfreq(n, timestep) # 频率\n period = abs(np.round(1 / freq_ary[max_ind], 0)) # 周期=1/频率\n return period", "def solar_ppa():\n per_kwh = 0.196 # [$/kWh]\n\n return per_kwh", "def periods(self) -> localedata.LocaleDataDict:\n try:\n return self._data['day_periods']['stand-alone']['wide']\n except KeyError:\n return localedata.LocaleDataDict({}) # pragma: no cover", "def execQ9():\n frame = pan.DataFrame(data, columns=['Product', 'Price', 'Period'])\n banana = frame[(dFrame.Series_title_1 == \"Bananas, 1kg\") & (dFrame.Period >= 2012.01) & (dFrame.Period < 2013.01)]\n average = banana['Price'].mean()\n return average", "def period(self) -> str:\n\t\t# pylint: disable=unsubscriptable-object\n\t\treturn self.value[0]", "def period_chart(self,req,period,start,end,prior):\n # fetch the raw data\n period,start,end,prior=self.get_chart_period(req)\n todate='' if prior else 'to date'\n year=int(str(period)[:4])\n now=int(DATE())\n if req.alltime and (period in (now//10000,now//100)):\n req.title=f\"{req.alltime} {req._pl_chartkind}\"\n elif period>9999:\n date=DATE(period*100+1)\n req.title=f\"{req.alltime} {req._pl_chartkind} for {date.datetime.strftime('%B')} {year} {todate}\"\n else:\n req.title=f\"{req.alltime} {req._pl_chartkind} for {year} {todate}\"\n raw=self.list(asObjects=False,sql=self.sql)\n # process the raw data, so it is ready for the template\n req.data=[]\n for i in raw:\n try:\n ob=self.get(i[\"page\"])\n ob.plays=i[\"sum(times)\"] # monthly score is stored temporarily as self.plays\n req.data.append(ob)\n # is this the currently playing/paused track?\n if self.player.list and (ob.uid == self.transport.uid):\n req._pl_index=ob.uid # the display will use this to hilite the track \n except: # we have a deleted item - ignore it\n pass\n# for i in req.data:\n# print(i.uid, i.name, i.times)\n # set more constants for the template to use\n req.period=period\n req._pl_prevperiod=self.prevperiod(period)\n if prior:\n req._pl_nextperiod=self.nextperiod(period) \n req._pl_len=len(req.data)\n req._pl_start=0\n # and return the template\n return self.charts(req)", "def get_sma(self,period):\n #df=pandas.DataFrame()\n sma=self.close.rolling(period).mean()\n return sma", "def risk_metric_period(cls,\n start_session,\n end_session,\n algorithm_returns,\n benchmark_returns,\n algorithm_leverages):\n\n algorithm_returns = algorithm_returns[\n (algorithm_returns.index >= start_session) &\n (algorithm_returns.index <= end_session)\n ]\n\n # Benchmark needs to be masked to the same dates as the algo returns\n benchmark_returns = benchmark_returns[\n (benchmark_returns.index >= start_session) &\n (benchmark_returns.index <= algorithm_returns.index[-1])\n ]\n\n benchmark_period_returns = ep.cum_returns(benchmark_returns).iloc[-1]\n algorithm_period_returns = ep.cum_returns(algorithm_returns).iloc[-1]\n\n alpha, beta = ep.alpha_beta_aligned(\n algorithm_returns.values,\n benchmark_returns.values,\n )\n benchmark_volatility = ep.annual_volatility(benchmark_returns)\n\n sharpe = ep.sharpe_ratio(algorithm_returns)\n\n # The consumer currently expects a 0.0 value for sharpe in period,\n # this differs from cumulative which was np.nan.\n # When factoring out the sharpe_ratio, the different return types\n # were collapsed into `np.nan`.\n # TODO: Either fix consumer to accept `np.nan` or make the\n # `sharpe_ratio` return type configurable.\n # In the meantime, convert nan values to 0.0\n if pd.isnull(sharpe):\n sharpe = 0.0\n\n sortino = ep.sortino_ratio(\n algorithm_returns.values,\n _downside_risk=ep.downside_risk(algorithm_returns.values),\n )\n\n rval = {\n 'algorithm_period_return': algorithm_period_returns,\n 'benchmark_period_return': benchmark_period_returns,\n 'treasury_period_return': 0,\n 'excess_return': algorithm_period_returns,\n 'alpha': alpha,\n 'beta': beta,\n 'sharpe': sharpe,\n 'sortino': sortino,\n 'period_label': end_session.strftime(\"%Y-%m\"),\n 'trading_days': len(benchmark_returns),\n 'algo_volatility': ep.annual_volatility(algorithm_returns),\n 'benchmark_volatility': benchmark_volatility,\n 'max_drawdown': ep.max_drawdown(algorithm_returns.values),\n 'max_leverage': algorithm_leverages.max(),\n }\n\n # check if a field in rval is nan or inf, and replace it with None\n # except period_label which is always a str\n return {\n k: (\n None\n if k != 'period_label' and not np.isfinite(v) else\n v\n )\n for k, v in iteritems(rval)\n }", "def Aperiodic(self):\n return self._with_axiom('Aperiodic')", "def tau_ruptura( largo_falla, beta = 2500 ):\n\n # se chequean unidades de medida de beta, tiene que estar en metros/s, no km/s\n if beta < 1000:\n beta = beta * 1000\n else: \n beta = beta\n\n tau = largo_falla/( 0.8 * beta )\n\n return tau", "def reverts_per_yr(self):\n\n text = self.text()\n\n lyr, lmonth, lday = self.last_date_on_pg(text)\n\n now = str(datetime.datetime.now())\n nyr, nmonth, nday = int(now[0:4]), int(now[5:7]), int(now[8:10])\n\n total_days = 365*(nyr-lyr) + 30.5*(nmonth-lmonth) + (nday-lday)\n\n num_reverts = len(re.findall(\"Reverted\",text))\n return 365*(num_reverts/total_days)", "def rythm_hist(duration_ohe):\n assert duration_ohe.ndim == 2\n return duration_ohe.sum(axis=0) / duration_ohe.sum()", "def time_period(s,h=30):\n\n t = 0\n\n old_z, pass_1 = 0, None\n\n while(True):\n k1 = h*sdot(s)\n k2 = h*sdot(s+k1/2)\n k3 = h*sdot(s+k2/2)\n k4 = h*sdot(s+k3)\n\n s = s+(k1+2*k2+2*k3+k4)/6\n t = t+h\n\n if (s[2]>=0 and old_z<0):\n dt = -s[2]/s[5]\n t2 = t+dt\n\n if pass_1 is None:\n pass_1 = t2\n else:\n return t2-pass_1\n\n old_z = s[2]", "def modulation_lifetime(r, freq=1):\n return np.sqrt(1 / np.abs(r) ** 2 - 1) / (2 * np.pi * freq)", "def phasor_from_lifetime(tau, freq=1):\n return 1 / (1 - 1j * 2 * np.pi * freq * tau)", "def _unit_yr(self):\n return ((self.time_base * 60.0) * 24.0) * 365.0", "def calcul_next(self):\n if not self.valide: # si c'est plus en fonction pas besoin de calcul complemnetaire\n return None\n initial = self.date\n if self.periodicite == 'u':\n return None\n finale = None\n if self.periodicite == 'j':\n finale = initial + datetime.timedelta(days=self.intervalle)\n if self.periodicite == 's':\n finale = initial + datetime.timedelta(weeks=self.intervalle)\n if self.periodicite == 'm':\n finale = initial + relativedelta(months=self.intervalle)\n if self.periodicite == 'a':\n finale = initial + relativedelta(years=self.intervalle)\n # on verifie que la date limite n'est pas dépasséee\n if self.date_limite is not None and finale > self.date_limite:\n finale = None\n return finale", "def E_Dynamic_MavkoEtAl2009(rhob,DTS,PR):\n E = (2*(rhob*1000)*((304800/DTS)**2)*(1+PR))/1000000\n return E", "def years_to_pay(self) -> float:\n return round(self.term / self.term_multiplier * self.n_periods / 12, 1)", "def calc_peakt(self, trial_dur):\n if trial_dur <= 11.0:\n peakt = 0.5375*trial_dur + 6.09625\n else:\n peakt = 11.75\n return peakt", "def ra_dec_calculate(self) -> dict:\n for sec in range(self.delta_time):\n if 0 < self.ra_start + self.one_sec_walk_ra < 360 * 3600:\n self.ra = self.ra_start + self.one_sec_walk_ra\n self.ra_start = self.ra\n else:\n self.ra = self.ra_start + self.one_sec_walk_ra - 360 * 3600\n self.ra_start = self.ra\n if self.ra_dec_min < self.ra < self.ra_dec_max:\n self.dec = self.dec_start - self.one_sec_walk_dec\n self.dec_start = self.dec\n else:\n self.dec = self.dec_start + self.one_sec_walk_dec\n self.dec_start = self.dec\n\n ra_res = f'{int(self.ra // (3600 * 15))}:{int((self.ra % 3600) // 60)}:' \\\n f'{round(float((self.ra % 3600) % 60), 1)}'\n dec_res = f'{int(self.dec // 3600)}:{int((self.dec % 3600) // 60)}:' \\\n f'{round(float((self.dec % 3600) % 60), 1)}'\n moon = {\n 'ra': ra_res,\n 'dec': dec_res\n }\n return moon", "def calculer_moment_quadratique(longueur_base, hauteur_base_bras):\n return longueur_base * pow(hauteur_base_bras, 3) / 12", "def PyRana_WE(data_path, output_path, date_specs):\n\n (start_month, start_year, end_month, end_year) = date_specs\n months_in_series = ((end_year - start_year) * 12) + (end_month - start_month)\n\n # start and enddate as datetimes for datetime functionality\n start_date = datetime(start_year, start_month, 1).date()\n end_date = datetime(end_year, end_month, 1).date()\n\n # at each timestep, keep track of how negative the depletion has gotten\n depletion_ledger = np.zeros((2525, 2272), dtype=float)\n # initialize depletion counter at zero.\n depletion_counter = np.zeros((2525, 2272), dtype=float)\n # keep track of the maximum depletion map\n max_depletion = np.zeros((2525, 2272), dtype=float)\n # to compare with SSEB\n total_eta = np.zeros((2525, 2272), dtype=float)\n\n for i in range(months_in_series + 1):\n\n # count up from the start date by months...\n date = start_date + relativedelta(months=+i)\n\n precip = os.path.join(data_path, \"tot_precip_{}_{}.tif\".format(date.month, date.year))\n eta = os.path.join(data_path, \"tot_eta_{}_{}.tif\".format(date.month, date.year))\n\n # array, transform, dimensions, projection, data type\n precip_arr, transform, dim, proj, dt = raster_extract(precip)\n eta_arr, transform, dim, proj, dt = raster_extract(eta)\n\n total_eta += eta_arr\n\n # this month's change in depletion\n depletion_delta = depletion_calc(eta_arr, precip_arr)\n\n # add to the running depletion tally\n print depletion_delta.shape\n print depletion_counter.shape\n depletion_counter += depletion_delta\n depletion_ledger += depletion_delta\n\n # for any values that become negative, make them zero. Assume runoff...Wang-Erlandsson (2016)\n # todo - uncomment to ONLY allow positive depletions\n depletion_counter[depletion_counter < 0.0] = 0.0\n\n # newmax_bool = [depletion_counter > max_depletion]\n # newmax = depletion_counter[newmax_bool == True]\n newmax = np.maximum(depletion_counter, max_depletion)\n\n max_depletion = newmax\n\n # for each monthly timestep, take the cumulative depletion condition and output it as a raster\n depletion_name = \"pyrana_cumulative_depletion_{}_{}.tif\".format(date.year, date.month)\n write_raster(depletion_counter, transform, output_path, depletion_name, dim, proj, dt)\n\n # output the maximum depletion\n max_depletion_name = 'pyrana_max_depletion_{}_{}.tif'.format(start_date.year, end_date.year)\n write_raster(max_depletion, transform, output_path, max_depletion_name, dim, proj, dt)\n\n # output total SSEBop (to test wheter it looks like the netcdf file)\n total_eta_name = \"total_eta_{}_{}.tif\".format(start_date.year, end_date.year)\n write_raster(total_eta, transform, output_path, total_eta_name, dim, proj, dt)", "def angular1(brdf_settings):\n # const\n scaleconst = 2*np.pi/366\n\n locals().update(brdf_settings)\n\n def scale(x, a=5, b=10, xmin=-1, xmax=1):\n \"\"\"\n rescale the sin\n a new min\n b = new max\n xmin = min of x\n xmax = max of x\n \"\"\"\n return (b - a)*(x - xmin)/(xmax - xmin) + a\n\n t = np.linspace(0, 2*np.pi, 366)\n\n\n noise = np.random.normal(0, 2*np.pi/100.0, size=366)\n\n szaMAX = 60\n szaMIN = 10\n sza_off = 0.5*np.pi # in pi\n\n sza_t = np.sin(noise + t + sza_off)\n SZA = scale(sza_t, a=szaMIN, b=szaMAX)\n\n\n # noisy it a bit?\n\n \"\"\"\n vza cycle\n \"\"\"\n vzaMAX = 45\n vzaMIN = 0\n vza_cycle = 6 # in days\n\n vza_t = np.sin(noise + t/(vza_cycle/366.0))\n VZA = scale(vza_t, a=vzaMIN, b=vzaMAX)\n\n \"\"\"\n raa cycle\n \"\"\"\n raaMAX = 360\n raaMIN = 0\n raa_cycle = 32 # in days\n\n raa_t = np.sin(t/(raa_cycle/366.0))\n RAA = scale(noise + vza_t, a=raaMAX, b=raaMIN)\n\n\n \"\"\"\n only need to return kernels really\n \"\"\"\n kerns = Kernels(VZA, SZA, RAA,\n LiType='Sparse', doIntegrals=False,\n normalise=True, RecipFlag=True, RossHS=False, MODISSPARSE=True,\n RossType='Thick',nbar=0.0)\n return kerns, VZA, SZA, RAA", "def api_asset_calculate_revenue():\n periods = request.args.getlist(\"period\")\n\n daily_response = requests.get(CBR_DAILY_URL)\n key_indicators_response = requests.get(CBR_INDICATORS_URL)\n currency_rates = parse_cbr_currency_base_daily(daily_response.text)\n currency_rates.update(parse_cbr_key_indicators(key_indicators_response.text))\n\n result = {}\n for period in periods:\n result[period] = app.bank.calculate_revenue(int(period), currency_rates)\n return result, 200", "def renewal_period(self) -> Optional[float]:\n return pulumi.get(self, \"renewal_period\")" ]
[ "0.68010175", "0.60649586", "0.59108144", "0.58845204", "0.58199346", "0.5665051", "0.56304914", "0.562789", "0.5617035", "0.55706686", "0.5552009", "0.5500564", "0.5400052", "0.53994274", "0.53955257", "0.5372322", "0.5367489", "0.5362566", "0.5361659", "0.5354063", "0.5348334", "0.53367764", "0.53275985", "0.5325581", "0.5285131", "0.52690095", "0.5265332", "0.52617276", "0.5260178", "0.52552384", "0.52425057", "0.5200841", "0.5198092", "0.5189105", "0.5188161", "0.51741105", "0.5173804", "0.5173804", "0.5173804", "0.5173804", "0.5173804", "0.5173804", "0.5173804", "0.5173804", "0.5173769", "0.5171194", "0.51518524", "0.5138542", "0.512783", "0.51240957", "0.51162577", "0.5115427", "0.5114606", "0.51133716", "0.51102036", "0.50933945", "0.5089979", "0.5085824", "0.5085403", "0.5070392", "0.50632775", "0.50618964", "0.50578624", "0.5054848", "0.50408524", "0.5032497", "0.50311464", "0.502848", "0.50282556", "0.50202453", "0.50196856", "0.50193435", "0.5011295", "0.5007872", "0.5002789", "0.49961695", "0.49957314", "0.49706528", "0.49660173", "0.4965207", "0.49635446", "0.49593842", "0.49557766", "0.49535602", "0.4952825", "0.4951067", "0.49438426", "0.49390057", "0.49363685", "0.49357682", "0.49338633", "0.49325877", "0.4925446", "0.49163607", "0.49120757", "0.49027222", "0.49024174", "0.4895066", "0.48805484", "0.4872949", "0.48713535" ]
0.0
-1
Calculates the solar azimuth angle for a specific date/time.
def solar_azimuth(self, dateandtime=None): if self.astral is None: self.astral = Astral() if dateandtime is None: dateandtime = datetime.datetime.now(tz=self.tz) return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_azimuth(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n\n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0#\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n return azimuth", "def getAzimuthAngle(self):\n return self._azimuth", "def azimuth(vv, v0, v1):\n with np.errstate(divide='ignore', invalid='ignore'):\n n0 = np.cross(v0, v1)\n n0 /= np.dual.norm(n0, axis=-1)[..., np.newaxis]\n nn = np.cross(v0, vv)\n nn /= np.dual.norm(nn, axis=-1)[..., np.newaxis]\n\n azi = np.arccos(np.sum(nn * n0, -1))\n if len(np.shape(azi)) > 0:\n azi[np.dot(vv, n0) < 0] *= -1\n # arbitrary angle where vv is (anti)parallel to v0\n azi[np.isnan(azi)] = 0\n elif np.isnan(azi):\n return 0\n elif np.dot(vv, v0) < 1 and azi > 0:\n azi *= -1\n\n return azi", "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n \n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n exoatmElevation = 90.0 - zenith\n\n if exoatmElevation > 85.0:\n refractionCorrection = 0.0\n else:\n te = tan(radians(exoatmElevation))\n if exoatmElevation > 5.0:\n refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te)\n elif exoatmElevation > -0.575:\n step1 = (-12.79 + exoatmElevation * 0.711)\n step2 = (103.4 + exoatmElevation * (step1))\n step3 = (-518.2 + exoatmElevation * (step2))\n refractionCorrection = 1735.0 + exoatmElevation * (step3)\n else:\n refractionCorrection = -20.774 / te\n \n refractionCorrection = refractionCorrection / 3600.0\n \n solarzen = zenith - refractionCorrection\n \n solarelevation = 90.0 - solarzen\n \n return solarelevation", "def fun_azimuth(self):\n\n energy_kev = self.energy_kev.get()\n hkl = self.hkl_magnetic.get()\n hkl = hkl.replace(',', ' ') # remove commas\n hkl = hkl.replace('(', '').replace(')', '') # remove brackets\n hkl = hkl.replace('[', '').replace(']', '') # remove brackets\n hkl = np.fromstring(hkl, sep=' ')\n\n azi = self.azim_zero.get()\n azi = azi.replace(',', ' ') # remove commas\n azi = azi.replace('(', '').replace(')', '') # remove brackets\n azi = azi.replace('[', '').replace(']', '') # remove brackets\n azi = np.fromstring(azi, sep=' ')\n\n pol = self.polval.get()\n if pol == u'\\u03c3-\\u03c3':\n pol = 's-s'\n elif pol == u'\\u03c3-\\u03c0':\n pol = 's-p'\n elif pol == u'\\u03c0-\\u03c3':\n pol = 'p-s'\n else:\n pol = 'p-p'\n\n F0 = self.resF0.get()\n F1 = self.resF1.get()\n F2 = self.resF2.get()\n\n isres = self.isres.get()\n if isres:\n # Resonant scattering\n self.xtl.Plot.simulate_azimuth_resonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol,\n F0=F0, F1=F1, F2=F2)\n plt.show()\n else:\n # Non-Resonant scattering\n self.xtl.Plot.simulate_azimuth_nonresonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol)\n plt.show()", "def azimuth(self, other, projected=True):\n x0, y0 = self.x, self.y\n if self.crs != other.crs:\n x1, y1 = other.get_vertex(self.crs)[:2]\n else:\n x1, y1 = other.x, other.y\n\n if (x0, y0) == (x1, y1):\n az = np.nan\n elif projected and not isinstance(self.crs, GeographicalCRS):\n az = 90.0 - math.atan2(y1-y0, x1-x0)*180.0/math.pi\n az = (az+180) % 360 - 180\n else:\n lon0, lat0 = self.crs.project(x0, y0, inverse=True)\n lon1, lat1 = self.crs.project(x1, y1, inverse=True)\n az, _, _ = self.crs.inverse(lon0, lat0, lon1, lat1)\n return az", "def azimuth(poly):\n num = len(poly) - 1\n vec = unit_normal(poly[0], poly[1], poly[num])\n vec_azi = np.array([vec[0], vec[1], 0])\n vec_n = np.array([0, 1, 0])\n # update by Santosh\n # angle2vecs gives the smallest angle between the vectors\n # so for a west wall angle2vecs will give 90\n # the following 'if' statement will make sure 270 is returned\n x_vector = vec_azi[0]\n if x_vector < 0:\n return 360 - angle2vecs(vec_azi, vec_n)\n else:\n return angle2vecs(vec_azi, vec_n)", "def azimuth(self, right: GeoSpatialValue) -> ir.FloatingValue:\n return ops.GeoAzimuth(self, right).to_expr()", "def horiz_angle(time, data):\n\n # TODO What should 0deg be? Set it to inline w/ target? facing target?\n\n # direction of the sun. measured in degrees counted clockwise from north.\n azimuth = data[time]['azimuth']\n\n h_angle = (azimuth / 2 - 90)\n\n # returns answer between -180 and 180 degrees\n return round(((h_angle + 180) % 360) - 180, 4)", "def set_azimuth(self):\n self.azimuth = self.Calculations.convert_to_azimuth( self.declination, self.right_ascension, self.Latitude, self.LHA)\n if self.azimuth < 0:\n self.azimuth = self.azimuth + 360.0\n return self.azimuth\n else:\n pass\n return self.azimuth\n print('azimuth set to', self.azimuth)", "def get_azimuth(self, degrees=True):\n if degrees:\n return math.degrees(self.current_location.az)\n else:\n return self.current_location.az", "def IAngle(a, b, t):\n \n # http://www.engineersedge.com/material_science/moment-inertia-gyration-7.htm\n d = b - t \n y = b - (t*(2*d + a) + d**2)/(2*(d+a))\n I = 1/3 * (t*y**3 + a*(b-y)**3 - (a-t)*(b-y-t)**3)\n return I", "def calculate_orbiting_angle(orbiting_center, raft):\n\n # note the negative sign before the first component, the y component\n # it is to make the orbiting angle in a right-handed coordiante.\n angle = np.arctan2(-(raft[1] - orbiting_center[1]), (raft[0] - orbiting_center[0])) * 180 / np.pi\n\n return angle", "def calc_angle_of_incidence(g, lat, ha, tilt, teta_z):\n # surface normal vector\n n_E = sin(tilt)*sin(teta_z)\n n_N = sin(tilt)*cos(teta_z)\n n_Z = cos(tilt)\n # solar vector\n s_E = -cos(g)*sin(ha)\n s_N = sin(g)*cos(lat) - cos(g)*sin(lat)*cos(ha)\n s_Z = cos(g)*cos(lat)*cos(ha) + sin(g)*sin(lat)\n\n # angle of incidence\n teta_B = acos(n_E*s_E + n_N*s_N + n_Z*s_Z)\n return teta_B", "def azimuth(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[0];", "def rotated_equatorial_hour_angle(hour, location):\n equatorial_angle = equatorial_hour_angle(hour, location)\n equatorial_angle_from_solar_noon = equatorial_angle - np.pi\n # Angle currently is angle referenced from solar noon, positive (pm) towards the east.\n # Change to mathematical angle, anticlockwise from 0 in the east.\n return np.pi / 2 - equatorial_angle_from_solar_noon", "def azimuth_update(self):\n self.current_azimuth = self.azimuth_encoder.get_degrees()\n azimuth_error = self.azimuth - float(self.current_azimuth)\n # print('goal azimuth', self.azimuth, 'current azimuth', self.azimuth_encoder.get_degrees(), 'difference in azimuth', azimuth_error)\n if azimuth_error >0:\n # print('positive azimuth')\n self.azimuth_motor.set_direction(1)\n elif azimuth_error > 0:\n # print('negative azimuth')\n self.azimuth_motor.set_direction(0)\n azimuth_error = abs(azimuth_error)\n self.azimuth_error = azimuth_error\n if azimuth_error >= 0:\n self.azimuth_motor.set_speed(0)\n if azimuth_error >= 35:\n self.azimuth_motor.set_speed(1)\n if azimuth_error >= 40:\n self.azimuth_motor.set_speed(2)\n if azimuth_error >= 80:\n self.azimuth_motor.set_speed(3)\n if azimuth_error >= 160:\n self.azimuth_motor.set_speed(4)\n if azimuth_error >= 280:\n self.azimuth_motor.set_speed(5)\n self.azimuth_error = azimuth_error\n print('debug_azimuth', self.current_azimuth, self.azimuth_error, self.azimuth_motor.speed)\n return self.azimuth_error", "def get_azimuth(self):\n self.degrees = self.azimuth_encoder.get_degrees()\n self.tele_azimuth = self.Calculations.convert_degrees(self.degrees)\n return self.tele_azimuth", "def azimuth_speed(self, degrees = True):\n return self.angularSpeed(self.future_location.az, self.old_location.az)", "def angle(self, angle: int, time: int = 0, /) -> None:", "def get_azimuth(self, p, az):\n az.value = self._get_azimuth(p, az.value)", "def setAzimuthAngle(self, angle):\n angle = int(round(angle))\n if angle != self._azimuth:\n self._azimuth = angle\n self._updateLight()\n self.sigAzimuthAngleChanged.emit()", "def _angle_of_attack(self, rel_wind, blade_chord):\n # blade_chord_vector - (relative_wind + pi)\n # rel_oposite = rel_wind.rotated(math.pi)\n aoa_rad = rel_wind.theta - blade_chord.theta\n aoa_rad = vec.normalize_angle(aoa_rad)\n aoa_360 = aoa_rad * 360 / math.tau\n return aoa_rad, aoa_360", "def polar_angle(self, p0, p1=None):\n if p1 == None:\n p1 = anchor\n y_span = p0[1] - p1[1]\n x_span = p0[0] - p1[0]\n return atan2(y_span, x_span)", "def calc_surface_azimuth(xdir, ydir, B):\n B = radians(B)\n teta_z = degrees(asin(xdir / sin(B)))\n # set the surface azimuth with on the sing convention (E,N)=(+,+)\n if xdir < 0:\n if ydir <0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (-,-)\n else: surface_azimuth = 360 + teta_z # (xdir,ydir) = (-,+)\n elif ydir < 0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (+,-)\n else: surface_azimuth = teta_z # (xdir,ydir) = (+,+)\n return surface_azimuth # degree", "def platform_auto_calibrate_azimuth_servo(self):\n self._platform_auto_calibrate_check()\n self.platform.auto_calibrate_azimuth_servo()", "def do_azangle(self):\n angle_1, angle_2 = cbp.potentiometer.main()\n current_angle = angle_2\n #print(current_angle)\n self.azangle = current_angle\n return current_angle", "def imu_get_azimuth(self):\n return self.imu.get_azimuth()", "def create_azimuthal_polarization(dim, rotation):\n theta_array = np.zeros((dim, dim))\n\n for i in range(np.size(theta_array, 0)):\n for j in range(np.size(theta_array, 1)):\n x = -dim / 2 + i\n y = -dim / 2 + j\n # perform roation\n th = math.pi*rotation/180.0\n x = np.cos(th)*x - np.sin(th)*y\n y = np.sin(th)*x + np.cos(th)*y\n\n rot = math.atan2(x, y) + math.pi/2\n # factor = (rot % (2*math.pi))\n theta_array[i][j] = (rot % (2 * math.pi))\n return theta_array", "def leaf_azimuth(size=1, phyllotactic_angle=180, phyllotactic_deviation=15, plant_orientation=0, spiral=False):\n if size == 1:\n return plant_orientation\n if spiral:\n main = numpy.arange(0, size) * phyllotactic_angle\n else:\n it = cycle((0, phyllotactic_angle))\n main = numpy.array([it.next() for i in xrange(size)])\n azim = plant_orientation + main + (numpy.random.random(size) - 0.5) * 2 * phyllotactic_deviation\n azim = azim % 360\n return numpy.where(azim <= 180, azim, azim - 360)", "def parangle(ra, dec, utdate, uttime, site, verbose=False):\n # degrees per radian\n degrad = 180. * u.deg /(np.pi * u.rad)\n\n l_ra = ra.strip()\n l_dec = dec.strip()\n if '-' not in l_dec and l_dec[0] != '+':\n l_dec = '+' + l_dec\n\n # Coordinate object\n coord = SkyCoord(l_ra,l_dec,frame='icrs',unit = (u.hr, u.deg))\n\n # Observation time\n obs_time = Time(utdate + 'T' + uttime, format='isot', scale='utc')\n\n # Location\n location = EarthLocation.of_site(site)\n if verbose:\n print('Site: ', location)\n\n altaz = coord.transform_to(AltAz(obstime=obs_time, location=location))\n if verbose:\n print('Alt/Az: ', altaz.alt.deg, altaz.az.deg)\n\n # Hour angle\n ha = np.arcsin(-np.sin(altaz.az) * np.cos(altaz.alt) / np.cos(coord.dec))\n if verbose:\n print('HA: ', ha)\n\n # Parallactic angle\n parang = -degrad * np.arctan2(-np.sin(ha),\n np.cos(coord.dec) * np.tan(location.lat) - np.sin(coord.dec) * np.cos(ha))\n\n return parang", "def angle(self):\n return arccos(dot((self.a - self.o) / self.r, (self.b - self.o) / self.r))", "def _calc_solar_from_clouds_and_angle(hr, ds_path):\n # Solar radiation [W/m^2] incident on top of atmosphere\n Q_o = 1368.0\n # Cloud model based on Dobson and Smith, table 5\n # SEA -- May 2010 : redid the cloud parametrization based on UBC\n # Solar data (/ocean/shared/SoG/met/solar/) fitting Q to cos_Z\n # (not Q/cos_Z as Kate did). Allen and Wolfe (2013). (0) no\n # clouds, (1) 1/10 cloud fraction (10) 100% clouds. Four sig\n # figs are what comes out of matlab but standard deviations are\n # 40W/m2 for low cloud fraction to 120 W/m2 for 6-9 cloud\n # fraction to 85 W/m2 for completely cloudy.\n cloud_consts = SimpleNamespace(\n A=numpy.array(\n [\n 0.6337,\n 0.6149,\n 0.5861,\n 0.5512,\n 0.5002,\n 0.4649,\n 0.4225,\n 0.3669,\n 0.2468,\n 0.1981,\n 0.0841,\n ]\n ),\n B=numpy.array(\n [\n 0.1959,\n 0.2119,\n 0.2400,\n 0.2859,\n 0.3192,\n 0.3356,\n 0.3339,\n 0.3490,\n 0.4427,\n 0.3116,\n 0.2283,\n ]\n ),\n )\n # Local standard time\n ## WARNING: .to(\"PST\") may be fragile and incorrect for summer-time dates\n lst = hr.to(\"PST\")\n # day_time is in seconds, LST\n day_time = (lst - lst.floor(\"day\")).seconds\n # hour of day as degrees from noon\n hour = (day_time / 3600 - 12) * 15\n # day is year-day\n day = (lst - lst.floor(\"year\")).days\n # solar declination [radians]\n declination = (\n 23.45 * numpy.pi / 180 * numpy.sin((284 + day) / 365.25 * 2 * numpy.pi)\n )\n # Latitude of approximate centre of model domain in radians\n lat = numpy.pi * 50 / 180\n # solar elevation\n elev_sin = numpy.sin(declination) * numpy.sin(lat)\n elev_cos = numpy.cos(declination) * numpy.cos(lat)\n cos_Z = elev_sin + elev_cos * numpy.cos(numpy.pi / 180 * hour)\n # cos of -hour_angle in radians\n hour_angle = numpy.tan(lat) * numpy.tan(declination)\n # assume we are south of the Arctic Circle\n day_length = numpy.arccos(-hour_angle) / 15 * 2 * 180 / numpy.pi\n sunrise = 12 - 0.5 * day_length # hours\n sunset = 12 + 0.5 * day_length # hours\n Qso = Q_o * (1 + 0.033 * numpy.cos(day / 365.25 * 2 * numpy.pi))\n with xarray.open_dataset(ds_path) as ds:\n cf_value = ds.percentcloud * 10\n fcf = numpy.floor(cf_value).astype(int) # integer below cf value\n fcf = xarray.where(fcf == 10, 9, fcf).data\n ccf = fcf + 1 # integer above cf value\n if (sunrise > day_time / 3600) or (day_time / 3600 > sunset):\n # nighttime\n return xarray.zeros_like(ds.percentcloud)\n return (\n Qso\n * (\n cloud_consts.A[fcf] * (ccf - cf_value)\n + cloud_consts.A[ccf] * (cf_value - fcf)\n + (\n cloud_consts.B[fcf] * (ccf - cf_value)\n + cloud_consts.B[ccf] * (cf_value - fcf)\n )\n * cos_Z\n )\n * cos_Z\n )", "def equatorial_hour_angle(hour, location):\n equatorial_angle = (hour - location.timezone) * 2 * np.pi / 24 + (np.deg2rad(location.longitude))\n logging.getLogger(\"hour.angle.equ\").debug(\"For hour %d, equatorial angle %g\" % (hour, np.rad2deg(equatorial_angle)))\n return equatorial_angle", "def calculate_attitude_angle(self):\n return np.arctan(np.pi * (1 - self.eccentricity_ratio ** 2) / (4 * self.eccentricity_ratio))", "def azizen(self):\n # x0,y0 array pixel coordinates relative to cx,cy\n# ndy0,ndx0=img.shape\n ndy0=self.ndy0\n ndx0=self.ndx0\n x0,y0=np.meshgrid(np.linspace(0,ndx0-1,ndx0)-self.cx,np.linspace(0,ndy0-1,ndy0)-self.cy)\n r0=np.sqrt(x0**2+y0**2)/self.pr0 # fractional radial distance from 0,0\n# self.roi=np.s_[ystart:ystart+self.ny0,xstart:xstart+self.nx0]\n # why not model the zenith angle dependence with polynomial directly\n # rather than linear interpolation between roots.\n roots=np.zeros(51)\n rr=np.arange(51)/100.0\n for i,ref in enumerate(rr):\n roots[i]=np.real(np.roots([self.c3,0,self.c2,0,self.c1,-ref])[-1])\n theta0 = np.interp(r0/2,rr,roots)\n \n phi0 = np.arctan2(x0,y0) - self.rot ####phi (i.e., azimuth) is reckoned with -pi corresponding to north, increasing clockwise, NOTE: pysolar use sub-standard definition\n phi0 = phi0%(2*np.pi)\n\n #####correction for the tilt of the camera\n k=np.array((np.sin(self.azm),np.cos(self.azm),0))\n a=np.array([np.sin(theta0)*np.cos(phi0),np.sin(theta0)*np.sin(phi0),np.cos(theta0)]); \n a = np.transpose(a,[1,2,0])\n b=np.cos(self.beta)*a + np.sin(self.beta)*np.cross(k,a,axisb=2) \\\n + np.reshape(np.outer(np.dot(a,k),k),(self.ndy0,self.ndx0,3))*(1-np.cos(self.beta))\n theta0=np.arctan(np.sqrt(b[:,:,0]**2+b[:,:,1]**2)/b[:,:,2])\n phi0=np.arctan2(b[:,:,1],b[:,:,0])%(2*np.pi)\n# max_theta *= deg2rad \n# valid0 = (theta0<max_theta) & (theta0>0); \n# theta0[valid0]=np.nan;\n self.theta0,self.phi0=theta0,phi0", "def convergence_angle(self):\n return np.arctan2(self.radius, self.focal_length)", "def calcScatterAngleOld(R, PHI, THETA, sun_rotation):\n \n H_rot = atmo_utils.calcRotationMatrix(sun_rotation)\n\n X_ = R * np.sin(THETA) * np.cos(PHI)\n Y_ = R * np.sin(THETA) * np.sin(PHI)\n Z_ = R * np.cos(THETA)\n \n XYZ_dst = np.vstack((X_.ravel(), Y_.ravel(), Z_.ravel(), np.ones(R.size)))\n XYZ_src_ = np.dot(H_rot, XYZ_dst)\n \n Z_rotated = XYZ_src_[2, :]\n R_rotated = np.sqrt(np.sum(XYZ_src_[:3, :]**2, axis=0))\n \n angle = np.arccos(Z_rotated/(R_rotated+amitibo.eps(R_rotated)))\n \n return angle", "def set_azimuth(self, phi: \"float\") -> \"void\":\n return _beamforming_swig.phasedarray_sptr_set_azimuth(self, phi)", "def atan(self, x):\n return self.arctan(x)", "def get_rotationalAngularPosition(self, t): # returns [rad]\n angle = self.theta0 + self.rotationalAngularVelocity * t # angular position [rad]\n return angle", "def arctan3(y, x):\n theta = np.arctan2(y,x)\n if type(theta) == np.ndarray:\n theta[theta < 0.0] += 2 * np.pi\n else:\n if theta < 0.0: theta += 2 * np.pi\n return theta", "def angle(z):", "def set_azimuth(self, phi: \"float\") -> \"void\":\n return _beamforming_swig.phasedarray_set_azimuth(self, phi)", "def atan (cls, x) :\n return Angle_R (math.atan (x))", "def getAltAz(arr,header,time,location):\n\tsoln = wcs.WCS(header)\n\tcoords = cartesian([arange(arr.shape[1]),arange(arr.shape[0])])\n\tworld = soln.wcs_pix2world(coords,0)\n\tradec = SkyCoord(ra=world[:,0],dec=world[:,1],frame='icrs',unit='deg')\n\taltaz = radec.transform_to(AltAz(obstime=time,location=telescope))\n\treturn altaz.alt.deg,altaz.az.deg,coords[:,0],coords[:,1]", "def scalar_earth_angle( lat1, lon1, lat2, lon2):\n theta1 = lat1 *dtor\n phi1 = lon1 *dtor\n theta2 = lat2 * dtor\n phi2 = lon2 * dtor\n p1 = numpy.vstack((cos(theta1)*cos(phi1),cos(theta1)*sin(phi1),sin( theta1))).T\n p2 = numpy.vstack((cos(theta2)*cos(phi2), cos( theta2)* sin( phi2), sin( theta2))).T\n dsq = ((p1-p2)**2).sum(-1)\n return numpy.arccos((2 -dsq)/2.)/dtor", "def diffraction_angle_for(self, wavelength: float = 532., theta: float = 0.):\n return np.arcsin(np.sin(-theta / 180. * np.pi)\n - self.interference * wavelength / 1000. / self.grating) * 180 / np.pi + theta", "def AngleFromSun(body, time):\n if body == Body.Earth:\n raise EarthNotAllowedError()\n sv = GeoVector(Body.Sun, time, True)\n bv = GeoVector(body, time, True)\n return AngleBetween(sv, bv)", "def parang (hourangle, declination, latitude):\n\n return -np.arctan2 (-np.sin (hourangle),\n np.cos (declination) * np.tan (latitude)\n - np.sin (declination) * np.cos (hourangle))", "def altAz2RADec(azim, elev, jd, lat, lon):\n\n azim = np.radians(azim)\n elev = np.radians(elev)\n lat = np.radians(lat)\n lon = np.radians(lon)\n \n # Calculate hour angle\n ha = np.arctan2(-np.sin(azim), np.tan(elev)*np.cos(lat) - np.cos(azim)*np.sin(lat))\n\n # Calculate Local Sidereal Time\n lst = np.radians(JD2LST(jd, np.degrees(lon))[0])\n \n # Calculate right ascension\n ra = (lst - ha)%(2*np.pi)\n\n # Calculate declination\n dec = np.arcsin(np.sin(lat)*np.sin(elev) + np.cos(lat)*np.cos(elev)*np.cos(azim))\n\n return np.degrees(ra), np.degrees(dec)", "def calc_optimal_angle(teta_z, latitude, transmissivity):\n if transmissivity <= 0.15:\n gKt = 0.977\n elif 0.15 < transmissivity <= 0.7:\n gKt = 1.237 - 1.361 * transmissivity\n else:\n gKt = 0.273\n Tad = 0.98 # transmittance-absorptance product of the diffuse radiation\n Tar = 0.97 # transmittance-absorptance product of the reflected radiation\n Pg = 0.2 # ground reflectance of 0.2\n l = radians(latitude)\n a = radians(teta_z)\n b = atan((cos(a) * tan(l)) * (1 / (1 + ((Tad * gKt - Tar * Pg) / (2 * (1 - gKt)))))) # eq.(11)\n return abs(b)", "def solar_elevation(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n\n return self.astral.solar_elevation(dateandtime, self.latitude, self.longitude)", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def era(self):\n # earth rotation angle using Universal Time\n J = self.MJD - 51544.5\n fraction = np.mod(J, self.turn)\n theta = np.mod(0.7790572732640 + 0.00273781191135448*J, self.turn)\n return self.turndeg*np.mod(theta + fraction, self.turn)", "def to_axisangle(self) -> Tuple[np.ndarray, float]:\n angle = np.arccos((self.A.trace()-1)/2)\n axis = np.zeros(3)\n if angle!=0:\n axis = np.array([self.A[2, 1]-self.A[1, 2], self.A[0, 2]-self.A[2, 0], self.A[1, 0]-self.A[0, 1]])/(2*np.sin(angle))\n return axis, angle", "def angle(self):\n return atan2(self.v.y, self.v.x)", "def rotate_orbit(self):\n try:\n ang = self.orbit_speed * self.time_scale / self.refresh_rate\n self.obj.rotate(angle=ang, axis=vector(0, 1, 0), origin=self.star.obj.pos)\n self.sum_ang += ang\n except ZeroDivisionError:\n print(\"ERROR: REFRESH_RATE is 0\")\n except (AttributeError, TypeError):\n print(\"ERROR: wrong arguments type while initializing!!\")", "def angle3pt(\n ax: float, ay: float, bx: float, by: float, cx: float, cy: float\n ) -> float:\n ang = math.degrees(math.atan2(cy - by, cx - bx) - math.atan2(ay - by, ax - bx))\n return ang + 360 if ang < 0 else ang", "def computeYaw(Vx, Vy):\n #print(Vx, Vy)\n if Vx > 0:\n if Vy > 0:\n angle = (math.degrees(math.atan2(Vy,Vx)))#+ how far it is from the x axis)\n #print(angle)\n return angle \n elif Vy < 0:\n angle = (math.degrees(math.atan2(Vy,Vx)) )#- how far from x axis)\n #print(angle)\n return angle\n else:\n #print(math.degrees(math.atan2(Vy,Vx)))\n return math.degrees(math.atan2(Vy,Vx))", "def test_az_za_astropy():\n\n Nside = 128\n\n altitude = 0.0\n loc = EarthLocation.from_geodetic(longitude, latitude, altitude)\n\n obs = observatory.Observatory(latitude, longitude, nside=Nside)\n\n t0 = Time(2458684.453187554, format=\"jd\")\n obs.set_fov(180)\n\n zen = AltAz(alt=Angle(\"90d\"), az=Angle(\"0d\"), obstime=t0, location=loc)\n\n zen_radec = zen.transform_to(ICRS())\n center = [zen_radec.ra.deg, zen_radec.dec.deg]\n northloc = EarthLocation.from_geodetic(lat=\"90.d\", lon=\"0d\", height=0.0)\n north_radec = AltAz(\n alt=\"90.0d\", az=\"0.0d\", obstime=t0, location=northloc\n ).transform_to(ICRS())\n yvec = np.array([north_radec.ra.deg, north_radec.dec.deg])\n za, az, inds = obs.calc_azza(center, yvec, return_inds=True)\n\n ra, dec = hp.pix2ang(Nside, inds, lonlat=True)\n\n altaz_astropy = ICRS(\n ra=Angle(ra, unit=\"deg\"), dec=Angle(dec, unit=\"deg\")\n ).transform_to(AltAz(obstime=t0, location=loc))\n\n za0 = altaz_astropy.zen.rad\n az0 = altaz_astropy.az.rad\n\n if environ.get(\"VIS\", False):\n hmap = np.zeros(12 * Nside ** 2) + hp.UNSEEN\n hmap[inds] = np.unwrap(az0 - az)\n import IPython\n\n IPython.embed()\n\n print(np.degrees(za0 - za))\n assert np.allclose(za0, za, atol=1e-4)\n assert np.allclose(\n np.unwrap(az0 - az), 0.0, atol=3e-4\n ) # About 1 arcmin precision. Worst is at the southern horizon.", "def calculate_angles(self, x, y):\n Oimat = inv(self.Omat)\n Mat = self.pixel_size * inv(self.Dmat) * Oimat\n polar_angles = []\n azimuthal_angles = []\n for i in range(len(x)):\n peak = Oimat * (vec(x[i], y[i]) - self.Cvec)\n v = norm(Mat * peak)\n polar_angle = np.arctan(v / self.distance)\n polar_angles.append(polar_angle)\n azimuthal_angles.append(np.arctan2(-peak[1, 0], peak[2, 0]))\n return (np.array(polar_angles) * degrees,\n np.array(azimuthal_angles) * degrees)", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def avl_angle(self):\n dif_height = (self.heights[5] - self.heights[7])\n dif_position = (self.positions[0][7] - self.positions[0][5])\n angle = atan(dif_height / dif_position) / 1.5 * 180 / pi\n return angle", "def resolution_azimuth(self) -> Optional[float]:\n return self._get_property(RESOLUTION_AZIMUTH_PROP, float)", "def get_compass_dir_azimuth(azimuth, resolution='intercardinal', format='short'):\n if azimuth < 0:\n azimuth += 360\n if format not in ['short', 'long']:\n raise KeyError(f'Direction format {format} is not supported')\n if resolution not in ['cardinal', 'intercardinal', 'meteorological']:\n raise KeyError(f'Direction resolution {resolution} is not supported')\n if resolution == 'cardinal':\n angles = np.arange(0, 360 + 90, 90)\n if format == 'long':\n points = LONG_CARDINAL_POINTS\n else:\n points = SHORT_CARDINAL_POINTS\n elif resolution == 'intercardinal':\n angles = np.arange(0, 360 + 45, 45)\n if format == 'long':\n points = LONG_INTERCARDINAL_POINTS\n else:\n points = SHORT_INTERCARDINAL_POINTS\n elif resolution == 'meteorological':\n angles = np.arange(0, 360 + 22.5, 22.5)\n if format == 'long':\n points = LONG_METEOROLOGICAL_POINTS\n else:\n points = SHORT_METEOROLOGICAL_POINTS\n\n adiff = abs(azimuth - angles)\n i = adiff.argmin()\n return points[i]", "def atanh(self, x):\n return self.arctanh(x)", "def _get_target_azimuths(radar_in):\n sweep_start = radar_in.sweep_start_ray_index['data'][0]\n sweep_end = radar_in.sweep_end_ray_index['data'][0]\n target_azimuths = np.sort(\n radar_in.azimuth['data'][sweep_start:sweep_end+1])\n az_tol = np.median(target_azimuths[1:]-target_azimuths[:-1])\n\n return target_azimuths, az_tol", "def get_altaz(ra,dec,jd=None,lat = 37.9183, lon = -122.1067, alt = 304, equinox='J2000'):\n if jd: t = ap.time.Time(jd,format='jd')\n else: t = ap.time.Time(time.time(),format='unix')\n l = ap.coordinates.EarthLocation(lat=lat*u.deg,\n lon=lon*u.deg,height=alt*u.m)\n f = ap.coordinates.AltAz(obstime=t,location=l)\n c = ap.coordinates.SkyCoord(ra, dec, frame='fk5',unit='deg',equinox=equinox)\n altaz = c.transform_to(f)\n return altaz.alt.deg, altaz.az.deg", "def angle(self):\n self.convert_window(\"Angle\", \"degree\", [\"arcminute\", \"arcsecond\", \"circle\", \"degree\", \"gon\", \"gradian\", \"mil(Nato)\", \"mil(Soviet Union)\", \"mil(Sweden)\", \"octant\", \"quadrant\", \"radian\", \"revolution\", \"sextant\", \"sign\", \"turn\"])", "def _calculate_angle(x0, y0, x1, y1):\n if x0 == y0 == x1 == y1 == 0:\n return 0\n\n if x1 - x0 > 0: # pointing to the right semi-plane\n angle = atan((y1 - y0) / (x1 - x0))\n elif x1 - x0 < 0 and y1 - y0 >= 0: # adding pi if pointing to the left-bottom quart\n angle = pi + atan((y1 - y0) / (x1 - x0))\n elif x1 - x0 < 0 and y1 - y0 < 0: # subtract pi if pointing to the left-upper quart\n angle = -pi + atan((y1 - y0) / (x1 - x0))\n else: # zerodevision handle\n if y1 - y0 > 0: # pointing down\n angle = pi / 2\n else: # pointing up\n angle = -pi / 2\n\n return angle", "def _get_angle(point1, point2):\n ydelta = point2[0] - point1[0]\n xdelta = point2[1] - point1[1]\n if xdelta == 0:\n hypot = np.sqrt(xdelta ** 2 + ydelta ** 2)\n theta = np.arcsin(ydelta / hypot)\n elif ydelta == 0:\n hypot = np.sqrt(xdelta ** 2 + ydelta ** 2)\n theta = np.arccos(xdelta / hypot)\n else:\n theta = np.arctan(ydelta / xdelta)\n return theta", "def _angle(u, v, w, d='+'):\n vu = np.arctan2(u[1] - v[1], u[0] - v[0])\n vw = np.arctan2(w[1] - v[1], w[0] - v[0])\n phi = vw - vu\n if phi < 0:\n phi += 2 * np.pi\n if d == '-':\n phi = 2 * np.pi - phi\n return np.round(phi, 6)", "def check_angle_of_arcs(self):\n\n if self.thin_arc_start_angle >= 3600:\n self.thin_arc_start_angle %= 360\n self.thin_arc_start_angle += 360\n\n elif self.thin_arc_start_angle <= -3600:\n self.thin_arc_start_angle %= 360\n self.thin_arc_start_angle -= 360\n\n if self.thin_arc_end_angle >= 3600:\n self.thin_arc_end_angle %= 360\n self.thin_arc_end_angle += 360\n\n elif self.thin_arc_end_angle <= -3600:\n self.thin_arc_end_angle %= 360\n self.thin_arc_end_angle -= 360\n\n if self.thick_arc_start_angle >= 3600:\n self.thick_arc_start_angle %= 360\n self.thick_arc_start_angle += 360\n\n elif self.thick_arc_start_angle <= -3600:\n self.thick_arc_start_angle %= 360\n self.thick_arc_start_angle -= 360\n\n if self.thick_arc_end_angle >= 3600:\n self.thick_arc_end_angle %= 360\n self.thick_arc_end_angle += 360\n\n elif self.thick_arc_end_angle <= -3600:\n self.thick_arc_end_angle %= 360\n self.thick_arc_end_angle -= 360", "def point_to_point_azimuth(point0, point1, out=None):\n azimuth_in_rads = point_to_point_angle(point0, point1, out=out)\n if out is None:\n return (np.pi * 0.5 - azimuth_in_rads) * 180.0 / np.pi\n else:\n np.subtract(np.pi * 0.5, azimuth_in_rads, out=out)\n return np.multiply(out, 180.0 / np.pi, out=out)", "def solar_angles(df, lat, lon, alt=0):\n\n jd = pd.Timestamp(df).to_julian_date()\n\n # offset (2451543.5)\n d_offset = pd.Timestamp('1999-12-31 00:00:00').to_julian_date()\n\n d = jd - d_offset\n\n\n # Keplerian elements for the sun (geocentric)\n w = 282.9404 + 4.70935E-5 * d # longitude of perihelion [degrees]\n a = 1.0 # mean distance [AU]\n e = 0.016709 - 1.151E-9 * d # eccentricity [-]\n M = np.mod(356.0470 + 0.9856002585 * d, 360.0) # mean anomaly [degrees]\n L = w + M # Sun's mean longitude [degrees]\n oblecl = 23.4393 - 3.563E-7 * d # Sun's obliquity of the eliptic [degrees]\n\n # Auxiliary angle [degrees]\n E = M + (180.0 / np.pi) * e * np.sin(np.deg2rad(M)) * (1.0 + e * np.cos(np.deg2rad(M)))\n\n # Rectangular coordinates in the plane of the ecliptic (x-axis toward perihelion)\n x = np.cos(np.deg2rad(E)) - e\n y = np.sin(np.deg2rad(E)) * np.sqrt(1 - (e ** 2))\n\n # Distance (r) and true anomaly (v)\n r = np.sqrt((x ** 2) + (y ** 2))\n v = np.rad2deg(np.arctan2(y, x))\n\n # Longitude of the sun\n lon_sun = v + w\n\n # Ecliptic rectangular coordinates\n xeclip = r * np.cos(np.deg2rad(lon_sun))\n yeclip = r * np.sin(np.deg2rad(lon_sun))\n zeclip = 0.0\n\n # Rotate coordinates to equatorial rectangular coordinates\n xequat = xeclip\n yequat = yeclip * np.cos(np.deg2rad(oblecl)) + zeclip * np.sin(np.deg2rad(oblecl))\n zequat = yeclip * np.sin(np.deg2rad(23.4406)) + zeclip * np.cos(np.deg2rad(oblecl))\n\n # Convert equatorial rectangular coordinates to right-ascension (RA) and declination\n r = np.sqrt(xequat ** 2 + yequat ** 2 + zequat ** 2) - (alt / 149598000.0)\n RA = np.rad2deg(np.arctan2(yequat, xequat))\n delta = np.rad2deg(np.arcsin(zequat / r))\n\n # Calculate local siderial time\n uth = df.hour + (df.minute / 60.0) + (df.second / 3600.0)\n gmst0 = np.mod(L + 180.0, 360.0) / 15.0\n sidtime = gmst0 + uth + (lon / 15.0)\n\n # Replace RA with hour-angle (HA)\n HA = sidtime * 15.0 - RA\n\n # Convert to rectangular coordinates\n x = np.cos(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n y = np.sin(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n z = np.sin(np.deg2rad(delta))\n\n # Rotate along an axis going East-West\n xhor = x * np.cos(np.deg2rad(90.0 - lat)) - z * np.sin(np.deg2rad(90.0 - lat))\n yhor = y\n zhor = x * np.sin(np.deg2rad(90.0 - lat)) + z * np.cos(np.deg2rad(90.0 - lat))\n\n # Find azimuthal and elevation angles\n azimuthal = np.rad2deg(np.arctan2(yhor, xhor)) + 180.0\n elevation = np.rad2deg(np.arcsin(zhor))\n\n zenith = 90.0 - elevation\n\n return np.column_stack((zenith, elevation, azimuthal))", "def azalt(ra, dec):\n\tx = rectanglize(ra, dec)\n\ty = np.dot(R_1, x)\n\tz = np.dot(R_2, y)\n\treturn sphericalize(z)", "def thetaCal(opposite, adjacent):\n opposite = opposite * (-1)\n theta = math.atan2(opposite, adjacent) # * (180 / 3.1415)\n theta = math.degrees(theta)\n theta = round(theta, 2)\n\n if theta < 0:\n theta = 180 + theta\n theta = theta + 180\n theta = round(theta, 2)\n return theta", "def calcNadirAngle(ele):\n\n nadeg = np.arcsin(6378.0/26378.0 * np.cos(ele/180.*np.pi)) * 180./np.pi\n\n return nadeg", "def angle(self):\n return angle(self.force, self.forceXYZ, self.excited_axis,\n self.distance, self.distanceXYZ)", "def angle_to( self, vector3 ):\n # make sure neither vector is zero-length\n sm = self.magnitude\n vm = vector3.magnitude\n if abs(sm) < self.EPSILON or abs(vm) < self.EPSILON:\n raise ZeroDivisionError(\n \"can't calculate angle between zero-length vectors!\" )\n \n # calculation will fail if vectors have same heading\n # catch error and return zero\n try:\n return math.degrees( math.acos(self.dot(vector3) / (sm * vm)) )\n except ValueError:\n # test whether direction is same or opposite\n if Vector3( self ).add( vector3 ).magnitude < sm:\n return 180.0\n return 0.0", "def getAngle(self):\n x, y = self.components\n return math.atan2(y, x)", "def deltaAngle(x, y):\n return math.atan2(math.sin(x-y), math.cos(x-y))", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def day_angle(day):\n return 2*pi*( day - 1 )/365", "def atan(data):\n return _make.atan(data)", "def compute_angle_in_rad(location1, location2):\n return np.arctan2(location1[0] - location2[0], location1[1] - location2[1])", "def angle_diff(self, i):\n (h0, k0, l0) = [int(np.rint(x)) for x in self.hkl(i)]\n polar0 = self.unit_cell.two_theta((h0, k0, l0), self.wavelength)\n return np.abs(self.polar(i) - polar0)", "def angle(self):\n self._normalise()\n norm = np.linalg.norm(self.vector)\n return self._wrap_angle(2.0 * atan2(norm,self.scalar))", "def test_angle():\n # radians\n theta_coord = 45. * coord.degrees\n theta_astro = astropy.coordinates.Angle(pi/4., units.radian)\n\n # degrees\n np.testing.assert_almost_equal(theta_coord.rad, theta_astro.rad, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.degrees, theta_astro.degree, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.hours, theta_astro.hour, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.arcmin, theta_astro.arcminute, decimal=12)\n np.testing.assert_almost_equal(theta_coord / coord.arcsec, theta_astro.arcsec, decimal=12)\n\n # Other constructors\n theta_astro2 = astropy.coordinates.Angle(23.09, units.arcsec)\n theta_coord2 = coord.Angle(23.09, coord.arcsec)\n np.testing.assert_almost_equal(theta_coord2.rad, theta_astro2.rad, decimal=12)\n\n theta_astro3 = astropy.coordinates.Angle(-0.17, unit='rad')\n theta_coord3 = coord._Angle(-0.17)\n np.testing.assert_almost_equal(theta_coord3.rad, theta_astro3.rad, decimal=12)\n\n # astropy wrapping uses a different convention than we do. Their argument is\n # the upper end of the target range, not the center.\n theta_astro4 = theta_astro3.wrap_at(360 * units.deg)\n theta_coord4 = theta_coord3.wrap(180 * coord.degrees)\n np.testing.assert_almost_equal(theta_coord4.rad, theta_astro4.rad, decimal=12)\n\n theta_astro5 = theta_astro3.wrap_at(-100 * units.deg)\n theta_coord5 = theta_coord3.wrap(-280 * coord.degrees)\n np.testing.assert_almost_equal(theta_coord5.rad, theta_astro5.rad, decimal=12)\n\n theta_astro6 = astropy.coordinates.Angle('03:34:12', unit='hourangle')\n theta_coord6 = coord.Angle.from_hms('03:34:12')\n np.testing.assert_almost_equal(theta_coord6.rad, theta_astro6.rad, decimal=12)\n\n theta_astro7 = astropy.coordinates.Angle('03:34:12', unit='deg')\n theta_coord7 = coord.Angle.from_dms('03:34:12')\n np.testing.assert_almost_equal(theta_coord7.rad, theta_astro7.rad, decimal=12)\n\n # Their default arguments to to_string are different from ours, but can make them compatible.\n print('theta_astro6.hms = ',theta_astro6.to_string(sep=':', pad=True))\n print('theta_coord6.hms = ',theta_coord6.hms())\n assert theta_coord6.hms() == theta_astro6.to_string(sep=':', pad=True)\n\n print('theta_astro7.dms = ',theta_astro7.to_string(sep=':', pad=True))\n print('theta_coord7.dms = ',theta_coord7.dms())\n assert theta_coord7.dms() == theta_astro7.to_string(sep=':', pad=True)\n\n print('theta_astro6.hms = ',theta_astro6.to_string())\n print('theta_coord6.hms = ',theta_coord6.hms(sep='hms', pad=False))\n assert theta_coord6.hms(sep='hms', pad=False) == theta_astro6.to_string()\n\n print('theta_astro7.hms = ',theta_astro7.to_string())\n print('theta_coord7.hms = ',theta_coord7.dms(sep='dms', pad=False))\n assert theta_coord7.dms(sep='dms', pad=False) == theta_astro7.to_string()", "def _altaz_rotation(self, jd):\n R_lon = rot_z(- self.longitude.radians - jd.gast * TAU / 24.0)\n return einsum('ij...,jk...,kl...->il...', self.R_lat, R_lon, jd.M)", "def calculate_angles():\n time = request.args.get('time')\n\n result = Helpers.validate_and_parse_input(time)\n if result:\n hour, minute = result\n\n hour_angle = 0.5 * (hour * 60 + minute)\n minute_angle = 6 * minute\n\n angle = abs(hour_angle - minute_angle)\n angle = min(360 - angle, angle)\n DatastoreClient(kind='clock_angle_logs').log_to_datastore(time, angle)\n\n return Helpers.success(angle)\n else:\n DatastoreClient(kind='clock_angle_logs').log_to_datastore(time, 'bad_request')\n return Helpers.bad_request(r\"query parameter time should follow regex ^\\d{1,2}:\\d{1,2}$ and value should be \"\n r\"between 00:00 and 23:59\")", "def calcAnnualWeightedAveInsolation(latitude, slope, azimuth):\n\tdf = calcTotalInsolation(latitude, slope, azimuth)\n\treturn np.dot(\n\t\tnp.array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]),\n\t\tdf['insolation_tilted']\n\t\t) / 365.0", "def angle(self) -> float:\n ...", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def determine_in_plane_angle(self, qxy, qz=0.0, theta_incident=0.0):\n \n k = self.get_k()\n if theta_incident==None:\n # Use internal value\n theta_incident = self.theta_incident\n theta_incident_rad = np.radians(theta_incident)\n \n from scipy.optimize import fsolve\n \n def equations(p, qxy=qxy, qz=qz, theta_incident=theta_incident, k=k):\n \n # The variable we are fitting for\n omega_rad, = p\n \n # Non-fit values: qxy, qz, k, theta_incident, k\n \n return ( (qxy*cos(omega_rad))**2 + (qxy*sin(omega_rad)+k*cos(theta_incident_rad))**2 + (qz-k*sin(theta_incident_rad))**2 - k**2 )\n\n \n omega_rad, = fsolve(equations, ( np.radians(5.0) ) )\n #print( 'omega_rad = %.2f (err = %.4f)' % ( omega_rad, equations((omega_rad, )) ) )\n \n omega = abs( np.degrees(omega_rad) )\n #print( 'omega = %.2f (err = %.4f)' % ( omega, equations((omega_rad, )) ) )\n \n \n return omega", "def angle_with(self, some_site):\n Δx, Δy = some_site - self\n if Δx == 0:\n if Δy != 0:\n return -2 * np.pi / 3\n elif Δy == 0:\n if Δx != 0:\n return 0\n else:\n return 2 * np.pi / 3", "def Arc( x, y0, y1, r):\n return 0.5 * r*r * ( np.arctan( (y1).astype(float)/(x).astype(float) ) - np.arctan( (y0).astype(float)/(x).astype(float) ) )", "def angle(*args):\n if len(args) < 1:\n return 0.0\n elif len(args) == 1:\n return np.arctan2(args[0][1], args[0][0])\n else:\n v1 = args[0].flatten()\n v2 = args[1].flatten()\n return np.arccos(np.dot(v1, v2) / (norm(v1) * norm(v2)))", "def getAngle(self):\n return self.vector.angle" ]
[ "0.78250027", "0.67446834", "0.6695796", "0.64115757", "0.6395283", "0.63717794", "0.63140875", "0.62267244", "0.6145953", "0.6141899", "0.6060071", "0.598316", "0.5980102", "0.59746766", "0.5884954", "0.58290213", "0.5812057", "0.58084035", "0.57966334", "0.5779464", "0.5756145", "0.57466495", "0.5734621", "0.5720032", "0.5689982", "0.5673885", "0.5667952", "0.56595176", "0.565421", "0.5652813", "0.56457806", "0.5644773", "0.5640393", "0.5621939", "0.5602482", "0.5587706", "0.5580266", "0.55660665", "0.55480635", "0.5538841", "0.5536773", "0.55247563", "0.5503322", "0.5502376", "0.5472387", "0.5459578", "0.5446177", "0.5407133", "0.5406907", "0.5399454", "0.53963006", "0.53952765", "0.53947735", "0.53934246", "0.5383062", "0.5345392", "0.53378", "0.5330391", "0.53298175", "0.5324938", "0.532034", "0.53194094", "0.53175455", "0.53140104", "0.53135955", "0.5309276", "0.5307775", "0.52960056", "0.52925086", "0.52813816", "0.5281014", "0.5280651", "0.52790946", "0.52737445", "0.52736056", "0.52713126", "0.52680004", "0.5258554", "0.52546024", "0.52537847", "0.5252015", "0.5251143", "0.5244174", "0.524411", "0.5236371", "0.5232533", "0.52297765", "0.5229164", "0.5217008", "0.52149993", "0.52131903", "0.52123016", "0.5202067", "0.519779", "0.5197785", "0.51952827", "0.51877916", "0.51877195", "0.518617", "0.51830375" ]
0.7764275
1
Calculates the solar elevation angle for a specific time.
def solar_elevation(self, dateandtime=None): if self.astral is None: self.astral = Astral() if dateandtime is None: dateandtime = datetime.datetime.now(tz=self.tz) return self.astral.solar_elevation(dateandtime, self.latitude, self.longitude)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n \n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n exoatmElevation = 90.0 - zenith\n\n if exoatmElevation > 85.0:\n refractionCorrection = 0.0\n else:\n te = tan(radians(exoatmElevation))\n if exoatmElevation > 5.0:\n refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te)\n elif exoatmElevation > -0.575:\n step1 = (-12.79 + exoatmElevation * 0.711)\n step2 = (103.4 + exoatmElevation * (step1))\n step3 = (-518.2 + exoatmElevation * (step2))\n refractionCorrection = 1735.0 + exoatmElevation * (step3)\n else:\n refractionCorrection = -20.774 / te\n \n refractionCorrection = refractionCorrection / 3600.0\n \n solarzen = zenith - refractionCorrection\n \n solarelevation = 90.0 - solarzen\n \n return solarelevation", "def solar_azimuth(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n \n return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)", "def solar_azimuth(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n\n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0#\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n return azimuth", "def era(self):\n # earth rotation angle using Universal Time\n J = self.MJD - 51544.5\n fraction = np.mod(J, self.turn)\n theta = np.mod(0.7790572732640 + 0.00273781191135448*J, self.turn)\n return self.turndeg*np.mod(theta + fraction, self.turn)", "def AngleFromSun(body, time):\n if body == Body.Earth:\n raise EarthNotAllowedError()\n sv = GeoVector(Body.Sun, time, True)\n bv = GeoVector(body, time, True)\n return AngleBetween(sv, bv)", "def angle(self, angle: int, time: int = 0, /) -> None:", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def vert_angle(time, data, height, distance):\n\n altitude = float(data[time]['altitude'])\n\n return round((degrees(atan2(height, distance)) - altitude) / 2, 4)", "def horiz_angle(time, data):\n\n # TODO What should 0deg be? Set it to inline w/ target? facing target?\n\n # direction of the sun. measured in degrees counted clockwise from north.\n azimuth = data[time]['azimuth']\n\n h_angle = (azimuth / 2 - 90)\n\n # returns answer between -180 and 180 degrees\n return round(((h_angle + 180) % 360) - 180, 4)", "def diffraction_angle_for(self, wavelength: float = 532., theta: float = 0.):\n return np.arcsin(np.sin(-theta / 180. * np.pi)\n - self.interference * wavelength / 1000. / self.grating) * 180 / np.pi + theta", "def altAz2RADec(azim, elev, jd, lat, lon):\n\n azim = np.radians(azim)\n elev = np.radians(elev)\n lat = np.radians(lat)\n lon = np.radians(lon)\n \n # Calculate hour angle\n ha = np.arctan2(-np.sin(azim), np.tan(elev)*np.cos(lat) - np.cos(azim)*np.sin(lat))\n\n # Calculate Local Sidereal Time\n lst = np.radians(JD2LST(jd, np.degrees(lon))[0])\n \n # Calculate right ascension\n ra = (lst - ha)%(2*np.pi)\n\n # Calculate declination\n dec = np.arcsin(np.sin(lat)*np.sin(elev) + np.cos(lat)*np.cos(elev)*np.cos(azim))\n\n return np.degrees(ra), np.degrees(dec)", "def solarelevation_function_overcast(latitude_deg, longitude_deg, utc_datetime,\n elevation = elevation_default, temperature_celsius = 25,\n pressure_millibars = 1013.25):\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars)\n return ((-0.0067133) + (0.78600 * (math.sin(altitude)))) + (0.22401 * (0.5 * (1 - math.cos(2 * altitude))))", "def lunarperigee(time):\n dtor = np.pi / 180\n t1 = 1 + time\n t2 = t1 * t1\n t3 = t2 * t1\n perigee = (\n 334.329653 * dtor\n + 4069.0340329575 * dtor * t1\n - 0.010325 * dtor * t2\n - 1.2e-5 * dtor * t3\n )\n return perigee", "def get_rotationalAngularPosition(self, t): # returns [rad]\n angle = self.theta0 + self.rotationalAngularVelocity * t # angular position [rad]\n return angle", "def equatorial_hour_angle(hour, location):\n equatorial_angle = (hour - location.timezone) * 2 * np.pi / 24 + (np.deg2rad(location.longitude))\n logging.getLogger(\"hour.angle.equ\").debug(\"For hour %d, equatorial angle %g\" % (hour, np.rad2deg(equatorial_angle)))\n return equatorial_angle", "def getAltitudeAngle(self):\n return self._altitude", "def calc_angle_of_incidence(g, lat, ha, tilt, teta_z):\n # surface normal vector\n n_E = sin(tilt)*sin(teta_z)\n n_N = sin(tilt)*cos(teta_z)\n n_Z = cos(tilt)\n # solar vector\n s_E = -cos(g)*sin(ha)\n s_N = sin(g)*cos(lat) - cos(g)*sin(lat)*cos(ha)\n s_Z = cos(g)*cos(lat)*cos(ha) + sin(g)*sin(lat)\n\n # angle of incidence\n teta_B = acos(n_E*s_E + n_N*s_N + n_Z*s_Z)\n return teta_B", "def get_azimuth(self):\n self.degrees = self.azimuth_encoder.get_degrees()\n self.tele_azimuth = self.Calculations.convert_degrees(self.degrees)\n return self.tele_azimuth", "def _angle_of_attack(self, rel_wind, blade_chord):\n # blade_chord_vector - (relative_wind + pi)\n # rel_oposite = rel_wind.rotated(math.pi)\n aoa_rad = rel_wind.theta - blade_chord.theta\n aoa_rad = vec.normalize_angle(aoa_rad)\n aoa_360 = aoa_rad * 360 / math.tau\n return aoa_rad, aoa_360", "def calculate_angles():\n time = request.args.get('time')\n\n result = Helpers.validate_and_parse_input(time)\n if result:\n hour, minute = result\n\n hour_angle = 0.5 * (hour * 60 + minute)\n minute_angle = 6 * minute\n\n angle = abs(hour_angle - minute_angle)\n angle = min(360 - angle, angle)\n DatastoreClient(kind='clock_angle_logs').log_to_datastore(time, angle)\n\n return Helpers.success(angle)\n else:\n DatastoreClient(kind='clock_angle_logs').log_to_datastore(time, 'bad_request')\n return Helpers.bad_request(r\"query parameter time should follow regex ^\\d{1,2}:\\d{1,2}$ and value should be \"\n r\"between 00:00 and 23:59\")", "def get_altaz(ra,dec,jd=None,lat = 37.9183, lon = -122.1067, alt = 304, equinox='J2000'):\n if jd: t = ap.time.Time(jd,format='jd')\n else: t = ap.time.Time(time.time(),format='unix')\n l = ap.coordinates.EarthLocation(lat=lat*u.deg,\n lon=lon*u.deg,height=alt*u.m)\n f = ap.coordinates.AltAz(obstime=t,location=l)\n c = ap.coordinates.SkyCoord(ra, dec, frame='fk5',unit='deg',equinox=equinox)\n altaz = c.transform_to(f)\n return altaz.alt.deg, altaz.az.deg", "def do_azangle(self):\n angle_1, angle_2 = cbp.potentiometer.main()\n current_angle = angle_2\n #print(current_angle)\n self.azangle = current_angle\n return current_angle", "def _calc_solar_from_clouds_and_angle(hr, ds_path):\n # Solar radiation [W/m^2] incident on top of atmosphere\n Q_o = 1368.0\n # Cloud model based on Dobson and Smith, table 5\n # SEA -- May 2010 : redid the cloud parametrization based on UBC\n # Solar data (/ocean/shared/SoG/met/solar/) fitting Q to cos_Z\n # (not Q/cos_Z as Kate did). Allen and Wolfe (2013). (0) no\n # clouds, (1) 1/10 cloud fraction (10) 100% clouds. Four sig\n # figs are what comes out of matlab but standard deviations are\n # 40W/m2 for low cloud fraction to 120 W/m2 for 6-9 cloud\n # fraction to 85 W/m2 for completely cloudy.\n cloud_consts = SimpleNamespace(\n A=numpy.array(\n [\n 0.6337,\n 0.6149,\n 0.5861,\n 0.5512,\n 0.5002,\n 0.4649,\n 0.4225,\n 0.3669,\n 0.2468,\n 0.1981,\n 0.0841,\n ]\n ),\n B=numpy.array(\n [\n 0.1959,\n 0.2119,\n 0.2400,\n 0.2859,\n 0.3192,\n 0.3356,\n 0.3339,\n 0.3490,\n 0.4427,\n 0.3116,\n 0.2283,\n ]\n ),\n )\n # Local standard time\n ## WARNING: .to(\"PST\") may be fragile and incorrect for summer-time dates\n lst = hr.to(\"PST\")\n # day_time is in seconds, LST\n day_time = (lst - lst.floor(\"day\")).seconds\n # hour of day as degrees from noon\n hour = (day_time / 3600 - 12) * 15\n # day is year-day\n day = (lst - lst.floor(\"year\")).days\n # solar declination [radians]\n declination = (\n 23.45 * numpy.pi / 180 * numpy.sin((284 + day) / 365.25 * 2 * numpy.pi)\n )\n # Latitude of approximate centre of model domain in radians\n lat = numpy.pi * 50 / 180\n # solar elevation\n elev_sin = numpy.sin(declination) * numpy.sin(lat)\n elev_cos = numpy.cos(declination) * numpy.cos(lat)\n cos_Z = elev_sin + elev_cos * numpy.cos(numpy.pi / 180 * hour)\n # cos of -hour_angle in radians\n hour_angle = numpy.tan(lat) * numpy.tan(declination)\n # assume we are south of the Arctic Circle\n day_length = numpy.arccos(-hour_angle) / 15 * 2 * 180 / numpy.pi\n sunrise = 12 - 0.5 * day_length # hours\n sunset = 12 + 0.5 * day_length # hours\n Qso = Q_o * (1 + 0.033 * numpy.cos(day / 365.25 * 2 * numpy.pi))\n with xarray.open_dataset(ds_path) as ds:\n cf_value = ds.percentcloud * 10\n fcf = numpy.floor(cf_value).astype(int) # integer below cf value\n fcf = xarray.where(fcf == 10, 9, fcf).data\n ccf = fcf + 1 # integer above cf value\n if (sunrise > day_time / 3600) or (day_time / 3600 > sunset):\n # nighttime\n return xarray.zeros_like(ds.percentcloud)\n return (\n Qso\n * (\n cloud_consts.A[fcf] * (ccf - cf_value)\n + cloud_consts.A[ccf] * (cf_value - fcf)\n + (\n cloud_consts.B[fcf] * (ccf - cf_value)\n + cloud_consts.B[ccf] * (cf_value - fcf)\n )\n * cos_Z\n )\n * cos_Z\n )", "def getEdgeAngle():\n '''\n returns angle a\n a\n ◿\n b c\n '''\n ANGLE_OFFSET = 8 # How far off the angle measurements are in degrees.\n THRESHOLD = 220 # How much light must be reflected to 'notice' the desk.\n angle = 0\n while angle < panTilt.TLT_RANGE:\n angle += 1\n panTilt.tilt(int(angle))\n deskDetected = ir.readWithDelay()\n # print \"Angle:\", angle + ANGLE_OFFSET, \", ir reading:\", deskDetected\n if deskDetected > THRESHOLD or angle == panTilt.TLT_RANGE:\n # print \"-----------------------\"\n break # Break out of looking downwards loop\n panTilt.up() # Look up again\n return 90 - angle - ANGLE_OFFSET", "def set_azimuth(self):\n self.azimuth = self.Calculations.convert_to_azimuth( self.declination, self.right_ascension, self.Latitude, self.LHA)\n if self.azimuth < 0:\n self.azimuth = self.azimuth + 360.0\n return self.azimuth\n else:\n pass\n return self.azimuth\n print('azimuth set to', self.azimuth)", "def IAngle(a, b, t):\n \n # http://www.engineersedge.com/material_science/moment-inertia-gyration-7.htm\n d = b - t \n y = b - (t*(2*d + a) + d**2)/(2*(d+a))\n I = 1/3 * (t*y**3 + a*(b-y)**3 - (a-t)*(b-y-t)**3)\n return I", "def fun_azimuth(self):\n\n energy_kev = self.energy_kev.get()\n hkl = self.hkl_magnetic.get()\n hkl = hkl.replace(',', ' ') # remove commas\n hkl = hkl.replace('(', '').replace(')', '') # remove brackets\n hkl = hkl.replace('[', '').replace(']', '') # remove brackets\n hkl = np.fromstring(hkl, sep=' ')\n\n azi = self.azim_zero.get()\n azi = azi.replace(',', ' ') # remove commas\n azi = azi.replace('(', '').replace(')', '') # remove brackets\n azi = azi.replace('[', '').replace(']', '') # remove brackets\n azi = np.fromstring(azi, sep=' ')\n\n pol = self.polval.get()\n if pol == u'\\u03c3-\\u03c3':\n pol = 's-s'\n elif pol == u'\\u03c3-\\u03c0':\n pol = 's-p'\n elif pol == u'\\u03c0-\\u03c3':\n pol = 'p-s'\n else:\n pol = 'p-p'\n\n F0 = self.resF0.get()\n F1 = self.resF1.get()\n F2 = self.resF2.get()\n\n isres = self.isres.get()\n if isres:\n # Resonant scattering\n self.xtl.Plot.simulate_azimuth_resonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol,\n F0=F0, F1=F1, F2=F2)\n plt.show()\n else:\n # Non-Resonant scattering\n self.xtl.Plot.simulate_azimuth_nonresonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol)\n plt.show()", "def getAltAz(arr,header,time,location):\n\tsoln = wcs.WCS(header)\n\tcoords = cartesian([arange(arr.shape[1]),arange(arr.shape[0])])\n\tworld = soln.wcs_pix2world(coords,0)\n\tradec = SkyCoord(ra=world[:,0],dec=world[:,1],frame='icrs',unit='deg')\n\taltaz = radec.transform_to(AltAz(obstime=time,location=telescope))\n\treturn altaz.alt.deg,altaz.az.deg,coords[:,0],coords[:,1]", "def calculate_orbiting_angle(orbiting_center, raft):\n\n # note the negative sign before the first component, the y component\n # it is to make the orbiting angle in a right-handed coordiante.\n angle = np.arctan2(-(raft[1] - orbiting_center[1]), (raft[0] - orbiting_center[0])) * 180 / np.pi\n\n return angle", "def pointing_dir_earth (self, time):\n\n return self.vect_from_lspe_to_earth (self.pointing_dir_lspe (time),\n time)", "def solarelevation_function_clear(latitude_deg, longitude_deg, utc_datetime,temperature_celsius = 25,\n pressure_millibars = 1013.25, elevation = elevation_default):\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars) \n return (0.038175 + (1.5458 * (math.sin(altitude))) + ((-0.59980) * (0.5 * (1 - math.cos(2 * (altitude))))))", "def altitude(press, altimeter=29.92126):\n AS = altimeter*inHg2PA\n print(AS, press**(L*R/g/M))\n h = -(press**(L*R/g/M) - AS**(L*R/g/M))*T0/L/(P0**(L*R/g/M))\n return h/ft2m", "def rotated_equatorial_hour_angle(hour, location):\n equatorial_angle = equatorial_hour_angle(hour, location)\n equatorial_angle_from_solar_noon = equatorial_angle - np.pi\n # Angle currently is angle referenced from solar noon, positive (pm) towards the east.\n # Change to mathematical angle, anticlockwise from 0 in the east.\n return np.pi / 2 - equatorial_angle_from_solar_noon", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def getAzimuthAngle(self):\n return self._azimuth", "async def get_altaz(self, **kwargs: Any) -> Tuple[float, float]:\n if self.observer is not None:\n alt_az = self.observer.altaz(Time.now(), self._telescope.position)\n return float(alt_az.alt.degree), float(alt_az.az.degree)\n else:\n raise ValueError(\"No observer given.\")", "def solar_angles(df, lat, lon, alt=0):\n\n jd = pd.Timestamp(df).to_julian_date()\n\n # offset (2451543.5)\n d_offset = pd.Timestamp('1999-12-31 00:00:00').to_julian_date()\n\n d = jd - d_offset\n\n\n # Keplerian elements for the sun (geocentric)\n w = 282.9404 + 4.70935E-5 * d # longitude of perihelion [degrees]\n a = 1.0 # mean distance [AU]\n e = 0.016709 - 1.151E-9 * d # eccentricity [-]\n M = np.mod(356.0470 + 0.9856002585 * d, 360.0) # mean anomaly [degrees]\n L = w + M # Sun's mean longitude [degrees]\n oblecl = 23.4393 - 3.563E-7 * d # Sun's obliquity of the eliptic [degrees]\n\n # Auxiliary angle [degrees]\n E = M + (180.0 / np.pi) * e * np.sin(np.deg2rad(M)) * (1.0 + e * np.cos(np.deg2rad(M)))\n\n # Rectangular coordinates in the plane of the ecliptic (x-axis toward perihelion)\n x = np.cos(np.deg2rad(E)) - e\n y = np.sin(np.deg2rad(E)) * np.sqrt(1 - (e ** 2))\n\n # Distance (r) and true anomaly (v)\n r = np.sqrt((x ** 2) + (y ** 2))\n v = np.rad2deg(np.arctan2(y, x))\n\n # Longitude of the sun\n lon_sun = v + w\n\n # Ecliptic rectangular coordinates\n xeclip = r * np.cos(np.deg2rad(lon_sun))\n yeclip = r * np.sin(np.deg2rad(lon_sun))\n zeclip = 0.0\n\n # Rotate coordinates to equatorial rectangular coordinates\n xequat = xeclip\n yequat = yeclip * np.cos(np.deg2rad(oblecl)) + zeclip * np.sin(np.deg2rad(oblecl))\n zequat = yeclip * np.sin(np.deg2rad(23.4406)) + zeclip * np.cos(np.deg2rad(oblecl))\n\n # Convert equatorial rectangular coordinates to right-ascension (RA) and declination\n r = np.sqrt(xequat ** 2 + yequat ** 2 + zequat ** 2) - (alt / 149598000.0)\n RA = np.rad2deg(np.arctan2(yequat, xequat))\n delta = np.rad2deg(np.arcsin(zequat / r))\n\n # Calculate local siderial time\n uth = df.hour + (df.minute / 60.0) + (df.second / 3600.0)\n gmst0 = np.mod(L + 180.0, 360.0) / 15.0\n sidtime = gmst0 + uth + (lon / 15.0)\n\n # Replace RA with hour-angle (HA)\n HA = sidtime * 15.0 - RA\n\n # Convert to rectangular coordinates\n x = np.cos(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n y = np.sin(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n z = np.sin(np.deg2rad(delta))\n\n # Rotate along an axis going East-West\n xhor = x * np.cos(np.deg2rad(90.0 - lat)) - z * np.sin(np.deg2rad(90.0 - lat))\n yhor = y\n zhor = x * np.sin(np.deg2rad(90.0 - lat)) + z * np.cos(np.deg2rad(90.0 - lat))\n\n # Find azimuthal and elevation angles\n azimuthal = np.rad2deg(np.arctan2(yhor, xhor)) + 180.0\n elevation = np.rad2deg(np.arcsin(zhor))\n\n zenith = 90.0 - elevation\n\n return np.column_stack((zenith, elevation, azimuthal))", "def _earth_distance(time='now'):\n return get_earth(time).radius", "def do_altangle(self):\n nave = 10000\n x, y, z, angle = cbp.phidget.main(nave)\n current_angle = angle\n #print(current_angle)\n self.altangle = current_angle\n return current_angle", "def avl_angle(self):\n dif_height = (self.heights[5] - self.heights[7])\n dif_position = (self.positions[0][7] - self.positions[0][5])\n angle = atan(dif_height / dif_position) / 1.5 * 180 / pi\n return angle", "def parangle(ra, dec, utdate, uttime, site, verbose=False):\n # degrees per radian\n degrad = 180. * u.deg /(np.pi * u.rad)\n\n l_ra = ra.strip()\n l_dec = dec.strip()\n if '-' not in l_dec and l_dec[0] != '+':\n l_dec = '+' + l_dec\n\n # Coordinate object\n coord = SkyCoord(l_ra,l_dec,frame='icrs',unit = (u.hr, u.deg))\n\n # Observation time\n obs_time = Time(utdate + 'T' + uttime, format='isot', scale='utc')\n\n # Location\n location = EarthLocation.of_site(site)\n if verbose:\n print('Site: ', location)\n\n altaz = coord.transform_to(AltAz(obstime=obs_time, location=location))\n if verbose:\n print('Alt/Az: ', altaz.alt.deg, altaz.az.deg)\n\n # Hour angle\n ha = np.arcsin(-np.sin(altaz.az) * np.cos(altaz.alt) / np.cos(coord.dec))\n if verbose:\n print('HA: ', ha)\n\n # Parallactic angle\n parang = -degrad * np.arctan2(-np.sin(ha),\n np.cos(coord.dec) * np.tan(location.lat) - np.sin(coord.dec) * np.cos(ha))\n\n return parang", "def air_exchange(self, room: Room, time: float) -> _VectorisedFloat:\n return 0.", "def spin_axis_earth (self, time):\n\n return self.vect_from_lspe_to_earth (self.spin_axis_lspe (time),\n time)", "def convergence_angle(self):\n return np.arctan2(self.radius, self.focal_length)", "def calcNadirAngle(ele):\n\n nadeg = np.arcsin(6378.0/26378.0 * np.cos(ele/180.*np.pi)) * 180./np.pi\n\n return nadeg", "def angle(z):", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def azel2radec(az,el,mjd,lat=47.8781,lon=-87.6298):\n \n T_UT1 = (mjd-51544.5)/36525;\n ThetaGMST = 67310.54841 + (876600*3600 + 8640184.812866)*T_UT1 + \\\n .093104*(T_UT1**2) - (6.2e-6)*(T_UT1**3)\n ThetaGMST = np.mod((np.mod(ThetaGMST,86400*(ThetaGMST/np.abs(ThetaGMST)))/240),360)\n ThetaLST = ThetaGMST + lon\n \n DEC = asind(sind(el)*sind(lat)+cosd(el)*cosd(lat)*cosd(az))\n LHA = atand2(-sind(az)*cosd(el)/cosd(DEC), \n (sind(el)-sind(DEC)*sind(lat))/(cosd(DEC)*cosd(lat)))*(180/np.pi);\n RA = np.mod(ThetaLST-LHA,360);\n \n return RA,DEC", "def calculate_attitude_angle(self):\n return np.arctan(np.pi * (1 - self.eccentricity_ratio ** 2) / (4 * self.eccentricity_ratio))", "def calc_optimal_angle(teta_z, latitude, transmissivity):\n if transmissivity <= 0.15:\n gKt = 0.977\n elif 0.15 < transmissivity <= 0.7:\n gKt = 1.237 - 1.361 * transmissivity\n else:\n gKt = 0.273\n Tad = 0.98 # transmittance-absorptance product of the diffuse radiation\n Tar = 0.97 # transmittance-absorptance product of the reflected radiation\n Pg = 0.2 # ground reflectance of 0.2\n l = radians(latitude)\n a = radians(teta_z)\n b = atan((cos(a) * tan(l)) * (1 / (1 + ((Tad * gKt - Tar * Pg) / (2 * (1 - gKt)))))) # eq.(11)\n return abs(b)", "def getFinalLarmorAngle(self):\n return np.degrees(self.theta_L_array[-1])", "def calcScatterAngleOld(R, PHI, THETA, sun_rotation):\n \n H_rot = atmo_utils.calcRotationMatrix(sun_rotation)\n\n X_ = R * np.sin(THETA) * np.cos(PHI)\n Y_ = R * np.sin(THETA) * np.sin(PHI)\n Z_ = R * np.cos(THETA)\n \n XYZ_dst = np.vstack((X_.ravel(), Y_.ravel(), Z_.ravel(), np.ones(R.size)))\n XYZ_src_ = np.dot(H_rot, XYZ_dst)\n \n Z_rotated = XYZ_src_[2, :]\n R_rotated = np.sqrt(np.sum(XYZ_src_[:3, :]**2, axis=0))\n \n angle = np.arccos(Z_rotated/(R_rotated+amitibo.eps(R_rotated)))\n \n return angle", "def azimuth_update(self):\n self.current_azimuth = self.azimuth_encoder.get_degrees()\n azimuth_error = self.azimuth - float(self.current_azimuth)\n # print('goal azimuth', self.azimuth, 'current azimuth', self.azimuth_encoder.get_degrees(), 'difference in azimuth', azimuth_error)\n if azimuth_error >0:\n # print('positive azimuth')\n self.azimuth_motor.set_direction(1)\n elif azimuth_error > 0:\n # print('negative azimuth')\n self.azimuth_motor.set_direction(0)\n azimuth_error = abs(azimuth_error)\n self.azimuth_error = azimuth_error\n if azimuth_error >= 0:\n self.azimuth_motor.set_speed(0)\n if azimuth_error >= 35:\n self.azimuth_motor.set_speed(1)\n if azimuth_error >= 40:\n self.azimuth_motor.set_speed(2)\n if azimuth_error >= 80:\n self.azimuth_motor.set_speed(3)\n if azimuth_error >= 160:\n self.azimuth_motor.set_speed(4)\n if azimuth_error >= 280:\n self.azimuth_motor.set_speed(5)\n self.azimuth_error = azimuth_error\n print('debug_azimuth', self.current_azimuth, self.azimuth_error, self.azimuth_motor.speed)\n return self.azimuth_error", "def Elongation(body, time):\n angle = PairLongitude(body, Body.Sun, time)\n if angle > 180.0:\n visibility = Visibility.Morning\n esep = 360.0 - angle\n else:\n visibility = Visibility.Evening\n esep = angle\n angle = AngleFromSun(body, time)\n return ElongationEvent(time, visibility, angle, esep)", "def RefractionAngle(refraction, altitude):\n if altitude < -90.0 or altitude > +90.0:\n return 0.0 # No attempt to correct an invalid altitude\n\n if refraction == Refraction.Normal or refraction == Refraction.JplHorizons:\n # http://extras.springer.com/1999/978-1-4471-0555-8/chap4/horizons/horizons.pdf\n # JPL Horizons says it uses refraction algorithm from\n # Meeus \"Astronomical Algorithms\", 1991, p. 101-102.\n # I found the following Go implementation:\n # https://github.com/soniakeys/meeus/blob/master/v3/refraction/refract.go\n # This is a translation from the function \"Saemundsson\" there.\n # I found experimentally that JPL Horizons clamps the angle to 1 degree below the horizon.\n # This is important because the 'refr' formula below goes crazy near hd = -5.11.\n hd = max(altitude, -1.0)\n refr = (1.02 / math.tan(math.radians((hd+10.3/(hd+5.11))))) / 60.0\n\n if refraction == Refraction.Normal and altitude < -1.0:\n # In \"normal\" mode we gradually reduce refraction toward the nadir\n # so that we never get an altitude angle less than -90 degrees.\n # When horizon angle is -1 degrees, the factor is exactly 1.\n # As altitude approaches -90 (the nadir), the fraction approaches 0 linearly.\n refr *= (altitude + 90.0) / 89.0\n else:\n # No refraction, or the refraction option is invalid.\n refr = 0.0\n return refr", "def thetaCal(opposite, adjacent):\n opposite = opposite * (-1)\n theta = math.atan2(opposite, adjacent) # * (180 / 3.1415)\n theta = math.degrees(theta)\n theta = round(theta, 2)\n\n if theta < 0:\n theta = 180 + theta\n theta = theta + 180\n theta = round(theta, 2)\n return theta", "def vect_from_lspe_to_earth (self, vector, time):\n\n position_coord = self.lspe_coordinates (time)\n # The following code has been optimized:\n # position_vector = coord_to_pointing (position_coord)\n # angle = np.arccos (np.dot (self.spin_axis_lspe (time), position_vector))\n # and is therefore a one-line assignment: \n angle = np.pi * 0.5 - position_coord[0]\n rot_axis = np.array ([-np.sin (position_coord[1]),\n np.cos (position_coord[1]),\n 0])\n\n return rotate (vector, angle, rot_axis)", "def offset_to_altaz(xoff, yoff, azimuth, altitude):\n #Deal with situations where offset = 0?\n\n d = sqrt(xoff*xoff+yoff*yoff)\n pos = np.where(d==0)\n d=1e-12 * u.deg # add a very small offset to prevent math errors\n\n q = arctan(d.to(u.rad).value)\n\n sq = sin(q)\n xp1 = xoff * (sq/d)\n yp1 = yoff * (sq/d)\n zp1 = cos(q)\n\n cx = sin(altitude)\n sx = cos(altitude)\n\n xp0 = cx*xp1 - sx*zp1\n yp0 = yp1\n zp0 = sx*xp1 + cx*zp1\n\n obj_altitude = arcsin(zp0)\n obj_altitude[pos]=altitude\n obj_azimuth = arctan2(yp0,-xp0) + azimuth\n obj_azimuth[pos] = azimuth\n\n #if obj_azimuth.value < 0.:\n # obj_azimuth += 2.*pi\n #elif obj_azimuth.value >= (2.*pi ):\n # obj_azimuth -= 2.*pi\n\n return obj_altitude,obj_azimuth", "def Rotation_ECL_EQD(time):\n rot = Rotation_EQD_ECL(time)\n return InverseRotation(rot)", "def get_surface_elevation(wind_lat, wind_lon):\n # Load the NetCDF file containing the geopotential of Europe.\n nc = Dataset(path_join(era5_data_dir, geopotential_file_name))\n \n # Read the variables from the netCDF file.\n geopot_lat = nc.variables['latitude'][:]\n geopot_lon = nc.variables['longitude'][:]\n \n \n # Check if wind and geopotential data use same grid.\n assert np.array_equal(geopot_lat, wind_lat) and np.array_equal(geopot_lon, wind_lon), \\\n \"Requested latitudes and/or longitudes do not correspond to those in the NetCDF file.\"\n\n geopot_z = nc.variables['z'][0, :, :]\n nc.close()\n\n surface_elevation = geopot_z/9.81\n print(\"Minimum and maximum elevation found are respectively {:.1f}m and {:.1f}m, removing those below zero.\"\n .format(np.amin(surface_elevation), np.amax(surface_elevation)))\n\n # Get rid of negative elevation values.\n for i, row in enumerate(surface_elevation):\n for j, val in enumerate(row):\n if val < 0.:\n surface_elevation[i, j] = 0.\n\n return surface_elevation", "def azimuth(self, right: GeoSpatialValue) -> ir.FloatingValue:\n return ops.GeoAzimuth(self, right).to_expr()", "def elevation(self):\n return self.altitude - self.heightAboveGround", "def _altaz_rotation(self, jd):\n R_lon = rot_z(- self.longitude.radians - jd.gast * TAU / 24.0)\n return einsum('ij...,jk...,kl...->il...', self.R_lat, R_lon, jd.M)", "def lspe_coordinates (self, time):\n\n return (self.base_lat,\n self.base_long\n + time * 2 * np.pi * (1 + 1 / self.rev_days) / SECONDS_PER_DAY)", "def parang (hourangle, declination, latitude):\n\n return -np.arctan2 (-np.sin (hourangle),\n np.cos (declination) * np.tan (latitude)\n - np.sin (declination) * np.cos (hourangle))", "def angle(self) -> float:\n ...", "def elevation(x, y):\n file = os.path.abspath(\"..\") + \"\\Shape\\Shape.vrt\"\n layer = gdal.Open(file)\n gt = layer.GetGeoTransform()\n rasterx = int((x - gt[0]) / gt[1])\n rastery = int((y - gt[3]) / gt[5])\n print('elevation =', layer.GetRasterBand(1).ReadAsArray(rasterx, rastery, 1, 1)[0][0], 'm above sea level')", "def _calc_delta_theta(self):\n\n # Difference between the vehicle angle and the trajectory angle\n next_index = self.index + 5\n\n while next_index >= len(self.x_trajectory):\n next_index = next_index - 1\n\n self.trajec_angle = math.atan2((self.y_trajectory[next_index]\n - self.y_trajectory[self.index]),\n (self.x_trajectory[next_index]\n - self.x_trajectory[self.index]))\n # to set trajec_angle between [0,2pi]\n if self.trajec_angle < 0:\n self.trajec_angle = math.pi + self.trajec_angle + math.pi\n\n self.delta_theta = self.trajec_angle - self.theta\n # if the difference is bigger than 180 is because\n # someone went throug a lap\n\n if self.delta_theta > math.pi:\n self.delta_theta = self.delta_theta - 2 * math.pi\n\n if self.delta_theta < -math.pi:\n self.delta_theta = self.delta_theta + 2 * math.pi\n\n return self.delta_theta", "def calc_incidence_angle():\n \n Delta_r, lat_r, Omega_r, Zenith_r, Azimuth_r, Elev_angle = solar_model()\n \n # Beta is equal to angle of tilted surface to horizontal (in radians)\n roof_slopes_west = section_coordinates()\n Beta_r = np.arctan(roof_slopes_west) \n incidence_angles_west = np.zeros(101)\n \n \n for i in range(0,len(roof_slopes_west)):\n incidence_angles_west[i] = np.arccos(np.sin(Delta_r)* np.sin(lat_r) * np.cos(Beta_r[i]) - np.sin(Delta_r) * np.cos(lat_r) * np.sin(Beta_r[i]) * np.cos(Azimuth_r) + np.cos(Delta_r) * np.cos(lat_r) * np.cos(Beta_r[i]) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(lat_r) * np.sin(Beta_r[i]) * np.cos(Azimuth_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(Beta_r[i]) * np.sin(Azimuth_r) * np.sin(Omega_r))", "def leaf_azimuth(size=1, phyllotactic_angle=180, phyllotactic_deviation=15, plant_orientation=0, spiral=False):\n if size == 1:\n return plant_orientation\n if spiral:\n main = numpy.arange(0, size) * phyllotactic_angle\n else:\n it = cycle((0, phyllotactic_angle))\n main = numpy.array([it.next() for i in xrange(size)])\n azim = plant_orientation + main + (numpy.random.random(size) - 0.5) * 2 * phyllotactic_deviation\n azim = azim % 360\n return numpy.where(azim <= 180, azim, azim - 360)", "def day_angle(day):\n return 2*pi*( day - 1 )/365", "def declination_degree(utc_datetime, TY = TY_default ): \n return 23.45 * math.sin((2 * math.pi / (TY)) * ((solar.GetDayOfYear(utc_datetime)) - 81))", "def earth_tide(theta, lamda, gtime):\n\n global dsz, dcz, dsl, dcl, ssz, scz, ssl, scl, dpar, sdist # bpos common block\n global h, k, l # love common block\n h = [0.6114, 0.2891, 0.175]\n k = [0.304, 0.09421, 0.043]\n l = [0.0832, 0.0145, 0.0103]\n\n global azt, azs # azimut common block\n global etmut # tdiff common block\n global moon # sunny common block\n moon = 0\n # hardwire these - you can only send it ONE droptime\n deltat = 1\n NPT = 1\n\n temp_time = num2date(gtime)\n\n YY = temp_time.year\n MO = temp_time.month\n DD = temp_time.day\n HH = temp_time.hour\n MM = temp_time.minute\n SS = temp_time.second\n # Initialize variables\n irl = 1\n iflag = 0\n ntotl = 1\n iget = [0, 0, 0, 0, 0, 0, 0] # ' !!!\n ispc = [0, 0, 0, 0] # ' !!!\n ntw = [1, 0, 0] # ' !!!\n ioptn = 't'\n ielement = 0\n # \tdata statements for input and output unit numbers (on terminal I/O)\n inun = 5\n ioun = 6\n nptpb = 6\n\n yr1 = YY - 1900\n day1 = date2num(datetime(YY, MO, DD))\n # \tfind times in hours from 0 hr, 1 jan 1900\n # matlab:\n ts = (\n SS / 3600\n + MM / 60\n + HH\n + 24 * (day1 - 1)\n + 8760 * yr1\n + 24 * np.fix((yr1 - 1) / 4)\n )\n # python:\n dj = date_to_julian_day(datetime(YY, MO, DD))\n djref = date_to_julian_day(datetime(1899, 12, 31, 0, 0, 0))\n delta_dj = (\n dj - djref\n ) # difference in days from current date (0hr) to 0hr, 1 jan 1900\n delta_djhr = float(delta_dj) * 24.0 + HH - 12.0 + MM / 60.0 + SS / 3600.0\n te = ts + (NPT - 1) * deltat / 3600\n d = deltat / 3600\n # terms=(te-ts)/d + 1\n terms = NPT\n\n # done asking questions - begin execution\n i = 1\n tt = ts\n sph(theta, lamda, 0)\n etmut = 41.184 + yr1 - 70\n # matlab:\n # t = (tt+12 + (etmut/3600))/876600\n t = (delta_djhr + etmut / 3600) / 876600\n # t is ephemeris time in julian centuries from 12 hr 0 jan 1900\n ephem(t)\n\n # calculate normalized gravity tides\n [grav, tilt, strain, gdc] = elastd(ntw)\n\n gravtide = 1.0e5 * grav\n # convert m/s² to mgal: 1m/s² = 100 gal = 100 000 mgal\n\n iflag = 1\n\n iterms = np.fix(terms)\n i = 1\n return gravtide", "def pointing_pos_lspe (self, time):\n\n return (self.elevation, self.start_angle + time * self.omega_rot)", "def zodiac(cls, tee):\n return quotient(float(cls.solar_longitude(tee)), 30) + 1", "def sidereal_solar_longitude(tee):\n return mod(Solar.solar_longitude(tee) - Astro.precession(tee) + SIDEREAL_START, 360)", "def azimuth(poly):\n num = len(poly) - 1\n vec = unit_normal(poly[0], poly[1], poly[num])\n vec_azi = np.array([vec[0], vec[1], 0])\n vec_n = np.array([0, 1, 0])\n # update by Santosh\n # angle2vecs gives the smallest angle between the vectors\n # so for a west wall angle2vecs will give 90\n # the following 'if' statement will make sure 270 is returned\n x_vector = vec_azi[0]\n if x_vector < 0:\n return 360 - angle2vecs(vec_azi, vec_n)\n else:\n return angle2vecs(vec_azi, vec_n)", "def solve_atmospheric_entry(self, radius, velocity, density, strength, angle,\n init_altitude=100e3, ts=0.01, dt=0.05, tmax=120, radians=False):\n # RK4 solver\n def RK4(f, u0, t0, t_max, dt, args=()):\n \"\"\" Implement RK4 time-stepping to solve du/dt = f(t, u), given the RHS vector f,\n initial condition u0, start time t0, termination time t_max, and the timestep dt\n \"\"\"\n u = np.array(u0)\n t = np.array(t0)\n u_all = [u0]\n t_all = [t0]\n while t+dt < t_max:\n k1 = dt*f(t, u, *args)\n k2 = dt*f(t + 0.5*dt, u + 0.5*k1, *args)\n k3 = dt*f(t + 0.5*dt, u + 0.5*k2, *args)\n k4 = dt*f(t + dt, u + k3, *args)\n u = u + (1/6)*(k1 + 2*k2 + 2*k3 + k4)\n u_all.append(u)\n t = t + dt\n t_all.append(t)\n if u[3] <= 0:\n break # terminate at ground\n return np.array(u_all), np.array(t_all)\n\n # initial condition\n v0 = velocity\n m0 = (4/3) * np.pi * radius**3 * density\n if radians:\n theta0 = angle\n else:\n theta0 = angle * np.pi / 180\n z0 = init_altitude\n x0 = 0\n r0 = radius\n state0 = np.array([v0, m0, theta0, z0, x0, r0])\n\n # run solver\n t0 = 0\n sol = RK4(self.system, state0, t0, tmax, ts, args=(strength, density))\n\n # convert angles back to degrees if specfied at input\n if not radians:\n sol[0][:, 2] = sol[0][:, 2] * 180 / np.pi\n\n # interpolate results at the output timestep\n if dt == ts:\n t_out = sol[1].T\n sol_out = sol[0].T\n else:\n t_sol = sol[1]\n N = floor(t_sol[-1] * 0.9999 / dt)\n t_out = np.hstack([np.linspace(t_sol[0], N*dt, N+1), t_sol[-1]])\n sol_out = np.array([np.interp(t_out, t_sol, sol[0][:, j])\n for j in range(len(state0))])\n\n return pd.DataFrame({'velocity': sol_out[0, :],\n 'mass': sol_out[1, :],\n 'angle': sol_out[2, :],\n 'altitude': sol_out[3, :],\n 'distance': sol_out[4, :],\n 'radius': sol_out[5, :],\n 'time': t_out}, index=range(len(t_out)))", "def azimuth(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[0];", "def angle_diff(self, i):\n (h0, k0, l0) = [int(np.rint(x)) for x in self.hkl(i)]\n polar0 = self.unit_cell.two_theta((h0, k0, l0), self.wavelength)\n return np.abs(self.polar(i) - polar0)", "def solar_model():\n \n latitude, longitude, timezone, elevation = location_input()\n year, time = time_input()\n\n lat_r = latitude/180*np.pi\n lon_r = longitude/180*np.pi \n n = 0\n for i in range(1900,year):\n if i%4 == 0:\n n += 366\n else:\n n+=365\n JulD = n + time + 2415018.5 - (timezone)/24\n LT = time - int(time)\n JC = (JulD - 2451545) / 36525\n x = 46.815 + JC * (0.00059 - JC * 0.001813)\n M_OE = 23 + (26 + (21.448 - JC * x) / 60) / 60\n EEO = 0.016708634 - JC * (0.000042037 + 0.0000001267 * JC)\n GMAS = 357.52911 + JC * (35999.05029 - 0.0001537 * JC)\n GMAS_r = m.radians(GMAS)\n GMLS = (280.46646 + JC * (36000.76983 + JC * 0.0003032))%360\n GMLS_r = m.radians(GMLS)\n Obliq_C = M_OE + 0.00256 * np.cos((125.04 - 1934.136 * JC) / 180 * np.pi)\n Obliq_C_r = m.radians(Obliq_C)\n SEC = np.sin(GMAS_r) * (1.914602 - JC * (0.004817 + 0.000014 * JC)) + np.sin(2 * GMAS_r) * (0.019993 - 0.000101 * JC) + np.sin(3 * GMAS_r) * 0.000289\n STL = GMLS + SEC\n SAL = STL - 0.00569 - 0.00478 * np.sin((125.04 - 1934.136 * JC) / 180 * np.pi)\n SAL_r = m.radians(SAL)\n sin_Delta = np.sin(Obliq_C_r) * np.sin(SAL_r)\n Delta_r = np.arcsin(sin_Delta) #in radians \n Var_y = np.tan((Obliq_C / 2) / 180 * np.pi) * np.tan((Obliq_C / 2) / 180 * np.pi)\n EOT_prime = Var_y * np.sin(2 * GMLS_r) - 2 * EEO * np.sin(GMAS_r) + 4 * EEO * Var_y * np.sin(GMAS_r) * np.cos(2 * GMLS_r) - 0.5 * Var_y * Var_y * np.sin(4 * GMLS_r) - 1.25 * EEO * EEO * np.sin(2 * GMAS_r)\n EOT = 4 * EOT_prime / np.pi * 180 \n TST = (LT * 1440 + EOT + 4 * longitude - 60 * timezone)%1440\n if TST / 4 < 0:\n Omega = TST/4+180\n else:\n Omega = TST/4 - 180 \n Omega_r = m.radians(Omega)\n \n cos_Zenith = np.sin(lat_r) * np.sin(Delta_r) + np.cos(lat_r) * np.cos(Delta_r) * np.cos(Omega_r)\n Zenith_r = np.arccos(cos_Zenith) #in radians\n Aprime_r = np.arccos((np.sin(lat_r) * np.cos(Zenith_r) - np.sin(Delta_r)) / (np.cos(lat_r) * np.sin(Zenith_r)))\n Aprime = Aprime_r / np.pi * 180\n if Omega > 0:\n Azimuth = (Aprime + 180) % 360 #in degrees\n else:\n Azimuth = (540 - Aprime) % 360 #in degrees \n Azimuth_r = Azimuth / 180 * np.pi\n Elev_angle = (np.pi)/2 - Zenith_r\n\n \n # calculate incidence angle\n # Beta is equal to angle of tilted surface to horizontal (in radians)\n Beta = 45 # in degrees\n Beta_r = m.radians(Beta)\n \n cos_incidence = np.sin(Delta_r)* np.sin(lat_r) * np.cos(Beta_r) - np.sin(Delta_r) * np.cos(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) + np.cos(Delta_r) * np.cos(lat_r) * np.cos(Beta_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(lat_r) * np.sin(Beta_r) * np.cos(Azimuth_r) * np.cos(Omega_r) + np.cos(Delta_r) * np.sin(Beta_r) * np.sin(Azimuth_r) * np.sin(Omega_r) \n incidence_ang_r = np.arccos(cos_incidence)\n \n return Delta_r, lat_r, Omega_r, Zenith_r, Azimuth_r, Elev_angle", "def get_angel(coordinates):\n x = coordinates[0]\n y = coordinates[1]\n\n if x == 0:\n if y < 0:\n return 0\n else:\n return math.pi\n\n if y == 0:\n if x < 0:\n return (3 * math.pi) / 2\n else:\n return math.pi / 2\n\n if x >= 0:\n if y >= 0:\n return ((math.pi / 2) + math.atan(abs(y)/abs(x)))\n else:\n return math.atan(abs(x)/abs(y))\n else:\n if y >= 0:\n return math.pi + math.atan(abs(x)/abs(y))\n else:\n return (3/2) * math.pi + math.atan(abs(y)/abs(x))", "def get_azimuth(self, degrees=True):\n if degrees:\n return math.degrees(self.current_location.az)\n else:\n return self.current_location.az", "def EclipticLongitude(body, time):\n if body == Body.Sun:\n raise InvalidBodyError()\n hv = HelioVector(body, time)\n eclip = Ecliptic(hv)\n return eclip.elon", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def diffuse_underclear(latitude_deg, longitude_deg, utc_datetime, elevation = elevation_default, \n temperature_celsius = 25, pressure_millibars = 1013.25, TL=TL_default): \n DT = ((-21.657) + (41.752 * (TL)) + (0.51905 * (TL) * (TL)))\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars)\n\n return mean_earth_sun_distance(utc_datetime) * DT * altitude", "def scalar_earth_angle( lat1, lon1, lat2, lon2):\n theta1 = lat1 *dtor\n phi1 = lon1 *dtor\n theta2 = lat2 * dtor\n phi2 = lon2 * dtor\n p1 = numpy.vstack((cos(theta1)*cos(phi1),cos(theta1)*sin(phi1),sin( theta1))).T\n p2 = numpy.vstack((cos(theta2)*cos(phi2), cos( theta2)* sin( phi2), sin( theta2))).T\n dsq = ((p1-p2)**2).sum(-1)\n return numpy.arccos((2 -dsq)/2.)/dtor", "def altaz2radec(az, alt, t):\n if not isinstance(t, Time):\n try:\n t = Time(t)\n except:\n raise ValueError(\"\\n\\t=== Time syntax should be 'YYYY-MM-DD hh:mm:ss' ===\") \n frame = coord.AltAz(obstime=t, location=nancay())\n altaz = coord.SkyCoord(az*u.deg, alt*u.deg, frame=frame)\n radec = altaz.transform_to(coord.FK5(equinox='J2000'))\n return radec.ra.rad, radec.dec.rad", "def cal_eta(self):\n\n if not self.check_def(['E','px','py','pz']):\n sys.exit('Particle error: Quadri impulsion not define (error for eta routine)')\n \n theta=math.acos(self.pz/math.sqrt(self.px**2+self.py**2+self.pz**2))\n self.eta=-math.log(math.tan(theta/2.0))", "def altitude_speed(self, degrees = True):\n return self.angularSpeed(self.future_location.al, self.old_location.al)", "def getAngle(self):\n return self.vector.angle", "def angle(self) -> int:", "def path(e,a,q,i,period, timediff, offset):\n b = a*np.sqrt(1-e**2)\n c=np.sqrt(a**2-b**2)\n #t = ((timediff*np.pi*b)/period)\n #t1 = ((timediff)*np.pi*b)/period\n #print(t1-t)\n #t1 = (((timediff*np.pi*b)/period) + e*np.sin(t))%(2*np.pi)\n #print(t)\n t1= angle_helper(((timediff % period)/period)*2*np.pi,e)\n theta=0 #need a real theta value \n xcoord = a*np.cos(t1+offset)*np.cos(theta)-b*np.sin(t1+offset)*np.sin(theta)-c*np.cos(theta) #c term is to adjust for the focus\n ycoord = a*np.cos(t1+offset)*np.sin(theta)*np.cos(i)+b*np.sin(t1+offset)*np.cos(theta)*np.cos(i)-c*np.sin(theta)*np.cos(i)\n zcoord = a*np.cos(t1+offset)*np.sin(theta)*np.sin(i)+b*np.sin(t1+offset)*np.cos(theta)*np.sin(i)-c*np.sin(theta)*np.sin(i)\n \n \n return xcoord, ycoord, zcoord", "def getAngle(self):\n return self.articulateEncoder.getDistance()+self.angleOffset", "def getEta(self, pose):\n vector_x = np.cos(self.ori) * (pose.x - self.pos.x) + np.sin(self.ori) * (pose.y - self.pos.y)\n vector_y = -np.sin(self.ori) * (pose.x - self.pos.x) + np.cos(self.ori) * (pose.y - self.pos.y)\n eta = math.atan2(vector_y, vector_x)\n return eta", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def get_azimuth(self, p, az):\n az.value = self._get_azimuth(p, az.value)", "def angle(self):\n return atan2(self.v.y, self.v.x)", "def parse_azimuth_elevation(filename):\n match = REGEX.match(filename)\n return int(match.group(1)), int(match.group(2))", "def angle(self):\n return angle(self.force, self.forceXYZ, self.excited_axis,\n self.distance, self.distanceXYZ)" ]
[ "0.71530664", "0.67977107", "0.67431724", "0.6220381", "0.61309683", "0.6082073", "0.6003304", "0.5945914", "0.5920471", "0.58973986", "0.5858497", "0.58510685", "0.58294576", "0.579033", "0.57864755", "0.57316357", "0.5688304", "0.56844896", "0.56828004", "0.56804633", "0.5671778", "0.5669164", "0.56561786", "0.5654852", "0.5646939", "0.56376356", "0.56368685", "0.56348133", "0.56116825", "0.56085527", "0.56065714", "0.55959934", "0.5587656", "0.55820113", "0.55731606", "0.55611455", "0.555582", "0.55508935", "0.5548347", "0.5547848", "0.5528379", "0.5514326", "0.5507361", "0.5505419", "0.5501826", "0.5494058", "0.5485265", "0.54774", "0.5470609", "0.5463211", "0.54447156", "0.54443717", "0.541046", "0.5409892", "0.5404317", "0.5397656", "0.5395752", "0.5394808", "0.53898203", "0.5388496", "0.5357156", "0.5351554", "0.53396225", "0.5336658", "0.53300303", "0.5325911", "0.53208", "0.5314729", "0.53139865", "0.53135717", "0.53096485", "0.52958375", "0.52834845", "0.5279573", "0.52762663", "0.5273967", "0.5268291", "0.5262842", "0.5257204", "0.5255028", "0.52547467", "0.5252761", "0.5242228", "0.5233508", "0.52306324", "0.52287245", "0.5227326", "0.52227527", "0.52076", "0.52061784", "0.52047634", "0.51854515", "0.5178779", "0.5177113", "0.51669705", "0.51604533", "0.5156272", "0.5154766", "0.51508605", "0.5149578" ]
0.70600396
1
Access to each timezone group. For example London is in timezone group Europe. Attribute lookup is case insensitive
def __getattr__(self, key): key = str(key).lower().encode('utf-8') for name, value in self._groups.items(): if name == key: return value raise AttributeError('Group \'%s\' not found' % key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_athlete_zones(self):\n pass", "def grouping_timezone(self):\n return tz.gettz(self._summariser.grouping_time_zone)", "def associate_timezones_to_countries(self):\n\t\t\n\t\tresult = {}\n\t\twith open(\"/usr/share/zoneinfo/zone.tab\", \"r\") as f:\n\t\t\tfor line in f.readlines():\n\t\t\t\tif line[0] == \"#\": continue\n\t\t\t\t\n\t\t\t\tline = line.replace(\"\\n\",\"\").split(\"\\t\")\n\t\t\t\tif not line[0] in result: result[line[0]] = line[2]\n\t\t\n\t\treturn result", "def time_zones(self) -> localedata.LocaleDataDict:\n return self._data['time_zones']", "def get_timezone_list():\n return pytz.country_timezones('US')", "def test_all_time_zones_choices(self):\n # Obtain a timezone that is in pytz.all_timezones, but not in pytz.common_timezones\n timezones = set(pytz.all_timezones) - set(pytz.common_timezones)\n timezone = timezones.pop()\n\n choices = {\n choice[0]\n for choice in TimeZoneField.get_all_choices()\n }\n\n self.assertTrue(timezone in choices)", "def GetTimezones():\n return GetDataFromCsvFile('timezones.csv')", "def test_aws_service_api_availability_zones_get(self):\n pass", "def timezone():\n \n pass", "def test_timezones(self):\n a_user = User.objects.create()\n user = VSBUser.objects.create(user=a_user)\n\n today_datetime = timezone.datetime.today()\n today_datetime = timezone.datetime(year=today_datetime.year, month=today_datetime.month, day=today_datetime.day)\n\n tomorrow_late_EST = timezone.make_aware(today_datetime + timezone.timedelta(hours=23), timezone=pytz.timezone('US/Eastern'))\n tomorrow_last_UTC = (tomorrow_late_EST + timezone.timedelta(minutes=10)).astimezone(pytz.utc)\n ETC_event = CalenderEvent.objects.create(user=user, time=tomorrow_late_EST)\n UTC_event = CalenderEvent.objects.create(user=user, time=tomorrow_last_UTC)\n\n received = util.bucket_calenderevents(user.calenderevent_set)\n\n self.assertEqual(received, [[ETC_event, UTC_event]], msg=\"CalenderEvents.timezones: Timezones failed to align.\")", "def Timezones():\n return sorted(list(PytzCache._zmap.values()))", "def test_common_time_zones_choices(self):\n # Obtain a timezone that is in pytz.all_timezones, but not in pytz.common_timezones\n timezones = set(pytz.all_timezones) - set(pytz.common_timezones)\n timezone = timezones.pop()\n\n choices = {\n choice[0]\n for choice in TimeZoneField.get_common_choices()\n }\n self.assertTrue(timezone not in choices)", "def list_zone_names():\n get_name = lambda a: a.get_name()\n return map(get_name, list_zones())", "def set_time_by_timezone(df):\n df = set_city_time_by_timezone(df, 1078, 3)\n df = set_city_time_by_timezone(df, 22390, 4)\n df = set_city_time_by_timezone(df, 22430, 4)\n df = set_city_time_by_timezone(df, 22438, 5)\n return df", "def test_list_zones_by_admin_group_name(list_zone_context, shared_zone_test_context):\n result = shared_zone_test_context.list_zones_client.list_zones(name_filter=f\"list-zones-group{shared_zone_test_context.partition_id}\", search_by_admin_group=True, status=200)\n retrieved = result[\"zones\"]\n\n assert_that(retrieved, has_length(5))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone1[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone2[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone3[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.non_search_zone1[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.non_search_zone2[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"adminGroupName\", list_zone_context.list_zones_group[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"backendId\", \"func-test-backend\")))\n\n assert_that(result[\"nameFilter\"], is_(f\"list-zones-group{shared_zone_test_context.partition_id}\"))", "def get_zones(self, context):\n # handling zones method in RPC\n response = self.dns_manager.get_zones(context)\n return response", "def get_zone(self, conn, host):\n fl = 'name=\"%s\"' % host\n request = conn.instances().aggregatedList(project=PROJECT, filter=fl)\n \twhile request is not None:\n \t\tresponse = request.execute()\n \t\tzones = response.get('items', {})\n \t\tfor zone in zones.values():\n \t\t\tfor inst in zone.get('instances', []):\n \t\t\t\tif inst['name'] == host:\n \t\t\t\t\treturn inst['zone'].split(\"/\")[-1]\n \t\trequest = conn.instances().aggregatedList_next(previous_request=request, previous_response=response)\n \traise Exception(\"Unable to determin the zone for instance %s\" % (host))", "def test_list_zones_by_admin_group_name_with_wildcard(list_zone_context, shared_zone_test_context):\n result = shared_zone_test_context.list_zones_client.list_zones(name_filter=f\"*group{shared_zone_test_context.partition_id}\", search_by_admin_group=True, status=200)\n retrieved = result[\"zones\"]\n\n assert_that(retrieved, has_length(5))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone1[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone2[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone3[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.non_search_zone1[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.non_search_zone2[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"adminGroupName\", list_zone_context.list_zones_group[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"backendId\", \"func-test-backend\")))\n\n assert_that(result[\"nameFilter\"], is_(f\"*group{shared_zone_test_context.partition_id}\"))", "def display_tzname(self):\n return settings.TIME_ZONES_BY_LANG.get(self.language, settings.TIME_ZONE)", "def test_calendar_query_timezone(self):\n TimezoneCache.create()\n self.addCleanup(TimezoneCache.clear)\n\n tzid1 = \"Etc/GMT+1\"\n tz1 = Component(None, pycalendar=readVTZ(tzid1))\n\n calendar_properties = (\n davxml.GETETag(),\n caldavxml.CalendarData(),\n )\n\n query_timerange = caldavxml.TimeRange(\n start=\"%04d1001T000000Z\" % (DateTime.getToday().getYear(),),\n end=\"%04d1101T000000Z\" % (DateTime.getToday().getYear(),),\n )\n\n query = caldavxml.CalendarQuery(\n davxml.PropertyContainer(*calendar_properties),\n caldavxml.Filter(\n caldavxml.ComponentFilter(\n caldavxml.ComponentFilter(\n query_timerange,\n name=\"VEVENT\",\n ),\n name=\"VCALENDAR\",\n ),\n ),\n caldavxml.TimeZone.fromCalendar(tz1),\n )\n\n def got_xml(doc):\n if not isinstance(doc.root_element, davxml.MultiStatus):\n self.fail(\"REPORT response XML root element is not multistatus: %r\" % (doc.root_element,))\n\n return self.calendar_query(query, got_xml)", "def compute_zones(self):\n path = '/os-availability-zone/detail'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack availability zone: %s' % truncate(res))\n return res[0]['availabilityZoneInfo']", "def get_timezones() -> set[str]:\n return available_timezones() - UNAVAILABLE_TIMEZONES", "def public_get_time_zones(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicGetTimeZones.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def get_members(self, context):\n zone_obj = self.dns_manager.get_members(context)\n return zone_obj", "def list_zones(self):\n\n return [zone[\"zone\"] for zone in list(self._zones.values())]", "def test_account_returns_list_of_zones(self):\n account = Account('test-account')\n a_zone = Zone('azone.com')\n b_zone = Zone('bzone.com')\n c_zone = Zone('czone.com')\n account.add_zone(a_zone)\n account.add_zone(b_zone)\n account.add_zone(c_zone)\n\n self.assertDictEqual(account.zones, {\n 'azone.com': a_zone,\n 'bzone.com': b_zone,\n 'czone.com': c_zone,\n })", "def list_zones(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n\n if not verbose:\n attributes = [\"dc\", \"objectClass\"]\n else:\n attributes = ALL\n\n self.display(\n self.engine.query(\n self.engine.ZONES_FILTER(),\n attributes, base=','.join([\"CN=MicrosoftDNS,DC=DomainDNSZones\", self.engine.base_dn])\n ),\n verbose\n )", "def _map_timezones():\n tz_map = {}\n todo = HAYSTACK_TIMEZONES_SET.copy()\n for full_tz in pytz.all_timezones:\n # Finished case:\n if not bool(todo): # pragma: no cover\n # This is nearly impossible for us to cover, and an unlikely case.\n break\n\n # Case 1: exact match\n if full_tz in todo:\n tz_map[full_tz] = full_tz # Exact match\n todo.discard(full_tz)\n continue\n\n # Case 2: suffix match after '/'\n if '/' not in full_tz:\n continue\n\n (prefix, suffix) = full_tz.split('/',1)\n # Case 2 exception: full timezone contains more than one '/' -> ignore\n if '/' in suffix:\n continue\n\n if suffix in todo:\n tz_map[suffix] = full_tz\n todo.discard(suffix)\n continue\n\n return tz_map", "def meta_zones(self) -> localedata.LocaleDataDict:\n return self._data['meta_zones']", "def get_availability_zones(self, context, filters=None, fields=None,\n sorts=None, limit=None, marker=None,\n page_reverse=False):", "def get_tz(self):\n for _, element in etree.iterparse(self.source):\n if element.tag == TIMEZONE:\n return float(element.text or 0.0)", "async def get_zones(self) -> list[str] | None:\n data: list[dict[str, str]] | None = await self.api.get(self._endpoint())\n\n if data is None:\n return None\n\n return [zone[\"name\"] for zone in data]", "def get_zones(self, latitude, longitude):\n result = self.__request(\n \"GET\",\n \"https://api.voiapp.io/v1/zones?lat={}&lng={}\".format(latitude, longitude),\n )\n if result and \"zones\" in result:\n return result[\"zones\"]", "def test_tzinfo(self):\n if tzset is None:\n raise SkipTest(\"Platform cannot change timezone; unable to verify offsets.\")\n\n def testForTimeZone(name, expectedOffsetDST, expectedOffsetSTD):\n setTZ(name)\n\n localDST = mktime((2006, 6, 30, 0, 0, 0, 4, 181, 1))\n localSTD = mktime((2007, 1, 31, 0, 0, 0, 2, 31, 0))\n\n tzDST = FixedOffsetTimeZone.fromLocalTimeStamp(localDST)\n tzSTD = FixedOffsetTimeZone.fromLocalTimeStamp(localSTD)\n\n self.assertEqual(tzDST.tzname(localDST), \"UTC{}\".format(expectedOffsetDST))\n self.assertEqual(tzSTD.tzname(localSTD), \"UTC{}\".format(expectedOffsetSTD))\n\n self.assertEqual(tzDST.dst(localDST), timedelta(0))\n self.assertEqual(tzSTD.dst(localSTD), timedelta(0))\n\n def timeDeltaFromOffset(offset):\n assert len(offset) == 5\n\n sign = offset[0]\n hours = int(offset[1:3])\n minutes = int(offset[3:5])\n\n if sign == \"-\":\n hours = -hours\n minutes = -minutes\n else:\n assert sign == \"+\"\n\n return timedelta(hours=hours, minutes=minutes)\n\n self.assertEqual(\n tzDST.utcoffset(localDST), timeDeltaFromOffset(expectedOffsetDST)\n )\n self.assertEqual(\n tzSTD.utcoffset(localSTD), timeDeltaFromOffset(expectedOffsetSTD)\n )\n\n addTZCleanup(self)\n\n # UTC\n testForTimeZone(\"UTC+00\", \"+0000\", \"+0000\")\n # West of UTC\n testForTimeZone(\"EST+05EDT,M4.1.0,M10.5.0\", \"-0400\", \"-0500\")\n # East of UTC\n testForTimeZone(\"CEST-01CEDT,M4.1.0,M10.5.0\", \"+0200\", \"+0100\")\n # No DST\n testForTimeZone(\"CST+06\", \"-0600\", \"-0600\")", "def availability_zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"availability_zones\")", "def zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def get_time_zones(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetTimeZones.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def test_get_activity_zones(self):\n zones = self.client.get_activity_zones(99895560)\n print zones\n self.assertEquals(1, len(zones))\n self.assertIsInstance(zones[0], model.PaceActivityZone)\n\n # Indirectly\n activity = self.client.get_activity(99895560)\n self.assertEquals(len(zones), len(activity.zones))\n self.assertEquals(zones[0].score, activity.zones[0].score)", "def _lsInTimezone(self, timezone, stat):\n # Set the timezone to a well-known value so the timestamps are\n # predictable.\n os.environ['TZ'] = timezone\n time.tzset()\n return ls.lsLine('foo', stat)", "def get_zonerecords(self):\n\n response = self.call(method='getZoneRecords', args=[self.domainname, self.subdomain])\n records = []\n for r in response:\n record = self.zonerecord(\n domain=self.domainname,\n subdomain=self.subdomain,\n record_id=r['record_id'],\n type=r['type'],\n ttl=r['ttl'],\n priority=r['priority'],\n rdata=r['rdata']\n )\n records.append(record)\n return records", "def __getitem__(self, key):\n \n key = str(key).lower().encode('utf-8')\n for group in self._groups.values():\n try:\n return group[key]\n except KeyError:\n pass\n\n raise KeyError('Unrecognised city name - %s' % key)", "def timezone(self):\n tz_data = self._router_request(\n self._make_request_data(\n 'getTimeZone',\n data=dict()\n )\n )\n\n return tz_data['data']", "async def public_get_time_zones_async(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicGetTimeZones.create(\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def getVampAll(self):\n for i in range(1, 4):\n self.getAllZoneAllParam(str(i))\n\t#return(self._pzones)", "def list_zones(pattern=None):\n zlist = []\n cmd = [CMD_ZONEADM, \"list\", \"-pc\"]\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n stdout, stderr = proc.communicate()\n ret = proc.returncode\n\n if ret:\n raise OSError(\"%s exited with exit code %d. stderr: '%s.'\" %\n (str(cmd), ret, stderr))\n\n def set_attr(zone, attr, line):\n \"\"\"just a helper function \"\"\"\n zone.set_attr(attr, line[attr])\n\n # line format:\n # zoneid:zonename:state:zonepath:uuid:brand:ip-type:r/w:file-mac-profile\n for line in str(stdout).split(\"\\n\"):\n if not line:\n continue\n line = line.split(\":\")\n\n if pattern and not(re.match(pattern, line[ZONE_ENTRY['ZNAME']])):\n continue # skip entries that does not pass regexp\n\n tmp_zone = Zone(line[ZONE_ENTRY['ZNAME']])\n for item in ZONE_ENTRY.values():\n set_attr(tmp_zone, item, line)\n\n zlist.append(tmp_zone)\n\n return zlist", "def __loadTimezones(self):\n try:\n fd = open(ZONEINFO_FILE)\n content = fd.readlines()\n fd.close()\n\n result = []\n\n for line in content:\n if line.startswith(\"#\"):\n continue\n\n parts = line.strip().split()\n\n if len(parts) < 3:\n continue\n\n result.append(parts[2])\n\n result.sort()\n\n return result\n except Exception as e:\n self.__logger.critical(\"Failed to load Timezones list\")\n raise ZKVMError(\"POSTINSTALL\", \"TIMEZONE\", \"TIMEZONE_LIST\")", "def sc_dns_zone_aliases(self):\n return self._sc_dns_zone_aliases", "def _get_group_attributes(self, index):\n\n g_case = (None, None, -1)\n for group in self.group_slots:\n if group[0] == index:\n g_case = group[1]\n break\n return g_case", "def get_geo_data(self):\n # Get all countries and create a dictionary by name\n countries_shp = shpreader.natural_earth(\n resolution='10m',\n category='cultural',\n name='admin_0_countries',\n )\n self.countries = list(shpreader.Reader(countries_shp).records())\n self.countries_by_name = {}\n self.countries_by_iso_a2 = {}\n for country in shpreader.Reader(countries_shp).records():\n self.countries_by_name[country.attributes['NAME_LONG']] = country\n self.countries_by_iso_a2[country.attributes['ISO_A2']] = country\n\n # Get all states and create a dictionary by name\n states_provinces_shp = shpreader.natural_earth(\n resolution='50m',\n category='cultural',\n name='admin_1_states_provinces',\n )\n# full_list = list(shpreader.Reader(states_provinces_shp).records())\n# self.states = [x for x in full_list if x.attributes['type_en'] == 'State']\n self.states = list(shpreader.Reader(states_provinces_shp).records())\n self.states_by_name = {}\n for state in self.states:\n self.states_by_name[state.attributes['name']] = state\n\n # Get all timezones and create a dictionary by name\n timezones_shp = shpreader.natural_earth(\n resolution='10m',\n category='cultural',\n name='time_zones',\n )\n self.timezones = list(shpreader.Reader(timezones_shp).records())\n self.timezones_by_name = {}\n for timezone in shpreader.Reader(timezones_shp).records():\n # Try to get the actual name. Something like `Europe/Berlin`\n timezone_name = timezone.attributes['tz_name1st']\n # If there is no name, we default to the utc offset name `-5` `+4.5`\n if timezone_name == '':\n timezone_name = timezone.attributes['name']\n\n if timezone_name not in self.timezones_by_name.keys():\n self.timezones_by_name[timezone_name] = timezone", "def time_zone_name(self):\n return icemac.addressbook.preferences.utils.get_time_zone_name()", "def time_zone(self):\n # type: () -> string_types\n return self._time_zone", "async def test_has_group_address_localtime(self):\n xknx = XKNX()\n self.datetime = DateTime(\n xknx,\n \"TestDateTime\",\n group_address=\"1/2/3\",\n group_address_state=\"1/2/4\",\n localtime=True,\n )\n assert self.datetime.has_group_address(GroupAddress(\"1/2/3\"))\n # group_address_state ignored when using localtime\n assert not self.datetime.has_group_address(GroupAddress(\"1/2/4\"))", "def timezone(self):\n return self.data.get(\"timezone\")", "def zones(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def alerts_forecast_zone(self: SimpleNWS) -> List[Dict[str, Any]]:\n return self._alerts_forecast_zone", "def get_effective_hostgroups(self):\n # TODO: This function is incomplete and untested\n # TODO: Need error handling when object defines hostgroups but hostgroup does not exist\n result = []\n hostgroup_list = []\n # Case 1 and Case 2:\n tmp = self._get_effective_attribute('hostgroups')\n for i in tmp.split(','):\n if i == '': continue\n i = Hostgroup.objects.get_by_shortname(i)\n if not i in result: result.append(i)\n '''\n # Case 1\n if self.has_key('hostgroups'):\n grp = self['hostgroups']\n grp = grp.split(',')\n for i in grp:\n i = i.strip('+')\n i = Hostgroup.objects.get_by_shortname(i)\n if not i in result: result.append(i)\n # Case 2:\n if not self.has_key('hostgroups') or self['hostgroups'].startswith('+'):\n parents = self.get_effective_parents()\n for parent in parents:\n parent_results += parent.get_effective_hostgroups()\n '''\n # Case 3:\n if self.has_key('host_name'):\n # We will use hostgroup_list in case 4 and 5 as well\n hostgroup_list = Hostgroup.objects.filter(members__has_field=self['host_name'])\n for hg in hostgroup_list:\n if hg not in result:\n result.append( hg )\n # Case 4: \n for hg in hostgroup_list:\n if not hg.has_key('hostgroup_name'): continue\n grp = Hostgroup.objects.filter(hostgroup_members__has_field=hg['hostgroup_name'])\n for i in grp:\n if i not in result:\n result.append(i )\n # Case 5:\n for hg in hostgroup_list:\n if not hg.has_key('hostgroup_name'): continue\n grp = Hostgroup.objects.filter(use__has_field=hg['hostgroup_name'])\n for i in grp:\n if i not in result:\n result.append(i )\n \n return result", "def getZoneRecords(self, filters=[]):\n return self._getRecords('zone', filters)", "def astimezone(self, tz=LOCAL):\n if tz is None:\n tz = LOCAL\n tz = parser.get_timezone(tz)\n return super(self.__class__, self).astimezone(tz)", "def localize(self, dt):\n\n #\n # TODO: implement various RRULE styles (at least common ones..)\n # possibly move rrule parsing into own classes because it's used by VEVENT as well\n # TODO: move get x-th day of month, first sunday, etc in separate functions\n\n logging.debug('localizing %s for timezone %s', (dt, self.tzid))\n\n cur_timezone = None\n cur_timestamp = None\n\n for t in self._times:\n dtstart = t['DTSTART']\n\n if 'RRULE' in t.keys():\n target_date = None\n vals = {}\n for k in t['RRULE'].split(';'):\n (key, value) = k.split('=')\n vals[key] = value\n\n if 'FREQ' in vals.keys():\n if vals['FREQ'] == 'YEARLY':\n month = int(vals['BYMONTH'])\n day = vals['BYDAY']\n\n if not day.isnumeric():\n wd = day[-2:]\n if day[:1] == \"-\":\n cnt = int(day[1:2])\n year = datetime.today().year\n month = (month + 1) % 12\n if month == 1:\n year += 1\n\n start_date = datetime(year, int(month), 1)\n\n day_num = start_date.weekday()\n day_num_target = VTIMEZONE._weekdays.index(wd)\n days_ago = (7 + day_num - day_num_target) % 7\n if days_ago == 0:\n days_ago = 7\n target_date = start_date - timedelta(days=days_ago + ((cnt-1)*7))\n\n else:\n cnt = int(day[:1])\n\n start_date = datetime(datetime.today().year, int(month), 1)\n\n day_num = start_date.weekday()\n day_num_target = VTIMEZONE._weekdays.index(wd)\n days_ago = (7 + day_num_target - day_num) % 7\n if days_ago == 0:\n days_ago = 7\n target_date = start_date + timedelta(days=days_ago + ((cnt-1)*7))\n\n if target_date is not None:\n if cur_timestamp is None:\n cur_timestamp = target_date\n cur_timezone = t\n else:\n if target_date.date() < dt.date():\n if cur_timestamp.date() > dt.date() or target_date.date() > cur_timestamp.date():\n cur_timestamp = target_date\n cur_timezone = t\n else:\n logging.error('RRULE not implemented yet, no localization possible (%s)' % t['RRULE'])\n\n logging.debug('decided on timezone offset: %s' % cur_timezone['TZOFFSETTO'])\n\n m = re.search(r'([+-])?(\\d\\d)(\\d\\d)', cur_timezone['TZOFFSETTO'])\n\n if m.group(1) == \"-\":\n dt -= timedelta(hours=int(m.group(2)), minutes=int(m.group(3)))\n else:\n dt += timedelta(hours=int(m.group(2)), minutes=int(m.group(3)))\n\n logging.debug('localized to %s' % dt)\n return dt", "def taxi_zones(path, storage_options=None):\n zdf = pd.read_csv(path, storage_options=storage_options)\n zdf = zdf.drop(\"OBJECTID\", axis=\"columns\")\n zdf = zdf.set_index(\"LocationID\")\n return zdf", "def get_time_attr_map(t):\n now = datetime.datetime.now()\n if t + datetime.timedelta(hours=3) > now:\n return get_map(\"main_list_white\")\n if t + datetime.timedelta(days=3) > now:\n return get_map(\"main_list_lg\")\n else:\n return get_map(\"main_list_dg\")", "def timezone_offset_country():\r\n\r\n return _random.choice(\r\n [\r\n 'Eniwetoa',\r\n 'Hawaii',\r\n 'Alaska',\r\n 'Pacific',\r\n 'Mountain',\r\n 'Central',\r\n 'Eastern',\r\n 'Atlantic',\r\n 'Canada',\r\n 'Brazilia',\r\n 'Buenos Aries',\r\n 'Mid-Atlantic',\r\n 'Cape Verdes',\r\n 'Greenwich Mean Time',\r\n 'Dublin',\r\n 'Berlin',\r\n 'Rome',\r\n 'Israel',\r\n 'Cairo',\r\n 'Moscow',\r\n 'Kuwait',\r\n 'Abu Dhabi',\r\n 'Muscat',\r\n 'Islamabad',\r\n 'Karachi',\r\n 'Almaty',\r\n 'Dhaka',\r\n 'Bangkok, Jakarta',\r\n 'Hong Kong',\r\n 'Beijing',\r\n 'Tokyo',\r\n 'Osaka',\r\n 'Sydney',\r\n 'Melbourne',\r\n 'Guam',\r\n 'Magadan',\r\n 'Soloman Islands',\r\n 'Fiji',\r\n 'Wellington',\r\n 'Auckland',\r\n ]\r\n )", "def get_timzone_offset(self, timezone):\n raise NotImplementedError", "def _format_zone_list(self, instance_list):\n result = []\n if instance_list is not None:\n if \"items\" in instance_list:\n items = instance_list[\"items\"]\n for item in items:\n result.append(self._process_instance(item))\n return result", "def alerts_county_zone(self: SimpleNWS) -> List[Dict[str, Any]]:\n return self._alerts_county_zone", "def refresh_all_info(self):\n # Do not use uuid as it's not available in state configured\n # Do not use self.get_attr as it would call refresh_all_info again :-)\n state_cmd = [CMD_ZONEADM, \"-z\",\n self._zone_attr[ZONE_ENTRY['ZNAME']], \"list\", \"-p\"]\n\n line_items = str(getoutputs(state_cmd)).split(\":\")\n for val in ZONE_ENTRY.values():\n # our ZONE_MAPING reflects _zone_attr\n self._zone_attr[val] = line_items[val]\n\n # other comes later net/anet\n extra_info = ['autoboot', 'brand', 'ip-type', 'bootargs', 'file-mac-profile', 'pool', 'limitpriv', 'scheduling-class', 'hostid', 'fs-allowed']\n info_cmd = [CMD_ZONECFG, \"-z\", self._zone_attr[ZONE_ENTRY['ZNAME']], \"info\"]\n line_items = str(getoutputs(info_cmd)).split(\"\\n\")\n\n for line in line_items:\n for attr in extra_info:\n if line.startswith(attr+\":\"):\n self._zone_attr[attr] = line[line.find(':')+1:].strip()", "def get_router_availability_zones(self, router):\n return [self._get_router_az_obj(router).name]", "async def _timein_city(self, *, city_name):\n\t\t\n\t\tapiKey = self.settings['api_key']\n\t\tif \".com\" in apiKey:\n\t\t\tawait self.bot.say(\"You have to set your API key, see data/timein/settings.json for details\")\n\t\t\treturn\n\t\t\n\t\turl = 'http://api.timezonedb.com/v2/list-time-zone?key=' + apiKey + '&format=xml'\n\t\t\n\t\tcity = city_name.replace(' ', '_')\n\t\t\n\t\turl += '&zone=*' + city + '*'\n\t\t\n\t\tasync with aiohttp.get(url) as response:\n\t\t\tsoupObject = BeautifulSoup(await response.text(), \"html.parser\")\n\t\tmessage = ''\n\t\t\n\t\tstatus = soupObject.find('status').get_text()\n\t\tif status != 'OK':\n\t\t\tmessage += 'Request failed. Details:\\n```'\n\t\t\tmessage += status + '\\n'\n\t\t\tmessage += soupObject.find('message').get_text()\n\t\t\tmessage += '```\\nTry searching for a capital or other major city'\n\t\telse:\n\t\t\tzones = soupObject.find_all('zone')\n\t\t\tfor zone in zones:\n\t\t\t\tnewmessage = ''\n\t\t\t\tnewmessage += ':flag_' + zone.find('countrycode').get_text().lower() + ': '\n\t\t\t\tnewmessage += zone.find('countryname').get_text() + '\\n'\n\t\t\t\tnewmessage += zone.find('zonename').get_text() + '\\n'\n\t\t\t\tunixtime = zone.find('timestamp').get_text()\n\t\t\t\tprettyTime = datetime.datetime.utcfromtimestamp(int(unixtime)).strftime('%Y-%m-%d %H:%M:%S')\t\t\t\t\n\t\t\t\tnewmessage += prettyTime + '\\n'\n\t\t\t\tmessage += newmessage + '\\n'\n\t\t\n\t\tawait self.bot.say(message)", "def info(self, req):\n items = api.get_zone_capabilities(req.environ['nova.context'])\n\n zone = dict(name=FLAGS.zone_name)\n caps = FLAGS.zone_capabilities\n for cap in caps:\n key, value = cap.split('=')\n zone[key] = value\n for item, (min_value, max_value) in items.iteritems():\n zone[item] = \"%s,%s\" % (min_value, max_value)\n return dict(zone=zone)", "def index(self, req):\n # Ask the ZoneManager in the Scheduler for most recent data,\n # or fall-back to the database ...\n items = api.get_zone_list(req.environ['nova.context'])\n items = common.limited(items, req)\n items = [_scrub_zone(item) for item in items]\n return dict(zones=items)", "def get_all_db_zone(self, context):\n zone_objs = self.dns_manager.get_all_db_zone(context)\n return zone_objs", "def get_all_db_region(self, context):\n zone_objs = self.dns_manager.get_all_db_region(context)\n return zone_objs", "def test_get_groups(self):\n pass", "def test_get_groups(self):\n pass", "async def _timezone(self, ctx: commands.Context, tz: str = None):\n self.check_if_exist(ctx.guild)\n\n self.daily_guilds[str(ctx.guild.id)][\"tz\"] = tz\n self.daily_info.update(\"guilds\", self.daily_guilds)\n await ctx.reply(\"New daily timezone is {0}\".format(tz))", "def recurrence_time_zone(self):\n return self.__recurrence_time_zone", "def hotColdZones(playerID, group = 'hitting'):\n #find his current team and if he's a hitter or pitcher then\n zoneData = get('person',{ 'ver':'v1' , 'personId':playerID,'hydrate':['stats(group={},type={})'.format(group,'hotColdZones'),'currentTeam']})\n zonesData = {}\n for stat in zoneData.get('people')[0].get('stats'):\n for types in stat.get('splits'):\n zonesData[types.get('stat')['name']] = types.get('stat')['zones']\n #create a list of Zones, up to 9\n #using that list of zones make a strike zone\n #make a list of strike zone data for each value\n return zonesData", "async def get_time_zones_async(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetTimeZones.create(\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )", "def map(self):\n return self.map_digis(self.group)", "def getTimeZoneDict():\n if not len(TimeZoneDict):\n for tz_descr in map(str.split, TimeZoneStr.split('\\n')):\n tz_offset = int(float(tz_descr[0]) * 3600)\n for tz_code in tz_descr[1:]:\n TimeZoneDict[tz_code] = tz_offset\n return TimeZoneDict", "def parse_groups(self):\n\n data = []\n ads_by_data = []\n for date_time, group in self.groups:\n for ad in group:\n ads_by_data.append({\"ad\": ad})\n date_key = self.date_to_string(date_time)\n data.append({date_key: ads_by_data})\n ads_by_data = []\n\n return data", "def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"zones\")", "def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"zones\")", "def timezone(self, latitude, longitude):\n root = self._api_call('GET', 'timezone', lat=latitude, lng=longitude)\n return { 'timezoneId': root[0].find('timezoneId').text,\n 'rawOffset': root[0].find('rawOffset').text, }", "def test_calendar_query_timezone_id(self):\n TimezoneCache.create()\n self.addCleanup(TimezoneCache.clear)\n\n tzid1 = \"Etc/GMT+1\"\n\n calendar_properties = (\n davxml.GETETag(),\n caldavxml.CalendarData(),\n )\n\n query_timerange = caldavxml.TimeRange(\n start=\"%04d1001T000000Z\" % (DateTime.getToday().getYear(),),\n end=\"%04d1101T000000Z\" % (DateTime.getToday().getYear(),),\n )\n\n query = caldavxml.CalendarQuery(\n davxml.PropertyContainer(*calendar_properties),\n caldavxml.Filter(\n caldavxml.ComponentFilter(\n caldavxml.ComponentFilter(\n query_timerange,\n name=\"VEVENT\",\n ),\n name=\"VCALENDAR\",\n ),\n ),\n caldavxml.TimeZoneID.fromString(tzid1),\n )\n\n def got_xml(doc):\n if not isinstance(doc.root_element, davxml.MultiStatus):\n self.fail(\"REPORT response XML root element is not multistatus: %r\" % (doc.root_element,))\n\n return self.calendar_query(query, got_xml)", "def test_calendar_query_wrong_timezone_elements(self):\n TimezoneCache.create()\n self.addCleanup(TimezoneCache.clear)\n\n tzid1 = \"Etc/GMT+1\"\n tz1 = Component(None, pycalendar=readVTZ(tzid1))\n\n calendar_properties = (\n davxml.GETETag(),\n caldavxml.CalendarData(),\n )\n\n query_timerange = caldavxml.TimeRange(\n start=\"%04d1001T000000Z\" % (DateTime.getToday().getYear(),),\n end=\"%04d1101T000000Z\" % (DateTime.getToday().getYear(),),\n )\n\n query = caldavxml.CalendarQuery(\n davxml.PropertyContainer(*calendar_properties),\n caldavxml.Filter(\n caldavxml.ComponentFilter(\n caldavxml.ComponentFilter(\n query_timerange,\n name=\"VEVENT\",\n ),\n name=\"VCALENDAR\",\n ),\n ),\n caldavxml.TimeZone.fromCalendar(tz1),\n )\n query.children += (caldavxml.TimeZoneID.fromString(tzid1),)\n\n result = yield self.calendar_query(query, got_xml=None, expected_code=responsecode.BAD_REQUEST)\n self.assertTrue(\"Only one of\" in result)", "def groups(self):\n #return self.get('{}/groups'.format(ApiVersion.A1.value))\n return self.get('{}/groups'.format(ApiVersion.CM1.value))", "def calculate_continent_daywise(countries_daywise_df):", "def test_get_country_states(self):\n pass", "def SecurityZone(self) -> _n_6_t_7:", "def SecurityZone(self) -> _n_6_t_7:", "def tz_arg(timezone: str):\n for tz in all_timezones:\n if tz.lower().endswith(timezone.lower()):\n return tz\n return None", "async def _timein_country(self, country_code):\n\t\t\n\t\tapiKey = self.settings['api_key']\n\t\tif \".com\" in apiKey:\n\t\t\tawait self.bot.say(\"You have to set your API key, see data/timein/settings.json for details\")\n\t\t\treturn\n\t\t\n\t\turl = 'http://api.timezonedb.com/v2/list-time-zone?key=' + apiKey + '&format=xml'\n\t\tflag = ':flag_'\n\n\t\tif country_code.lower() == 'use':\n\t\t\turl += '&country=US&zone=*New_York*'\n\t\t\tflag += 'us: EAST '\n\t\telif country_code.lower() == 'usw':\n\t\t\turl += '&country=US&zone=*Los_Angeles*'\n\t\t\tflag += 'us: WEST '\n\t\telif country_code.lower() == 'test':\n\t\t\turl += '&zone=*auckland*'\n\t\t\tflag += 'nz: '\n\t\telif len(country_code) != 2 or ' ' in country_code == False:\n\t\t\tawait self.bot.say(\"Country code must be 2 letters and from this list https://timezonedb.com/country-codes\")\n\t\t\treturn\n\t\telse:\n\t\t\tif country_code == 'UK' or country_code == 'uk':\n\t\t\t\tcountry_code = 'GB'\n\t\t\turl += '&country=' + country_code\n\t\t\tflag += country_code.lower() + ': '\n\t\t\t\n\t\tasync with aiohttp.get(url) as response:\n\t\t\tsoupObject = BeautifulSoup(await response.text(), \"html.parser\")\n\t\tmessage = ''\n\t\t\n\t\tstatus = soupObject.find('status').get_text()\n\t\tif status != 'OK':\n\t\t\tmessage += 'Request failed. Details:\\n```'\n\t\t\tmessage += status + '\\n'\n\t\t\tmessage += soupObject.find('message').get_text()\n\t\t\tmessage += '```\\nMake sure country code is from the list at https://timezonedb.com/country-codes'\n\t\telse:\n\t\t\tzones = soupObject.find_all('zone')\n\t\t\tfor zone in zones:\n\t\t\t\tnewmessage = ''\n\t\t\t\tnewmessage += flag\n\t\t\t\tnewmessage += zone.find('countryname').get_text() + '\\n'\n\t\t\t\tnewmessage += zone.find('zonename').get_text() + '\\n'\n\t\t\t\tunixtime = zone.find('timestamp').get_text()\n\t\t\t\tprettyTime = datetime.datetime.fromtimestamp(int(unixtime)).strftime('%Y-%m-%d %H:%M:%S')\n\t\t\t\tnewmessage += prettyTime + '\\n'\n\t\t\t\tmessage += newmessage + '\\n'\n\t\t\n\t\tawait self.bot.say(message)", "def time_zone(self) -> str:\n return pulumi.get(self, \"time_zone\")", "def getRapPerZone():\n\tdicoZone = {}\n\ttry:\n\t\tdicoZone = json.load(codecs.open(settings.APDICOZONE,'r',encoding='utf-8'))\n\texcept Exception as e:\n\t\tOperationalError(source=\"Rap Per Zone - dico loading\", error=str(e)).save()\n\t\treturn {}\n\n\tresult = {}\n\n\t# prefetch to avoid n+1 queries\n\tfor rap in RogueAccessPoint.objects.areUp().filter(closestAp__isnull=False).prefetch_related('closestAp'):\n\t\tif rap.closestAp != None:\n\t\t\tclosestApName = rap.closestAp.name\n\t\t\tfor tag,zone in dicoZone.items():\n\t\t\t\tif tag in closestApName:\n\t\t\t\t\tif zone not in result:\n\t\t\t\t\t\tresult[zone] = []\n\t\t\t\t\tresult[zone].append(rap)\n\t\t\t\t\tbreak\n\n\n\treturn result", "def get_zones(region=None, key=None, keyid=None, profile=None):\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n\n return [z.name for z in conn.get_all_zones()]", "def test_time_zone() -> None:\n schema = vol.Schema(cv.time_zone)\n\n with pytest.raises(vol.MultipleInvalid):\n schema(\"America/Do_Not_Exist\")\n\n schema(\"America/Los_Angeles\")\n schema(\"UTC\")", "def check_guest_timezone(self):\n guest_timezone = None\n for sample in TimeoutingSampler(\n config.SAMPLER_TIMEOUT, config.SAMPLER_SLEEP,\n ll_vms.get_vm_obj, self.vm_name, all_content=True\n ):\n guest_timezone = sample.get_guest_time_zone()\n if guest_timezone:\n break\n logger.info(\n \"Guest timezone name is '%s', offset: '%s'\",\n guest_timezone.get_name(),\n guest_timezone.get_utc_offset()\n )\n testflow.step(\"Check if guest agent reports timezone name\")\n assert len(guest_timezone.get_name()) > 0, 'Timezone name is empty'\n testflow.step(\"Check if guest agent reports UTC offset\")\n assert len(guest_timezone.get_utc_offset()) > 0, \"UTC offset is empty\"", "def test_zone_name(self):\n zone = Zone('test.example.com')\n self.assertEqual(zone.name, 'test.example.com')" ]
[ "0.5835346", "0.57775724", "0.57361513", "0.5698909", "0.55084604", "0.55003285", "0.545441", "0.5390807", "0.53817374", "0.5315582", "0.526018", "0.52240425", "0.51877755", "0.5176649", "0.516878", "0.51196074", "0.5056693", "0.50521755", "0.49813348", "0.49468496", "0.49349907", "0.49266613", "0.49249437", "0.49219984", "0.49091083", "0.48892498", "0.48613378", "0.4858069", "0.4854292", "0.4831495", "0.48306042", "0.48224452", "0.4820185", "0.4799136", "0.47949764", "0.47811714", "0.47811714", "0.4762613", "0.47585434", "0.4757025", "0.47352344", "0.47287852", "0.47281706", "0.4716935", "0.47141737", "0.47077683", "0.4698405", "0.46883747", "0.46840724", "0.4677138", "0.46716386", "0.4669053", "0.4650032", "0.4648532", "0.46405005", "0.46388945", "0.46373096", "0.46344164", "0.46278182", "0.4595137", "0.4591892", "0.4586627", "0.45750454", "0.45745057", "0.4573268", "0.45588857", "0.45549992", "0.45522037", "0.45231718", "0.4522528", "0.45136586", "0.4512207", "0.45121157", "0.45114276", "0.45114276", "0.45093924", "0.4505513", "0.4503418", "0.45031038", "0.44985878", "0.44942567", "0.44938534", "0.44925827", "0.44925827", "0.44889066", "0.44872105", "0.44859645", "0.44854558", "0.4481774", "0.44739795", "0.4472964", "0.4472964", "0.44702056", "0.44691402", "0.4466435", "0.4453564", "0.4451755", "0.44486699", "0.44413984", "0.4440487" ]
0.45075953
76
Lookup a city within all timezone groups. Item lookup is case insensitive.
def __getitem__(self, key): key = str(key).lower().encode('utf-8') for group in self._groups.values(): try: return group[key] except KeyError: pass raise KeyError('Unrecognised city name - %s' % key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_cities(self, city_name: str = None):", "def get_cities(self, city_name: str = \"\"):", "async def _timein_city(self, *, city_name):\n\t\t\n\t\tapiKey = self.settings['api_key']\n\t\tif \".com\" in apiKey:\n\t\t\tawait self.bot.say(\"You have to set your API key, see data/timein/settings.json for details\")\n\t\t\treturn\n\t\t\n\t\turl = 'http://api.timezonedb.com/v2/list-time-zone?key=' + apiKey + '&format=xml'\n\t\t\n\t\tcity = city_name.replace(' ', '_')\n\t\t\n\t\turl += '&zone=*' + city + '*'\n\t\t\n\t\tasync with aiohttp.get(url) as response:\n\t\t\tsoupObject = BeautifulSoup(await response.text(), \"html.parser\")\n\t\tmessage = ''\n\t\t\n\t\tstatus = soupObject.find('status').get_text()\n\t\tif status != 'OK':\n\t\t\tmessage += 'Request failed. Details:\\n```'\n\t\t\tmessage += status + '\\n'\n\t\t\tmessage += soupObject.find('message').get_text()\n\t\t\tmessage += '```\\nTry searching for a capital or other major city'\n\t\telse:\n\t\t\tzones = soupObject.find_all('zone')\n\t\t\tfor zone in zones:\n\t\t\t\tnewmessage = ''\n\t\t\t\tnewmessage += ':flag_' + zone.find('countrycode').get_text().lower() + ': '\n\t\t\t\tnewmessage += zone.find('countryname').get_text() + '\\n'\n\t\t\t\tnewmessage += zone.find('zonename').get_text() + '\\n'\n\t\t\t\tunixtime = zone.find('timestamp').get_text()\n\t\t\t\tprettyTime = datetime.datetime.utcfromtimestamp(int(unixtime)).strftime('%Y-%m-%d %H:%M:%S')\t\t\t\t\n\t\t\t\tnewmessage += prettyTime + '\\n'\n\t\t\t\tmessage += newmessage + '\\n'\n\t\t\n\t\tawait self.bot.say(message)", "def cities(self):\n objs = models.storage.all()\n tmp = []\n for key, value in objs.items():\n name = key.split('.')\n if name[0] == \"City\":\n if value.state_id == str(self.id):\n tmp.append(objs[key])\n return tmp", "def city_finder(location):\n import requests\n import json\n from greyd import config\n latitude, longitude = location.split(\",\")\n result_city = \"\"\n geonames_url = f\"http://api.geonames.org/findNearbyPlaceNameJSON?lat={latitude}&lng={longitude}&username={config.GEONAMES_USERNAME}\" # noqa pylint: disable=line-too-long\n\n for _ in range(5):\n request_map_api = requests.get(geonames_url)\n map_json_parse = json.loads(request_map_api.text)\n try:\n result_city = map_json_parse[\"geonames\"][0][\"adminName1\"]\n break\n except IndexError:\n result_city = \"\"\n\n return result_city", "def test_city_country(self):\n dublin_ireland = city_country('dublin', 'ireland')\n self.assertEqual(dublin_ireland, 'Dublin, Ireland')", "def test_city_country(self):\n santiago_chile = get_city_name('santiago', 'chile')\n self.assertEqual(santiago_chile, 'Santiago, Chile')", "def filter_city(input_city: str) -> str:\n # input_city = string.capwords(input_city.lower())\n result = filterString(input_city).cities\n return result", "def test_city_country(self):\n formatted_city = get_full_city(\"santiago\", \"chile\")\n self.assertEqual(formatted_city, \"Santiago, Chile\")", "def find_by_city(city, cat, keywords):\n print \"--------------------------------------------------------\"\n city_items = CityItems(city, cat)\n r = requests.get(city_items.url)\n if r.status_code == 200:\n scraper = HtmlScraper(r.text)\n for path in scraper.item_paths:\n item = scraper.scrape_item(path, keywords)\n if item:\n print item\n city_items.add_item(item)\n else:\n print 'ERROR: Invalid city: {}'.format(city)\n return city_items", "def match_city(self, city, dpt_code, zip_code = None):\n city = format_str_city_insee(city)\n dpt_code = dpt_code.rjust(2, '0')\n if zip_code:\n zip_code.rjust(5, '0')\n # Based on zip code and city name\n ls_matching = []\n found_indicator = False\n if zip_code:\n if zip_code in self.dict_corr_zip_insee:\n for city_insee, zip_insee, dpt_insee, code_insee in self.dict_corr_zip_insee[zip_code]:\n if city == city_insee:\n ls_matching.append((city_insee, zip_insee, code_insee))\n found_indicator = True\n if found_indicator:\n return (ls_matching, 'zip_city_match')\n # If no exact zip, city match: check if city name in insee city names\n for city_insee, zip_insee, dpt_insee, code_insee in self.dict_corr_zip_insee[zip_code]:\n if city in city_insee:\n ls_matching.append((city_insee, zip_insee, code_insee))\n found_indicator = True\n if found_indicator:\n return (ls_matching, 'zip_city_in_match(es)')\n # Based on dpt code and city name\n for city_insee, zip_insee, dpt_insee, code_insee in self.dict_corr_dpt_insee[dpt_code]:\n if city == city_insee:\n ls_matching.append((city_insee, zip_insee, code_insee))\n found_indicator = True\n if found_indicator:\n return (ls_matching, 'dpt_city_match')\n # If no exact dpt, city match: check if city name in insee city names\n for city_insee, zip_insee, dpt_insee, code_insee in self.dict_corr_dpt_insee[dpt_code]:\n if city in city_insee:\n ls_matching.append((city_insee, zip_insee, code_insee))\n found_indicator = True\n if found_indicator:\n return (ls_matching, 'dpt_city_in_match(es)')\n # No match\n return (None, 'no_match')", "def get_city(address):\n geolocator = Nominatim(user_agent=\"specify_your_app_name_here\")\n \n while True:\n try:\n location = geolocator.geocode(address)\n break\n except Exception:\n None\n \n city = citipy.nearest_city(location.latitude, location.longitude)\n return [city.city_name.title(), city.country_code.title()]", "def find_airport_code_by_city(city):\n airports = get_airports()\n\n if city == 'London':\n return 'LHR'\n\n for airport_code in airports:\n if airports[airport_code].lower() == city.lower():\n return airport_code\n return None", "def all_capital_city(state):\n\n\tfor state_key in states:\n\t\tif state_key.lower() == state.lower():\n\t\t\tif capital_cities.get(states[state_key]):\n\t\t\t\treturn [capital_cities[states[state_key]], state_key]\n\treturn None", "def get_city(self, name: str):\n key = name.lower()\n try:\n return self._cities[key]\n except KeyError:\n city = City(name=name, state=self)\n self._cities[key] = city\n return city", "def get_city(self, territory_id: str = \"\"):", "def get_city(self, territory_id: str = \"\"):", "def test_find_cities(self):\n\n # Given\n game_state: CarcassonneGameState = CarcassonneGameState()\n\n city_one_side_straight_road = base_tiles[\"city_top_straight_road\"].turn(3)\n city_with_road = inns_and_cathedrals_tiles[\"ic_15\"].turn(3)\n\n game_state.board = [[None for column in range(2)] for row in range(1)]\n\n game_state.board[0][0] = city_with_road\n game_state.board[0][1] = city_one_side_straight_road\n\n # When\n cities: [City] = CityUtil.find_cities(\n game_state=game_state,\n coordinate=Coordinate(0, 0)\n )\n\n # Then\n self.assertEqual(1, len(cities))\n self.assertEqual(2, len(cities[0].city_positions))\n self.assertTrue(cities[0].finished)", "def city_update(self):\n self.city = self.city_finder(self.location.__str__())", "def cities(self):\n return [value for value in models.storage.all(City).values()\n if value.state_id == self.id]", "def search_engine(city_name):\n\n API_Key = \"zIGuOeUd0aE4O621Gj1KGDc6JiZ3PAGb\"\n http_request = f\"http://dataservice.accuweather.com/locations/v1/cities/search?apikey={API_Key}&q={city_name}&language=pt-br\"\n\n search_request = requests.get(http_request)\n\n if search_request.status_code != 200:\n print(f\"It was not possible to retrive information about {city_name}\")\n\n else:\n search_response = search_request.json()\n print(f\"Obtaining information about the weather in {city_name}\")\n\n return search_response[0]", "def cities(self):\n from models import storage\n city_list = []\n cities_dict = storage.all(cls=\"City\")\n for k, v in cities_dict.items():\n if v.get(\"state_id\") == self.id:\n city_list.append(v)\n return city_list", "def city(self) -> str:\n return pulumi.get(self, \"city\")", "def test_city_country(self):\n santiago_chile = city_country('santiago', 'chile')\n self.assertEqual(santiago_chile, 'Santiago, Chile')", "def test_city_country(self):\n santiago_chile = city_country('santiago', 'chile')\n self.assertEqual(santiago_chile, 'Santiago, Chile')", "def test_city_country(self):\n santiago_chile = city_country('santiago', 'chile')\n self.assertEqual(santiago_chile, 'Santiago, Chile')", "async def test_get_location_data(self):\n for city_name in ['dublin', 'London', 'Copenhagen']:\n response = await self.http_client.fetch(request=HTTPRequest(\n url=self.get_url(path=\"/location-data/{}\".format(city_name)),\n method='GET'\n ))\n self.assertEqual(response.code, HTTPStatus.OK)\n self.check_city_response(response, city_name.lower())", "def city(self):\n # type: () -> string_types\n return self._city", "def city(self):\n\n try:\n city = self.status.place[\"full_name\"].strip(r\",[A-Z ]\")\n except TypeError:\n city = None\n if not city:\n try:\n city = self.metadata.as_dict.get(\"user_city\").get(\"google_geocoding\")\n except (TypeError, AttributeError):\n city = None\n return city", "def GetCity():\n IPinfoRequest = requests.get('https://ipinfo.io/')\n IPinfo = IPinfoRequest.json()\n City = IPinfo['city']\n return(City)", "def test_find_city(self):\n\n # Given\n game_state: CarcassonneGameState = CarcassonneGameState()\n\n city_top = base_tiles[\"city_top\"]\n city_bottom = city_top.turn(2)\n\n game_state.board = [[None for column in range(1)] for row in range(2)]\n\n game_state.board[0][0] = city_bottom\n game_state.board[1][0] = city_top\n\n # When\n city: City = CityUtil.find_city(\n game_state=game_state,\n city_position=CoordinateWithSide(Coordinate(0, 0), Side.BOTTOM)\n )\n\n # Then\n self.assertTrue(city.finished)\n self.assertEqual(2, len(city.city_positions))\n self.assertIn(CoordinateWithSide(Coordinate(0, 0), Side.BOTTOM), city.city_positions)\n self.assertIn(CoordinateWithSide(Coordinate(1, 0), Side.TOP), city.city_positions)", "def test_city_country(self):\n\t\tformatted_address = city_country('santiago', 'chile')\n\t\tself.assertEqual(formatted_address, 'Santiago, Chile')", "def find_city(self, text):\n\n result = ''\n textarr = text.split()\n for i in range(0, len(textarr)):\n\n if textarr[i] in self.cities:\n\n if len(result) < 2:\n result = result + textarr[i]\n else:\n result = result + ' ' + textarr[i]\n\n if result in self.cities:\n\n # ends the function if the city is found\n if (result + '\\n') in self.cities and result != 'in':\n return result\n\n else:\n result_list = result.split()\n del result_list[0]\n result = ' '.join(w for w in result_list).lstrip(' ')\n\n # ends the function if the city is found\n if (result + '\\n') in self.cities and result != 'in':\n return result\n\n return None", "def test_city_country(self):\n formatted_name = city_country('santiago', 'chile')\n self.assertEqual(formatted_name, 'Santiago, Chile')", "def get_by_city():\n while True:\n try:\n for key in CITY_DICT:\n print(key)\n city = input(\"Enter city name: \")\n lat, long = CITY_DICT.get(city.capitalize())\n return lat, long\n except TypeError:\n print(\"~\" * 50)\n print(\"Please enter the city name correctly\")", "def test_get_country_by_geo_location(self):\n pass", "def city(self) -> Optional[str]:\n return pulumi.get(self, \"city\")", "def __getitem__(self, key):\n \n city = self._citydb[key]\n city.astral = self\n return city", "def get_city_info(g, city_name):\n flag = 0\n for key in g.city_dict:\n if(g.city_dict[key].get_name() == city_name):\n print g.city_dict[key].get_info()\n flag = 1\n \n if(flag == 0):\n print (\"Invalid Input\")", "def city(self):\n if \"city\" in self._prop_dict:\n return self._prop_dict[\"city\"]\n else:\n return None", "def city(self):\n if \"city\" in self._prop_dict:\n return self._prop_dict[\"city\"]\n else:\n return None", "def get_datacenter_city(self, node):\n if self._datacenter_cache is None:\n self.populate_datacenter_cache()\n location = self._datacenter_cache[node.datacenter_id].location\n location = location.lower()\n location = location.split(\",\")[0]\n return location", "def get_city_by_name(request, city_prefix):\n cities = City.objects.filter(city_name__istartswith=city_prefix)[:5]\n serializer = AllCitiesSerializer(cities, many=True)\n return Response(serializer.data)", "def city_country_select():\n q = \"\"\"\n SELECT airport_city, airport_country\n FROM airport\n GROUP BY airport_city, airport_country\"\"\"\n cursor = connection.cursor()\n cursor.execute(q)\n city_country = cursor.fetchall()\n cursor.close()\n return city_country", "def build(self):\n allow_bare = AllowBareCityName(blocklist=self.bare_name_blocklist)\n\n iter_keys = CityKeyIter(allow_bare)\n\n # Deduped cities.\n cities = WOFLocality.clean_us_cities()\n\n logger.info('Indexing US cities.')\n\n for row in tqdm(cities):\n\n # Key -> id(s)\n for key in map(keyify, iter_keys(row)):\n self.add_key(key, row.wof_id)\n\n # ID -> city\n self.add_location(row.wof_id, CityMatch(row))", "def is_city_of_london(self):\n return \"local.city-of-london\" in self.slug", "def test_single_word_boston(self):\n result = location.lookup_location('Boston GB')\n\n self.assertEqual(result['country'], 'GB')", "def return_city(n):\n if n == 1:\n return \"San Francisco\"\n elif n == 2:\n return \"Los Angeles\"\n elif n == 3:\n return \"Las Vegas\"\n elif n == 4:\n return \"Portland\"\n elif n == 5:\n return \"San Diego\"\n else:\n return \"Seattle\"", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def test_double_word_coombe_martin(self):\n result = location.lookup_location('Combe Martin GB')\n\n self.assertEqual(result['country'], 'GB')", "def parse_china(self, pyName=None):\n self.status = {}\n\n extractMethod = lambda d: {\n k: v for k,\n v in d.items() if k in [\n \"quName\",\n \"stateDetailed\",\n \"tem1\",\n \"tem2\",\n \"windState\"]}\n if pyName in [\"xisha\", \"nansha\", \"diaoyudao\"]:\n for city in self.root.findall(\"city\"):\n if city.get(\"pyName\") == pyName:\n self.status[\"cityname\"] = city.get(\"cityname\")\n self.status[\"stateDetailed\"] = city.get(\"stateDetailed\")\n self.status[\"temLow\"] = city.get(\"tem2\")\n self.status[\"temHigh\"] = city.get(\"tem1\")\n self.status[\"windState\"] = city.get(\"windState\")\n\n break\n\n return self.status\n else:\n for city in self.root.findall(\"city\"):\n self.status[city.get(\"cityname\")] = extractMethod(city.attrib)\n\n return self.status", "def get_city_by_name(self, name):\n query = \"SELECT _id, name, country, lat, lon FROM cities INNER JOIN coord \" \\\n \"ON cities.coord = coord.id WHERE name = '{}';\".format(name)\n self.select_from_bd(query)\n cities_list = self.cursor.fetchall()\n for loop, city in enumerate(cities_list):\n cities_list[loop]['lat'] = float('%g' % round(city['lat'], 2))\n cities_list[loop]['lon'] = float('%g' % round(city['lon'], 2))\n return cities_list", "def test_city_country(self):\n your_location = location_name(\"lviv\", \"ukraine\")\n self.assertEqual(your_location, \"Lviv, Ukraine\")", "def GetUsCities():\n return GetDataFromCsvFile('us_cities.csv')", "def get_city(doc = None, cursor = None):\n\tif cursor is None and doc is not None:\n\t\treturn doc['details']['city']\n\telif doc is None and cursor is not None:\n\t\tallcities = list()\n\t\tfor thisdoc in cursor:\n\t\t\tallcities.append(thisdoc['details']['city'])\n\t\treturn allcities\n\telse:\n\t\tprint \"Supply any one argument only!\"", "def find_city(city, dbsession):\n\n\t# Since we're creating the FK relation based on ID, and hence the casing has no bearing on \n\t# whether the city record associates with the address, I'm upcasing the city to prevent dupes.\n\tcity = str(city)\n\tcity = city.upper()\n\n\tresult = dbsession.query(db.City).filter_by(city_name=city).first()\n\n\tif result is None:\n\t\t# Create a new instance of city\n\t\tcity_object = db.City(city)\n\t\t# I'm adding the city without committing the transaction since it would also\n\t\t# commit the address insert transaction that's still open in routes.py.\n\t\tdbsession.add(city_object)\n\t\treturn city_object\n\telse:\n\t\t# Assign the existing user object to the variable\n\t\treturn result", "def city_by_id(city_id):\n cities_values = storage.all(\"City\").values()\n for obj in cities_values:\n if obj.id == city_id:\n return jsonify(obj.to_dict())\n abort(404)", "def get_coordinates_from_city(self, city):\n return self.cities_dict.get(city)", "def parse_city(self, response):\n j = response.body\n cs = json.loads(j)\n \n if cs:\n for v in cs:\n cid = v['city_id']\n url = \"http://api.qaym.com/0.1/cities/\"+cid+\"/items/key=\"+key\n yield scrapy.Request(url, callback=self.parse_restaurant)", "def cities(self):\n from models.engine.file_storage import FileStorage\n from models.city import City\n fs = FileStorage.all(City)\n city_list = []\n for key, value in fs.items():\n if 'City' in key and self.id == value.state_id:\n '''Append City instances maybe fucked up here!!!'''\n city_list.append(value)\n return city_list", "def get_airports(cities):\n ## Database connection, db, collection\n conn = pymongo.Connection()\n db=conn.flight_db\n ap = db.airports\n\n airport_list = []\n for city in cities:\n c = ap.find({\n 'city':{'$regex':'^'+city, '$options':'i'}\n })\n for info in c:\n airport_list.append(info['city'] + ': ' + info['code'])\n print '%s - %s' % (info['city'], info['code'])\n conn.disconnect()\n\n return airport_list", "def test_correct_city(self, ip_address, city_correct):\n city, country = get_geo(ip_address=ip_address)\n self.assertEqual(city, city_correct)", "def process_city(state, city, locations=Locations, perror=None, do_exit=None):\n c=ConfigParser.ConfigParser()\n c.read(locations)\n l=c.options('US_%s' % state)\n d = {}\n condition_station = None\n zone = None\n for x in l:\n # info: city condition-station zone radar-code\n info = string.split(c.get('US_%s' % state, x))\n if city == string.lower(info[0]):\n if verbose:\n print 'info:', info\n if info[1] != '-'*len(info[1]):\n condition_station = info[1]\n\n if info[2] != '-'*len(info[2]):\n zone = string.upper(info[2])\n zone = zone[3:]\n\n return (condition_station, zone)\n\n if perror:\n dp_io.eprintf(\"Don't know this state/city: %s/%s\\n\",\n self.state,\n self.city)\n if do_exit:\n sys.exit(1)\n \n return None", "def get_cities():\n _, cities = API.cities(limit=1000)\n result = []\n for city in cities['results']:\n result.append(city['city'])\n return result", "def get_place_details(self):\n self.google_api_url = 'https://maps.googleapis.com/maps/api/place/details/json?placeid={}&key={}'.format(self.place_id, api_key)\n self.r = requests.get(url=self.google_api_url)\n self.data = self.r.json()\n self.address_components = self.data['result']['address_components']\n\n for i in self.address_components:\n if i['types'][0] == 'locality':\n self.city = (i['long_name'])\n return (self.city)\n else:\n pass", "async def excursion_by_city_search(city_name: str, lang='en', limit=10) -> dict:\n city_params = {'lang': lang, 'name': city_name, 'limit': 100}\n cities = await excursion_instance.get_cities(city_params)\n for city in cities:\n if city.get('name') == city_name:\n data = {'city_id': city.get('region_id'),\n 'country_id': city.get('country_id'),\n 'limit': limit}\n return await excursion_instance.get_excursions(data)", "def do_search(search):\n import StringIO\n from x84.bbs import echo, getch\n disp_msg('SEARChiNG')\n resp = requests.get(u'http://apple.accuweather.com'\n + u'/adcbin/apple/Apple_find_city.asp',\n params=(('location', search),))\n locations = list()\n if resp is None:\n disp_notfound()\n elif resp.status_code != 200:\n # todo: logger.error\n echo(u'\\r\\n' + u'StAtUS COdE: %s\\r\\n\\r\\n' % (resp.status_code,))\n echo(repr(resp.content))\n echo(u'\\r\\n\\r\\n' + 'PRESS ANY kEY')\n getch()\n else:\n xml_stream = StringIO.StringIO(resp.content)\n locations = list([dict(elem.attrib.items())\n for _event, elem in ET.iterparse(xml_stream)\n if elem.tag == 'location'])\n if 0 == len(locations):\n disp_notfound()\n else:\n disp_found(len(locations))\n return locations", "def expected_city_names_fixture():\n return {'b', 'a', 'c'}", "def city_by_state(state_id):\n city_list = []\n state_obj = storage.get(\"State\", state_id)\n\n if state_obj is None:\n abort(404)\n for obj in state_obj.cities:\n city_list.append(obj.to_json())\n\n return jsonify(city_list)", "def get_random_city(self):\n psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, self.cursor)\n psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY, self.cursor)\n query = \"SELECT _id, name, country, lat, lon FROM cities INNER JOIN coord \" \\\n \"ON cities.coord = coord.id ORDER BY random() LIMIT 1;\"\n self.select_from_bd(query)\n return self.cursor.fetchone()", "def city():\r\n _cursor.execute('SELECT DISTINCT(name) FROM ca_cities where name is not null order by random() limit 1;')\r\n return _cursor.fetchone()[0].decode(\"utf-8\")", "def test_city_country(self):\n formatted_name = make_formatted_name('santiago', 'chile')\n self.assertEqual(formatted_name, 'Santiago, Chile')", "def test_single_word_exeter(self):\n result = location.lookup_location('Exeter GB')\n\n self.assertEqual(result['country'], 'GB')", "def GetWeatherByCity(City):\n WeatherUrl = \"http://api.openweathermap.org/data/2.5/weather?q=\"+ City + \"&appid=b4bacbe2dc824431289800439f1ec3df&units=metric\" \n WeatherRequest = requests.get(WeatherUrl)\n WeatherInfo = WeatherRequest.json()\n if ('main' in WeatherInfo):\n pass\n else:\n print(\"Invalid City Name\")\n exit() \n Temp = WeatherInfo['main']['temp']\n Humidity = WeatherInfo['main']['humidity']\n Description = WeatherInfo['weather'][0]['description']\n return(Temp, Humidity, Description)", "def parse_citystate(self):\n \n index = self.index\n \n if self.words[index]['tag'] != Vocabulary.NAME:\n return None, None, 0, 0\n \n if self.words[index]['word'] == 'mt':\n city = \"mountain\"\n else:\n city = self.words[index]['word']\n start = index\n \n index += 1\n if index == self.length:\n return None, None, 0, 0\n \n if self.words[index]['word'] == ',':\n index += 1\n if index == self.length:\n return None, None, 0, 0\n elif self.words[index]['tag'] == Vocabulary.NAME: \n # Hack\n state, n = self.state_hack(index)\n if n > 0:\n index += n\n return city, state, index - start + 1, index\n \n #if self.words[index]['word'] == 'medical doctor':\n #return city, \"ISO3166-2:US-MD\", index - start + 1, index\n try:\n state = self._state_dict[self.words[index]['word']]\n return city, state, index - start + 1, index\n except:\n city += ' ' + self.words[index]['word']\n index += 1\n if index == self.length:\n return None, None, 0, 0\n \n if self.words[index]['word'] == ',':\n index += 1\n if index == self.length:\n return None, None, 0, 0\n\n # Hack\n state, n = self.state_hack(index)\n if n > 0:\n index += n\n if index == self.length: index -= 1 # Hack\n return city, state, index - start + 1, index\n \n if self.words[index]['tag'] not in [Vocabulary.NAME, Vocabulary.ACRONYM]:\n return None, None, 0, 0\n \n try:\n state = self._state_dict[self.words[index]['word']]\n return city, state, index - start + 1, index\n except: \n return None, None, 0, 0", "def test_city_country_population(self):\n santiago_chile = get_city_name('santiago', 'chile','5000000')\n self.assertEqual(santiago_chile, 'Santiago, Chile - population 5000000')", "def continent(name=None):\n ldb = location_db()\n try:\n return ldb.find_continent(country=name)\n except LocationNotFound:\n return ldb.find_continent(country=ldb.find(name=name)[\"country\"])", "def test_triple_word_weston_super_mare(self):\n result = location.lookup_location('Weston Super Mare GB')\n\n self.assertEqual(result['country'], 'GB')", "def get_city_job(html):\n soup = BeautifulSoup(html, 'html.parser')\n city = soup.find(class_=\"subtle loc\").get_text()\n if city:\n return city\n return None", "def ad_rep_city_state(obj):\n return '%s, %s' % (obj.ad_rep.geolocation_object.us_city.name,\n obj.ad_rep.geolocation_object.us_state.abbreviation)", "def cities(city_id=None):\n cities_id = storage.get('City', city_id)\n if cities_id:\n return jsonify(cities_id.to_dict())\n abort(404)", "def city(self):\r\n try:\r\n return str(self.connect()['name'])\r\n except:\r\n return '@weather_city'", "def GetWorldCities():\n return GetDataFromCsvFile('world_cities.csv')", "def get_city_points(city):\n for item in coordinate_list:\n if item[0] == city:\n return (item[1], item[2])", "def cityAll(state_id):\n ll = []\n state = storage.get(\"State\", str(state_id))\n if state is None:\n abort(404)\n xx = storage.all(\"City\").values()\n for yy in xx:\n if yy.state_id == str(state_id):\n ll.append(yy.to_dict())\n return jsonify(ll)", "def city(city_id):\n\n if storage.get(\"City\", city_id) is not None:\n return jsonify(storage.get(\"City\", city_id).to_dict())\n else:\n abort(404)", "def city():\r\n cursor.execute('SELECT city FROM american_cities \\\r\n order by RANDOM() limit 1;')\r\n return cursor.fetchone()[0]", "def cities_by_states():\n states = storage.all(State).values()\n return render_template('8-cities_by_states.html', states=states)", "def city_country(city, country):\n full_city = city + \", \" + country\n return full_city.title()", "def test_get_currency_by_geo_location(self):\n pass", "def cities_by_states():\n states = storage.all(State)\n cities = storage.all(City)\n return render_template('8-cities_by_states.html', states=states,\n cities=cities)", "def cities(self):\r\n return self._store.keys()", "def search(query: str):\n try:\n # Search with user query.\n # TODO: Handle list with multiple data.\n data: List = pycountry.countries.search_fuzzy(query)\n\n # extract alpha2 value\n _, _, alpha_2, _ = utils.extract_fuzzy_country_data(data)\n\n # Get a list of timezone names.\n result = utils.get_timezones(alpha_2)\n\n payload: List = []\n\n # If length is greater than one, show terminal menu.\n if len(result) > 1:\n entry = utils.handle_interaction(result)\n\n payload.append(entry)\n\n return utils.get_local_time(payload)\n except LookupError:\n return console.print(\n \"Couldn't resolve your query, please try other keywords.:x:\"\n )\n\n return utils.get_local_time(result)", "async def report_by_city(city: str):\n return DF[DF[\"city\"] == city.title()].to_dict(orient=\"records\")", "def city_by_id(city_id):\n\n fetched_obj = storage.get(\"City\", str(city_id))\n\n if fetched_obj is None:\n abort(404)\n\n return jsonify(fetched_obj.to_json())", "def city_country(city_name, country_name):\n city_country_combo = city_name + ', ' + country_name\n return city_country_combo.title()" ]
[ "0.65049624", "0.63830185", "0.6244422", "0.61362547", "0.61103976", "0.5962785", "0.5956759", "0.5944463", "0.5939915", "0.5865012", "0.58639216", "0.58566123", "0.5840458", "0.58180654", "0.58092016", "0.58036107", "0.58036107", "0.5790917", "0.57779676", "0.5772209", "0.57627183", "0.5737695", "0.56962794", "0.56928736", "0.56928736", "0.56928736", "0.568141", "0.56795925", "0.5674642", "0.5673135", "0.56692076", "0.5660838", "0.5610339", "0.5595903", "0.5572494", "0.5551363", "0.55490845", "0.5544842", "0.5498342", "0.54858667", "0.54858667", "0.54835147", "0.5451459", "0.5446115", "0.5436845", "0.5423738", "0.54137534", "0.53926414", "0.53782654", "0.53782654", "0.53782654", "0.53782654", "0.53782654", "0.5375658", "0.5365621", "0.5363515", "0.5338359", "0.53286386", "0.5326346", "0.5319699", "0.5317529", "0.5316986", "0.52997434", "0.52964026", "0.52932304", "0.5281405", "0.52457833", "0.5240094", "0.5231634", "0.52017605", "0.5199539", "0.5197381", "0.51963717", "0.5188361", "0.51711303", "0.5156189", "0.51546115", "0.5152509", "0.5151043", "0.514185", "0.51399285", "0.5139263", "0.5133573", "0.51295024", "0.51232946", "0.51208246", "0.5117131", "0.51059467", "0.5098867", "0.50973874", "0.50908905", "0.50907224", "0.5090616", "0.509", "0.5076496", "0.50736666", "0.50649256", "0.5064499", "0.5059809", "0.5039994" ]
0.61644584
3
Initialise the city database and set the default depression.
def __init__(self): self._citydb = CityDB() self._depression = 6 # Set default depression in degrees
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setUp(self):\n self.my_city = City()", "def initialize():\n sql_db = SQLConnection()\n with SQLCursor(sql_db) as cur:\n cur.execute('SELECT position from govt_info')\n row = cur.fetchone()\n for pos in Government.positions:\n if row is None or len(row) != len(Government.positions):\n cur.execute('INSERT OR IGNORE INTO govt_info (position) VALUES (?);', (pos,))", "def reset_db(self):\n self._cities_distance.drop()", "def initCitys(self):\n self.cities = []\n for vertex in self.metaGraph:\n self.cities.append(vertex)", "def setUp(self):\n\n Cafe.query.delete()\n City.query.delete()\n\n sf = City(**CITY_DATA)\n db.session.add(sf)\n\n cafe = Cafe(**CAFE_DATA)\n db.session.add(cafe)\n\n db.session.commit()\n\n self.cafe = cafe", "def setUp(self):\n\n Cafe.query.delete()\n City.query.delete()\n\n sf = City(**CITY_DATA)\n db.session.add(sf)\n\n cafe = Cafe(**CAFE_DATA)\n db.session.add(cafe)\n\n db.session.commit()\n\n self.cafe = cafe", "def init_db() -> None:\n conn = sqlite3.connect('../Utils/map_storage.db')\n cursor = conn.cursor()\n\n with conn:\n station_cmd = \"\"\"CREATE TABLE IF NOT EXISTS\n nodes(city TEXT, name TEXT, is_station TEXT, x INT, y INT, zone TEXT)\"\"\"\n\n cursor.execute(station_cmd)\n\n connection_cmd = \"\"\"CREATE TABLE IF NOT EXISTS\n connections(city TEXT, name_1 TEXT, name_2 TEXT, color TEXT)\"\"\"\n\n cursor.execute(connection_cmd)", "def init_database(self):\n # init_database(self.engine)", "def __init__(self):\n self._zipcode = None\n self._city = None", "def init_db():\n db.drop_all()\n db.create_all()\n seed_companies()\n seed_emission_reports()\n seed_reduction_targets()\n seed_milestones()", "def initialise_bdd(self):\n print(fr.FR[1])\n self.base.create_database(\"sql/p5.sql\")\n print(fr.FR[2])\n self.category_table.save_category()\n print(fr.FR[3])", "def __init_database(self):\n from admin.database import init_db\n init_db()", "def init_database(self):\n init_database(self.engine)", "def initialize():\n DATABASE.connect()\n DATABASE.drop_tables([Journal], safe=True)\n DATABASE.create_tables([Journal], safe=True)\n DATABASE.close()", "def setUp(self):\n\n City.query.delete()\n Cafe.query.delete()\n\n sf = City(**CITY_DATA)\n db.session.add(sf)\n\n cafe = Cafe(**CAFE_DATA)\n db.session.add(cafe)\n\n db.session.commit()\n\n self.cafe_id = cafe.id", "def setUpClass(cls):\n cls.city = City()", "def set_tour(self, city_list=None):\n self.cities = city_list or \\\n random.sample(range(len(self.x_points)), len(self.y_points))\n self.distance = 0\n self.fitness = 0", "def initialize():\n db.connect()\n db.create_tables([Expense], safe=True)", "def __post_init__(self):\n self.dbase = databases.Database(\n self.dsn,\n min_size=self.min_size,\n max_size=self.max_size\n )\n self.engine, self.meta = self.get_engine_metadata()", "def setUp(self):\n\n Cafe.query.delete()\n City.query.delete()\n\n sf = City(**CITY_DATA)\n db.session.add(sf)\n\n cafe = Cafe(**CAFE_DATA)\n db.session.add(cafe)\n\n db.session.commit()\n\n self.cafe_id = cafe.id", "def populate_cities():\n if City.query.filter_by(name=CITIES[0]).first():\n return\n\n for city in CITIES:\n _add_city(city)", "def init_db():\n db.drop_all()\n db.create_all()\n\n print(\"Initialized Connect 4 Database.\")", "def __init__(self, city):\r\n self.city = city", "def initialise(self):\n self.set_up()", "def init_db():\n\tdb.drop_all()\n\tdb.create_all()\n\n\tprint(\"Initialized Database.\")\n\treturn", "def city(self, city):\n self._city = city", "def populate_db(self):\n # Get donors\n log.info(\"Populating donors.\")\n\n self.r.hmset('Thomas', {'donations': '500', 'email': 'thomas@thomas.com', 'city': 'Athens', 'state': 'GA', 'zip': 30606})\n\n self.r.hmset('Ted', {'donations': '1', 'email': 'ted@ted.com', 'city': 'Memphis', 'state': 'TN', 'zip': 38104})\n\n self.r.hmset(\"Bailey\", {'donations': '1000', 'email': 'bailey@bailey.com', 'city': 'Washington', 'state': 'DC', 'zip': 12345})", "def __init__(self):\n\t\tDBHelper.initialize() #initiate dababase helper", "def initialize(self):\n self.population.initialize()\n self.cache.initialize()\n if self.storage:\n self.storage.initialize()", "def setup_database(self):\n self.db.setup_database()", "def initialise_db(prefill=True, clear=False):\n db.DB.create_all()\n\n if clear:\n prompt = raw_input(\n 'Are you sure you wish to clear the entire database? '\n )\n\n if prompt.lower() in ['yes', 'y']:\n models.Affiliation.query.delete()\n models.Announcement.query.delete()\n models.Battels.query.delete()\n models.CardTransaction.query.delete()\n models.College.query.delete()\n models.Log.query.delete()\n models.Photo.query.delete()\n models.Statistic.query.delete()\n models.Ticket.query.delete()\n models.Transaction.query.delete()\n models.TransactionItem.query.delete()\n models.User.query.delete()\n models.Voucher.query.delete()\n models.Waiting.query.delete()\n\n if prefill:\n db.DB.session.add_all(static.COLLEGES)\n db.DB.session.add_all(static.AFFILIATIONS)\n db.DB.session.commit()", "def test_init(self, fixture_environment):\n\n # Generate city object\n city_object = cit.City(environment=fixture_environment)\n\n # Check inheritance from citydistrict object of pycity\n assert city_object._kind == 'citydistrict'", "def __init__(self):\r\n date_time('Connecting to local database ...')\r\n\r\n self.conn = sqlite3.connect(DATABASE_PATH)\r\n self.cursor = self.conn.cursor()\r\n\r\n # Set up database\r\n self.cursor.execute('PRAGMA synchronous = OFF')\r\n self.cursor.execute('PRAGMA journal_mode = OFF')\r\n self.cursor.execute('PRAGMA locking_mode = EXCLUSIVE')\r\n self.cursor.execute('PRAGMA count_changes = FALSE')\r\n\r\n self.cursor.execute('CREATE TABLE IF NOT EXISTS citations (id INTEGER PRIMARY KEY, citation TEXT UNIQUE);')", "def __init__(self, dbfile=\"eom_default_db.sqlite\", init_db=False):\n self.sql = None\n missing = not os.path.exists(dbfile)\n self.sql = sqlite3.connect(dbfile, detect_types = sqlite3.PARSE_DECLTYPES)\n self.sql.text_factory = str\n if missing or init_db:\n self.init_rpki_rtr_tables()\n self.init_rib_tables()\n self.init_analysis_tables()", "def initialize(self):\r\n state_name = self.state\r\n\r\n state_name = state_name.lower()\r\n\r\n response = requests.get(\"https://cdn-api.co-vin.in/api/v2/admin/location/states\") \r\n\r\n if response.ok:\r\n\r\n df = pd.DataFrame(json.loads(response.text)[\"states\"]) \r\n\r\n state = process.extractOne(state_name, df[\"state_name\"].tolist()) # fuzzy match to get best state match \r\n\r\n self.state_id = df.loc[df.state_name == state[0],[\"state_id\"]].values[0][0] \r\n self.load_districts()", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def city(self, city):\n\n self._city = city", "def tearDown(self):\n del self.my_city", "def initialize_database():\n # TODO: Refactor the funtime library\n this.db = Store(this.host).create_lib(this.store_name).get_store()", "def set_initial_values(self):\n\n pass", "def initialize():\n DATABASE.connect()\n DATABASE.create_tables([User, Entry], safe=True)\n DATABASE.close()", "def initialize(self):\n\n db = dict()\n\n db['meta'] = Meta(None)\n db['race'] = Race(None, None, None, None, None)\n db['track'] = Track(None, None)\n db['classes'] = set([])\n db['teams'] = set([])\n db['drivers'] = set([])\n\n self.db = db", "def __init__(self):\n self._db = db\n # Connect to DB\n self._db.connect()\n # Create tables\n self._db.create_tables([Teachers, Parents, Tutors, Students, Homework, Groups, StudentsGroups, Courses])\n # Create filling entries\n self.__create_dummies()\n self._db.close()", "def initialize():\n\n db.connect() # Se conecta\n db.create_tables([Entry], safe=True) # Crea las tablas\n # safe=true evita crear modelos ya creados", "def init_population(self):\n pass", "def _initialiseDefault(self):\n\n # Empty old properties\n if not self.properties.empty:\n self.properties.drop(self.properties.index, inplace=True)\n # Empty old fields\n if not self.fields.empty:\n self.fields.drop(self.fields.index, inplace=True)\n\n # Empty functions\n self.functions = {}\n\n initConf.initialProps(self.properties, self._jsondata, PropertyID)\n initConf.initialField(self.fields, self._jsondata, FieldID)\n initConf.initialFunc(self.functions, self._jsondata, FunctionID)", "def fill_db(self, data):\n check_input_params(data, self.DB)\n self.db = data[self.DB]", "def _post_init(self):\n self.set_new_record_state(False)", "def city_update(self):\n self.city = self.city_finder(self.location.__str__())", "def populate_database(self):\n self.insert_products()\n self.insert_categories()\n self.insert_products_categories()\n self.insert_stores()\n self.insert_products_stores()", "def setup(self):\n print(\"INIT DATA\")\n\n self.nutella = Product.objects.create(name=\"nutella\", nutriscore=\"e\")", "def init(self):\n self.db.connect()\n try:\n self.db.create_tables([JambiModel], safe=True)\n JambiModel.create(ref='0')\n self.logger.info('Database initialized')\n except IntegrityError:\n self.logger.info('Database was already initialized')\n self.db.close()", "def setUp(self):\n try:\n # Get default data from medical_forum_data_dump.sql, populate tables and connect to DB\n ENGINE.populate_tables()\n self.connection = ENGINE.connect()\n\n # In case of error/exception in populating tables, clear all tables data\n except Exception as exception:\n print(exception)\n ENGINE.clear()", "def setUp(self):\n resume.objects.create(\n first_name='Nicholas',\n last_name='Bielinski',\n )\n experience.objects.create(\n title='Helpdesk Technician',\n location='L3 Technologies',\n start_date='6/26/2017',\n end_date='present',\n description='blah blah blah'\n )\n education.objects.create(\n institution_name='UNH Manchester',\n location='Manchester',\n degree='Bachelor',\n major='CIS',\n gpa = '3.5'\n )", "def initCitys(self, metaGraph):\n self.cities = []\n for vertex in metaGraph:\n self.cities.append(vertex)", "def initialisation(self):\n self.create_variables()\n self.create_placeholders()\n self.build_model()\n self.reset_lr(None, True)\n self.build_loss()\n self.initialised = True", "def initialize():\n db.connect()\n db.create_tables([Entry], safe=True)", "def initialize():\n db.connect()\n db.create_tables([Entry], safe=True)", "def setUp(self):\n self.ds = DictionaryStore(Camper)\n self.engine = Engine(self.ds)", "def dbinit( *args, **kwargs ):", "def set_city_count(self, city_count):\n self.city_count = city_count", "def init_database(self):\n try:\n DatabaseCreation.create_database()\n except BaseExceptionHandler as base_exception_handler:\n self.logger.error(message=base_exception_handler.error_message)", "def startup(self):\n self.load_up_initial_db(TIMESTAMP_PARSE_DICT)\n self.add_numeric_cols()", "def init():\n database.create_tables([Tracker])\n database.commit()", "def init_db():\n db.drop_all()\n db.configure_mappers()\n db.create_all()\n db.session.commit()", "def initial_db_setup() -> None:\n db_filename = \"twdft.db\"\n db_path = os.path.join(TWDFT_DATA_DIR, db_filename)\n csv_filename = \"sites.csv\"\n csv_path = os.path.join(TWDFT_DATA_DIR, csv_filename)\n db_is_new = not os.path.exists(db_path)\n sites_csv = os.path.join(TWDFT_DATA_DIR, csv_filename)\n\n if db_is_new:\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n\n # first we create a site object\n c.execute(\n \"\"\"\n CREATE TABLE site(\n id INTEGER PRIMARY KEY,\n name TEXT,\n site_type TEXT,\n sub_category TEXT,\n address_1 TEXT,\n address_2 TEXT,\n town TEXT,\n county TEXT,\n country TEXT,\n postcode TEXT,\n site_category TEXT,\n freq_target TEXT,\n created TEXT,\n notes TEXT,\n last_inspection TEXT,\n next_inspection TEXT,\n pfsp_approval TEXT,\n pfsp_expiry TEXT,\n unlocode TEXT,\n pfso TEXT,\n pso TEXT,\n pfsa_approval TEXT,\n pfsa_expiry TEXT,\n team TEXT,\n created_by TEXT,\n last_updated TEXT,\n updated_by TEXT,\n afp_loc TEXT,\n rdf TEXT,\n classification TEXT,\n article24 TEXT,\n psa_approval TEXT,\n inspection_due TEXT\n )\n \"\"\"\n )\n conn.commit()\n\n # next we want an inspection table\n\n c.execute(\n \"\"\"\n CREATE TABLE inspection(\n id INTEGER PRIMARY KEY,\n site INTEGER,\n date TEXT,\n status TEXT,\n time TEXT,\n FOREIGN KEY(site) REFERENCES site(id)\n )\n \"\"\"\n )\n conn.commit()\n\n # next we want an inspector table\n c.execute(\n \"\"\"\n create table inspector(\n id integer primary key,\n first_name text,\n last_name text\n )\n \"\"\"\n )\n conn.commit()\n\n for i in INSPECTORS:\n first = i.split(\" \")[0]\n last = i.split(\" \")[1]\n c.execute(\n \"INSERT INTO inspector(first_name, last_name) VALUES (?,?)\",\n (first, last),\n )\n\n # a table that links inspectors with inspections\n c.execute(\n \"\"\"\n CREATE TABLE inspector_inspections(\n inspector INTEGER,\n inspection INTEGER,\n FOREIGN KEY (inspector) REFERENCES inspector(id),\n FOREIGN KEY (inspection) REFERENCES inspection(id)\n )\n \"\"\"\n )\n conn.commit()\n\n for site in map(Site._make, csv.reader(open(csv_path, \"r\"))):\n try:\n c.execute(\n f\"\"\"\n INSERT INTO site VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)\"\"\",\n (\n int(site.id.replace(\",\", \"\")),\n site.name,\n site.site_type,\n site.sub_category,\n site.address_1,\n site.address_2,\n site.town,\n site.county,\n site.country,\n site.postcode,\n site.site_category,\n site.freq_target,\n site.created,\n site.notes,\n site.last_inspection,\n site.next_inspection,\n site.pfsp_approval,\n site.pfsp_expiry,\n site.unlocode,\n site.pfso,\n site.pso,\n site.pfsa_approval,\n site.pfsa_expiry,\n site.team,\n site.created_by,\n site.last_updated,\n site.updated_by,\n site.afp_loc,\n site.rdf,\n site.classification,\n site.article24,\n site.psa_approval,\n site.inspection_due,\n ),\n )\n except sqlite3.IntegrityError as e:\n print(\"That hasnae worked\", site.inspection_due)", "def populate(self):\n\n NUM_COUNTRIES = 2 # random.randint(1, 4)\n\n # find a suitable hex\n with Timer(\"Creating initial data\", debug=self.debug):\n\n for i in range(NUM_COUNTRIES):\n country, provinces, pops = create_country(self, self.map)\n country.determine_tax_policy()\n self.countries.append(country)", "def __init_geoip2__(self) -> None:\n try:\n self.geoip2 = GeoIP2()\n logging.info(\"GeoIP2 - successfully initialised database reader\")\n except GeoIP2Exception as ex:\n raise MiddlewareNotUsed(f\"GeoError initialising GeoIP2: {ex}\") from ex", "def initialize():\n DATABASE.connect()\n DATABASE.create_tables([User], safe=True)\n DATABASE.close()", "def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['STAGE'].values()))\n cur = conn.cursor()\n \n #remove the existing tables\n drop_tables(cur, conn)\n \n #replace the tables with new ones\n create_tables(cur, conn)\n \n #add missing postcode value into table\n default_missing_values(cur, conn)\n \n conn.close()", "def init_module():\n global geoip_db_reader\n if geoip_db_reader:\n raise RuntimeError(\"Attempted to double initialize geoip module\")\n\n database_path = get_setting('geoip', 'DatabaseFilePath')\n geoip_db_reader = geoip2.database.Reader(database_path)\n\n # Ensure the reader is properly initialized\n try:\n geoip_db_reader.country('127.0.0.1')\n except geoip2.errors.AddressNotFoundError:\n pass", "def initialize_db(self) -> None:\n if not self.check_schema_initialized():\n self._create_genes_table()\n self._create_meta_data_table()", "def __init__(self, cities):\n self.cities = copy.copy(cities)\n self.compute_fitness()", "def initdb():\n db.drop_all()\n db.configure_mappers()\n db.create_all()\n db.session.commit()", "def initialize_database():\n # Create the schema\n Base.metadata.create_all(engine)\n\n # Create a connection/database session\n session = Session()\n\n # Now, create a few restaurants:\n cupcake = Restaurant(name=\"Cupcakes\")\n five_guys = Restaurant(name=\"Five Guys\")\n ihop = Restaurant(name=\"IHOP\")\n\n # And a few users:\n mike = User(name=\"Mike\")\n ryan = User(name=\"Ryan\")\n\n # And finally a few votes:\n mike.preferences.append(Preference(vote=\"+1\", restaurant=five_guys))\n ryan.preferences.append(Preference(vote=\"+0\", restaurant=five_guys))\n ryan.preferences.append(Preference(vote=\"-0\", restaurant=cupcake))\n\n session.add(mike)\n session.add(ryan)\n session.add(ihop)\n\n session.commit()\n\n session.close()", "def build(self):\n allow_bare = AllowBareCityName(blocklist=self.bare_name_blocklist)\n\n iter_keys = CityKeyIter(allow_bare)\n\n # Deduped cities.\n cities = WOFLocality.clean_us_cities()\n\n logger.info('Indexing US cities.')\n\n for row in tqdm(cities):\n\n # Key -> id(s)\n for key in map(keyify, iter_keys(row)):\n self.add_key(key, row.wof_id)\n\n # ID -> city\n self.add_location(row.wof_id, CityMatch(row))", "def initdb_command():\n db.drop_all()\n db.create_all()\n if LOAD_DUMMY_DATA:\n setup_dummy_data()\n\n print('Initialized the database.')", "def __init__(self):\n\n self.__path = \"DataBase.db\"\n self.__admins = []\n self.__update_admin_cache()", "def init_database():\n database.init(DATABASE_NAME)\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON')\n if not database.table_exists([Customer]):\n database.create_tables([Customer])\n database.close()", "def initialize():\n \n db.connect()\n db.create_tables([Product], safe=True)", "def init(username, password):\r\n click.echo('Initializing the database...')\r\n db.create_all()\r\n\r\n admin = Admin.query.first()\r\n if admin:\r\n click.echo('The adminstrator already exists, updating...')\r\n admin.username = username\r\n admin.set_password(password)\r\n else:\r\n click.echo('Creating the temporary administrator account..')\r\n admin = Admin(\r\n username=username,\r\n blog_title='Bluelog',\r\n blog_sub_title=\"No, I'm the real thing.\",\r\n name='Admin',\r\n about='Anything about you'\r\n )\r\n admin.set_password(password)\r\n db.session.add(admin)\r\n\r\n category = Category.query.first()\r\n if category is None:\r\n click.echo('Creating the default category...')\r\n category = Category(name='默认')\r\n db.session.add(category)\r\n\r\n db.session.commit()\r\n click.echo('Done.')", "def on_init(self):\n self.model.maze.initialize(os.path.join(\n config.value['src']['data'], 'maze.csv'))", "def init():\n\n # delete existing file\n if os.path.exists(DBFILE):\n os.remove(DBFILE)\n\n db = sqlite3.connect(DBFILE)\n # create tables\n create(db, PARAGRAPH, \"paragraph\")\n create(db, QUESTION, \"question\")\n create(db, ANSWER, \"answer\")\n\n return db", "def populate_database(telescope_name, instrument_name):\n telescope = Telescope.objects.create(\n name=telescope_name, latitude=25.0, longitude=45.0)\n instrument = Instrument.objects.create(\n name=instrument_name, telescope=telescope)\n for year_int in (2012, 2013):\n for month_int in range(1, 13):\n for night_int in (1, monthrange(year_int, month_int)[1]):\n ut_date = date(year_int, month_int, night_int)\n night = Night.objects.create(\n ut_date=ut_date, instrument=instrument, observers='Smith')\n Exposure.objects.create(\n night=night, run_number=1, ut_start=time(10, 0, 0),\n exposed=20.0, ra=60.0, dec=30.0, object_exp=True)\n Exposure.objects.create(\n night=night, run_number=2, ut_start=time(11, 0, 0),\n exposed=30.0, ra=90.0, dec=0.0, object_exp=True)\n Exposure.objects.create(\n night=night, run_number=3, ut_start=time(12, 0, 0),\n exposed=40.0, ra=120.0, dec=-30.0, object_exp=False)", "def manual_enter(self):\n self._dbconnect = sqlite3.connect(self._db_file)\n\n # Set row_factory to access columns by name\n self._dbconnect.row_factory = sqlite3.Row\n\n # Create a cursor to work with the db\n self._cursor = self._dbconnect.cursor()", "def strict_startup(self):\n self.load_up_initial_db(TIMESTAMP_PARSE_DICT)\n self.clean()\n self.add_numeric_cols()", "def __init__(self):\n\n catalog.connect_database()\n catalog.clean_db()\n self.viewer = Viewer()\n self.editor = Editor()", "def initDatabase(self):\n\n try:\n self.dbase = workflowManager(self.wf_name)\n\n working = self.config['working_d']\n self.dbase.initWorkflow(self.wf_name, working)\n\n except sqlite3.Error as error:\n self.logger.warning('Database %s: %s', self.wf_name, error.args)\n #print('Database %s: %s', self.wf_name, error.args)\n\n return", "def __init__(self):\n self.conn = psycopg2.connect(dbname=DB, user=DB_USER, password=DB_PW, host=HOST, port=PORT)\n self.categories = self.fill_category()\n self.fill_products()", "def create_city():\n city = {}\n city['biysk'] = {}\n city['biysk']['barnaul'] = 9\n city['biysk']['novosibirsk'] = 11\n city['biysk']['belokurikha'] = 8\n city['barnaul'] = {}\n city['barnaul']['tomsk'] = 4\n city['belokurikha'] = {}\n city['belokurikha']['novosibirsk'] = 2\n city['novosibirsk'] = {}\n city['novosibirsk']['barnaul'] = 2\n city['novosibirsk']['tomsk'] = 5\n city['novosibirsk']['omsk'] = 20\n city['tomsk'] = {}\n city['tomsk']['krasnoyarsk'] = 6\n city['krasnoyarsk'] = {}\n city['krasnoyarsk']['omsk'] = 7\n city['omsk'] = {}\n return city", "def boot(self):\n self._columns = ()\n self._creates = {}\n\n self._sql = \"\"\n self._sql_binding = \"\"\n self._bindings = ()\n\n self._updates = ()\n\n self._wheres = ()\n self._order_by = ()\n self._group_by = ()\n self._joins = ()\n self._having = ()\n\n self._aggregates = ()\n\n self._limit = False\n self._offset = False\n self.set_action(\"select\")", "def tearDown(self):\n\n Cafe.query.delete()\n City.query.delete()\n db.session.commit()", "def tearDown(self):\n\n Cafe.query.delete()\n City.query.delete()\n db.session.commit()" ]
[ "0.6182034", "0.6132606", "0.60481834", "0.60060346", "0.5950479", "0.5950479", "0.59373367", "0.59160274", "0.5874279", "0.58517987", "0.5830317", "0.58109874", "0.5791189", "0.57841307", "0.5774677", "0.57559514", "0.5754031", "0.57528317", "0.5746476", "0.5727893", "0.567058", "0.5652014", "0.563074", "0.5621083", "0.5593378", "0.5565594", "0.55648994", "0.5560749", "0.55539143", "0.55507696", "0.55453473", "0.5506629", "0.54969263", "0.54882413", "0.54857135", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5484853", "0.5483044", "0.548144", "0.5480779", "0.5460964", "0.54460657", "0.54447347", "0.5425916", "0.54116964", "0.5406263", "0.53961986", "0.539266", "0.53503895", "0.533275", "0.5329411", "0.5326916", "0.5321469", "0.5307176", "0.53066427", "0.5306077", "0.5289003", "0.5289003", "0.5277366", "0.52572274", "0.5250956", "0.52490014", "0.5243931", "0.52334017", "0.5228255", "0.5222423", "0.52212477", "0.5192904", "0.5177749", "0.5177473", "0.5176771", "0.51752037", "0.5170749", "0.5166319", "0.5154599", "0.514069", "0.5138889", "0.51351976", "0.51316875", "0.51288366", "0.5124398", "0.51219314", "0.51217717", "0.5121541", "0.511834", "0.51145893", "0.51134586", "0.5107958", "0.51038074", "0.50886756", "0.5080999", "0.5079138", "0.5079138" ]
0.77128774
0
Returns the City instance specified by ``key``.
def __getitem__(self, key): city = self._citydb[key] city.astral = self return city
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_city(self, name: str):\n key = name.lower()\n try:\n return self._cities[key]\n except KeyError:\n city = City(name=name, state=self)\n self._cities[key] = city\n return city", "def __getitem__(self, key):\n \n key = str(key).lower().encode('utf-8')\n for group in self._groups.values():\n try:\n return group[key]\n except KeyError:\n pass\n\n raise KeyError('Unrecognised city name - %s' % key)", "def retrieve_city(city_id):\n city = storage.get('City', city_id)\n if city:\n return city.to_dict()\n abort(404)", "def fetch(cls, key):\n return cls(_key=key, **(cls._dbag[key]))", "def get_city(city_id):\n city = storage.get(\"City\", city_id)\n if city is None:\n abort(404)\n return jsonify(city.to_json())", "def city_by_id(city_id):\n\n fetched_obj = storage.get(\"City\", str(city_id))\n\n if fetched_obj is None:\n abort(404)\n\n return jsonify(fetched_obj.to_json())", "def get_city(city_id):\n city = storage.get(\"City\", city_id)\n if city is None:\n abort(404)\n return jsonify(city.to_dict())", "def __getitem__(self, key):\n if key.isdigit():\n customer = self.request.db.query(models.Klant).get(key)\n if customer:\n return Customer(self, key, customer=customer)\n raise KeyError", "def get_city(self, city_id):\n city = self.city_repo.get_by_id(city_id)\n\n resource = self.city_repo.dump(city)\n return dict(city=resource), [], SUCCESS", "def retrieve_city(city_id):\n obj = models.storage.get(\"City\", city_id)\n if obj is not None:\n return jsonify(obj.to_dict())\n else:\n abort(404)", "def find(cls, key):\r\n return cls.query().get(key)", "def get(self, key):\n if self.db is None:\n self._init()\n return self.db[key]", "def __getitem__(cls, key):\n return cls(cls._nameToValue[key])", "def get_case(self, key: str):\n case = self.cases.get(key)\n if not hasattr(case, 'case_id'):\n message = \"get_case(): Case key {} does not have a case_id\"\n logmessage(message.format(key))\n else:\n logmessage(\"get_case(): \" + \"Retrieved case {}\".format(str(case)))\n return case", "def city_by_id(city_id):\n cities_values = storage.all(\"City\").values()\n for obj in cities_values:\n if obj.id == city_id:\n return jsonify(obj.to_dict())\n abort(404)", "def get(self, key):\n # Initialize key variables\n result = self.cache.get(key)\n\n # Return\n return result", "def get_city(self, territory_id: str = \"\"):", "def get_city(self, territory_id: str = \"\"):", "def city(city_id):\n\n if storage.get(\"City\", city_id) is not None:\n return jsonify(storage.get(\"City\", city_id).to_dict())\n else:\n abort(404)", "def get(self, key):\n return self._cache[key]", "def find_city(city, dbsession):\n\n\t# Since we're creating the FK relation based on ID, and hence the casing has no bearing on \n\t# whether the city record associates with the address, I'm upcasing the city to prevent dupes.\n\tcity = str(city)\n\tcity = city.upper()\n\n\tresult = dbsession.query(db.City).filter_by(city_name=city).first()\n\n\tif result is None:\n\t\t# Create a new instance of city\n\t\tcity_object = db.City(city)\n\t\t# I'm adding the city without committing the transaction since it would also\n\t\t# commit the address insert transaction that's still open in routes.py.\n\t\tdbsession.add(city_object)\n\t\treturn city_object\n\telse:\n\t\t# Assign the existing user object to the variable\n\t\treturn result", "def __getitem__(self, key):\n return type(self)(self.origin, typeof(key))", "def get(self, id):\n\n session = Session()\n city = session.query(Cities).get(id)\n if city:\n response = dict(data=city.get_as_dict())\n else:\n return \"City with id={} does not exist!\".format(id), HTTP_NOT_FOUND_CODE\n\n return response, HTTP_OK_CODE", "def show_city(city_id=None):\n city = storage.get(City, city_id)\n\n if city is None:\n abort(404)\n\n return jsonify(city.to_dict())", "def __getitem__(self, key):\n self._remove_expired()\n\n cache_entry = self._d.get(key, None)\n log.debug(\"__getitem__: {}\".format(cache_entry))\n\n return cache_entry", "def get(self, key):\n return self.container[key]", "def get(self, key):\n person = self._data.get(key)\n\n if not person:\n raise NotFoundError(\"{} could not be found\".format(key))\n\n return Person(key, person)", "def city(self):\n if \"city\" in self._prop_dict:\n return self._prop_dict[\"city\"]\n else:\n return None", "def city(self):\n if \"city\" in self._prop_dict:\n return self._prop_dict[\"city\"]\n else:\n return None", "def __getitem__(self, key):\n log.info(\"querying post %s, %s:%s\" % (self.blog, self.date, key))\n try:\n post = DBSession.query(Post).filter_by(blog=self.blog, date=self.date, slug=key).one()\n # make location aware\n post.__parent__ = self\n return post\n except NoResultFound:\n raise KeyError", "def _get_node(self, key):\n\n index = self._hash_function(key) % self.capacity # Get the index by hashing the key\n node = self._buckets[index].contains(key) # Get the node with the key (if it exists)\n return node", "def __getitem__(self, key):\n\n bucket_key = self.key_for_bucket(key)\n return self.buckets[bucket_key][key]", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def city(self):\n return self._city", "def lookup(self, key):\n return self.root.lookup(key)", "def get(self, key):\n raise NotImplementedError(\"get must be implemented in your cache class\")", "def _single_getitem(self, key):\n return getattr(self._cpp_obj, self._getter)(key)", "def __getitem__(self, key: T) -> T:\n return self.lookup(key)", "def get(self, key):\n dkey = digest(key)\n # if this node has it, return it\n if self.storage.get(dkey) is not None:\n return defer.succeed(self.storage.get(dkey))\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def __getattr__(self, key):\n return Field(key, self._mappings[key])", "def __getitem__(self, key):\n return self", "def get(self, key):\n if key in self._db:\n return self._db[key]\n else:\n return None", "def get(self, key):\n with self._lock:\n self._check_expire()\n\n obj = self._obj_cache[key]\n self._log.debug(\"getting object '%s' (type=%s). \"\n \"updating access time.\",\n key, type(obj))\n self._obj_last_access[key] = time.time()\n return obj", "def __getitem__(self, key: Position) -> Tile:\n (x, y) = key\n if 0 <= x < self.width or 0 <= y < self.height:\n raise KeyError\n return self._tiles[x + y * self.width]", "def city(self) -> str:\n return pulumi.get(self, \"city\")", "def get(self, key):\n if key is None:\n return None # None is not a valid key\n return get_from_subtree(self.root, key)", "def get(self, key: str) -> Any:\n return self.db.get(key)", "def cget(self, key):\n return self._widget_cget(key, cook=False)", "def get(self, key):\n if key is None:\n return None\n return self.cache_data.get(key, None)", "def get(self, key):\n return self._store.get(key, None)", "def get(self, key):\n return self._store.get(key, None)", "def lookup(self, key):", "def city(self) -> Optional[str]:\n return pulumi.get(self, \"city\")", "def find_city(city, used_city, min_path):\n for key in city[used_city].keys():\n if city[used_city][key] == min_path:\n return key", "def get(self, key):\n return self[key]", "def __getitem__(self, key):\n if key in ('x','y','z'):\n return self.asDict()[key]\n else:\n return self.coords.__getitem__(key)", "def __getitem__(self, key):\n for db in self.db:\n if db.name == key:\n return db\n raise IndexError", "def __getitem__(self, key):\n return self.get_models()[str(key)]", "def city_id(city_id):\n selected_city = storage.get(City, city_id)\n if selected_city is not None:\n if request.method == 'GET':\n return make_response(jsonify(selected_city.to_dict()))\n if request.method == 'DELETE':\n selected_city.delete()\n storage.save()\n return make_response(jsonify({}), 200)\n if request.method == 'PUT':\n ignore_keys = ['id', 'state_id', 'created_at', 'updated_at']\n if request.get_json():\n for name, value in request.get_json().items():\n if name not in ignore_keys:\n if hasattr(selected_city, name):\n setattr(selected_city, name, value)\n selected_city.save()\n storage.save()\n put_response = jsonify(selected_city.to_dict())\n return make_response(put_response, 200)\n else:\n error_message = jsonify(error=\"Not a JSON\")\n return make_response(error_message, 400)\n else:\n abort(404)", "def retrieve_city_in_state(state_id):\n\n city_list = []\n if storage.get(\"State\", state_id) is not None:\n for city in storage.get(\"State\", state_id).cities:\n city_list.append(city.to_dict())\n return make_response(jsonify(city_list))\n else:\n return abort(404)", "def get(self, key: K) -> Optional[V]:\n return self.mget([key])[0]", "def get(self, key):\n _filter = {'_id': key}\n doc = self.collection.find_one(_filter)\n\n if doc and not self._verify_timeout(doc):\n return self._unpickle(doc['value'])", "def GetCityFromAirportId(self, airprot_id):\n return self.airports.set_index('airport_id').loc[airprot_id]['city']", "def __getitem__(self, key):\n return self.query(key)", "def get(key):\n return Cache.cache_connector.get(key)", "def __getitem__(self, key):\n return self()[key]", "def get(self, key):\n if key is None:\n raise TypeError\n\n index = self.__get_cache_set_index(key)\n cache_set = self.cache_sets[index]\n h_key = self.__ensure_hashable_key(key)\n return cache_set.get(h_key)", "def _map___getitem__(self, key):\n if not isinstance(key, self.keytype):\n raise KeyError('type of key should be ' + repr(self.keytype) + ' but got ' + repr(type(key)))\n if key not in self:\n raise KeyError('key not found')\n return self.second(self.find(key))", "def __getitem__(self, key):\n result = self.tree[key]\n if result is not None:\n \"\"\"This needs to be deep-copied in order not to change the elements in the map via the reference, but\n return the value as in SetlX.\n The index 2 from key implies stands for the value as key-value-pairs are represented as lists of length 2\"\"\"\n return copy.deepcopy(result.key[2])", "def cityId(city_id):\n yy = storage.get(\"City\", str(city_id))\n if yy is None:\n abort(404)\n return jsonify(yy.to_dict())", "def __getitem__(self, key):\n result = mongo['readable-api'].foo.find_one({\"foo\": key})\n if result:\n return self.make_child(key)\n return None", "def cities_id(city_id):\n city = storage.get(City, city_id)\n if city:\n return jsonify(city.to_dict())\n else:\n abort(404)", "def get_entity_by_key(cls, key):\n db_key = \"entity:\" + str(key)\n result = cls.db.hgetall(db_key)\n return (Entity.build(result) if type(result) is dict else None)", "def find_one_bykey(cls, keydict, defaultval = None):\n return cls.dbm().modelclass_find_one_bykey(cls, keydict, defaultval)", "def get(self, key):\n try:\n\n item = self._item_to_dict(self.client.get_item(**self._prepare_get_request(str(key))))\n\n # If item is empty, nothing in cache\n if not item:\n return None\n\n # If current time beyond expiry, nothing to return\n if time()>float(item[self._expiry_field.name]):\n return None\n\n return self.load_object(b64decode(item.get(self._value_field.name)))\n\n except Exception as e:\n logging.info('Error getting object from DynamoDB table %s (%s): %s',self.table_name,e.__class__.__name__,e)\n return None", "def getGeoInfo(self, key):\n\n return [entry for entry in self._GeoInfos if entry.name == key]", "def get(self, key):\n return getattr(self, key)", "def __getitem__(self, key):\n\t\tif not self._is_valid_key(key):\n\t\t\traise KeyError\n\t\t\n\t\tx, y = self._index_from_key(key)\n\t\treturn self._board[x][y]", "def from_api_key(cls, api_key):\n SELECTSQL = \"SELECT * FROM accounts WHERE api_key=:api_key;\"\n with sqlite3.connect(cls.dbpath) as connection:\n connection.row_factory = sqlite3.Row\n cursor = connection.cursor()\n cursor.execute(SELECTSQL, {\"api_key\": api_key})\n dictrow = cursor.fetchone()\n if dictrow:\n return cls(**dictrow)\n return None", "def __getitem__(self, key: Union[int, str]) -> Node:\r\n node: Node = None\r\n if isinstance(key, int):\r\n node = self._nodes.get(key)\r\n if isinstance(key, str):\r\n node = self._node_name_map.get(key)\r\n\r\n if node is None:\r\n raise IndexError(\"Invalid key.\")\r\n\r\n return node", "def get(self, key):\n if key:\n return self.cache_data.get(key)\n else:\n return None", "def __getitem__(self, key):\n for sen in self.__s:\n if sen.name == key or sen.key == key:\n return sen\n raise KeyError(key)", "def get(self, key):\n # Your code here\n\n idx = self.hash_index(key)\n\n # check if the index is in range\n if idx >= 0 and idx < self.capacity:\n curr_node = self.hash_table[idx]\n\n # check if any node at index exists\n if curr_node is None:\n return None\n\n # if there's already something at this index\n while curr_node is not None:\n \n # check to see if there is an entry at this index whose key matches the provided key\n while curr_node.key is not key:\n curr_node = curr_node.next\n \n # if we never found an entry with a matching key, return None\n if curr_node.key is not key or curr_node is None:\n return None\n else:\n return curr_node.value\n \n \n # otherwise return None if the index is not in range\n else:\n return None", "def get(self, key: Hashable) -> Any: # type: ignore\n try:\n return[key]\n except (KeyError, TypeError):\n if self.default_factory is None:\n raise KeyError(f'{key} is not in {self.__class__}')\n else:\n try:\n return self.default_factory()\n except TypeError:\n return self.default_factory", "def __getitem__(self,key):\n result = None\n # check if it's tin the cache first\n if key in self._cache:\n result = self._cache[key]\n else:\n # it's not in the cache so retrieve it\n result = self._get_from_tree(key)\n # remove None values\n result = [x for x in result if x is not None]\n self._cache[key] = result\n\n return result", "def get(cls, subdomain, key):\n key_name = subdomain + ':' + key\n return cls.get_by_key_name(key_name)", "def get(self, key):\n return self.cache_data.get(key)", "def get(self, key):", "def get(self, key):", "def cities(self):\n objs = models.storage.all()\n tmp = []\n for key, value in objs.items():\n name = key.split('.')\n if name[0] == \"City\":\n if value.state_id == str(self.id):\n tmp.append(objs[key])\n return tmp", "def get_cache(self, key):\n return self.r.get(key)", "def get(self, key):\n return self.code_table[key]", "def get(self, key):\n raise NotImplementedError", "def get(self, key):\n dkey = digest(key)\n _log.debug(\"Server:get %s\" % base64.b64encode(dkey))\n # if this node has it, return it\n exists, value = self.storage.get(dkey)\n if exists:\n return defer.succeed(value)\n node = Node(dkey)\n nearest = self.protocol.router.findNeighbors(node)\n if len(nearest) == 0:\n self.log.warning(\"There are no known neighbors to get key %s\" % key)\n return defer.succeed(None)\n spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)\n return spider.find()", "def _lookup(self, key):\n\n if key in self.position and key in self.info:\n # If the key exists in both position and info, treat it as a list to intersect.\n return self._skill_list(key)\n if key in self.position:\n return self.position[key]\n if key in self.info:\n return self.info[key]\n\n raise KeyError(f\"Invalid Key: {key}\")", "def getCache(self, key):\n return self._cache.get(key, None)", "def fetch(self,key):\n try:\n return self.__content[key]\n except KeyError:\n return None" ]
[ "0.6803799", "0.64028823", "0.61282915", "0.6064786", "0.598396", "0.5972638", "0.59683293", "0.59556264", "0.5922655", "0.5832542", "0.5792213", "0.5784531", "0.57582283", "0.573862", "0.57019925", "0.5670712", "0.56591344", "0.56591344", "0.56481266", "0.56079686", "0.5563027", "0.55553216", "0.5541962", "0.5518856", "0.5516858", "0.55150664", "0.5490744", "0.54855865", "0.54855865", "0.54367745", "0.54000705", "0.5396576", "0.5378624", "0.5378624", "0.5378624", "0.5378624", "0.5378624", "0.5353413", "0.5343711", "0.5340546", "0.5335899", "0.5329854", "0.53275317", "0.5325276", "0.53246874", "0.53234804", "0.53098273", "0.53044033", "0.5298269", "0.52798605", "0.5278349", "0.5272944", "0.5262816", "0.5262816", "0.52579665", "0.5252874", "0.52464867", "0.52460307", "0.5239612", "0.5236716", "0.523652", "0.522516", "0.52244264", "0.5222627", "0.52160096", "0.52159435", "0.52150774", "0.51989704", "0.51962453", "0.5192902", "0.519152", "0.51903564", "0.5190132", "0.51813203", "0.5180733", "0.5180154", "0.517515", "0.51739734", "0.5162538", "0.5150318", "0.51494366", "0.5148245", "0.5138518", "0.5134851", "0.51337", "0.51332694", "0.51291305", "0.5119433", "0.51169693", "0.51151067", "0.51098", "0.51098", "0.5108754", "0.510609", "0.5101847", "0.5076503", "0.50747854", "0.5072306", "0.507192", "0.5063729" ]
0.769488
0
Calculate all the info for the sun at once.
def sun_utc(self, date, latitude, longitude): dawn = self.dawn_utc(date, latitude, longitude) sunrise = self.sunrise_utc(date, latitude, longitude) noon = self.solar_noon_utc(date, longitude) sunset = self.sunset_utc(date, latitude, longitude) dusk = self.dusk_utc(date, latitude, longitude) return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def apply_sun_presets(args, weather):\n if args.sun is not None:\n if args.sun in SUN_PRESETS:\n weather.sun_altitude_angle = SUN_PRESETS[args.sun][0]\n weather.sun_azimuth_angle = SUN_PRESETS[args.sun][1]\n else:\n print(\"[ERROR]: Command [--sun | -s] '\" + args.sun + \"' not known\")\n sys.exit(1)", "def func(self):\n account = self.account\n city_name = 'Phoenix' if not self.args else self.args\n a = Astral()\n a.solar_depression = 'civil'\n city = a[city_name]\n if not city:\n return\n timezone = city.timezone\n sun = city.sun(date=datetime.date.today(), local=True)\n\n account.msg('Information for %s/%s\\n' % (city_name, city.region))\n account.msg('Timezone: %s' % timezone)\n account.msg('Latitude: %.02f; Longitude: %.02f' % (city.latitude, city.longitude))\n account.msg('Dawn: %s' % str(sun['dawn']))\n account.msg('Sunrise: %s' % str(sun['sunrise']))\n account.msg('Noon: %s' % str(sun['noon']))\n account.msg('Sunset: %s' % str(sun['sunset']))\n account.msg('Dusk: %s' % str(sun['dusk']))", "def run_all(self):\n self.formatter.section_start('Firmware info')\n self.analyse_firmware_id() # Always do this first!\n # If the chip has not panicked, the preserved\n # block is populated with random values, therefore\n # until the magic_value is implemented, do a try and except\n self.analyse_panic_state()\n self.analyse_slt() # Kind of pointless but why not.\n self.formatter.section_end()", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time", "def get_sun_report(**kwargs):\n vars = {'FFX' : 1, 'ZZZ' : 'END',\n 'xxy' : kwargs['year'], 'type' : kwargs['report_type']}\n if 'location' in kwargs:\n if kwargs['location'].state:\n vars['st'] = kwargs['location'].state.upper()\n if kwargs['location'].locality:\n vars['place'] = kwargs['location'].locality\n else:\n vars['st'] = kwargs.pop('st','')\n vars['place'] = kwargs.pop('place','')\n vars = urlencode(vars)\n try:\n lines = urllib2.urlopen(urllib2.Request(URL, vars)).readlines()\n except IOError, e:\n conf.log('warning','RequestError: %s'%e)\n raise RequestError, e\n\n data = {}\n def liner(line):\n while 1:\n if not line: break\n part = line[:9].replace(' ','None').replace(' ','')\n rise,set = part[:4],part[4:]\n if rise == 'None' or not rise: rise = None\n else: rise = time(int(rise[:2]),int(rise[2:]))\n if set == 'None' or not set: set = None\n else: set = time(int(set[:2]),int(set[2:]))\n yield rise,set\n line = line[11:]\n for l in lines:\n l = l.strip()\n if not l: continue\n try:\n day = int(l[:2])\n except ValueError:\n continue\n for i,(r,s) in enumerate(liner(l[2:].strip())):\n try:\n data[( date(int(kwargs['year']), i, int(day)) )] = ( r, s )\n except ValueError:\n continue\n return data", "def fetch_sundata(self, date: datetime) -> Sundata:\n pass", "def time_info(input_file):\n original_path = os.getcwd() #set original directory\n save_path = input_file['save_path']\n planet = input_file['exoplanet'] #set exoplanet name\n print '\\nObtain the images .... \\n'\n print 'Change to ', save_path\n os.chdir(save_path) #change to save directory where is our scvience images\n images = sorted(glob.glob('AB'+input_file['exoplanet']+'*.fits'))\n print '\\nImages = \\n',images\n tempo_loc = [] #time object\n SUN = [] #Sun coordinate object\n ra_sun, dec_sun, dsun = np.zeros(len(images)),np.zeros(len(images)),np.zeros(len(images)) #sun coordinates\n JD = np.zeros(len(images)) #julian date from time object\n ST = np.zeros(len(images))\n HJD = np.zeros(len(images))\n #create the exoplanet object coordianate\n exoplanet = SkyCoord(dec=input_file['DEC'],ra=input_file['RA'],unit=('deg','deg'),frame=input_file['frame'])\n print '\\nObtain data info from header ....\\n'\n for i in range(len(images)):\n hdr = fits.getheader(images[i])\n UTC = hdr['date-obs']+'T'+hdr['UT'] #string that contain the time in UTC in isot format\n tempo_loc.append(Time(UTC,scale=input_file['scale-time'],format='isot',location=(input_file['lon-obs'],input_file['lat-obs'])))#,input_data['altitude'])))\n JD[i] = tempo_loc[i].jd\n ST[i] = tempo_loc[i].sidereal_time('apparent').hour\n SUN.append(get_sun(tempo_loc[i]))\n ra_sun[i],dec_sun[i] = SUN[i].ra.deg, SUN[i].dec.deg\n dsun[i] = SUN[i].distance.value\n HJD[i] = use.hjd_date(JD[i],dsun[i],dec_sun[i],ra_sun[i],exoplanet.dec.deg,exoplanet.ra.deg,circular_orbit=input_file['circular_orbit'])\n use.update_progress((i+1.)/len(images))\n print '\\n.... done.\\n'\n print '\\n Time from header = \\n'\n #print '\\nImages ** UTC (YYYY-MM-DDTHH:MM:SS) ** JD (7d.5d) ** ST (hours) ** ST (HH:MM:SS) ** Sun Coordinate (epoch,RA,DEC,Distance) (deg,deg,AU) \\n'\n ST_string = []\n for i in range(len(images)):\n ST1 = int(ST[i])\n ST2 = int((ST[i]-ST1)*60.)\n ST3 = (((ST[i]-ST1)*60.)-ST2)*60\n ST_string.append(str(ST1)+':'+str(ST2)+':'+str(ST3))\n tempo_loc[i] = tempo_loc[i].value\n use.update_progress((i+1.)/len(images))\n #print images[i], ' ** ',tempo_loc[i], ' ** ', JD[i], ' ** ', ST[i],' ** ',ST_string[i],' ** ',sun_loc[i],' ** ',HJD[i]\n print '\\nSave data file ... \\n'\n data = DataFrame([images,tempo_loc,list(JD),list(ST),list(ST_string),list(ra_sun),list(dec_sun),list(dsun),list(HJD)]).T\n data.columns=['images','UTC','JD','ST','ST_isot','RA_SUN','DEC_SUN','D_SUN','HJD']\n print data\n data.to_csv('results.csv')\n os.chdir(original_path)\n return", "def __cal_aod(self, year, month, day):\n print 'Calculate...'\n logging.info('[calculate]->Calculate...')\n\n t = datetime.datetime(year, month, day)\n\n ddir = self.aodSetting.data_dir\n wdir = self.aodSetting.p_aot_dir\n ascdir = self.aodSetting.ascii_dir\n aotdir = self.aodSetting.aot_dir\n\n stations = self.aodSetting.stations\n\n # Calculate AOD\n print 'Calculate AOD...'\n logging.info('[calculate]->Calculate AOD...')\n\n for stId in stations.getstIds():\n station = stations.get(stId)\n fn = station.stId\n k7fn = path.join(self.aodSetting.merge_dir, fn, t.strftime('%Y%m'), fn + \"_\" +\n t.strftime(\"%Y%m%d\") + \"_merge.K7\")\n if not os.path.exists(k7fn):\n continue\n print '[{0}]: Ready'.format(fn)\n logging.info('[calculate]->[{0}]: Ready'.format(fn))\n nsu_dir = path.join(ascdir, fn, t.strftime('%Y%m'))\n nsufn = path.join(nsu_dir, fn + \"_\" +\n t.strftime(\"%Y%m%d\") + '.NSU')\n if not os.path.exists(nsufn):\n if not os.path.exists(nsu_dir):\n os.makedirs(nsu_dir)\n rr = spdata.decode(k7fn)\n r = spdata.extract(rr, 'NSU')\n spdata.save(r, nsufn)\n print '[{0}]: Output nsu file'.format(fn)\n logging.info('[calculate]->[{0}]: Output nsu file'.format(fn))\n\n # check if the external program and the parameter files are ready\n validated = True\n exefn = self.aodSetting.p_aot_exe\n if not os.path.exists(exefn):\n print '[{0}]: Not Found Aot program, {1}'.format(fn, exefn)\n logging.warn(\n '[calculate]->[{0}]: Not Found Aot program, {1}'.format(fn, exefn))\n validated = False\n\n inputfn = self.aodSetting.p_aot_input\n if not os.path.exists(inputfn):\n print '[{0}]: Not Found input parameter data, {1}'.format(fn, inputfn)\n logging.warn(\n '[calculate]->[{0}]: Not Found input parameter data, {1}'.format(fn, inputfn))\n validated = False\n\n ozonefn = self.aodSetting.p_aot_ozone\n if not os.path.exists(ozonefn):\n print '[{0}]: Not Found ozone data, {1}'.format(fn, ozonefn)\n logging.warn(\n '[calculate]->[{0}]: Not Found input parameter data, {1}'.format(fn, inputfn))\n validated = False\n\n calfn = path.join(self.aodSetting.p_cal_dir,\n \"calibr\" + station.calibr + \".cal\")\n if not os.path.exists(calfn):\n print '[{0}]: Not Found calculation paramter data, {1}'.format(fn, calfn)\n logging.warn(\n '[calculate]->[{0}]: Not Found calculation paramter data, {1}'.format(fn, calfn))\n validated = False\n\n if validated:\n tao_dir = path.join(aotdir, fn, t.strftime('%Y%m'))\n if not os.path.exists(tao_dir):\n os.makedirs(tao_dir)\n taofn = path.join(tao_dir, fn + \"_\" +\n t.strftime(\"%Y%m%d\") + '.tao')\n lat = station.lat\n lon = station.lon\n alt = station.alt\n\n spdata.cal_aot(wdir, calfn, taofn, nsufn,\n lat, lon, alt, alpha=1)\n print '[{0}] => {1}'.format(fn, taofn)\n logging.info('[calculate]->[{0}] => {1}'.format(fn, taofn))\n else:\n print '[{0}]: Abort'.format(fn)\n logging.warn('[calculate]->[{0}]: Abort'.format(fn))\n\n print 'Calculate Done!'\n logging.info('[calculate]->Calculate Done!')", "def update_root_statistics_and_totals(self):\n\n self.average_radius = 0\n self.total_root_length = 0\n\n total_radius = 0\n\n for root in self.root_dict.values():\n\n root.calculate_root_statistics()\n\n self.total_root_length += root.total_length\n\n total_radius += root.total_length * root.average_radius\n\n self.average_radius = total_radius / self.total_root_length", "def system_info():\n requirements = get_requirements(\"sunpy\")\n groups = get_keys_list(requirements)\n extra_groups = get_extra_groups(groups, ['all', 'dev'])\n base_reqs = get_keys_list(requirements['required'])\n extra_reqs = get_keys_list(requirements['all'])\n missing_packages, installed_packages = find_dependencies(package=\"sunpy\", extras=extra_groups)\n extra_prop = {\"System\": platform.system(),\n \"Arch\": f\"{platform.architecture()[0]}, ({platform.processor()})\",\n \"Python\": platform.python_version(),\n \"sunpy\": version(\"sunpy\")}\n sys_prop = {**installed_packages, **missing_packages, **extra_prop}\n print(\"==============================\")\n print(\"sunpy Installation Information\")\n print(\"==============================\")\n print()\n print(\"General\")\n print(\"#######\")\n if sys_prop['System'] == \"Linux\":\n print(f\"OS: {distro.name()} ({distro.version()}, Linux {platform.release()})\")\n elif sys_prop['System'] == \"Darwin\":\n print(f\"OS: Mac OS {platform.mac_ver()[0]}\")\n elif sys_prop['System'] == \"Windows\":\n print(f\"OS: Windows {platform.release()} {platform.version()}\")\n else:\n print(\"Unknown OS\")\n for sys_info in ['Arch', 'sunpy']:\n print(f'{sys_info}: {sys_prop[sys_info]}')\n print(f'Installation path: {distribution(\"sunpy\")._path}')\n print()\n print(\"Required Dependencies\")\n print(\"#####################\")\n for req in base_reqs:\n print(f'{req}: {sys_prop[req]}')\n print()\n print(\"Optional Dependencies\")\n print(\"#####################\")\n for extra_req in extra_reqs:\n print(f'{extra_req}: {sys_prop[extra_req]}')", "def get_mean_sun_angles(self) -> (float, float):\n # Get MTD XML file\n root, _ = self.read_mtd()\n\n # Open zenith and azimuth angle\n zenith_angle = float(root.findtext(\".//SolarZenith\"))\n azimuth_angle = float(root.findtext(\".//SolarAzimuth\"))\n\n return azimuth_angle, zenith_angle", "def sun(xs, ys, s, n):\n yellow = (255, 255, 0) # sun color\n\n circle(screen, yellow, (xs, ys), 30 * s) # sun body\n for k in range(n + 1): # sun rays on the upper side of the sun\n polygon(screen, yellow,\n [(xs + 45 * s * np.cos(np.pi / n * (k - 1 / 2)), ys - 45 * s * np.sin(np.pi / n * (k - 1 / 2))),\n (xs + 30 * s * np.cos(np.pi * (k - 1) / n), ys - 30 * s * np.sin(np.pi * (k - 1) / n)),\n (xs + 30 * s * np.cos(np.pi * k / n), ys - 30 * s * np.sin(np.pi * k / n))], 0)\n for k in range(n + 1): # sun rays on the lower side of the sun\n polygon(screen, yellow,\n [(xs + 45 * s * np.cos(np.pi / n * (k - 1 / 2)), ys + 45 * s * np.sin(np.pi / n * (k - 1 / 2))),\n (xs + 30 * s * np.cos(np.pi * (k - 1) / n), ys + 30 * s * np.sin(np.pi * (k - 1) / n)),\n (xs + 30 * s * np.cos(np.pi * k / n), ys + 30 * s * np.sin(np.pi * k / n))], 0)", "def checkSun(ontology_sun):\n elevation = ontology_sun.has_elevation[0] #gets the elevation value of the Sun in the ontology. \n azimuth = ontology_sun.has_azimuth[0] #gets the azimuth value of the Sun in the ontology. \n intensity = ontology_sun.has_intensity[0] #gets the intensity value of the Sun in the ontology.\n return xosc.Sun(intensity,azimuth,elevation)", "def forecast_weather(self):\n pass", "def get_sun_features(image): # Use grayscale images, outside val: NaN\r\n ratio = sun_isoperimetric_ratio(image)\r\n sun_features = {\"sun_circularity_ratio\": ratio}\r\n return sun_features", "def main():\n # Constants\n groundstation_name = 'Wallops Antenna'\n groundstation_address = 'Radar Road, Temperanceville, VA 23442'\n satnum = 25544 # ISS = 25544\n saturl=\"http://www.celestrak.com/NORAD/elements/stations.txt\"\n gs_minimum_elevation_angle = 10.0\n\n # Alternate constants\n gs_alt_lat = 37.854886 # Only needed if address not found\n gs_alt_lon = -75.512936 # Ditto\n gs_alt_el_meters = 3.8 # Ditto\n gs_alt_tz_offset_seconds = -18000.0 # Ditto\n gs_tzname = 'US/Eastern'\n\n # Construct the ground station info\n try:\n # Try to use the address...\n gs = GroundStation.from_address(groundstation_address, \\\n groundstation_name, \\\n gs_minimum_elevation_angle)\n except:\n # Otherwise, use explicit location data...\n gs = GroundStation.from_location(gs_alt_lat, gs_alt_lon, \\\n gs_alt_el_meters, \\\n gs_tzname, \\\n groundstation_name, \\\n gs_minimum_elevation_angle)\n\n # Times we need\n now = datetime.now()\n gs_today = gs.get_tz().localize(datetime(now.year, now.month, now.day))\n gs_today_start = gs.get_tz().localize(datetime(now.year, now.month, now.day, \\\n 0, 0, 0)) \n gs_today_end = gs.get_tz().localize(datetime(now.year, now.month, now.day, \\\n 23, 59, 59))\n\n # Get the InviewCalculator and compute the inviews\n st = SatelliteTle(satnum, tle_url=saturl)\n ic = InviewCalculator(gs, st)\n inviews = ic.compute_inviews(gs_today_start, gs_today_end)\n\n # Print the results\n print_satellite_header(st)\n print_inview_header(gs.get_minimum_elevation_angle(), gs_today, gs)\n print_inviews(gs, inviews)\n print_azeltables(inviews, ic)", "def perform_calculations(collector):\n result = {}\n try:\n radius, mass = Calculator.calculate_radius_mass(collector)\n result['radius'] = radius\n result['mass'] = mass\n average_density = Calculator.calculate_average_density(radius,\n mass)\n result['average_density'] = average_density\n escape_velocity = Calculator.calculate_escape_velocity(radius,\n mass)\n result['escape_velocity'] = escape_velocity\n earth_similarity_index = Calculator.calculate_esi_index(\n radius, mass, collector.get_average_temperature())\n result['earth_similarity_index'] = earth_similarity_index\n except NoDataError:\n pass\n\n try:\n avg_atm_molar_mass = Calculator.calculate_molar_mass(collector)\n except NoDataError:\n avg_atm_molar_mass = None\n if avg_atm_molar_mass is not None and avg_atm_molar_mass <= 0:\n logging.getLogger('Analyzer').debug('Molar mass <= 0: %d',\n avg_atm_molar_mass)\n avg_atm_molar_mass = None\n\n if avg_atm_molar_mass is not None:\n result['avg_atm_molar_mass'] = avg_atm_molar_mass\n avg_molecule_mass = avg_atm_molar_mass / Calculator.A\n result['avg_molecule_mass'] = avg_molecule_mass\n specific_gas_const = Calculator.R / avg_atm_molar_mass\n result['specific_gas_const'] = specific_gas_const\n\n try:\n speed_of_sound = Kundt.speed_of_sound(collector.kundt)\n result['speed_of_sound'] = speed_of_sound\n\n if avg_atm_molar_mass is None:\n # All further calculations require valid molar mass\n return result\n\n # Since calculate_molar_mass already uses get_average_temperature\n # and get_ground_pressure, it's safe to use these functions here\n # without worrying about NoDataError\n adiabatic_index = Calculator.calculate_adiabatic_index(\n collector, speed_of_sound, avg_atm_molar_mass)\n result['adiabatic_index'] = adiabatic_index\n\n atmosphere_density = (adiabatic_index *\n collector.get_ground_pressure() /\n speed_of_sound ** 2)\n result['atmosphere_density'] = atmosphere_density\n\n refractive_index = (3 * avg_atm_molar_mass *\n collector.get_ground_pressure() /\n atmosphere_density / Calculator.R /\n collector.get_average_temperature() - 2) ** 0.5\n result['refractive_index'] = refractive_index\n\n molar_refractivity = (avg_atm_molar_mass /\n atmosphere_density *\n (refractive_index ** 2 - 1) /\n (refractive_index ** 2 + 2))\n result['molar_refractivity'] = molar_refractivity\n\n atm_speed_of_light = Calculator.C / refractive_index\n result['atm_speed_of_light'] = atm_speed_of_light\n except NoDataError:\n pass\n\n return result", "def calculate():\n con = mdb.connect(constants.sql_.IP, constants.sql_.USER, constants.sql_.PASS,\n constants.sql_.DB)\n# dicti = {}\n liste = mdb_get_table(constants.sql_tables.cron.name)\n# with con:\n# cur = con.cursor()\n# sql = 'SELECT * FROM '+constants.sql_tables.cron.name\n# cur.execute(sql)\n# results = cur.fetchall()\n# field_names = [i[0] for i in cur.description]\n# j = 0\n# for row in results:\n# for i in range(0, len(row)):\n# dicti[field_names[i]] = row[i]\n# liste.append(dicti)\n# dicti = {}\n# j = j + 1\n# con.close\n time = localtime()\n HOME.date = strftime(\"%Y-%m-%d 00:00:00\", time)\n # check for daylight saving\n if getattr(localtime(), 'tm_isdst') > 0:\n delta = 2\n else:\n delta = 1\n sunrise = ((HOME.next_rising(ephem.Sun())).datetime() +\n datetime.timedelta(hours=delta, minutes=0, seconds=0))\n sunset = ((HOME.next_setting(ephem.Sun())).datetime() +\n datetime.timedelta(hours=delta, minutes=0, seconds=0))\n for eintrag in liste:\n dynamic = False\n for setting in eintrag:\n if setting == \"Sonne\" and str(eintrag.get(\"Sonne\")) <> \"None\":\n dynamic = True\n if str(eintrag.get(\"Sonne\")) == \"rise\":\n time = sunrise.replace(second=0)\n else:\n time = sunset.replace(second=0)\n elif setting == \"Rohtime\" and str(eintrag.get(\"Rohtime\")) <> \"None\":\n dynamic = True\n time = eintrag.get(\"Rohtime\")\n for setting in eintrag:\n if setting == \"offset\" and str(eintrag.get(\"offset\")) <> \"None\":\n time = time + datetime.timedelta(hours=0, minutes=int(eintrag.get(\"offset\")),\n seconds=0)\n if setting == \"Zufall\" and str(eintrag.get(\"Zufall\")) <> \"None\":\n time = (time +\n datetime.timedelta(hours=0,\n minutes=random.randrange(int(eintrag.get(\"Zufall\"))),\n seconds=0))\n if dynamic:\n with con:\n #time = time - datetime.timedelta(seconds=int(str(time)[6:]))\n cur = con.cursor()\n sql = ('UPDATE %s SET Time = \"%s\" WHERE Id = \"%s\"'\n % (constants.sql_tables.cron.name, str(time), str(eintrag.get(\"Id\"))))\n cur.execute(sql)\n con.close\n return True", "def _calc_(self):\n self.data = []\n all_xyz_data = self.Var.data.get_xyz_data()\n all_cols = self.Var.data.get_xyz_cols()\n\n # Loop over all the xyz data and cols we have\n for xyz_data, cols in zip(all_xyz_data, all_cols):\n\n at_crds = np.array([i[cols[0] != 'Ne'] for i in xyz_data])\n self.natom = len(at_crds[0])\n self.nstep = len(at_crds)\n self.step_data = {}\n\n # Calculate the nearest neighbour lists for each step\n for step in range(self.nstep):\n self.step_data[step] = {}\n\n # Get coords\n crds = at_crds[step]\n\n # Get distances between neighbours\n self.get_distances(crds)\n\n # Get a sorted list of atom indices by distance\n self.get_nearest_atom_inds()\n\n # If we have some molecule metadata\n if 'atoms_per_molecule' in self.Var.metadata:\n self.at_per_mol = self.Var.metadata['atoms_per_molecule']\n self.nmol = mol_utils.get_nmol(self.natom, self.at_per_mol)\n self.reshape_at_dist()\n self.get_nearest_atom_inds_per_mol()\n self.step_data[step]['closest_atoms_mol_grouped'] = self.closest_at_per_mol\n self.step_data[step]['distances_mol_grouped'] = self.all_dist_per_mol\n\n # Save data in dict\n self.step_data[step]['distances'] = self.all_dist\n self.step_data[step]['closest_atom_indices'] = self.closest_ats\n\n self.data.append(self.step_data)\n\n return self.data", "def generate_sunsets(self, nyears=13, day_pad=50):\n\n # Set observatory horizon to zero\n doff = ephem.Date(0)-ephem.Date('1858/11/17')\n\n self.obs.horizon = 0.\n\n # Swipe dates to match sims_skybrightness_pre365\n mjd_start = self.mjd\n mjd_end = np.arange(mjd_start, mjd_start+365.25*nyears+day_pad+366, 366).max()\n step = 0.25\n mjds = np.arange(mjd_start, mjd_end+step, step)\n setting = mjds*0.\n\n # Stupid Dublin Julian Date\n djds = mjds - doff\n sun = ephem.Sun()\n\n for i, (mjd, djd) in enumerate(zip(mjds, djds)):\n sun.compute(djd)\n setting[i] = self.obs.previous_setting(sun, start=djd, use_center=True)\n setting = setting + doff\n\n # zomg, round off crazy floating point precision issues\n setting_rough = np.round(setting*100.)\n u, indx = np.unique(setting_rough, return_index=True)\n self.setting_sun_mjds = setting[indx]\n left = np.searchsorted(self.setting_sun_mjds, mjd_start)\n self.setting_sun_mjds = self.setting_sun_mjds[left:]", "def derive_features(self):\n\n temp = int(self.stop_id)\n\n while temp not in self.stops_latlon.keys():\n if temp < 7692:\n temp += 1\n else:\n while temp not in self.stops_latlon.keys():\n temp -= 1\n\n self.latitude = self.stops_latlon[temp][0]\n self.longitude = self.stops_latlon[temp][1]\n\n self.distance_centre = FormatInput.haversine(self.latitude, self.longitude)\n\n self.cluster = FormatInput.map_stop_to_cluster(self.cluster_map, self.stop_id)\n\n self.holiday = FormatInput.add_holiday(self.date)", "def calc_resources(self):\n self.popula = self.energy = self.popula_used = self.energy_used = 0\n self.cnt_public = self.cnt_shop = self.cnt_1 = self.cnt_2 = self.cnt_3 = self.cnt_4 = self.cnt_5 = self.cnt_office = 0\n self.popula += self.extra_pop\n for i in range(20):\n b = self.b[i]\n if b == 'T':\n self.popula += self.f[i] * 2\n self.energy_used += 1\n elif b == 'O':\n self.popula_used += 1\n self.energy_used += 1\n self.cnt_office += self.f[i]\n elif b == 'U':\n self.popula_used += 1\n self.cnt_public += 1\n elif b == 'S':\n self.energy_used += 1\n self.cnt_shop += 1\n elif b == '1':\n self.popula += 1\n self.energy += 1\n self.popula_used += 1\n self.cnt_1 += 1\n elif b == '2':\n self.popula_used += 1\n self.cnt_2 += 1\n elif b == '3':\n self.popula_used += 1\n self.cnt_3 += 1\n elif b == '4':\n self.popula += 2\n self.popula_used += 1\n self.cnt_4 += 1\n elif b == '5':\n self.energy += 2\n self.popula_used += 1\n self.cnt_5 += 1\n elif b == 'A':\n self.energy += 2\n self.popula_used += 1\n elif b == 'F':\n self.energy += 3\n self.popula_used += 1\n elif b == 'G':\n self.popula += 1\n if 'tvst' in args.exp:\n self.popula += self.cnt_shop\n if 'ward' in args.exp:\n self.popula += 3\n if 'elec' in args.exp:\n self.energy += 3\n if 'capi' in args.exp:\n self.popula_used += 2\n if 'fire' in args.exp:\n self.popula_used += 1\n if 'park' in args.exp:\n self.popula_used += 1", "def update_only_total_statistics(self):\n\n self.average_radius = 0\n self.total_root_length = 0\n\n total_radius = 0\n\n for root in self.root_dict.values():\n\n self.total_root_length += root.total_length\n\n total_radius += root.total_length * root.average_radius\n\n self.average_radius = total_radius / self.total_root_length", "def triangulate_analytic_sun_at_center(self,r1,x2,y2,r2,x3,y3,r3):\n gamma=(r1**2+x2**2+y2**2-r2**2)/(2.0*x2)\n\ta=(y2**2)/(float(x2**2))\n\tb=-2.0*gamma*y2/x2\n\tc=gamma**2-r1**2\n\ty_plus=(-b+np.sqrt((b**2)-4*a*c))/(2.0*a)\n\ty_minus=(-b-np.sqrt((b**2)-4*a*c))/(2.0*a)\n x_plus=gamma-y_plus*y2/float(x2)\n x_minus=gamma-y_minus*y2/float(x2)\n difference_plus=(x_plus-x3)**2+(y_plus-y3)**2-r3**2\n difference_minus=(x_minus-x3)**2+(y_minus-y3)**2-r3**2\n if abs(difference_minus) < abs(difference_plus):\n print \"Difference minus\", difference_minus\n print x_minus, y_minus\n return x_minus, x_plus, difference_minus\n else:\n print \"Difference plus\", difference_plus\n print x_plus, y_plus\n return x_plus, y_plus, difference_plus", "def get_forecasts(api_key, lat, lng):\n current_time = datetime.datetime.now()\n forecast = forecastio.load_forecast(api_key, lat, lng, time=current_time)\n result = {}\n for day in forecast.daily().data:\n sunrise = pytz.utc.localize(day.sunriseTime)\n sundown = pytz.utc.localize(day.sunsetTime)\n print('Sun up: {}, sun down: {}, moon phase: {}'.format(sunrise, sundown, day.moonPhase))\n day = forecast.daily().data[0]\n result['sunrise'] = pytz.utc.localize(day.sunriseTime).replace(tzinfo=datetime.timezone.utc).astimezone(tz=None)\n result['sunset'] = pytz.utc.localize(day.sunsetTime).replace(tzinfo=datetime.timezone.utc).astimezone(tz=None)\n result['moonphase'] = day.moonPhase\n return result", "def calculate_all_distances(self):\n self.close_distance = self.calculate_distance(self.close_distance_factor)\n self.medium_distance = self.calculate_distance(self.medium_distance_factor)\n self.far_distance = self.calculate_distance(self.far_distance_factor)", "def calculate_all_metrcis(self):\n self.calculate_gc_metrcis()\n self.calculate_sam_metrics()\n self.calculate_classification_metrics()\n self.calculate_losses()", "def run_analysis(self):\n ### skip some snapshots for testing purposes\n nskip = 199\n read_char.skip_snapshots(self.hfile, self.ifile, nskip)\n ### read in the first two steps (required for velocity related computations\n xs_old, ys_old, lx_old, ly_old, tstep_old, natoms_old = read_char.read_snapshot(self.hfile, self.ifile)\n x_old = xs_old*lx_old\n y_old = ys_old*ly_old\n xs,ys,lx,ly,tstep,natoms = read_char.read_snapshot(self.hfile, self.ifile)\n x = xs*lx\n y = ys*ly\n ### loop over all steps of the input file\n for step in range(nskip+1,self.nsteps-1):\n print step\n ### read in coordinates (as required)\n xs_new,ys_new,lx_new,ly_new,tstep_new,natoms_new = read_char.read_snapshot(self.hfile, self.ifile)\n x_new = xs_new*lx_new\n y_new = ys_new*ly_new\n ### compute further current per/atom quantities\n phi = misc_tools.compute_orientation(x,y,lx,ly,self.npol)\n vx,vy = misc_tools.compute_velocity(x_old,y_old, x_new, y_new, lx, ly, tstep_old, tstep_new, natoms)\n ### start desired analysis methods\n # density\n if self.density_flag:\n self.density.compute(step,x,y,lx,ly,natoms, plot = 'False')\n # number fluctuations\n if self.nf_flag:\n self.numberfluctuation.compute(step,xs,ys, plot = 'False')\n # voronoi density\n if self.voronoi_flag:\n self.voronoidensity.compute(step,x,y,lx,ly,natoms, plot = 'False')\n # velocity / worticity\n if self.velocity_flag:\n self.velocityworticity.compute(step,x,y,vx,vy,natoms,lx,ly, plot = 'False')\n # orientation / velocity\n if self.orientvel_flag:\n self.orientvel.compute(step,x,y,vx,vy,phi,natoms, plot = 'False')\n # defect points\n if self.pointdefects_flag:\n self.pointdefects.compute(step,x,y,phi,lx,ly,natoms)\n ### move coordinate arrays\n xs_old = np.copy(xs)\n ys_old = np.copy(ys)\n x_old = np.copy(x)\n y_old = np.copy(y)\n tstep_old = tstep\n xs = np.copy(xs_new)\n ys = np.copy(ys_new)\n x = np.copy(x_new)\n y = np.copy(y_new)\n tstep = tstep_new\n return", "def get_sunspot_data(yy, time1):\n master = []\n num_of_ss = np.max(yy.flatten()) # get number of different SS's\n centroids = []\n sizes = []\n numbers = []\n\n for i in np.arange(1, num_of_ss + 1): # for each SS:\n temp_sunspot = SunSpot(1, 1, 1)\n copy_yy = np.array(yy, copy = True)\n copy_yy[copy_yy != i] = 0 # get only points == i\n copy_yy[copy_yy == i] = 1\n\n indices_x, indices_y = np.where(yy == i)\n\n max_lat = np.max(indices_x)\n min_lat = np.min(indices_x)\n mean_lat = max_lat - (max_lat - min_lat)/2\n \n max_lon = np.max(indices_y)\n min_lon = np.min(indices_y)\n mean_lon = max_lon - (max_lon - min_lon)/2\n \n temp_sunspot.mask = copy_yy\n temp_sunspot.centroid = [mean_lon, mean_lat]\n temp_sunspot.size = len(indices_x)\n temp_sunspot.number = i\n temp_sunspot.x_points = indices_x\n temp_sunspot.y_points = indices_y\n temp_sunspot.timestamp = time1\n temp_sunspot.min_x = min_lon\n temp_sunspot.max_x = max_lon\n temp_sunspot.min_y = min_lat\n temp_sunspot.max_y = max_lat\n\n master.append(temp_sunspot)\n\n return num_of_ss, master", "def main():\n \n ## Determine whether to query for the sunset or sunrise\n if datetime.now().hour >= 20:\n ## Run sunrise tweets after 8PM\n type = 'sunrise'\n else:\n ## Any earlier, run sunset tweets (by default run at 12PM)\n type = 'sunset'\n \n ## Iterate through the time series and states\n log_df = TWEET_HISTORY_DF.copy()\n for loc in c.LOCATIONS.keys():\n \n ## Instantiate a class to do the tweetin'\n MySunTweeter = SunTweeter(loc, type, log_df)\n MySunTweeter.send_tweet()\n \n ## Save the log to use in the next iteration of the loop\n log_df = MySunTweeter.log_df\n \n ## Overwrite the log with the updated records\n log_df.to_csv(\"log/SunsetWx_full_tweet_log.csv\",\n index = False)", "def rainfall_event(self):\n\n # assign local variables\n datatype = 'strds'\n increment = str(self.rain_interval)+' minutes'\n raster = 'raster'\n iterations = int(self.rain_duration)/int(self.rain_interval)\n rain_excess = 'rain_excess'\n net_difference = 'net_difference'\n\n # create raster space time datasets\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.elevation_timeseries,\n title=self.elevation_title,\n description=self.elevation_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.depth_timeseries,\n title=self.depth_title,\n description=self.depth_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.erdep_timeseries,\n title=self.erdep_title,\n description=self.erdep_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.flux_timeseries,\n title=self.flux_title,\n description=self.flux_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.difference_timeseries,\n title=self.difference_title,\n description=self.difference_description,\n overwrite=True)\n\n # register the initial digital elevation model\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=self.elevation,\n start=self.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # create evolution object\n evol = Evolution(elevation=self.elevation,\n precipitation=self.precipitation,\n start=self.start,\n rain_intensity=self.rain_intensity,\n rain_interval=self.rain_interval,\n rain_duration=self.rain_duration,\n walkers=self.walkers,\n runoff=self.runoff,\n mannings=self.mannings,\n detachment=self.detachment,\n transport=self.transport,\n shearstress=self.shearstress,\n density=self.density,\n mass=self.mass,\n grav_diffusion=self.grav_diffusion,\n erdepmin=self.erdepmin,\n erdepmax=self.erdepmax,\n k_factor=self.k_factor,\n c_factor=self.c_factor,\n m=self.m,\n n=self.n,\n threads=self.threads,\n fill_depressions=self.fill_depressions)\n\n # determine mode and run model\n if self.mode == 'simwe_mode':\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # run the landscape evolution model\n # as a series of rainfall intervals in a rainfall event\n i = 1\n while i < iterations:\n\n # update the elevation\n evol.elevation = evolved_elevation\n print evol.elevation\n\n # update time\n evol.start = time\n print evol.start\n\n # derive excess water (mm/hr) from rainfall rate (mm/hr)\n # plus the depth (m) per rainfall interval (min)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_excess}\"\n \"={rain_intensity}\"\n \"+{depth}\"\n \"/1000.\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_excess=rain_excess,\n rain_intensity=self.rain_intensity,\n depth=depth,\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # update excess rainfall\n rain_intensity = 'rain_intensity'\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity} = {rain_excess}\".format(\n rain_intensity='rain_intensity',\n rain_excess=rain_excess),\n overwrite=True)\n evol.rain_intensity = rain_intensity\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # remove temporary maps\n gscript.run_command(\n 'g.remove',\n type='raster',\n name=['rain_excess'],\n flags='f')\n\n i = i+1\n\n # compute net elevation change\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{net_difference}\"\n \"={evolved_elevation}-{elevation}\".format(\n net_difference=net_difference,\n elevation=self.elevation,\n evolved_elevation=evol.elevation),\n overwrite=True)\n gscript.write_command(\n 'r.colors',\n map=net_difference,\n rules='-',\n stdin=difference_colors)", "def calc(self):\n\t\tfor neuron in self.neurons.items():\n\t\t\tneuron.calculate()", "def sun(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sun = self.astral.sun_utc(date, self.latitude, self.longitude)\n\n if local:\n for key, dt in sun.items():\n sun[key] = dt.astimezone(self.tz)\n\n return sun", "def get_angam_data(jd_sunrise, jd_sunrise_tmrw, angam_type, ayanamsha_id=swe.SIDM_LAHIRI):\n swe.set_sid_mode(ayanamsha_id)\n\n w_moon = angam_type['w_moon']\n w_sun = angam_type['w_sun']\n arc_len = angam_type['arc_len']\n\n num_angas = int(360.0 / arc_len)\n\n # Compute angam details\n angam_now = get_angam(jd_sunrise, angam_type, ayanamsha_id=ayanamsha_id)\n angam_tmrw = get_angam(jd_sunrise_tmrw, angam_type, ayanamsha_id=ayanamsha_id)\n\n angams_list = []\n\n num_angas_today = (angam_tmrw - angam_now) % num_angas\n\n if num_angas_today == 0:\n # The angam does not change until sunrise tomorrow\n return [(angam_now, None)]\n else:\n lmoon = (swe.calc_ut(jd_sunrise, swe.MOON)[0] - swe.get_ayanamsa(jd_sunrise)) % 360\n\n lsun = (swe.calc_ut(jd_sunrise, swe.SUN)[0] - swe.get_ayanamsa(jd_sunrise)) % 360\n\n lmoon_tmrw = (swe.calc_ut(jd_sunrise_tmrw, swe.MOON)[0] -\n swe.get_ayanamsa(jd_sunrise_tmrw)) % 360\n\n lsun_tmrw = (swe.calc_ut(jd_sunrise_tmrw, swe.SUN)[0] -\n swe.get_ayanamsa(jd_sunrise_tmrw)) % 360\n\n for i in range(num_angas_today):\n angam_remaining = arc_len * (i + 1) - (((lmoon * w_moon +\n lsun * w_sun) % 360) % arc_len)\n\n # First compute approximate end time by essentially assuming\n # the speed of the moon and the sun to be constant\n # throughout the day. Therefore, angam_remaining is computed\n # just based on the difference in longitudes for sun and\n # moon today and tomorrow.\n approx_end = jd_sunrise + angam_remaining / (((lmoon_tmrw - lmoon) % 360) * w_moon +\n ((lsun_tmrw - lsun) % 360) * w_sun)\n\n # Initial guess value for the exact end time of the angam\n x0 = approx_end\n\n # What is the target (next) angam? It is needed to be passed\n # to get_angam_float for zero-finding. If the target angam\n # is say, 12, then we need to subtract 12 from the value\n # returned by get_angam_float, so that this function can be\n # passed as is to a zero-finding method like brentq or\n # newton. Since we have a good x0 guess, it is easy to\n # bracket the function in an interval where the function\n # changes sign. Therefore, brenth can be used, as suggested\n # in the scipy documentation.\n target = (angam_now + i - 1) % num_angas + 1\n\n # Approximate error in calculation of end time -- arbitrary\n # used to bracket the root, for brenth\n TDELTA = 0.05\n try:\n t_act = brentq(get_angam_float, x0 - TDELTA, x0 + TDELTA,\n args=(angam_type, -target, ayanamsha_id, False))\n except ValueError:\n logging.warning('Unable to bracket! Using approximate t_end itself.')\n logging.warning(locals())\n t_act = approx_end\n angams_list.extend([((angam_now + i - 1) % num_angas + 1, t_act)])\n return angams_list", "def annual_summary(self):\n \n #Initialize dict with info about all of year's storms\n hurdat_year = {'id':[],'operational_id':[],'name':[],'max_wspd':[],'min_mslp':[],'category':[],'ace':[]}\n \n #Search for corresponding entry in keys\n count_ss_pure = 0\n count_ss_partial = 0\n iterate_id = 1\n for key in self.dict.keys():\n\n #Retrieve info about storm\n temp_name = self.dict[key]['name']\n temp_vmax = np.array(self.dict[key]['vmax'])\n temp_mslp = np.array(self.dict[key]['mslp'])\n temp_type = np.array(self.dict[key]['type'])\n temp_time = np.array(self.dict[key]['date'])\n temp_ace = self.dict[key]['ace']\n\n #Get indices of all tropical/subtropical time steps\n idx = np.where((temp_type == 'SS') | (temp_type == 'SD') | (temp_type == 'TD') | (temp_type == 'TS') | (temp_type == 'HU'))\n\n #Get times during existence of trop/subtrop storms\n if len(idx[0]) == 0: continue\n trop_time = temp_time[idx]\n if 'season_start' not in hurdat_year.keys():\n hurdat_year['season_start'] = trop_time[0]\n hurdat_year['season_end'] = trop_time[-1]\n\n #Get max/min values and check for nan's\n np_wnd = np.array(temp_vmax[idx])\n np_slp = np.array(temp_mslp[idx])\n if len(np_wnd[~np.isnan(np_wnd)]) == 0:\n max_wnd = np.nan\n max_cat = -1\n else:\n max_wnd = int(np.nanmax(temp_vmax[idx]))\n max_cat = convert_category(np.nanmax(temp_vmax[idx]))\n if len(np_slp[~np.isnan(np_slp)]) == 0:\n min_slp = np.nan\n else:\n min_slp = int(np.nanmin(temp_mslp[idx]))\n\n #Append to dict\n hurdat_year['id'].append(key)\n hurdat_year['name'].append(temp_name)\n hurdat_year['max_wspd'].append(max_wnd)\n hurdat_year['min_mslp'].append(min_slp)\n hurdat_year['category'].append(max_cat)\n hurdat_year['ace'].append(temp_ace)\n hurdat_year['operational_id'].append(self.dict[key]['operational_id'])\n \n #Handle operational vs. non-operational storms\n\n #Check for purely subtropical storms\n if 'SS' in temp_type and True not in np.isin(temp_type,['TD','TS','HU']):\n count_ss_pure += 1\n\n #Check for partially subtropical storms\n if 'SS' in temp_type:\n count_ss_partial += 1\n\n #Add generic season info\n hurdat_year['season_storms'] = len(hurdat_year['name'])\n narray = np.array(hurdat_year['max_wspd'])\n narray = narray[~np.isnan(narray)]\n hurdat_year['season_named'] = len(narray[narray>=34])\n hurdat_year['season_hurricane'] = len(narray[narray>=65])\n hurdat_year['season_major'] = len(narray[narray>=100])\n hurdat_year['season_ace'] = np.sum(hurdat_year['ace'])\n hurdat_year['season_subtrop_pure'] = count_ss_pure\n hurdat_year['season_subtrop_partial'] = count_ss_partial\n \n #Return object\n return hurdat_year", "def run_daily(self, doy, Ta, Rew=1.0):\n \"\"\" update physiology and leaf area of planttypes and canopy\"\"\"\n for pt in self.planttypes:\n if pt.LAImax > 0.0:\n PsiL = (pt.Roots.h_root - self.z) / 100.0 # MPa\n pt.update_daily(doy, Ta, PsiL=PsiL, Rew=Rew) # updates pt properties\n\n # total leaf area index [m2 m-2]\n self.LAI = sum([pt.LAI for pt in self.planttypes])\n # total leaf area density [m2 m-3]\n self.lad = sum([pt.lad for pt in self.planttypes])\n # layerwise mean leaf characteristic dimension [m]\n self.leaf_length = sum([pt.leafp['lt'] * pt.lad for pt in self.planttypes]) / (self.lad + EPS)\n\n \"\"\" normalized flow statistics in canopy with new lad \"\"\"\n if self.Switch_Eflow and self.planttypes[0].Switch_lai:\n self.micromet.normalized_flow_stats(self.z, self.lad, self.hc)", "def update_totals(self):\n # Reset counts to 0\n self.total_f = self.total_s = self.total_intra = self.total_mac_regular = self.total_mac_infected = \\\n self.total_mac_activated = self.total_regular_fast = self.total_regular_slow = self.total_infected_fast = \\\n self.total_infected_slow = self.total_activated_fast = self.total_activated_slow = self.total_f_degree = \\\n self.total_s_degree = self.total_activation = 0\n self.total_f_o2 = self.total_s_o2 = 0.0\n\n for node in self.node_list.values():\n # Get values from node\n fast_in_node = node.subpopulations[BACTERIA_FAST]\n slow_in_node = node.subpopulations[BACTERIA_SLOW]\n intra_in_node = node.subpopulations[BACTERIA_INTRACELLULAR]\n reg_mac_in_node = node.subpopulations[MACROPHAGE_REGULAR]\n inf_mac_in_node = node.subpopulations[MACROPHAGE_INFECTED]\n act_mac_in_node = node.subpopulations[MACROPHAGE_ACTIVATED]\n degree = node.degree\n o2_tens = node.oxygen_tension\n # Update relevant totals\n self.total_f += fast_in_node\n self.total_s += slow_in_node\n self.total_intra += intra_in_node\n self.total_mac_regular += reg_mac_in_node\n self.total_mac_infected += inf_mac_in_node\n self.total_mac_activated += act_mac_in_node\n self.total_regular_fast += fast_in_node * reg_mac_in_node\n self.total_regular_slow += slow_in_node * reg_mac_in_node\n self.total_infected_fast += fast_in_node * inf_mac_in_node\n self.total_infected_slow += slow_in_node * inf_mac_in_node\n self.total_activated_fast += fast_in_node * act_mac_in_node\n self.total_activated_slow += slow_in_node * act_mac_in_node\n # TODO - check usage of degree\n self.total_f_degree += fast_in_node * degree\n self.total_s_degree += slow_in_node * degree\n self.total_f_o2 += fast_in_node * (1/o2_tens)\n self.total_s_o2 += slow_in_node * o2_tens\n self.total_activation += reg_mac_in_node * inf_mac_in_node", "def propagate(satellite):", "def cal_topology_feature(self):\n self.NPL()\n self.topo_efficiency_cal()\n self.efficiency_cal()\n self.cluster_cal()\n self.topo_diameter()\n self.spatial_diameter()", "def calculate(self):", "def _get_information(self):\n weather_dict = {}\n table_body = self.climate_table\n\n rows = table_body.find_all('tr')\n months = [col.get_text() for col in rows[0].find_all('td')[1:]]\n\n for row in rows[1:]:\n cols = row.find_all('td')\n key = cols[0].get_text()\n value_getter = self._value_getters_by_key.get(key, self._get_remote_workers)\n\n weather_dict.update({key: [(months[i],) + value_getter(col) for i, col in enumerate(cols[1:])]})\n\n return weather_dict", "def _calculate_salinity(self):\n params = self.parameters.keys()\n if 'seawater_salinity' in params:\n return\n else:\n if 'water_specific_conductance' in params:\n T = 25.0\n cond = self.data['water_specific_conductance'].rescale(\n sq.mScm).magnitude\n elif 'water_electrical_conductivity' in params:\n current_unit = self.data['water_temperature'].units\n temp_celsius = self.data['water_temperature'].rescale(pq.degC)\n temp_celsius += self._temperature_offset(current_unit, pq.degC)\n T = temp_celsius.magnitude\n cond = self.data['water_electrical_conductivity'].rescale(\n sq.mScm).magnitude\n else:\n return\n\n if 'water_depth_non_vented' in params:\n P = self.data['water_depth_non_vented'].rescale(\n sq.dbar).magnitude + (pq.atm).rescale(sq.dbar).magnitude\n elif 'water_depth_vented' in params:\n P = self.data['water_depth_vented'].rescale(sq.dbar).magnitude\n else:\n P = (pq.atm).rescale(sq.dbar).magnitude\n\n R = cond / 42.914\n sal = seawater.salt(R, T, P)\n\n self.set_standard_unit('seawater_salinity', sq.psu)\n self.data['seawater_salinity'] = sal * sq.psu", "def _obtain_data(self):\n (self.data_df, self.column_df, self.station_name, self.log_file, self.station_lat, self.station_lon,\n self.station_elev, self.ws_anemometer_height, self.missing_fill_value, self.script_mode,\n self.auto_mode, self.fill_mode, self.metadata_mode, self.generate_bokeh, self.metadata_df,\n metadata_series) = input_functions.obtain_data(self.config_path, self.metadata_path)\n\n if self.script_mode == 1: # correcting data\n self.mc_iterations = 1000 # Number of iters for MC simulation of thornton running solar radiation gen\n else:\n self.mc_iterations = 50 # if we're not correcting data then only do a few iterations to save time\n\n print(\"\\nSystem: Raw data successfully extracted from station file.\")\n\n # Extract individual variables from data frame back into to numpy arrays.\n self.data_year = np.array(self.data_df.year)\n self.data_month = np.array(self.data_df.month)\n self.data_day = np.array(self.data_df.day)\n self.data_tavg = np.array(self.data_df.tavg)\n self.data_tmax = np.array(self.data_df.tmax)\n self.data_tmin = np.array(self.data_df.tmin)\n self.data_tdew = np.array(self.data_df.tdew)\n self.data_ea = np.array(self.data_df.ea)\n self.data_rhavg = np.array(self.data_df.rhavg)\n self.data_rhmax = np.array(self.data_df.rhmax)\n self.data_rhmin = np.array(self.data_df.rhmin)\n self.data_rs = np.array(self.data_df.rs)\n self.data_ws = np.array(self.data_df.ws)\n self.data_precip = np.array(self.data_df.precip)\n\n self.output_file_path = \"correction_files/\" + self.station_name + \"_output\" + \".xlsx\"", "def rainfall_series(self):\n\n # assign local temporal variables\n datatype = 'strds'\n increment = str(self.rain_interval)+\" minutes\"\n raster = 'raster'\n rain_excess = 'rain_excess'\n net_difference = 'net_difference'\n #iterations = sum(1 for row in precip)\n\n # create a raster space time dataset\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.elevation_timeseries,\n title=self.elevation_title,\n description=self.elevation_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.depth_timeseries,\n title=self.depth_title,\n description=self.depth_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.erdep_timeseries,\n title=self.erdep_title,\n description=self.erdep_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.flux_timeseries,\n title=self.flux_title,\n description=self.flux_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.difference_timeseries,\n title=self.difference_title,\n description=self.difference_description,\n overwrite=True)\n\n # register the initial digital elevation model\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=self.elevation,\n start=self.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # create evolution object\n evol = Evolution(\n elevation=self.elevation,\n precipitation=self.precipitation,\n start=self.start,\n rain_intensity=self.rain_intensity,\n rain_interval=self.rain_interval,\n walkers=self.walkers,\n runoff=self.runoff,\n mannings=self.mannings,\n detachment=self.detachment,\n transport=self.transport,\n shearstress=self.shearstress,\n density=self.density,\n mass=self.mass,\n grav_diffusion=self.grav_diffusion,\n erdepmin=self.erdepmin,\n erdepmax=self.erdepmax,\n k_factor=self.k_factor,\n c_factor=self.c_factor,\n m=self.m,\n n=self.n,\n threads=self.threads,\n fill_depressions=self.fill_depressions)\n\n # open txt file with precipitation data\n with open(evol.precipitation) as csvfile:\n\n # check for header\n has_header = csv.Sniffer().has_header(csvfile.read(1024))\n\n # rewind\n csvfile.seek(0)\n\n # skip header\n if has_header:\n next(csvfile)\n\n # parse time and precipitation\n precip = csv.reader(csvfile, delimiter=',', skipinitialspace=True)\n\n # initial run\n initial = next(precip)\n evol.start = initial[0]\n evol.rain_intensity = 'rain_intensity'\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=evol.rain_intensity,\n rain_observation=float(initial[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # run the landscape evolution model for each rainfall record\n for row in precip:\n\n # update the elevation\n evol.elevation=evolved_elevation\n\n # update time\n evol.start=row[0]\n\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n rain_intensity = 'rain_intensity'\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=rain_intensity,\n rain_observation=float(row[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # derive excess water (mm/hr) from rainfall rate (mm/hr)\n # plus the depth (m) per rainfall interval (min)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_excess}\"\n \"={rain_intensity}\"\n \"+{depth}\"\n \"/1000.\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_excess=rain_excess,\n rain_intensity=rain_intensity,\n depth=depth,\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # update excess rainfall\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity} = {rain_excess}\".format(\n rain_intensity='rain_intensity',\n rain_excess=rain_excess),\n overwrite=True)\n evol.rain_intensity = rain_intensity\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # remove temporary maps\n gscript.run_command(\n 'g.remove',\n type='raster',\n name=['rain_excess'],\n flags='f')\n\n # compute net elevation change\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{net_difference}\"\n \"= {evolved_elevation}-{elevation}\".format(\n net_difference=net_difference,\n elevation=self.elevation,\n evolved_elevation=evol.elevation),\n overwrite=True)\n gscript.write_command(\n 'r.colors',\n map=net_difference,\n rules='-',\n stdin=difference_colors)", "def cminfo_compute():\n from hera_mc import cm_sysutils \n h = cm_sysutils.Handling()\n cminfo = h.get_cminfo_correlator()\n snap_to_ant = {}\n ant_to_snap = {}\n for antn, ant in enumerate(cminfo['antenna_numbers']):\n name = cminfo['antenna_names'][antn]\n for pol in cminfo['correlator_inputs'][antn]:\n if pol.startswith('e'):\n e_pol = pol\n if pol.startswith('n'):\n n_pol = pol\n ant_to_snap[ant] = {}\n if e_pol != 'None':\n snapi_e, channel_e = snap_part_to_host_input(cminfo['correlator_inputs'][antn][0])\n ant_to_snap[ant]['e'] = {'host': snapi_e, 'channel': channel_e}\n if snapi_e not in snap_to_ant.keys():\n snap_to_ant[snapi_e] = [None] * 6\n snap_to_ant[snapi_e][channel_e] = name + 'E'\n if n_pol != 'None':\n snapi_n, channel_n = snap_part_to_host_input(cminfo['correlator_inputs'][antn][1])\n ant_to_snap[ant]['n'] = {'host': snapi_n, 'channel': channel_n}\n if snapi_n not in snap_to_ant.keys():\n snap_to_ant[snapi_n] = [None] * 6\n snap_to_ant[snapi_n][channel_n] = name + 'N'\n return snap_to_ant, ant_to_snap", "def extract_energies(self):\n path2save = 'Analysis/energies.pkl'\n #check, if I have to extract them, or they are already extracted. This the latter case, load them.\n if os.path.exists(path2save):\n print(\"extraction of the polarizaion has already been done. Loading polarizations from from pkl\")\n # TODO delete to check if exists above and do load without doing\n with open('Analysis/energies.pkl', 'rb') as fid:\n [self.E0_plus, self.E0_0, self.E0_minus,\n self.V0_plus, self.V0_0, self.V0_minus,\n self.V_env_plus, self.V_env_0, self.V_env_minus,\n self.E_env_plus, self.E_env_0, self.E_env_minus,\n self.n_mols] \\\n = pickle.load(fid)\n else:\n print('Energies are being extracting and will be saved to pkl')\n for i, radius in enumerate(self.radii):\n self.E_sd_plus[radius] = {}\n self.E_sd_0[radius] = {}\n self.E_sd_minus[radius] = {}\n\n self.E_sum_env_plus[radius] = {}\n self.E_sum_env_0[radius] = {}\n self.E_sum_env_minus[radius] = {}\n\n self.V0_plus[radius] = {}\n self.V0_0[radius] = {}\n self.V0_minus[radius] = {}\n\n self.E_env_plus[radius] = {}\n self.E_env_0[radius] = {}\n self.E_env_minus[radius] = {}\n\n self.V_env_plus[radius] = {}\n self.V_env_0[radius] = {}\n self.V_env_minus[radius] = {}\n\n self.n_mols[radius] = {}\n\n for j, core_id in enumerate(self.core_ids):\n #path2file_ip = \\\n # 'Analysis/' + self.dict_radii_folder_IP[radius] + '/Matrix-analysis-IP_' \\\n # + self.mol_name + '-Mol_' + str(core_id) + '_C_1.yml'\n\n path2file_ip = \\\n 'Analysis/IP_by_radius/' + self.dict_radii_folder_IP[radius]\\\n + '/Matrix-analysis-IP_' + self.mol_name + '.yml' # new\n path2file_ea = \\\n 'Analysis/EA_by_radius/' + self.dict_radii_folder_EA[radius]\\\n + '/Matrix-analysis-EA_' + self.mol_name + '.yml'\n\n # IP. Charged states: \"+\" and \"0\"\n with open(path2file_ip) as fid:\n ip_dict = yaml.load(fid, Loader=yaml.SafeLoader)\n with open(path2file_ea) as fid:\n ea_dict = yaml.load(fid, Loader=yaml.SafeLoader)\n\n\n # number of mols extraction\n self.n_mols[radius][core_id] = len(ip_dict[int(core_id)]['energies'])\n\n # sd extraction. E_sd = E_0 + V_0\n self.E_sd_plus[radius][core_id] = ip_dict[int(core_id)]['energies'][int(core_id)]['total_e_charged'] #new\n self.E_sd_0[radius][core_id] = ip_dict[core_id]['energies'][int(core_id)]['total_e_uncharged']\n self.E_sd_minus[radius][core_id] = ea_dict[int(core_id)]['energies'][int(core_id)]['total_e_charged']\n # E_0\n self.E0_plus[core_id] = ip_dict[int(core_id)]['energies'][int(core_id)]['total_e_charged_vacuum']\n self.E0_0[core_id] = ip_dict[int(core_id)]['energies'][int(core_id)]['total_e_uncharged_vacuum']\n self.E0_minus[core_id] = ea_dict[int(core_id)]['energies'][int(core_id)]['total_e_charged_vacuum']\n # # E_0_vacuum\n # self.E0_plus_vacuum[core_id] =\n # self.E0_0_vacuum[core_id] =\n # self.E0_minus_vacuum[core_id] =\n\n\n # V_0\n self.V0_plus[radius][core_id] = self.E_sd_plus[radius][core_id] - self.E0_plus[core_id]\n self.V0_0[radius][core_id] = self.E_sd_0[radius][core_id] - self.E0_0[core_id]\n self.V0_minus[radius][core_id] = self.E_sd_minus[radius][core_id] - self.E0_minus[core_id]\n\n # E_sum_env = \\sum_i\\ne 0 E_i \\sum_{j=0}^{N} V_{ij}\n ip_env_sub_dict = ip_dict[int(core_id)]['energies']#new\n del ip_env_sub_dict[int(core_id)]\n # del ip_env_sub_dict['info'] # TODO: do I need to dlt this?\n\n\n ea_env_sub_dict = ea_dict[int(core_id)]['energies'] # new\n del ea_env_sub_dict[int(core_id)]\n # del ea_env_sub_dict['info'] # TODO: do I need to dlt this?\n\n # tmp = ip_env_sub_dict['energies'][]\n\n list_total_e_env_plus = [ip_env_sub_dict[env_id]['total_e_charged'] for env_id in ip_env_sub_dict]\n self.E_sum_env_plus[radius][int(core_id)] = np.sum(list_total_e_env_plus) if not list_total_e_env_plus == [] else 0.0\n list_total_e_env_0 = [ip_env_sub_dict[env_id]['total_e_uncharged'] for env_id in ip_env_sub_dict]\n self.E_sum_env_0[radius][int(core_id)] = np.sum(list_total_e_env_0) if not list_total_e_env_0 == [] else 0.0\n list_total_e_env_minus = [ea_env_sub_dict[env_id]['total_e_charged'] for env_id in ea_env_sub_dict]\n self.E_sum_env_minus[radius][int(core_id)] = np.sum(list_total_e_env_minus) if not list_total_e_env_minus == [] else 0.0\n\n # E_env = \\sum_i \\ne 0 E_i. sum of DFT env energies.\n list_vacuum_env_e_plus = [ip_env_sub_dict[env_id]['total_e_charged_vacuum'] for env_id in ip_env_sub_dict]\n self.E_env_plus[radius][int(core_id)] = np.sum(list_vacuum_env_e_plus) if not list_vacuum_env_e_plus == [] else 0.0\n list_vacuum_env_e_0 = [ip_env_sub_dict[env_id]['total_e_uncharged_vacuum'] for env_id in ip_env_sub_dict]\n self.E_env_0[radius][int(core_id)] = np.sum(list_vacuum_env_e_0) if not list_vacuum_env_e_0 == [] else 0.0\n list_vacuum_env_e_minus = [ea_env_sub_dict[env_id]['total_e_charged_vacuum'] for env_id in ea_env_sub_dict]\n self.E_env_minus[radius][int(core_id)] = np.sum(list_vacuum_env_e_minus) if not list_vacuum_env_e_minus == [] else 0.0\n\n # V_env = 0.5 (\\sum_{i=1} \\sum_{j=1} V_{ij}). classical interaction of env. mols\n self.V_env_plus[radius][core_id] = 0.5 * (self.E_sum_env_plus[radius][core_id]\n - self.E_env_plus[radius][core_id]\n - self.V0_plus[radius][core_id])\n\n self.V_env_0[radius][core_id] = 0.5 * (self.E_sum_env_0[radius][core_id]\n - self.E_env_0[radius][core_id]\n - self.V0_0[radius][core_id])\n\n self.V_env_minus[radius][core_id] = 0.5 * (self.E_sum_env_minus[radius][core_id]\n - self.E_env_minus[radius][core_id]\n - self.V0_minus[radius][core_id])\n\n\n append_dict_with_mean(self.V0_plus, self.V0_0, self.V0_minus,\n self.V_env_plus, self.V_env_0, self.V_env_minus,\n self.E_env_plus, self.E_env_0, self.E_env_minus,\n self.E0_plus, self.E0_0, self.E0_minus,\n self.n_mols) # compute and add \"mean\" to all mentioned dicts\n\n with open('Analysis/energies.pkl', 'wb') as fid:\n pickle.dump([self.E0_plus, self.E0_0, self.E0_minus,\n self.V0_plus, self.V0_0, self.V0_minus,\n self.V_env_plus, self.V_env_0, self.V_env_minus,\n self.E_env_plus, self.E_env_0, self.E_env_minus,\n self.n_mols],\n fid)\n print(\"Energies are extracted and dumped to pkl\")", "def calcAll():\n global macd_objects\n global data\n\n for macd in macd_objects:\n try:\n if macd.pair not in data:\n data[macd.pair] = fetch(macd.pair) # get data\n data[macd.pair] = parse_data(data[macd.pair]) # in each pair is stored sdf-data itself\n\n except Exception as err:\n return jsonpify(err)\n\n sdf = macd.calculate_coefficient(data[macd.pair][macd.time_period])\n\n data = dict() # empty data\n return jsonpify([m.__dict__ for m in macd_objects])", "def __init__(self):\n\n self.Cp_air0 = config_earth.earth_properties['Cp_air0']\n self.Rsp_air = config_earth.earth_properties['Rsp_air']\n\n self.d = config_earth.balloon_properties['d']\n self.vol = math.pi*4/3*pow((self.d/2),3) #volume m^3\n self.surfArea = math.pi*self.d*self.d #m^2\n self.cs_area = math.pi*self.d*self.d/4.0 #m^2\n\n #self.emissEnv = config_earth.balloon_properties['emissEnv']\n self.areaDensityEnv = config_earth.balloon_properties['areaDensityEnv']\n self.mp = config_earth.balloon_properties['mp']\n self.mdot = 0\n self.massEnv = config_earth.balloon_properties['mEnv']\n self.Upsilon = config_earth.balloon_properties['Upsilon']\n\n self.vent = config_earth.simulation['vent']\n self.coord = config_earth.simulation['start_coord']\n self.t = config_earth.simulation['start_time']\n self.lat = math.radians(self.coord['lat'])\n self.Ls = self.t.timetuple().tm_yday\n self.min_alt = config_earth.simulation['min_alt']\n\n self.vm_coeff = .1 #virtual mass coefficient\n self.k = self.massEnv*config_earth.balloon_properties['cp'] #thermal mass coefficient\n\n self.dt = config_earth.dt", "def compute_values(self, update_statistics=False):\n\n self.compute_iterations()\n self.axsec = sum([one.axsec for one in self])\n self.xsec = sum([one.xsec for one in self])\n self.xerrc = sum([one.xerrc for one in self])\n self.xerru = math.sqrt(sum([one.xerru**2 for one in self]))\n\n self.nevents = sum([one.nevents for one in self])\n self.nw = sum([one.nw for one in self])\n self.maxit = len(self.yerr_iter) # \n self.nunwgt = sum([one.nunwgt for one in self]) \n self.wgt = 0\n self.luminosity = min([0]+[one.luminosity for one in self])\n if update_statistics:\n self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])", "def getAllPoints(self):\n self.getOrigin()\n self.computesWingsMeshPoints()\n self.computesFuselageMeshPoints()\n self.readsMaterials()\n self.assembleMatrices()\n self.computesWingConnexions()\n\n logger.debug(self.aircraftTotalMass)\n # self.plotSectionsPoints() # for debugging", "def calibrateData(data, cal, antennas, sourceInfo, file=True, niter=None):\n # Loop over data\n for iant, dant in data.items():\n # Write results to a file\n writeOutputFile = False\n if file != False and file <> None:\n # Set file name\n writeOutputFile = True\n\n # date the output file to avoid having to parse huge files later on\n today = dt.date.today()\n dateStr = \"%i%02i%02i\" % (today.timetuple()[0], today.timetuple()[1], today.timetuple()[2]) \n \n if file == True:\n outputFileRoot = '%s_%.2d_%s.dat' % (RPNT_RESULTS, antennas[iant], dateStr)\n else:\n outputFileRoot = \"%s_%.2d_%s.dat\" % (file, antennas[iant], dateStr)\n\n # Open file\n fout = open(outputFileRoot, \"a\")\n fout.write(\"# Pointing data for antenna %d : %s\\n\" % (antennas[iant], time.asctime()))\n f=commands.freqSetup()\n fout.write(\"# Rest Frequency : %d\\n\" % f[0])\n fout.write(\"# UT : %s\\n\" % utils.getUT(timestamp=True))\n fout.write(\"# Source %s\\n\" % sourceInfo['name'])\n fout.write(\"#\\n\");\n fout.write(\"# Iter offset(az) offset(el) Amp sigma Az El\\n\");\n fout.write(\"# (arcmin) (arcmin) (Jy) (Jy) (deg) (deg)\\n\");\n\n # Get az/el\n mpAz = utils.getAntennaMp(antennas[iant]) + \".AntennaCommon.Drive.Track.actualAzimuth\"\n mpEl = utils.getAntennaMp(antennas[iant]) + \".AntennaCommon.Drive.Track.actualElevation\"\n antaz = commands.queryDouble(mpAz)\n antel = commands.queryDouble(mpEl)\n\n # Initialize\n cal[iant] = list()\n\n # Compute mean amplitude\n for d in dant:\n # Initialize\n sum = 0.0\n sumw = 0.0\n nwindows = len(d['use'])\n weights = np.zeros(nwindows)\n\n # Compute weighted average\n x = []\n for i in range(nwindows):\n if d['use'][i]:\n sum += d['amp'][i] * d['wt'][i]\n sumw += d['wt'][i]\n x.append(d['amp'][i])\n\n # Save data\n result = dict()\n if sumw > 0.0:\n # result['amp'] = sum / sumw\n x = sorted(x)\n n1 = len(x) / 2\n n2 = (len(x)-1)/ 2\n result['amp'] = 0.5 * (x[n1] + x[n2])\n result['fwhm'] = getFWHM(antennas[iant], sourceInfo['lofreq'])\n result['offaz'] = d['offaz']\n result['offel'] = d['offel']\n result['sigma'] = 1.0 / math.sqrt(sumw)\n cal[iant].append(result)\n\n # Write data\n if writeOutputFile and (niter == None or niter == d['niter']):\n fout.write(\"%6s %10.3f %10.3f %10.3f %10.3f %10.3f %10.3f\\n\" % \\\n (str(d['niter']), result['offaz'], result['offel'], result['amp'], result['sigma'], antaz, antel))\n\n # Close file\n fout.close()", "def main():\n\tshow_program_intro()\n\tbyte_lines = read_rain_gauge_sunnyside_school()\n\t#print_rain_guage_output(byte_lines)\n\ttotals_dict = parse_regex_daily_total(byte_lines)\n\ttotals_list = sort_rain_dictionary(totals_dict)\n\thighest_rainfall = get_day_highest_rainfall(totals_list)\n\tprint_highest_rainfall(highest_rainfall)\n\tyear_highest_rain = get_year_with_most_rain(totals_list)\n\tprint_year_most_rain(year_highest_rain)", "def _get_value(self, info):\n\n for function, data in info.items():\n for dimension, run_data in data.items():\n rezultat, local_mins, populations, fabicrated_args = run_data\n\n # prepare function\n function.set_args(fabicrated_args)\n run_f = function(dimensions=dimension)\n\n # get the best run\n best_run = None\n for run_id, value in populations.items():\n if not best_run:\n best_run = value\n elif (self._get_fit_pop(value, run_f, local_mins) <\n self._get_fit_pop(best_run, run_f, local_mins)):\n best_run = value\n\n # compute for the best run\n\n return 12", "def getAll(self):\n # Get VT\n self.getVT()\n # Process VT data\n self.processVT()\n # Get reverse DNS\n self.getRDNS()\n # Get passivetotal\n self.getPT()\n # Get Geolocation\n self.getGeo()\n # Get Shodan\n self.getShodan()", "def addCalcMethodVars(df, latitude, azimuth, slope):\n\tdf['a'] = 0.409 + (0.5016 * np.sin(np.deg2rad(df['sunset_hour_angle'] - 60)))\n\tdf['a_prime'] = df['a'] - df['diffuse_fraction']\n\tdf['b'] = 0.6609 - (0.4767 * np.sin(np.deg2rad(df['sunset_hour_angle'] - 60)))\n\tdf['d'] = np.sin(np.deg2rad(df['sunset_hour_angle'])) - np.deg2rad(df['sunset_hour_angle'] * np.cos(np.deg2rad(df['sunset_hour_angle'])))\n\tdf['A'] = np.cos(np.deg2rad(slope)) + (np.tan(np.deg2rad(latitude)) * np.cos(np.deg2rad(azimuth)) * np.sin(np.deg2rad(slope)))\n\tdf['B'] = (np.cos(np.deg2rad(df['sunset_hour_angle'])) * np.cos(np.deg2rad(slope))) + (np.tan(np.deg2rad(df['declination'])) * np.sin(np.deg2rad(slope)) * np.cos(np.deg2rad(azimuth)))\n\tdf['C'] = np.sin(np.deg2rad(slope)) * np.sin(np.deg2rad(azimuth)) / np.cos(np.deg2rad(latitude))\n\tdf['omega_sr_abs'] = np.absolute(\n\t\tnp.minimum(\n\t\t\tdf['sunset_hour_angle'], \n\t\t\tnp.rad2deg(np.arccos(((df['A'] * df['B']) + (df['C'] * np.sqrt((df['A'] ** 2) - (df['B'] ** 2) + (df['C'] ** 2)))) / ((df['A'] ** 2) + (df['C'] ** 2))))\n\t\t\t)\n\t\t)\n\tdf['omega_sr'] = np.where(\n\t\t((df['A'] > 0.0) & (df['B'] > 0)) | (df['A'] >= df['B']), \n\t\t-df['omega_sr_abs'], \n\t\tdf['omega_sr_abs']\n\t\t)\n\tdf['omega_ss_abs'] = np.absolute(\n\t\tnp.minimum(\n\t\t\tdf['sunset_hour_angle'], \n\t\t\tnp.rad2deg(np.arccos(((df['A'] * df['B']) - (df['C'] * np.sqrt((df['A'] ** 2) - (df['B'] ** 2) + (df['C'] ** 2)))) / ((df['A'] ** 2) + (df['C'] ** 2))))\n\t\t\t)\n\t\t)\n\tdf['omega_ss'] = np.where(\n\t\t((df['A'] > 0.0) & (df['B'] > 0)) | (df['A'] >= df['B']), \n\t\tdf['omega_ss_abs'], \n\t\t-df['omega_ss_abs']\n\t\t)\n\tdf['D'] = np.where(\n\t\tdf['omega_ss'] >= df['omega_sr'],\n\t\tnp.maximum(0.0,\n\t\t\t((1 / (2 * df['d'])) * \\\n\t\t (np.deg2rad(((df['b'] * df['A'] / 2) - (df['a_prime'] * df['B'])) * (df['omega_ss'] - df['omega_sr'])) + \\\n\t\t \t\t\t (((df['a_prime'] * df['A']) - (df['b'] * df['B'])) * (np.sin(np.deg2rad(df['omega_ss'])) - np.sin(np.deg2rad(df['omega_sr'])))) - \\\n\t\t\t\t\t\t (df['a_prime'] * df['C'] * (np.cos(np.deg2rad(df['omega_ss'])) - np.cos(np.deg2rad(df['omega_sr'])))) + \\\n\t\t\t\t\t\t ((df['b'] * df['A'] / 2) * ((np.sin(np.deg2rad(df['omega_ss'])) * np.cos(np.deg2rad(df['omega_ss']))) - \\\n\t\t\t\t\t\t \t\t \t\t\t\t\t (np.sin(np.deg2rad(df['omega_sr'])) * np.cos(np.deg2rad(df['omega_sr']))))) + \\\n\t\t\t\t\t\t ((df['b'] * df['C'] / 2) * (((np.sin(np.deg2rad(df['omega_ss']))) ** 2) - ((np.sin(np.deg2rad(df['omega_sr']))) ** 2)))\n\t\t\t\t\t\t )\n\t\t )\n\t\t\t),\n\t\tnp.maximum(0.0,\n\t\t\t((1 / (2 * df['d'])) * \\\n\t\t\t (np.deg2rad(((df['b'] * df['A'] / 2) - (df['a_prime'] * df['B'])) * \\\n\t\t\t\t\t\t (df['omega_ss'] - (-df['sunset_hour_angle']))) + \\\n\t\t\t (((df['a_prime'] * df['A']) - (df['b'] * df['B'])) * \\\n\t\t\t (np.sin(np.deg2rad(df['omega_ss'])) - np.sin(np.deg2rad(-df['sunset_hour_angle'])))) - \\\n\t\t\t (df['a_prime'] * df['C'] * (np.cos(np.deg2rad(df['omega_ss'])) - np.cos(np.deg2rad(-df['sunset_hour_angle'])))) + \\\n\t\t\t ((df['b'] * df['A'] / 2) * ((np.sin(np.deg2rad(df['omega_ss'])) * \\\n\t\t\t\t\t\t \t\t \t\t np.cos(np.deg2rad(df['omega_ss']))) - \\\n\t\t\t\t\t\t \t\t \t\t (np.sin(np.deg2rad(-df['sunset_hour_angle'])) * np.cos(np.deg2rad(-df['sunset_hour_angle']))))) + \\\n\t\t\t ((df['b'] * df['C'] / 2) * (((np.sin(np.deg2rad(df['omega_ss']))) ** 2) - ((np.sin(np.deg2rad(-df['sunset_hour_angle']))) ** 2)))\n\t\t\t )\n\t\t\t ) + \\\n\t\t\t((1 / (2 * df['d'])) * (np.deg2rad(((df['b'] * df['A'] / 2) - (df['a_prime'] * df['B'])) * (df['sunset_hour_angle'] - df['omega_sr'])) + \\\n\t\t\t\t\t\t \t\t \t(((df['a_prime'] * df['A']) - (df['b'] * df['B'])) * (np.sin(np.deg2rad(df['sunset_hour_angle'])) - np.sin(np.deg2rad(df['omega_sr'])))) - \\\n\t\t\t\t\t\t \t\t \t(df['a_prime'] * df['C'] * (np.cos(np.deg2rad(df['sunset_hour_angle'])) - np.cos(np.deg2rad(df['omega_sr'])))) + \\\n\t\t\t\t\t\t \t\t \t((df['b'] * df['A'] / 2) * ((np.sin(np.deg2rad(df['sunset_hour_angle'])) * np.cos(np.deg2rad(df['sunset_hour_angle']))) - \\\n\t\t\t\t\t\t \t\t \t (np.sin(np.deg2rad(df['omega_sr'])) * np.cos(np.deg2rad(df['omega_sr']))))) + \\\n\t\t\t\t\t\t \t\t \t((df['b'] * df['C'] / 2) * (((np.sin(np.deg2rad(df['sunset_hour_angle']))) ** 2) - ((np.sin(np.deg2rad(df['omega_sr']))) ** 2)))\n\t\t\t\t\t\t \t\t \t)\n\t\t\t)\n\t\t\t)\n\t\t)\n\tdf['r_bar'] = df['D'] + \\\n\t\t\t\t (df['diffuse_fraction'] * \\\n\t\t\t\t (1 + np.cos(np.deg2rad(slope))) / 2) + \\\n\t\t\t\t (df['albedo'] * \\\n\t\t\t\t (1 - np.cos(np.deg2rad(slope))) / 2)\n\treturn df", "def full_analysis(self):\n print('FULL ANALYSIS\\n' +\n '----------------------------------\\n')\n #print('Basic Statistics') # Remove this and run 'basic_stats'\n results.append('FULL ANALYSIS\\n' +\n '----------------------------------\\n')\n print('Basic Information\\n' +\n '----------------------------')\n results.append('Basic Information\\n' +\n '----------------------------')\n self.info_density()\n self.calc_total_rows()\n self.show_empty()\n self.calc_null()\n self.calc_col_len()\n self.calc_row_len()\n self.calc_col_info()\n self.regex_info()", "def info():\n\n\t info = \"This package determines the day of the week.\"\n\t print(info)", "def update(self):\n url = '/weather/summary?version=2&lat={}&lon={}' \\\n .format(self.lat, self.lon)\n self.result = self.api.get(url)['weather']['summary'][0]", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def sun_set_rise_times(self, date=None):\n rstimes = (self.sunset(date=date),\n self.evening_twilight_12(date=date),\n self.evening_twilight_18(date=date),\n self.morning_twilight_18(date=date),\n self.morning_twilight_12(date=date),\n self.sunrise(date=date))\n return rstimes", "def main(name, line1, line2, orbital_filename):\n #name = \"TERRA\"\n #line1 = \"1 25994U 99068A 16048.43680378 .00000258 00000-0 67198-4 0 9999\"\n #line2 = \"2 25994 98.1982 124.4247 0001352 105.3907 254.7441 14.57126067859938\"\n satellite = ephem.readtle(name, line1, line2)\n \n\n # Landsat 8\n #name = \"Landsat8\"\n #line1=\"1 39084U 13008A 16051.82349873 .00000188 00000-0 51829-4 0 9999\"\n #line2=\"2 39084 98.1988 123.2603 0001265 89.4360 270.6984 14.57110027160810\"\n #LD8 = ephem.readtle(name, line1, line2)\n \n\n sun = ephem.Sun()\n fov = np.radians(68.6)\n\n \"\"\"\n Make pandas dataframe to store swath information\n \"\"\"\n import pandas as pd\n data = {\"DateTime\": [],\"DOY\":[],\"Month\": [],\n \"orbit_id\":[], \"ground_lat\": [], \n \"ground_lon\": [], \"swath_width\": []}\n swaths = pd.DataFrame(data)\n swaths.set_index(keys=\"DateTime\")\n # generate shapefile\n\n orbit_id = 0\n # need to do splitted by hemisphere unfortunately..\n for orbit in make_an_orbit(satellite):\n #import pdb; pdb.set_trace()\n if len(orbit) > 1:\n \"\"\"\n So worth doing processing on orbit...\n\n \"\"\"\n sun = ephem.Sun()\n\n print(orbit[0].datetime)\n\n for overpass in orbit:\n overpass.only_daytime_overpasses(sun)\n overpass.derive_swath_width(fov)\n \"\"\"\n Create a tempoary dataframe for this orbit\n \"\"\"\n epoch = datetime.datetime(1970, 1, 1)\n #import pdb; pdb.set_trace()\n tmp_d = {\"DateTime\": [(o.datetime - epoch).total_seconds() for o in orbit],\n \"DOY\":[int(o.datetime.strftime('%j')) for o in orbit],\n \"Month\": [o.datetime.month for o in orbit],\n \"orbit_id\": orbit_id * np.ones(len(orbit)),\n \"ground_lat\": [o.lat for o in orbit],\n \"ground_lon\": [o.long for o in orbit],\n \"swath_width\": [o.swath_width for o in orbit]}\n tmp = pd.DataFrame(tmp_d)\n tmp.set_index(keys=\"DateTime\")\n #import pdb; pdb.set_trace()\n orbit_id +=1 \n \"\"\"\n Append to main dataframe\n \"\"\"\n swaths = swaths.append(tmp)\n #swaths.set_index(keys=\"DateTime\")\n\n \"\"\"\n Save the DataFrame to a file\n \"\"\"\n swaths = swaths.set_index(keys=\"DateTime\")\n #swaths.set_index(keys=\"DateTime\")\n #import pdb; pdb.set_trace()\n swaths.to_csv(orbital_filename, header=True)", "def _compute_energies(self):\n\n from scipy import weave\n\n start_time = time.time()\n\n # Temporary storage for computed phi and psi angles.\n phi = units.Quantity(numpy.zeros([self.nstates], numpy.float64), units.radians)\n psi = units.Quantity(numpy.zeros([self.nstates], numpy.float64), units.radians)\n\n # Compute reference energies.\n for replica_index in range(self.nstates):\n # Compute reference energy once.\n reference_energy = self.reference_state.reduced_potential(self.replica_coordinates[replica_index], platform=self.energy_platform)\n self.u_kl[replica_index,:] = reference_energy\n\n # Compute torsion angles.\n for replica_index in range(self.nstates): \n # Compute torsion angles.\n phi[replica_index] = self._compute_torsion(self.replica_coordinates[replica_index], 4, 6, 8, 14) \n psi[replica_index] = self._compute_torsion(self.replica_coordinates[replica_index], 6, 8, 14, 16)\n\n\n # Compute torsion energies.\n code = \"\"\"\n for(int replica_index = 0; replica_index < nstates; replica_index++) {\n double phi = PHI1(replica_index);\n double psi = PSI1(replica_index);\n long state_index = 1;\n for(int phi_index = 0; phi_index < nbins; phi_index++) {\n for(int psi_index = 0; psi_index < nbins; psi_index++) { \n // Compute torsion angles\n double phi0 = phi_index * delta;\n double psi0 = psi_index * delta;\n\n // Compute torsion energies.\n U_KL2(replica_index,state_index) += kappa*cos(phi-phi0) + kappa*cos(psi-psi0);\n state_index += 1;\n }\n }\n }\n \"\"\"\n\n # Stage input temporarily.\n nstates = self.nstates\n nbins = self.nbins\n delta = self.delta / units.radians\n kappa = self.kappa\n phi = phi / units.radians\n psi = psi / units.radians\n u_kl = self.u_kl\n try:\n # Execute inline C code with weave.\n info = weave.inline(code, ['nstates', 'nbins', 'delta', 'kappa', 'phi', 'psi', 'u_kl'], headers=['<math.h>', '<stdlib.h>'], verbose=2)\n self.u_kl = u_kl\n except:\n for replica_index in range(self.nstates): \n # Compute torsion restraint energies for all states. \n state_index = 1 \n for phi_index in range(self.nbins):\n phi0 = float(phi_index) * self.delta / units.radians \n for psi_index in range(self.nbins):\n psi0 = float(psi_index) * self.delta / units.radians\n # Compute torsion energies.\n self.u_kl[replica_index,state_index] += (self.kappa)*math.cos(phi[replica_index]-phi0) + (self.kappa)*math.cos(psi[replica_index]-psi0)\n #print \"(%6d,%6d) : %16s %16s : %16.1f %16.1f\" % (phi_index, psi_index, str(phi), str(psi), self.u_kl[replica_index,state_index], self.states[state_index].reduced_potential(self.replica_coordinates[replica_index]))\n # Increment state index.\n state_index += 1\n \n end_time = time.time()\n elapsed_time = end_time - start_time\n time_per_energy = elapsed_time / float(self.nstates)**2 \n if self.verbose: print \"Time to compute all energies %.3f s (%.3f per energy calculation).\\n\" % (elapsed_time, time_per_energy)\n\n return", "def finalize(self):\n for (phi_i, clsweights) in self.weights.iteritems():\n for (cls, weight) in clsweights.iteritems():\n weight.average(self.time)", "def calculations():\r\n\t\r\n\tpayload, avionics, booster = weight_input()\r\n\r\n\tdrogue_size, drogue_force = drogue_calc()\r\n\tmain_size, main_force = main_calc(avionics, booster, drogue_force) #total mass, payload detaches\r\n\r\n\tprint(\"Drogue is diameter is \" + str(drogue_size) + \" inches\")\r\n\tprint(\"Main is diameter is \" + str(main_size) + \" inches\")", "def getPressures(self, flaggedmeteo, useWeatherStations=True, scaleHeight=500.):\n self._weather = self.asdm.weatherTable().get()\n self._station = self.asdm.stationTable().get()\n self._antenna = self.asdm.antennaTable().get()\n antennas = []\n wStationId = {}\n wStationName = {}\n wStationDistance = {}\n flagged_meteo = flaggedmeteo.split()\n count = {}\n self.meanDeltaPressure = {}\n \n centralStationId = Tag(0)\n #for r in self._station:\n # if str(r.name()) == \"MeteoCentral\":\n # centralStationId = r.stationId()\n for r in self._station:\n if str(r.name()) == \"MeteoTB2\":\n centralStationId = r.stationId()\n \n if centralStationId == Tag(0):\n print(\"== no central station\")\n return\n refPos = self.asdm.stationTable().getRowByKey(centralStationId).position()\n refVector = pl.array([refPos[0].get(),refPos[1].get(),refPos[2].get()])\n for row in self._antenna:\n ant = row.name()\n antennas.append(ant)\n count[ant] = 0\n self.meanDeltaPressure[ant] = 0\n if useWeatherStations:\n stationId = row.stationId()\n r0 = self.asdm.stationTable().getRowByKey(stationId)\n\n d2min = 1e12\n for r in self._station:\n if (str(r.type()) == 'WEATHER_STATION') and (str(r.name()) not in flagged_meteo):\n d2 = 0\n for i in range(3):\n d2 += (r0.position()[i].get()-r.position()[i].get())**2\n if d2 < d2min: \n rows = self.asdm.weatherTable().getByContext(r.stationId())\n # test th epressure\n if rows[0].pressure().get() > 1000:\n # \n wStationName[ant] = r.name()\n wStationId[ant] = r.stationId()\n wStationDistance[ant] = sqrt(d2)\n d2min = d2\n print('%s/%s : Weather station %15s distance %10.2f m' \\\n %(ant, r0.name(), wStationName[ant], wStationDistance[ant])) \n \n self.deltaPressures = {}\n self.centralPressure = {}\n self.centralWaterPressure = {}\n self.centralTemperature = {}\n self.minPressure = 1e10\n self.maxPressure = -1e10\n \n for row in self.asdm.calDataTable().get():\n if str(row.calType()) == \"CAL_WVR\":\n scan = row.scanSet()[0]\n if scan not in list(self.scanArrayTimes.keys()):\n start = row.startTimeObserved().get()\n end = row.endTimeObserved().get()\n\n self.deltaPressures[scan] = {}\n rows = self.asdm.weatherTable().getByContext(centralStationId)\n for r in rows:\n ttt = r.timeInterval().start().get()\n if (ttt > start) and (ttt < end):\n found = True\n self.centralPressure[scan] = r.pressure().get()\n self.centralTemperature[scan] = r.temperature().get()\n for wvrrow in self.asdm.calWVRTable().get():\n #print wvrrow.calDataId(), row.calDataId()\n if wvrrow.antennaName() == self.refAntenna:\n if wvrrow.calDataId() == row.calDataId():\n water = wvrrow.water().get() # meters\n break\n # assuming scale height of 1000m\n scaleHeight = 1000.\n self.centralWaterPressure[scan] = self.centralTemperature[scan]*water*1000./217.*100*(1000./scaleHeight) ## in pascals.\n print(\"=== scan %2s pres %7.3f mb temp %7.3f K w %6.3f mm ppH2O %7.3f mb\" %\\\n (scan, self.centralPressure[scan]/100., self.centralTemperature[scan], water*1000, self.centralWaterPressure[scan]/100.))\n self.minPressure = min(self.minPressure, self.centralPressure[scan])\n self.maxPressure = max(self.minPressure, self.centralPressure[scan])\n\n for ant in antennas:\n # print \"antenna \", ant \n water = 0\n for wvrrow in self.asdm.calWVRTable().get():\n if wvrrow.antennaName() == ant:\n if wvrrow.calDataId() == row.calDataId():\n water = wvrrow.water().get() # meters\n break\n temp = self.centralTemperature[scan]\n water_pressure = temp*water*1000./217.*100.*(1000./scaleHeight) # pascals\n self.deltaPressures[scan][ant] = \\\n - (water_pressure-self.centralWaterPressure[scan] ) \n if useWeatherStations:\n rows = self.asdm.weatherTable().getByContext(wStationId[ant])\n sRow = self.asdm.stationTable().getRowByKey(wStationId[ant])\n pos = sRow.position()\n padVector = pl.array([pos[0].get(),pos[1].get(),pos[2].get()]) \n diffVector = padVector - refVector\n diffHeight = sqrt(padVector[0]**2+padVector[1]**2+padVector[2]**2)\n diffHeight -= sqrt(refVector[0]**2+refVector[1]**2+refVector[2]**2)\n found = False\n pres = 0\n for r in rows:\n ttt = r.timeInterval().start().get()\n if (ttt > start) and (ttt < end):\n found = True\n pres = r.pressure().get()\n temp = r.temperature().get()\n if found:\n self.deltaPressures[scan][ant] += \\\n pres - self.centralPressure[scan]*(1.-6.5e-3/293.5*diffHeight)**5.26 \n # if scan>1:\n self.meanDeltaPressure[ant] += self.deltaPressures[scan][ant]\n count[ant] += 1\n\n for ant in list(count.keys()):\n self.meanDeltaPressure[ant] /= count[ant]", "def refresh_all_info(self):\n # Do not use uuid as it's not available in state configured\n # Do not use self.get_attr as it would call refresh_all_info again :-)\n state_cmd = [CMD_ZONEADM, \"-z\",\n self._zone_attr[ZONE_ENTRY['ZNAME']], \"list\", \"-p\"]\n\n line_items = str(getoutputs(state_cmd)).split(\":\")\n for val in ZONE_ENTRY.values():\n # our ZONE_MAPING reflects _zone_attr\n self._zone_attr[val] = line_items[val]\n\n # other comes later net/anet\n extra_info = ['autoboot', 'brand', 'ip-type', 'bootargs', 'file-mac-profile', 'pool', 'limitpriv', 'scheduling-class', 'hostid', 'fs-allowed']\n info_cmd = [CMD_ZONECFG, \"-z\", self._zone_attr[ZONE_ENTRY['ZNAME']], \"info\"]\n line_items = str(getoutputs(info_cmd)).split(\"\\n\")\n\n for line in line_items:\n for attr in extra_info:\n if line.startswith(attr+\":\"):\n self._zone_attr[attr] = line[line.find(':')+1:].strip()", "def goto_sun(self, seconds_ahead = 0, blocking = True):\n assert self.is_initialized\n solar_ephemeris = self.devices['solar_ephemeris']\n tracking_mirror_positioner = self.controllers['tracking_mirror_positioner']\n #self.set_windings('on')\n #start tracking time\n t0 = time.time()\n #get current sun location\n jd_now, el_now, az_now = solar_ephemeris.update()\n #predict where sun will be at next control point\n jd_future, el_future, az_future = solar_ephemeris.predict(seconds_ahead, jd_now)\n #send start event\n info = OrderedDict()\n info['timestamp'] = t0\n info['seconds_ahead'] = seconds_ahead\n info['jd_now'] = jd_now\n info['az_now'] = az_now\n info['el_now'] = el_now\n info['jd_future'] = jd_future\n info['az_future'] = az_future\n info['el_future'] = el_future\n \n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_STARTED\", info)\n if blocking:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )\n t1 = time.time()\n used_t = t1-t0\n #send end event\n info = OrderedDict()\n info['timestamp'] = t1\n info['az_pos'] = self.az_pos\n info['el_pos'] = self.el_pos\n info['used_time'] = used_t\n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_COMPLETED\", info)\n return used_t\n else:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def addCalcSolarVars(df, latitude):\n\tdf['sunset_hour_angle'] = np.rad2deg(np.arccos(-np.tan(np.deg2rad(latitude)) * \\\n\t\t\t\t\t\t\t\t\t\t\t\t np.tan(np.deg2rad(df['declination']))))\n\tdf['ET_insol'] = (24 / np.pi) * \\\n\t\t\t\t\t (df['Isc_prime'] / 1000) * \\\n\t\t\t\t\t ((np.cos(np.deg2rad(latitude)) * np.cos(np.deg2rad(df['declination'])) * np.sin(np.deg2rad(df['sunset_hour_angle']))) + \\\n\t \t\t\t\t (np.deg2rad(df['sunset_hour_angle']) * np.sin(np.deg2rad(latitude)) * np.sin(np.deg2rad(df['declination']))))\n\tdf['clearness'] = df['insolation_horizontal'] / df['ET_insol']\n\t# Calculate diffuse fraction\n\tdf['diffuse_fraction'] = (df['insolation_horizontal'] * (1.39 - (4.027 * df['clearness']) + (5.531 * (df['clearness'] ** 2)) - (3.108 * (df['clearness'] ** 3)))) / df['insolation_horizontal']\n\treturn df", "def initialise(self):\n for i in range(self.nx):\n self.T[:, i] = (\n self.t_sun\n + self.mu\n * self.m_u\n * self.nabla\n * self.g\n * (self.y - self.y_max)\n / self.kb\n )\n self.P = self.p_sun * (self.T / self.t_sun) ** (1 / self.nabla)\n\n if self.Gaussian_perturbation:\n x_mean = 6e6\n y_mean = 2e6\n sigma = 8e5\n xx, yy = np.meshgrid(self.x, self.y)\n gaussian = self.t_sun * np.exp(\n -((xx - x_mean) ** 2 + (yy - y_mean) ** 2) / (2 * sigma ** 2)\n )\n self.T[:, :] = self.T[:, :] + gaussian\n\n self.rho[:, :] = self.P * self.mu * self.m_u / (self.kb * self.T[:, :])\n self.e[:, :] = self.P[:, :] / (self.Y - 1)", "def _calc(self):\r\n tot_sum: float = 0 # ? total sum of the noise values\r\n max_amp: float = 0 # ? keep the sum in [0,1]\r\n amp: float = 1.0 # ? amplitude of each noise value\r\n freq: float = 1.0 # ? frequency for getting the detailed noise\r\n\r\n # for each octave we twice the frequency and multiply the amplitude \r\n # by persistance to get the detailed noise value\r\n # to keep the final sum value in the range [0, 1] we keep track of the \r\n # max amplitude (sum of all the amplitudes)\r\n for octave in range(self.octaves):\r\n noise_obj = PerlinNoise(self.inp_x*freq, self.inp_y*freq, self.inp_z*freq)\r\n # ? multiply the noise value by the amplitude\r\n tot_sum += noise_obj.val() * amp\r\n max_amp += amp\r\n\r\n amp *= self.persist\r\n freq *= 2.0 # double the freq each iteration\r\n\r\n # value is in the range [0,1]\r\n self.value = tot_sum / max_amp", "def get_ships_analysis(self):\n \n # Get SHIPS times\n times = self.search_ships()\n if len(times) <= 1:\n raise RuntimeError('SHIPS data is unavailable for the requested storm.')\n \n # Declare dict\n new_dict = {\n 'time': [],\n 'mslp': [],\n 'type': [],\n 'vmax': [],\n 'wmo_basin': [],\n }\n for attr in ['name', 'id', 'operational_id', 'year', 'season', 'basin', 'realtime']:\n new_dict[attr] = self[attr]\n new_dict['ace'] = 0.0\n \n # Construct data\n for time in times:\n ships = self.get_ships(time)\n if ships is None: continue\n if np.isnan(ships.lat[0]) or np.isnan(ships.lon[0]): continue\n\n # Add relevant variables\n new_dict['time'].append(time)\n new_dict['mslp'].append(np.nan)\n for key in ships.dict.keys():\n if key in ['fhr', 'vmax_noland_kt', 'vmax_lgem_kt']: continue\n\n # Special handling for storm type\n if key == 'storm_type':\n subtropical_flag = False\n derived_type = 'EX'\n try:\n if ships.dict['storm_type'][0] == 'SUBT':\n subtropical_flag = True\n derived_type = get_storm_type(ships.dict['vmax_land_kt'][0], subtropical_flag)\n if ships.dict['storm_type'][0] not in ['TROP', 'SUBT']:\n derived_type = 'EX'\n except:\n pass\n new_dict['type'].append(derived_type)\n\n # vmax handling\n elif key == 'vmax_land_kt':\n new_dict['vmax'].append(ships.dict[key][0])\n\n # Normal handling\n elif key in new_dict:\n new_dict[key].append(ships.dict[key][0])\n else:\n new_dict[key] = [ships.dict[key][0]]\n \n # Derive ACE\n if not np.isnan(new_dict['vmax'][-1]):\n new_dict['ace'] += accumulated_cyclone_energy(new_dict['vmax'][-1])\n\n # Derive basin\n new_dict['wmo_basin'].append(get_basin(new_dict['lat'][-1],\n new_dict['lon'][-1],\n self.basin))\n\n # Add other attributes\n new_dict['source_info'] = 'SHIPS Analysis'\n new_dict['source_method'] = 'UCAR SHIPS Archive'\n new_dict['source_url'] = 'https://hurricanes.ral.ucar.edu/'\n new_dict['invest'] = False\n new_dict['source'] = 'ships'\n new_dict['prob_2day'] = 'N/A'\n new_dict['prob_7day'] = 'N/A'\n new_dict['risk_2day'] = 'N/A'\n new_dict['risk_7day'] = 'N/A'\n \n return Storm(new_dict)", "def calculate_system_performance(self):\n\n self._calculate_high_order_wfe()\n self._calculate_strehl()", "def calculate(self):\n pass", "def make_data(args):\n mass_MJ = 1.142\n radius_RJ = 1.138\n gravity_SI = 23.970 \n Rs_Rsun = 0.805\n inc = 85.71\n t0 = 2454037.612\n sma = 8.839304998 # semi major axis in stellar radiu\n orb_per = 2.21857545 #in days\n ecc = 0.0041\n w_peri = -24.1 # longiutude of periastron\n limbdark = \"linear\"\n \n u_limbdark = [0.35]\n \n num_transit = 1\n \n dates = [2458383.77055943, 2458383.77384704, 2458383.77707875,\n 2458383.78030307, 2458383.78358918, 2458383.78681399,\n 2458383.79004101, 2458383.79326712, 2458383.79655574,\n 2458383.79984545, 2458383.80307906, 2458383.80629228,\n 2458383.80958299, 2458383.8128124 , 2458383.81603942,\n 2458383.81925973, 2458383.82248474, 2458383.82577195,\n 2458383.82900097, 2458383.83223048, 2458383.8354501 ,\n 2458383.83874811, 2458383.84196822, 2458383.84520053,\n 2458383.84847654, 2458383.85170346, 2458383.85493727,\n 2458383.85821578, 2458383.86144419, 2458383.86466921,\n 2458383.86790322, 2458383.87118233, 2458383.87441074,\n 2458383.87763435, 2458383.88092406, 2458383.88414957],\n #don't forget the coma at the end if there is only one transit !!!!!\n \n\n\n # Wmean = [2400.695909757236,2328.5343131275904,1972.9809993156186,\n # 1927.2107049022654,]\n # Wmean = [1634.5200937047302,1600.8109822367207],[1670.071564637037,1634.5459486709924,1600.8124596368639],\n Wmean = [2328.5343131275904], \n orderstot = [33]\n orders = [33],\n # orderstot = [46,47,48]\n # orders = [47,48],[46,47,48],\n \n # Vfiles = [\"Vcorr47_DRS2.txt\",\n # \"Vcorr48_DRS2.txt\",\n # ],[\"Vcorr46_Jun19-1_DRS2.txt\",\n # \"Vcorr47_Jun19-1_DRS2.txt\",\n # \"Vcorr48_Jun19-1_DRS2.txt\"\n # ],\n Vfiles = [\"V33_CO.txt\"], \n \n Ifiles = [\"I33_CO.txt\"],\n \n # if Stdfiles are not needed, for example with the Brogi likelihood, \n # uncomment the next line\n #Stdfiles = []\n Stdfiles = [\"Std33_CO.txt\"],\n \n lambdas = np.array([[ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [2291.84518119, 2362.55271775],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1939.42197854, 1998.81548771],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1758.50261646, 1812.39702422],\n [1718.50054581, 1771.64067835],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1512.43747007, 1558.89713666],\n [1484.77586677, 1528.30354258],\n [1457.06015806, 1498.88570675],\n [1429.75333156, 1470.19096444],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1306.967007 , 1343.21643463],\n [1285.02046052, 1320.56072659],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [1167.78440327, 1198.13940642],\n [1150.59417256, 1178.48372217],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ],\n [ 0. , 0. ]])\n\n return dict(\n mass_MJ=mass_MJ,\n radius_RJ=radius_RJ,\n\t\tgravity_SI = gravity_SI,\n\t\tRs_Rsun = Rs_Rsun,\n\t\tinc = inc,\n\t\tt0 = t0,\n\t\tsma = sma,\n\t\torb_per = orb_per,\n\t\tecc = ecc,\n\t\tw_peri = w_peri,\n Wmean = Wmean,\n\t\tlimbdark = limbdark,\n\t\tu_limbdark = u_limbdark,\n\t\tdates = dates,\n\t\tlambdas = lambdas,\n orders = orders,\n orderstot=orderstot,\n num_transit=num_transit,\n\t\tVfiles = Vfiles,\n\t\tIfiles = Ifiles,\n\t\tStdfiles = Stdfiles\n\t\t )", "def direct(sun_pos, grid):\n\n # for each pixel at top of grid pass sun rays in\n for i in xrange(grid.gr.shape[0]):\n \"\"\"\n Make an array starting at loc\n \"\"\"\n xpos = i * grid.xres\n ypos = grid.zres * grid.zsize\n pos = np.array(xpos, ypos)\n direction = pos - sun_pos / np.norm(pos - sun_pos) # this location minus \n r = ray(pos, direction)\n \"\"\"\n The ray now travels down through the canopy being\n altered by transmission and reflectance\n\n amount of scattering vs absorption is determined by leaf area density\n\n \"\"\"", "def update(self):\n url = '/weather/current/minutely' \\\n '?version=2&lat={}&lon={}&city={}&county={}&village={}' \\\n .format(self.lat, self.lon, self.city, self.county, self.village)\n self.result = self.api.get(url)['weather']['minutely'][0]", "def calculate_vars(self):\n pass", "def Analyze(self):\n \n self._analyzeLogs()\n for user in self._start_times:\n self._result[user] = self._zipTimes(user)", "def updateTemperature(self):\n sumv2 = 0\n for atom in self.atoms:\n sumv2 += atom.vx**2 + atom.vy**2 + atom.vz**2\n self.currentTemp = (self.m/(3*self.numAtoms*self.kb))*sumv2\n self.temperatures.append(self.currentTemp)", "def _calc(self):\r\n u = self._fadefunc(self.xf)\r\n v = self._fadefunc(self.yf)\r\n w = self._fadefunc(self.zf)\r\n\r\n # populate the hashes dict\r\n self._hash()\r\n \r\n # once the hash dict is populated, start calculating the dot product between \r\n # the gradient vector and the distance vectors, which is done in the _grad method.\r\n # finally linearly interpolate the values to get the avg value\r\n # first interpolate in the x-dir, then in y-dir\r\n x1: float = self._lerp(self._grad(self.hashes[\"aaa\"], self.xf, self.yf, self.zf),\r\n self._grad(self.hashes[\"baa\"], self.xf - 1, self.yf, self.zf), u)\r\n\r\n x2: float = self._lerp(self._grad(self.hashes[\"aba\"], self.xf, self.yf - 1, self.zf),\r\n self._grad(self.hashes[\"bba\"], self.xf - 1, self.yf - 1, self.zf), u)\r\n\r\n # the first y-dir lerp\r\n y1: float = self._lerp(x1, x2, v)\r\n\r\n x1: float = self._lerp(self._grad(self.hashes[\"aab\"], self.xf, self.yf, self.zf - 1),\r\n self._grad(self.hashes[\"bab\"], self.xf - 1, self.yf, self.zf - 1), u)\r\n\r\n x2: float = self._lerp(self._grad(self.hashes[\"abb\"], self.xf, self.yf - 1, self.zf - 1),\r\n self._grad(self.hashes[\"bbb\"], self.xf-1, self.yf-1, self.zf-1), u)\r\n\r\n # the second y-dir lerp\r\n y2: float = self._lerp(x1, x2, v)\r\n\r\n # the final noise value, which will be in the range [0, 1]\r\n self.value = (self._lerp(y1, y2, w) + 1)/2\r\n return self.value", "def get_geo_data(self):\n # Get all countries and create a dictionary by name\n countries_shp = shpreader.natural_earth(\n resolution='10m',\n category='cultural',\n name='admin_0_countries',\n )\n self.countries = list(shpreader.Reader(countries_shp).records())\n self.countries_by_name = {}\n self.countries_by_iso_a2 = {}\n for country in shpreader.Reader(countries_shp).records():\n self.countries_by_name[country.attributes['NAME_LONG']] = country\n self.countries_by_iso_a2[country.attributes['ISO_A2']] = country\n\n # Get all states and create a dictionary by name\n states_provinces_shp = shpreader.natural_earth(\n resolution='50m',\n category='cultural',\n name='admin_1_states_provinces',\n )\n# full_list = list(shpreader.Reader(states_provinces_shp).records())\n# self.states = [x for x in full_list if x.attributes['type_en'] == 'State']\n self.states = list(shpreader.Reader(states_provinces_shp).records())\n self.states_by_name = {}\n for state in self.states:\n self.states_by_name[state.attributes['name']] = state\n\n # Get all timezones and create a dictionary by name\n timezones_shp = shpreader.natural_earth(\n resolution='10m',\n category='cultural',\n name='time_zones',\n )\n self.timezones = list(shpreader.Reader(timezones_shp).records())\n self.timezones_by_name = {}\n for timezone in shpreader.Reader(timezones_shp).records():\n # Try to get the actual name. Something like `Europe/Berlin`\n timezone_name = timezone.attributes['tz_name1st']\n # If there is no name, we default to the utc offset name `-5` `+4.5`\n if timezone_name == '':\n timezone_name = timezone.attributes['name']\n\n if timezone_name not in self.timezones_by_name.keys():\n self.timezones_by_name[timezone_name] = timezone", "def draw_sun():\n lisandro.penup()\n lisandro.goto(40, 90)\n lisandro.begin_fill()\n lisandro.circle(150) # draws out a circle with a radius of 150 for the sun.\n lisandro.end_fill()\n lisandro.hideturtle()", "def calculate(self):\r\n\r\n pass", "def calculate(self):\r\n pass", "def do_setup(self, ants): \n log_filter = LogFilter()\n getLogger().addFilter(log_filter)\n\n self.hills = []\n self.directions = []\n\n self.seen = [] #areas that have been seen, use this to avoid repetition\n self.unseen = []\n self.stepped_on = []\n\n self.intent = {}\n self.lc = {} #center of mass for a location\n self.i = {} #number of iterations for an ant\n\n for row in range(ants.rows):\n for col in range(ants.cols):\n self.unseen.append((row, col))\n self.intent[(row,col)] = Intent.GATHER\n\n self.lc[(row,col)] = (-1.0,-1.0) #set up center of mass\n self.i[(row,col)] = -1", "def sun_on_date(location, date):\n\n sun_api_url = \"http://api.geonames.org/timezoneJSON?lat=\" + str(location[0]) + \\\n \"&lng=\" + str(location[1]) + \"&date=\" + date + \"&username=demo\"\n # use my name as username, if it does not work with demo\n sun_api_url_response = requests.get(sun_api_url)\n sun_data = sun_api_url_response.json()\n try:\n sunrise = sun_data[\"dates\"][0][\"sunrise\"][11:16]\n sunset = sun_data[\"dates\"][0][\"sunset\"][11:16]\n return sunrise, sunset\n except:\n print sun_data[\"status\"][\"message\"]\n print \"Change usarname to my name to make it work\"\n return exit()", "def start_simulation(self):\n\n\t\tif self.objects==10:#Adding colors for planet\n\t\t\tself.col_planet()\n\t\t\t\n\t\tfor step in range(self.steps-1):#iterator=all simulation steps\n\t\t\tvis.rate(600)#frames per sec\n\t\t\ti=0\n\t\t\tprint self.dt\n\t\t\tos.system('clear')\n\t\t\tprint \"==========================\\n\", \"Date: \",datetime(1930,12,24)+timedelta(seconds=step*self.dt) ,\"\\n==========================\"\n\t\t\tprint \"Steps: \",self.steps,\"Objects: \", self.objects, \"\\ndt: \",round(float(self.times[self.objects+1]/86400),5),\"days\\n==========================\"\n\t\t\tfor planet in self.ob:# iterator = all planets and sun\n\t\t\t\tx,y,z = (self.positions[i][step+1][0], self.positions[i][step+1][1], self.positions[i][step+1][2])\n\t\t\t\tplanet.pos = (x,y,z)#updating positions\n\t\t\t\tr = ((self.positions[0][step+1][0]-x)**2 + (self.positions[0][step+1][1]-y)**2 + (self.positions[0][step+1][2]-z)**2)**0.5#lenght from sun\n\t\t\t\tprint self.names[i], \"=\", r,\"AU\"\n\t\t\t\ti += 1\n\t\t\t\tself.p2.pos = (self.positions[0][step+1][0], self.positions[0][step+1][1], self.positions[0][step+1][2])#moving sun center\n\t\t\t\tself.p.pos = (self.center[0][step+1][0], self.center[0][step+1][1], self.center[0][step+1][2])#moving solar system mass center\n\t\t\t\tself.sun.pos = (self.positions[0][step+1][0], self.positions[0][step+1][1], self.positions[0][step+1][2])#moving sun \n\t\t\tprint \"==========================\\nBarycenter =\", round(((self.positions[0][step+1][0]-self.center[0][step+1][0])**2 + (self.positions[0][step+1][1]-self.center[0][step+1][1])**2 + (self.positions[0][step+1][2]-self.center[0][step+1][2])**2)**0.5,8),\"AU\"", "def update_variables(self):\n self.dl21 = self.l21-self.l11; self.dl22 = self.l22-self.l12; self.dl23 = self.l23-self.l13;\n self.kappa1, self.phi1, self.seg_len1 = self.configuration_space(self.l11, self.l12, self.l13, self.d, self.n)\n self.kappa2, self.phi2, self.seg_len2 = self.configuration_space(self.dl21, self.dl22, self.dl23, self.d, self.n)\n # aquire transformation matrices and tips for segment 1 and 2\n self.T01_bishop = self.transformation_matrix_bishop(self.kappa1, self.phi1, self.seg_len1)\n self.T12_bishop = self.transformation_matrix_bishop(self.kappa2, self.phi2, self.seg_len2)\n self.T02_bishop = np.matmul(self.T01_bishop, self.T12_bishop)\n self.T01_frenet = self.transformation_matrix_frenet(self.kappa1, self.phi1, self.seg_len1)\n self.T12_frenet = self.transformation_matrix_frenet(self.kappa2, self.phi2, self.seg_len2)\n self.T02_frenet = np.matmul(self.T01_frenet, self.T12_frenet)\n self.tip_vec1 = np.matmul(self.T01_bishop, self.base)[0:3]\n self.tip_vec2 = np.matmul(self.T02_bishop, self.base)[0:3]\n # Frenet frames\n self.normal_vec_frenet1 = self.T01_frenet[0:3, 0]\n self.binormal_vec_frenet1 = self.T01_frenet[0:3, 1]\n self.tangent_vec_frenet1 = self.T01_frenet[0:3, 2]\n self.normal_vec_frenet2 = self.T02_frenet[0:3, 0]\n self.binormal_vec_frenet2 = self.T02_frenet[0:3, 1]\n self.tangent_vec_frenet2 = self.T02_frenet[0:3, 2]\n # Bishop frames\n self.normal_vec_bishop1 = self.T01_bishop[0:3, 0]\n self.binormal_vec_bishop1 = self.T01_bishop[0:3, 1]\n self.tangent_vec_bishop1 = self.T01_bishop[0:3, 2]\n self.normal_vec_bishop2 = self.T02_bishop[0:3, 0]\n self.binormal_vec_bishop2 = self.T02_bishop[0:3, 1]\n self.tangent_vec_bishop2 = self.T02_bishop[0:3, 2]", "def _compute_(self):\n dic = \"data/sim/{dn}/{rad}/\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), rad=self.rad)\n fbgc = \"data/sim/{dn}/{rad}/exp.bgc.bm({bm}).elv(<elv>).csv\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), \n rad=self.rad, bm=self.bmnum)\n fflare = \"data/sim/{dn}/{rad}/exp.flare.bm({bm}).elv(<elv>).csv\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"),\n rad=self.rad, bm=self.bmnum)\n cmd = \"export DIR_MODELS_REF_DAT=/home/shibaji/Collaboration_NCAR/code_rt_sd/pharlap/pharlap_4.1.3/dat;\\\n cd pharlap;\\\n matlab -nodisplay -nodesktop -nosplash -nojvm -r \\\"UT=[{ut}];rad='{rad}';dic='{dic}';fbgc='{fbgc}';bm={bm};\\\n fflare='{fflare}';rt_1D_sim;exit;\\\"\".format(ut=self.event.strftime(\"%Y %m %d %H %S\"), rad=self.rad,\n dic=dic, bm=self.bmnum, fbgc=fbgc, fflare=fflare)\n os.system(cmd)\n return", "def __init__(self, date, latitude, longitude, timezone):\n self.name = \"Sunrise Sunset Calculator\"\n self.date = date\n self.latitude = latitude\n self.longitude = longitude\n self.timezone = timezone\n return", "def _calculate_secondary_vars(self):\n print(\"\\nSystem: Now calculating secondary variables based on data provided.\")\n self.data_length = self.data_year.shape[0]\n self.station_pressure = 101.3 * (((293 - (0.0065 * self.station_elev)) / 293) ** 5.26) # units kPa, EQ 3 ASCE\n\n # Calculate DOY from Y/M/D values\n self.data_doy = []\n for i in range(self.data_length):\n # Create list of string DOY values\n self.data_doy.append(dt.date(self.data_year[i], self.data_month[i], self.data_day[i]).strftime(\"%j\"))\n\n self.data_doy = np.array(list(map(int, self.data_doy))) # Converts list of string values into ints\n\n # Calculate tavg if it is not provided by dataset\n if self.column_df.tavg == -1:\n # Tavg not provided\n self.data_tavg = np.array((self.data_tmax + self.data_tmin) / 2.0)\n else:\n # Tavg is provided, no action needs to be taken\n pass\n\n # Figure out which humidity variables are provided and calculate Ea and TDew if needed\n (self.data_ea, self.data_tdew) = data_functions.\\\n calc_humidity_variables(self.data_tmax, self.data_tmin, self.data_tavg, self.data_ea, self.column_df.ea,\n self.data_tdew, self.column_df.tdew, self.data_rhmax, self.column_df.rhmax,\n self.data_rhmin, self.column_df.rhmin, self.data_rhavg, self.column_df.rhavg)\n\n # Calculates secondary temperature values and mean monthly counterparts\n (self.delta_t, self.mm_delta_t, self.k_not, self.mm_k_not, self.mm_tmin, self.mm_tdew) = data_functions.\\\n calc_temperature_variables(self.data_month, self.data_tmax, self.data_tmin, self.data_tdew)\n\n # Calculates rso and grass/alfalfa reference evapotranspiration from refet package\n np.warnings.filterwarnings('ignore', 'invalid value encountered') # catch invalid value warning for nans\n (self.rso, self.mm_rs, self.eto, self.etr, self.mm_eto, self.mm_etr) = data_functions.\\\n calc_rso_and_refet(self.station_lat, self.station_elev, self.ws_anemometer_height, self.data_doy,\n self.data_month, self.data_tmax, self.data_tmin, self.data_ea, self.data_ws,\n self.data_rs)\n np.warnings.resetwarnings() # reset warning filter to default\n\n #########################\n # Back up original data\n # Original data will be saved to output file\n # Values are also used to generate delta values of corrected data - original data\n self.original_df = self.data_df.copy(deep=True) # Create an unlinked copy of read-in values dataframe\n self.original_df['rso'] = self.rso\n self.original_df['etr'] = self.etr\n self.original_df['eto'] = self.eto\n\n # Create datetime variables that will be used by bokeh plot and correction functions\n self.dt_array = []\n for i in range(self.data_length):\n self.dt_array.append(dt.datetime(self.data_year[i], self.data_month[i], self.data_day[i]))\n self.dt_array = np.array(self.dt_array, dtype=np.datetime64)\n self.mm_dt_array = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])\n self.data_null = np.empty(self.data_length) * np.nan", "def calc_SC(locator, radiation_csv, metadata_csv, latitude, longitude, weather_path, building_name):\n\n t0 = time.clock()\n\n # weather data\n weather_data = epwreader.epw_reader(weather_path)\n print 'reading weather data done'\n\n # solar properties\n g, Sz, Az, ha, trr_mean, worst_sh, worst_Az = solar_equations.calc_sun_properties(latitude, longitude, weather_data,\n settings.date_start, settings.solar_window_solstice)\n print 'calculating solar properties done'\n\n # get properties of the panel to evaluate\n panel_properties = calc_properties_SC_db(locator.get_supply_systems_database(), settings.type_SCpanel)\n print 'gathering properties of Solar collector panel'\n\n # select sensor point with sufficient solar radiation\n max_yearly_radiation, min_yearly_production, sensors_rad_clean, sensors_metadata_clean = \\\n solar_equations.filter_low_potential(weather_data, radiation_csv, metadata_csv, settings.min_radiation,\n settings.panel_on_roof, settings.panel_on_wall)\n\n print 'filtering low potential sensor points done'\n\n # Calculate the heights of all buildings for length of vertical pipes\n height = gpd.read_file(locator.get_zone_geometry())['height_ag'].sum()\n\n if not sensors_metadata_clean.empty:\n # calculate optimal angle and tilt for panels\n sensors_metadata_cat = solar_equations.optimal_angle_and_tilt(sensors_metadata_clean, latitude,\n worst_sh, worst_Az, trr_mean, max_yearly_radiation,\n panel_properties)\n print 'calculating optimal tilt angle and separation done'\n\n # group the sensors with the same tilt, surface azimuth, and total radiation\n number_groups, hourlydata_groups, number_points, prop_observers = solar_equations.calc_groups(sensors_rad_clean,\n sensors_metadata_cat)\n\n print 'generating groups of sensor points done'\n\n #calculate heat production from solar collectors\n results, Final = SC_generation(hourlydata_groups, prop_observers, number_groups, weather_data, g, Sz, Az, ha,\n settings.T_in_SC, height, panel_properties, latitude)\n\n\n # save SC generation potential and metadata of the selected sensors\n Final.to_csv(locator.SC_results(building_name= building_name), index=True, float_format='%.2f')\n sensors_metadata_cat.to_csv(locator.SC_metadata_results(building_name= building_name), index=True, float_format='%.2f')\n\n print 'Building', building_name,'done - time elapsed:', (time.clock() - t0), ' seconds'\n\n return", "def calculate_useful_stuff(self):\n\n self.pos = self.pair[1].position - self.pair[0].position\n self.vel = self.pair[1].velocity - self.pair[0].velocity\n\n self.r = self.pos.length()\n self.v = self.vel.length()\n self.inv_r = 1.0 / self.r\n\n self.rvec = self.pos * self.inv_r\n self.rdot = (self.rvec * self.vel).sum() * self.rvec\n self.rdot_mag = self.rdot.length()\n\n self.vth = self.vel - self.rdot\n self.vth_mag = self.vth.length()\n self.vth_vec = self.vth / self.vth_mag", "def estimate_sunrise_sunset(self, date):\n\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n historic_data = self.data\n # The range is 14 days ago to the end of yesterday\n start_date = date - datetime.timedelta(days=14)\n end_date = date - datetime.timedelta(hours=1)\n\n # We grab all hours where actual power is greater than 0\n relevant_data = historic_data[start_date:end_date]\n daylight_data = relevant_data[relevant_data['actuals'] > 0]\n\n # We do this to stop a warning from appearing, we know it's a copy\n daylight_data.is_copy = False\n daylight_data['hours'] = daylight_data.index.hour\n\n # Find the min and max hour for each day where we have positive\n # observed power generation.\n sunrises = daylight_data.groupby(daylight_data.index.date).min()['hours']\n sunsets = daylight_data.groupby(daylight_data.index.date).max()['hours']\n\n # We round in order to have an integer value for sunrise and sunset.\n average_sunrise = int(max(round(sunrises.mean()) - 1, 0))\n average_sunset = int(min(round(sunsets.mean()) + 1, 23))\n\n return average_sunrise, average_sunset", "def calibrate(self):\n\t\twl = BLi.getWavelength()\n\t\tif abs(self.stokes()) <= .5:\n\t\t\txxx=self.sign()*180/pi*asin( wl/(2*self.dspace)) - (self.thp())\n\t\t\tself.offset2(-xxx)\n\t\t\tyyy=self.tthp()-self.sign()*2*180/pi*asin(wl/(2*self.dspace))-self.offset5()\n\t\t\tself.offset4(yyy)\n\t\t\tself.offset9(self.dettrans())\n\t\telif abs(self.stokes()-90.) <= .5:\n\t\t\txxx=self.sign()*180/pi*asin( wl/(2*self.dspace)) - (self.thp())\n\t\t\tself.offset3(-xxx)\n\t\t\tyyy=self.tthp()-self.sign()*2*180/pi*asin(wl/(2*self.dspace))-self.offset5()\n\t\t\tself.offset8(yyy)\n\t\t\tself.offset10(self.dettrans())\n\t\telse:\n\t\t\tprint \"Can't calibrate at stokes=\",self.stokes()\n\t\treturn [self.sign(),self.offset2(), self.offset3(),self.offset4(),self.offset5(),self.offset8(),self.offset9(),self.offset10()]", "def main(self,fnames):\n\n\n \"\"\"\n Extract the data and calculate the means in a few different ways\n \"\"\"\n\n\n ReturnDict = dict()\n with open('DataAttrition.txt', 'w') as f:\n # f.write(\"nTotal,nNan, nUseableInital, nAttrition, nUsableFinal \\n \")\n for i in range(len(fnames)):\n InternalDict = dict()\n fname=fnames[i]\n print 'fname', fname\n output=self.readafile(os.path.join(fname))\n\n onameIndividualTime = fname.split('/')[-1][:-3]+'_IndividualTime.pdf'\n odir = '/'.join(fname.split('/')[0:-1])\n TimeStr = fname.split('/')[-1].split('_')[0]\n # outFile = os.path.join(odir,oname)\n # outFileContour = os.path.join(odir,onameContour)\n # outFilePolar = os.path.join(odir,onamePolar)\n\n # outfilefull = os.path.join(dpath,oname)\n # outfilefull2 = os.path.join(dpath, 'Median','%04d.png'%count)\n\n # print i\n # if i==0:\n Altitude=output['/Winds']['Altitude'];\n if Altitude.ndim == 2:\n InternalDict['MeanAltitude']=numpy.mean(Altitude,axis=1)\n elif Altitude.ndim == 1:\n InternalDict['MeanAltitude'] = Altitude\n\n\n\n InternalDict['ZonalWind'] = output['/Winds']['WindGeo'][:,:,0]\n InternalDict['MeridWind'] = output['/Winds']['WindGeo'][:,:,1]\n InternalDict['errZonalWind'] =output['/Winds']['errWindGeo'][:,:,0]\n InternalDict['errMeridWind'] =output['/Winds']['errWindGeo'][:,:,1]\n InternalDict['ZonalFlow'] = output['/VectorVels']['VestGmag'][:,:,0]\n InternalDict['MeridFlow'] = output['/VectorVels']['VestGmag'][:,:,1]\n InternalDict['errZonalFlow'] = output['/VectorVels']['errVestGmag'][:,:,0]\n InternalDict['errMeridFlow'] = output['/VectorVels']['errVestGmag'][:,:,1]\n InternalDict['time'] = output['/Time']['UnixTime'];\n InternalDict['mlt'] = output['/Time']['MLTDecHrs']\n InternalDict['slt'] = output['/Time']['LocalDecHrs']\n InternalDict['ZonalE'] = output['/ElectricFields']['Efield'][:,0]#.read()']\n InternalDict['MeridE'] = output['/ElectricFields']['Efield'][:,1]\n InternalDict['NeMean'] = output['/Ne']['MeanNeFitted']\n\n InternalDict['AE'] = output['/GeophysicalParameters']['AE']\n InternalDict['KP'] = output['/GeophysicalParameters']['KP']\n InternalDict['AP'] = output['/GeophysicalParameters']['AP']\n InternalDict['F107'] = output['/GeophysicalParameters']['F107']\n\n # InternalDict['ZonalFlowFregion'] = output['/Fregion']['VestGmag_300km'][:,0]\n # InternalDict['MeridFlowFregion'] = output['/Fregion']['VestGmag_300km'][:,1]\n\n # else:\n # InternalDict['ZonalWind'] = numpy.concatenate((InternalDict['ZonalWind'],output['/Winds']['WindGeo'][:,:,0]),axis=0)\n # InternalDict['MeridWind'] = numpy.concatenate((InternalDict['MeridWind'],output['/Winds']['WindGeo'][:,:,1]),axis=0)\n # InternalDict['ZonalFlow'] = numpy.concatenate((InternalDict['ZonalFlow'],output['/VectorVels']['VestGmag'][:,:,0]),axis=0)\n # InternalDict['MeridFlow'] = numpy.concatenate((InternalDict['MeridFlow'],output['/VectorVels']['VestGmag'][:,:,1]),axis=0)\n # InternalDict['errZonalWind'] = numpy.concatenate((InternalDict['errZonalWind'],output['/Winds']['errWindGeo'][:,:,0]),axis=0)\n # InternalDict['errMeridWind'] = numpy.concatenate((InternalDict['errMeridWind'],output['/Winds']['errWindGeo'][:,:,1]),axis=0)\n # InternalDict['errZonalFlow'] = numpy.concatenate((InternalDict['errZonalFlow'], output['/VectorVels']['errVestGmag'][:,:,0]), axis=0)\n # InternalDict['errMeridFlow'] = numpy.concatenate((InternalDict['errMeridFlow'], output['/VectorVels']['errVestGmag'][:,:,1]), axis=0)\n # InternalDict['time'] = numpy.concatenate((InternalDict['time'],output['/Time']['UnixTime']),axis=0)\n # InternalDict['mlt'] = numpy.concatenate((InternalDict['mlt'], output['/Time']['MLTDecHrs']),axis=0)\n # InternalDict['slt'] = numpy.concatenate((InternalDict['slt'], output['/Time']['LocalDecHrs']),axis=0)\n # InternalDict['ZonalE'] = numpy.concatenate((InternalDict['ZonalE'],output['/ElectricFields']['Efield'][:,0]), axis=0)#.read()']\n # InternalDict['MeridE'] = numpy.concatenate((InternalDict['MeridE'],output['/ElectricFields']['Efield'][:,1]), axis=0)\n # InternalDict['NeMean'] = numpy.concatenate((InternalDict['NeMean'],output['/Ne']['MeanNeFitted']), axis=0)\n # InternalDict['AE'] = numpy.concatenate((output['/GeophysicalParameters']['AE'],output['/GeophysicalParameters']['AE']),axis=0)\n # InternalDict['KP'] = numpy.concatenate((output['/GeophysicalParameters']['KP'], output['/GeophysicalParameters']['KP']), axis=0)\n # InternalDict['AP'] = numpy.concatenate((output['/GeophysicalParameters']['AP'], output['/GeophysicalParameters']['AE']), axis=0)\n # InternalDict['F107'] = numpy.concatenate((output['/GeophysicalParameters']['F107'], output['/GeophysicalParameters']['F107']), axis=0)\n\n # filter by velocity\n # Ibad=numpy.where( (InternalDict['ZonalWind'] > 500.0) | (InternalDict['ZonalWind'] < -500. ) | \\\n # (InternalDict['MeridWind'] > 500.0) | (InternalDict['MeridWind'] < -500.) | \\\n # (InternalDict['errZonalWind'] > 500.) | (InternalDict['errMeridWind'] > 500.) | \\\n # (InternalDict['NeMean'] < 1.e11) \\\n # )\n nTot = float(numpy.ravel(InternalDict['ZonalWind']).shape[0])\n print numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind'])) == False)[0]\n print 'nnan raw', numpy.where(numpy.isnan(numpy.ravel(InternalDict['ZonalWind']))==True)\n\n # note if you look at the raw output where outputs a tuple of two arrays\n # those two arrays combined will double count.\n\n nNan = float(numpy.where(numpy.isnan(numpy.ravel(InternalDict['ZonalWind'])) == True)[0].shape[0])\n qnan = numpy.where(numpy.isnan(numpy.ravel(InternalDict['ZonalWind'])) == True)\n print numpy.ravel(InternalDict['ZonalWind'])[qnan]\n\n qUse = numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind']))==True)\n print numpy.ravel(InternalDict['ZonalWind'])[qUse]\n\n nUsableInital = float(numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind']))==True)[0].shape[0])\n print 'Total starting Data,', numpy.ravel(InternalDict['ZonalWind']).shape[0], numpy.ravel(InternalDict['NeMean']).shape[0]\n # print 'Data initially Nans', numpy.ravel(numpy.where(numpy.isfinite(InternalDict['ZonalWind']) == False)).shape[0], numpy.ravel(numpy.where(numpy.isnan(InternalDict['ZonalWind']) == True)).shape[0]/nTot\n # print 'Usable Data', numpy.ravel(InternalDict['ZonalWind']).shape[0]-numpy.ravel(numpy.where(numpy.isnan(InternalDict['ZonalWind']) == True)).shape[0]\n print 'nUsableInital, nNan', nUsableInital, nNan\n nTotalCheck = 0.\n nUsableCheck = 0.\n f.write('%s \\n'%TimeStr)\n f.write('Initial Usable Data \\n')\n for i in range(InternalDict['ZonalWind'].shape[1]):\n nUsableInitalAlt = float(numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind'][:,i]))==True)[0].shape[0])\n print 'alt, initial usable data', InternalDict['MeanAltitude'][i],nUsableInitalAlt, InternalDict['ZonalWind'][:,i].shape\n f.write('alt, %0.1f, %0.1f \\n'%(InternalDict['MeanAltitude'][i],nUsableInitalAlt))\n nTotalCheck = InternalDict['ZonalWind'][:,i].shape[0]+nTotalCheck\n nUsableCheck = nUsableInitalAlt + nUsableCheck\n print 'initial nTotal check nUsable+nNan, nTot', nUsableInital+nNan, nTot\n print 'CHECK nUsableInital, nTotal', nUsableInital, nUsableCheck, nTot, nTotalCheck\n Ibad=numpy.where( (InternalDict['ZonalWind'] > self.config['DATADISCARD']['ZonalWindMax']) | \\\n (InternalDict['ZonalWind'] < self.config['DATADISCARD']['ZonalWindMin'] ) | \\\n (InternalDict['MeridWind'] > self.config['DATADISCARD']['ZonalWindMax']) | \\\n (InternalDict['MeridWind'] < self.config['DATADISCARD']['ZonalWindMin']) | \\\n (InternalDict['errZonalWind'] > self.config['DATADISCARD']['ErrorZonalWind']) | \\\n (InternalDict['errMeridWind'] > self.config['DATADISCARD']['ErrorMeridWind']) | \\\n (InternalDict['NeMean'] < self.config['DATADISCARD']['NeMean']) \\\n )\n\n\n\n # f.write(\"nTotal,nNan, nUseableInital, nAttrition, nUsableFinal \\n \")\n\n InternalDict['ZonalWind'][Ibad]=numpy.nan\n InternalDict['MeridWind'][Ibad]=numpy.nan\n InternalDict['errZonalWind'][Ibad]=numpy.nan\n InternalDict['errMeridWind'][Ibad]=numpy.nan\n InternalDict['ZonalFlow'][Ibad] = numpy.nan\n InternalDict['MeridFlow'][Ibad] = numpy.nan\n InternalDict['errZonalFlow'][Ibad] = numpy.nan\n InternalDict['errMeridFlow'][Ibad] = numpy.nan\n InternalDict['NeMean'][Ibad] = numpy.nan\n\n # print 'Data After Filtering Nans', numpy.ravel(numpy.where(numpy.isnan(InternalDict['ZonalWind']) == True)).shape[0]\n # print 'Number of finite data points', numpy.ravel(numpy.where(numpy.isfinite(InternalDict['ZonalWind']) == True)).shape[0]\n # nUsableFinal = float(numpy.ravel(numpy.where(numpy.isfinite(InternalDict['ZonalWind']) == True)).shape[0])\n nUsableFinal = float(numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind']))==True)[0].shape[0])\n nNanFinal = float(numpy.where(numpy.isnan(numpy.ravel(InternalDict['ZonalWind'])) == True)[0].shape[0])\n print 'Ibad size', numpy.ravel(Ibad[0]).shape[0]\n print 'nUsableFinal, nNanFinal,nTot check', nUsableFinal, nNanFinal, nUsableFinal+nNanFinal, numpy.ravel(InternalDict['ZonalWind']).shape[0]\n\n print 'total ending data number', numpy.ravel(InternalDict['ZonalWind']).shape[0]\n nTotalCheck = 0.\n nUsableCheck = 0.\n f.write('\\n Final Usable data \\n')\n for i in range(InternalDict['ZonalWind'].shape[1]):\n nUsableInitalAlt = float(numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind'][:,i]))==True)[0].shape[0])\n print 'alt, initial usable data', InternalDict['MeanAltitude'][i],nUsableInitalAlt, InternalDict['ZonalWind'][:,i].shape\n TotalCheck = InternalDict['ZonalWind'][:,i].shape[0]+nTotalCheck\n nUsableCheck = nUsableInitalAlt + nUsableCheck\n f.write('alt, %0.1f, %0.1f \\n'%(InternalDict['MeanAltitude'][i],nUsableInitalAlt))\n print 'initial nTotal check nUsable+nNan, nTot', nUsableInital+nNan, nTot\n print 'CHECK nUsableInital, nUsableCheck, nTot, nTotalCheck ', nUsableInital, nUsableCheck, nTot, nTotalCheck\n print '\\n \\n'\n\n # do not need to do Ibad for AE,KP,F107, etc.\n f.write( '-------------Hours-------------------- \\n')\n f.write('%s \\n'%TimeStr)\n # try to determine how much data in terms of time\n for i in range(InternalDict['ZonalWind'].shape[1]):\n qGoodData = numpy.where(numpy.isfinite(numpy.ravel(InternalDict['ZonalWind'][:,i]))==True)[0]\n # print 'test WindData', numpy.ravel(InternalDict['ZonalWind'][:,i])[qGoodData]\n print 'qgoodDataShape', qGoodData.shape\n tmpHours = numpy.sum(InternalDict['time'][qGoodData,1]-InternalDict['time'][qGoodData,0])/3600.\n totalTime = numpy.nansum(InternalDict['time'][:,1]-InternalDict['time'][:,0])/3600.\n # print 'alt, initial usable data', InternalDict['MeanAltitude'][i],nUsableInitalAlt, InternalDict['ZonalWind'][:,i].shape\n f.write('alt, %0.1f, %0.1f, %0.1f \\n'%(InternalDict['MeanAltitude'][i], tmpHours,totalTime))\n f.write('\\n ########################################## \\n ')\n # in case I need to check that the filtering is working\n # for i in range(ZonalWind.shape[0]):\n # print i, numpy.nanmax(ZonalWind[i,:]), numpy.nanmin(ZonalWind[i,:])\n\n\n MeanTime=numpy.nanmean(InternalDict['time'],axis=1)\n # filter out all times with nans\n qnan = numpy.where(numpy.isnan(MeanTime) == False)[0]\n\n # print 'MeanTime.shape', MeanTime.shape, mlt.shape, slt.shape\n\n if (MeanTime.shape[0] == InternalDict['mlt'].shape[0]) & (MeanTime.shape[0] == InternalDict['slt'].shape[0]):\n MeanTime = MeanTime[qnan]\n InternalDict['mlt'] = InternalDict['mlt'][qnan]\n InternalDict['slt'] = InternalDict['slt'][qnan]\n else:\n raise ValueError (\"Wrong dimensions on time arrays\")\n\n\n InternalDict['ZonalWind'] = InternalDict['ZonalWind'][qnan,:]\n InternalDict['MeridWind'] = InternalDict['MeridWind'][qnan,:]\n InternalDict['errZonalWind'] = InternalDict['errZonalWind'][qnan,:]\n InternalDict['errMeridWind'] = InternalDict['errMeridWind'][qnan,:]\n InternalDict['ZonalE'] = InternalDict['ZonalE'][qnan]\n InternalDict['MeridE'] = InternalDict['MeridE'][qnan]\n InternalDict['ZonalFlow'] = InternalDict['ZonalFlow'][qnan,:]\n InternalDict['MeridFlow'] = InternalDict['MeridFlow'][qnan,:]\n InternalDict['errZonalFlow'] = InternalDict['errZonalFlow'][qnan,:]\n InternalDict['errMeridFlow'] = InternalDict['errMeridFlow'][qnan,:]\n InternalDict['NeMean'] = InternalDict['NeMean'][qnan,:]\n\n # # added on 10/30/2018\n # InternalDict['ZonalFlowFregion'] = InternalDict['ZonalFlowFregion'][qnan]\n # InternalDict['MeridFlowFregion'] = InternalDict['MeridFlowFregion'][qnan]\n\n print 'AE', len(qnan), InternalDict['AE'].shape\n InternalDict['AE'] = InternalDict['AE'][qnan]\n InternalDict['KP'] = InternalDict['KP'][qnan]\n InternalDict['AP'] = InternalDict['AP'][qnan]\n InternalDict['F107'] = InternalDict['F107'][qnan]\n\n # have some sort of filtering\n # need to fiilter out wind estimates > 500 or 100 m/s\n\n # new function which will basically calculate the mean and then plot the data\n\n \"\"\"\n Setting up the time grid\n \"\"\"\n # dminute = self.config['TIME']['dMinutes']\n dhours = self.config['TIME']['TimeIntervalMinutes']/60.\n dt = self.config['TIME']['TimeIntervalLengthMinutes']/60.\n DecimalHoursTimeGrid =numpy.arange(0,24,dhours)\n DecimalTime = numpy.array([datetime.datetime.utcfromtimestamp(t) for t in MeanTime])\n DecimalHours = numpy.array([t.hour+t.minute/60.+t.second/3600. for t in DecimalTime])\n InternalDict['ut'] = DecimalHours\n\n\n\n\n # utTimeDict = CalculateMeanWindDict(DecimalHoursTimeGrid,dt,DecimalHours,MeanTime,\\\n # ZonalWind,MeridWind,errZonalWind, \\\n # errMeridWind,ZonalE, MeridE,MeanAltitude, \\\n # ZonalFlow,MeridFlow,errZonalFlow,errMeridFlow,NeMean)\n\n outDict = dict()\n outDict['ut'] = self.CalculateMeanWindDict(DecimalHoursTimeGrid,dt,InternalDict['ut'],InternalDict)\n outDict['slt'] = self.CalculateMeanWindDict(DecimalHoursTimeGrid,dt,InternalDict['slt'],InternalDict)\n outDict['mlt'] = self.CalculateMeanWindDict(DecimalHoursTimeGrid,dt,InternalDict['mlt'],InternalDict)\n\n ReturnDict[TimeStr] = outDict\n f.close()\n # mltTimeDict = self.CalculateMeanWindDict(DecimalHoursTimeGrid,dt,mlt,MeanTime,\\\n # ZonalWind,MeridWind,errZonalWind, \\\n # errMeridWind,ZonalE, MeridE,MeanAltitude,\\\n # ZonalFlow,MeridFlow,errZonalFlow,errMeridFlow,\\\n # NeMean, AE, AP,KP, F107)\n return ReturnDict" ]
[ "0.60909706", "0.6054457", "0.5922382", "0.56137407", "0.5576481", "0.5530526", "0.55205214", "0.5497912", "0.5490896", "0.5478395", "0.5460385", "0.5441609", "0.54134166", "0.5389697", "0.53693914", "0.53531724", "0.5340516", "0.53140557", "0.5308317", "0.52656955", "0.5256723", "0.52482164", "0.51912045", "0.5164172", "0.5157644", "0.51570296", "0.5145734", "0.5125793", "0.5124296", "0.512007", "0.5118314", "0.5113603", "0.51123595", "0.5110869", "0.5102913", "0.50900143", "0.50867975", "0.5085701", "0.50820196", "0.5072008", "0.50608766", "0.5057151", "0.50542593", "0.5047947", "0.5043184", "0.5036648", "0.5029314", "0.50222516", "0.5014494", "0.4998624", "0.4997353", "0.4996133", "0.49947256", "0.4987394", "0.4987126", "0.49856094", "0.49802554", "0.4976166", "0.49706423", "0.49696136", "0.4966013", "0.49620804", "0.49490616", "0.49473405", "0.4946342", "0.4933388", "0.49299854", "0.49291244", "0.4926772", "0.4921485", "0.49190614", "0.49175975", "0.4909705", "0.49078944", "0.49067125", "0.49038061", "0.4902621", "0.49009615", "0.4900754", "0.4894768", "0.4892504", "0.48903897", "0.48883164", "0.48867753", "0.48805517", "0.48792693", "0.4870382", "0.4862485", "0.48618397", "0.48594952", "0.48589146", "0.48573348", "0.4849585", "0.48456255", "0.48399806", "0.4837185", "0.48316634", "0.4831372", "0.48277476", "0.48231986" ]
0.56515837
3
Calculate dawn time in the UTC timezone.
def dawn_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunrise(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return dawn
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dawn(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n dawn = self.astral.dawn_utc(date, self.latitude, self.longitude)\n\n if local:\n return dawn.astimezone(self.tz) \n else:\n return dawn", "def get_unixtime(self):\n if not self.Complete():\n raise DateTimeError(\"get_unixtime requires complete timepoint\")\n zoffset = self.time.GetZoneOffset()\n if zoffset is None:\n raise DateTimeError(\"get_unixtime requires timezone\")\n elif zoffset == 0:\n zt = self\n else:\n zt = self.ShiftZone(zDirection=0)\n days = zt.date.GetAbsoluteDay() - EPOCH.date.GetAbsoluteDay()\n seconds = zt.time.GetTotalSeconds() - EPOCH.time.GetTotalSeconds()\n return 86400 * days + seconds", "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def start_hour_utc(self) -> int:\n return pulumi.get(self, \"start_hour_utc\")", "def get_chime_time(self):\n actual_time = datetime(year=self.start_time.year, month=self.start_time.month, day=self.start_time.day,\n hour=self.start_time.hour, minute=0, second=0, microsecond=0)\n if self.start_time.minute > 30:\n actual_time = actual_time + timedelta(hours=1)\n return actual_time", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def _clock_day(self):\n return int(self._shifted_time / 86400)", "def _get_tz():\n return 'UTC'", "async def time(self) -> dt.time:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz).time()", "def uptime(self) -> timedelta:\n return timedelta(seconds=int(time() - self.start_time))", "def itow2utc(itow: int) -> datetime.time:\r\n\r\n utc = datetime(1980, 1, 6) + timedelta(seconds=(itow / 1000) - (35 - 19))\r\n return utc.time()", "def timezone():\n \n pass", "def schedule(self):\n\n crontab = self._crontab\n return datetime.now() + timedelta(\n seconds=math.ceil(\n crontab.next(default_utc=False)\n )\n )", "def Timer():\n ltime = time.localtime()\n h, m, s = ltime[3:6]\n return h * 3600.0 + m * 60.0 + s", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def calculate_time(start_time):\r\n return round(time() - start_time, 2)", "def getUtcSeconde(self) -> int:\n ...", "def run_hour(self) -> int:\n return self.timestamp.hour", "def timecalc():\n print(\"timecalc started. Ain't nobody got TIME for: \\n\")\n if len(sys.argv) == 2:\n print(\"single input argument, assuming this is a UTC epoch timestamp in ms\")\n dt = int(sys.argv[1])\n dt = datetime.datetime.utcfromtimestamp(dt / 1000.0)\n else:\n if \":\" in sys.argv[2]:\n dt = sys.argv[1] + \" \" + sys.argv[2]\n dt = datetime.datetime.strptime(dt, DATETIME_FORMAT)\n else:\n print(\"timecalc requires time in either UTC epoch time or datetime in {}\".format(DATETIME_FORMAT))\n raise ValueError('UTC datetime needs to be {}'.format(DATETIME_FORMAT))\n\n gpstime = utctoweekseconds(dt)\n towsec = gpstime[2] + (gpstime[3] / 1000000.0)\n\n print(\"UTC DATETIME: {} \\nGPS WEEK: {}, TOW: {}\".format(dt, gpstime[0], towsec))", "def get_time(self):\r\n\t\tactual_time = '{}, {} de {} del {} ({}:{} {} (UTC))'\r\n\t\tda_taim = actual_time.format(self.wday, self.day, self.mon,\r\n\t\t\t\t\t\t\t\t\t self.year, self.hour, self.min,\r\n\t\t\t\t\t\t\t\t\t self.day_or_night)\r\n\t\treturn da_taim", "def _time_ms(self, dt):\n if dt.tzinfo is None:\n dt = dt.replace(tzinfo=pytz.utc)\n return int((dt - self._EPOCH).total_seconds() * 1000)", "def get_epoch_time(utc_datetime=None):\n if not utc_datetime:\n utc_datetime = datetime.datetime.utcnow()\n return math.ceil((utc_datetime - EPOCH_START).total_seconds())", "def py2_earth_hours_left(start_date=BITE_CREATED_DT):\n\n td = (PY2_DEATH_DT - start_date)\n return round((td.days*24 + td.seconds/3600), 1)", "def uptime():\n seconds = timedelta(seconds=int(time.time() - start_time))\n d = datetime(1, 1, 1) + seconds\n return(\"%dD:%dH:%dM:%dS\" % (d.day-1, d.hour, d.minute, d.second))", "def get_time():\n # Use this one for production:\n now_time = pendulum.now(tz=pendulum.timezone(\"America/New_York\"))\n # Use this one for testing and modify as needed:\n # now_time = pendulum.datetime(2019, 7, 21, 20, 00, tz='America/New_York')\n\n return now_time", "def brasilia_day():\n return (dt.datetime.utcnow() + dt.timedelta(hours=-3)).replace(hour=0, minute=0, second=0, microsecond=0)", "def what_night_is_it():\n d = datetime.datetime.utcnow() - datetime.timedelta(7 / 24 + 0.5)\n tonight = int(d.strftime('%Y%m%d'))\n return tonight", "def start_hour_utc(self) -> Optional[int]:\n return pulumi.get(self, \"start_hour_utc\")", "def _local_time_offset():\n if time.localtime().tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def get_tz_offset(self) -> float:\n return self.AD.tz.utcoffset(self.datetime()).total_seconds() / 60", "def get_current_india_time():\n india_offset = datetime.timedelta(hours=5, minutes=30)\n in_time = datetime.datetime.utcnow() + india_offset\n return in_time", "def duration(self):\n delta = self.occurrence.end - self.occurrence.start\n real_hours = delta.days * 24 + delta.seconds / (60.0 * 60.0)\n\n adjusted_hours = attendance_settings.HOUR_MULTIPLIER * real_hours\n\n return adjusted_hours", "def utctime(stamp):\n return stamp + utc_offset", "def now_utc() -> datetime:\n return datetime.now(timezone.utc)", "def uptime(start_time):\n return datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(start_time)", "def expiration_time_utc(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"expiration_time_utc\")", "def _get_sleep_time(self, start_date, end_date):\n if start_date.minute == end_date.minute:\n return 60 - end_date.second - (1 - start_date.microsecond / 1000000)\n\n return 0", "def get_timezone():\n return dates.get_timezone(_get_tz())", "def get_utc_now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.timezone(\"UTC\"))", "def as_timezone(self, tz):\n offset = tz_to_offset(tz)\n if TIMEZONES.get(tz, {}).get('dst') and is_dst(self):\n offset += 1\n offset_secs = offset * 3600\n return DateTime(*time.gmtime(self.unixtime() + offset_secs))", "def solar_noon_utc(self, date, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0)\n\n eqtime = self._eq_of_time(newt)\n timeUTC = 720.0 + (longitude * 4.0) - eqtime\n\n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return noon", "def py2_earth_hours_left():\r\n left = PY2_RETIRED_DT - BITE_CREATED_DT\r\n left_earth_hours = round(left.total_seconds()/3600,2)\r\n\r\n return left_earth_hours", "def getUnixTime(pool=\"time.apple.com\"):\n time_offset = ntplib.NTPClient().request(pool).offset\n return float(time.time()+time_offset)", "def Date_calibration(DD, s, TZ):\r\n \r\n dif_hour_tz = dt.datetime.now(tz=pytz.timezone(str(TZ))).hour - dt.datetime.now(tz=pytz.utc).hour\r\n #If s = 1 = True\r\n if(s):\r\n #Change the datetime to UTC\r\n DD = DD - dt.timedelta(hours=dif_hour_tz)\r\n else:\r\n #Change the datetime to the original time\r\n DD = DD + dt.timedelta(hours=dif_hour_tz)\r\n \r\n return DD", "def timezone():\n\n return time.timezone", "def get_duration(self):\n return (self.stop_day - self.start_day) * (24 * 60) \\\n + (self.stop_hour - self.start_hour) * 60", "def Utc(self):\n return _EPOCH + datetime.timedelta(days=self.ut)", "def time_run(self):\n if self._time_run is None:\n self._time_run = datetime.now(timezone.utc)\n return self._time_run.isoformat()", "def observation_time(self):\n epoch = float(self._current_observation['observation_epoch'])\n return datetime.datetime.utcfromtimestamp(epoch)", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def utctime(self) -> datetime:\n return datetime.utcfromtimestamp(float(self.ns_since_epoch) / 1e9)", "def test_time_to_commute_retrieved_from_google_api_in_posix_is_converted_to_utc(self):\n result = calculate_time_of_commute(\n origin_name='Gatwick Airport',\n destination_name='Kings Cross St Pancras',\n )\n assert type(result) == datetime\n assert result.tzinfo is None # Assert it is a naive datetime", "async def _timezone(self, ctx: commands.Context, tz: str = None):\n self.check_if_exist(ctx.guild)\n\n self.daily_guilds[str(ctx.guild.id)][\"tz\"] = tz\n self.daily_info.update(\"guilds\", self.daily_guilds)\n await ctx.reply(\"New daily timezone is {0}\".format(tz))", "def GetUtcTime(dt, tz):\n #Enriches the given time with the given timezone. For example 5 pm is enriched\n #to 5 pm EST, taking into account DST.\n local_time = tz.localize(dt)\n #We convert to utc\n utc_time = local_time.astimezone(pytz.utc)\n #We remove the timezone information ( = naive time)\n return utc_time.replace(tzinfo=None)", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def get_time(self) -> int:\n t = str(self.eval(\"pyb.RTC().datetime()\").encode(\"utf-8\"))[1:-1].split(\", \")\n return int(t[4]) * 3600 + int(t[5]) * 60 + int(t[6])", "def _unit_day(self):\n return (self.time_base * 60.0) * 24.0", "def localtime(stamp):\n return stamp - utc_offset", "def _getDailyPlayHours(self):\n if self.isSessionStartedThisDay():\n return self.__stats.dailyPlayHours[0] + (time_utils._g_instance.serverUTCTime - self.__sessionStartedAt)\n else:\n return self.__stats.dailyPlayHours[0] + time_utils._g_instance.serverRegionalTime % 86400", "def team_creation_time_utc(self) -> str:\n return pulumi.get(self, \"team_creation_time_utc\")", "def get_time_of_the_day(self, ts):\n h, m, s = ts.hour, ts.minute, ts.second\n # Convert the hours, minutes, and seconds to seconds: referenced to 0 AM\n t = int(h) * 3600 + int(m) * 60 + int(s)\n if t >= 0:\n return t\n else:\n return t + 24*3600", "def get_nightly_end_time():\n month = time.localtime().tm_mon\n if np.abs(month - 6) > 2:\n end_night = 8\n else:\n end_night = 7\n return end_night # local Tucson time the following morning", "async def get_now(self) -> dt.datetime:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz)", "def get_time(cls):\n now = rospy.Time.now()\n return now.secs + now.nsecs*(10**-9) # time in seconds", "def get_run_time(start, end):\n start_hour, start_min, start_sec = start.split(':')\n end_hour, end_min, end_sec = end.split(':')\n\n hour_diff = int(end_hour) - int(start_hour)\n min_diff = int(end_min) - int(start_min)\n sec_diff = int(end_sec) - int(start_sec)\n\n total_diff = hour_diff * 60 * 60 + min_diff * 60 + sec_diff\n\n return total_diff", "def createTime(days=1):\r\n return (int(round(time.time() * 1000))-(days*24*60*60*1000))", "def get_server_uptime(self):\n return time.time() - self.init_time", "def now_dt(tz='UTC'):\n if tz != 'UTC':\n raise NotImplementedError()\n return datetime.datetime.utcnow().replace(tzinfo = pytz.utc)", "def get_nightly_start_time():\n return 14 # 2PM local Tucson time", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def utcnow():\n return datetime.utcnow().replace(tzinfo=UTC)", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def _getUpTime(self):\n diff = (datetime.datetime.now() - self._startTime).__str__()\n return diff[:diff.find('.')]", "def time_zone():\n return timezone('Etc/GMT-10')", "def utc():\n return date_from_utc(dt.utcnow())", "def pending_time(self):\n now = datetime.datetime.utcnow().replace(tzinfo=utc)\n timediff = now - self.time_requested\n return timediff", "def utcnow() -> datetime.datetime:\n return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.time.utcnow().replace(microsecond=0).time()\n else:\n return datetime.time.utcnow().time()", "def get_tz_offset_seconds() -> float:\n import time\n import datetime\n tval = time.time()\n utc_offset = (datetime.datetime.fromtimestamp(tval) -\n datetime.datetime.utcfromtimestamp(tval)).total_seconds()\n return utc_offset", "def _get_utcoffset(self, tzname):\n # SQL Server has no built-in support for tz database\n # see http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx\n zone = pytz.timezone(tzname)\n # no way to take DST into account at this point\n now = datetime.datetime.now()\n delta = zone.localize(now, is_dst=False).utcoffset()\n return delta.days * 86400 + delta.seconds", "def time_to_live_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"time_to_live_utc\")", "def time_for_travel(self):\n return great_circle(self.pickupcoords, self.dropoffcoords).miles * 3600 / 25", "def time(self):\n return self.raw() % (60 * 24)", "def __calculate_time_cs(self, jiffies):\n\n return int((jiffies * 100.0) / self._jiffies_per_sec)", "def setup_datetime(self):\n \n current_date_time = datetime.now()\n timezone_diference = timedelta(hours=-3)\n return timezone(timezone_diference), current_date_time", "def tt_utc_diff(jd_ut):\n prev_offset = None\n for start, offset in TAI_UTC_DIFF:\n if jd_ut < start:\n if prev_offset is None:\n t = jd_ut - JD_AT_1_JAN_2000 / DAYS_PER_CENTURY\n return 64.184 + 59 * t - 51.2 * t ** 2 - 67.1 * t ** 3 - 16.4 * t ** 4\n else:\n break\n else:\n prev_offset = offset\n return prev_offset + 32.184", "def get_time_delta_in_hours(start, end):\n dhour = end.hour - start.hour\n dmin = end.minute - start.minute\n dsec = end.second - start.second\n dtime = timedelta(hours=dhour, minutes=dmin, seconds=dsec) # NOTE rounds to nearest second\n # print start, end, dtime\n return float(dtime.seconds) / (60*60)", "def GetTimeAndZone(self):\n return self.hour, self.minute, self.second, self.zDirection, self.zOffset", "def _getUTC(self, config = {} ):\n # Default implementation: get system local time\n return datetime.datetime.utcnow()", "def get_site_last_updated(cls, utc_now=None):\n if utc_now is None:\n utc_now = arrow.utcnow()\n\n today_update_hour = cls.stamp_time(utc_now)\n if today_update_hour > utc_now:\n update_time = today_update_hour.shift(days=-1)\n else:\n update_time = today_update_hour\n\n return update_time", "def timetz(self):\n return time(\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n self._tzinfo,\n fold=self.fold,\n )", "def calculate_total_minutes_now(self):\n total_seconds = (timezone.now() - self.login_time).total_seconds()\n return total_seconds" ]
[ "0.628965", "0.59754497", "0.58763033", "0.5851417", "0.5806555", "0.5735901", "0.56585526", "0.56538594", "0.55857354", "0.55760634", "0.5519843", "0.55160165", "0.55128175", "0.55116653", "0.53910935", "0.5379949", "0.536428", "0.5360612", "0.53601545", "0.5338035", "0.5333986", "0.5324582", "0.5288426", "0.527294", "0.52505535", "0.5246069", "0.52418363", "0.52414656", "0.52271163", "0.5214999", "0.52131325", "0.5209688", "0.52088684", "0.52062863", "0.52062863", "0.51951957", "0.51951486", "0.51909506", "0.51862437", "0.5146617", "0.51336724", "0.5125584", "0.5114995", "0.5112299", "0.51056844", "0.5104671", "0.50985885", "0.5095249", "0.5094172", "0.5092477", "0.5090137", "0.5077453", "0.50768644", "0.50738186", "0.5072691", "0.5072414", "0.50676584", "0.5063134", "0.506038", "0.5054606", "0.50488", "0.5044797", "0.50415635", "0.5035199", "0.5031329", "0.5020947", "0.50009114", "0.49904305", "0.4984998", "0.4984442", "0.4981947", "0.4981112", "0.4978319", "0.49723777", "0.4970616", "0.49687546", "0.4964726", "0.49642536", "0.49623314", "0.49605167", "0.49547213", "0.4951969", "0.4951615", "0.49464187", "0.4939844", "0.4938112", "0.4933412", "0.4926512", "0.49211293", "0.4916954", "0.49127492", "0.49122688", "0.4907522", "0.4904641", "0.49046203", "0.4900996", "0.48975736", "0.48969132", "0.4895524", "0.48913515" ]
0.76470286
0
Calculate sunrise time in the UTC timezone.
def sunrise_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunrise(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_sunrise(latitude, solarDec) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return sunrise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def sun_set_rise_times(self, date=None):\n rstimes = (self.sunset(date=date),\n self.evening_twilight_12(date=date),\n self.evening_twilight_18(date=date),\n self.morning_twilight_18(date=date),\n self.morning_twilight_12(date=date),\n self.sunrise(date=date))\n return rstimes", "async def sunrise(self, aware=False, today=False, days_offset=0) -> dt.datetime:\n return await self.AD.sched.sunrise(aware, today=today, days_offset=days_offset)", "def computeDaytimeStartEnd(self, date):\n dayStartTime = datetime.datetime.combine(date.date(), datetime.time())\n #compute sunrise time for that date\n (h, m, s) = self.sun.sunrise(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunrise_datetime = dayStartTime + time_delta\n #print(sunrise_datetime) \n #compute sunset time for that date \n (h, m, s) = self.sun.sunset(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunset_datetime = dayStartTime + time_delta\n \n return (sunrise_datetime, sunset_datetime)", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def sunrise(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n \n if date is None:\n date = datetime.date.today()\n\n sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude)\n\n if local:\n return sunrise.astimezone(self.tz) \n else:\n return sunrise", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def estimate_sunrise_sunset(self, date):\n\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n historic_data = self.data\n # The range is 14 days ago to the end of yesterday\n start_date = date - datetime.timedelta(days=14)\n end_date = date - datetime.timedelta(hours=1)\n\n # We grab all hours where actual power is greater than 0\n relevant_data = historic_data[start_date:end_date]\n daylight_data = relevant_data[relevant_data['actuals'] > 0]\n\n # We do this to stop a warning from appearing, we know it's a copy\n daylight_data.is_copy = False\n daylight_data['hours'] = daylight_data.index.hour\n\n # Find the min and max hour for each day where we have positive\n # observed power generation.\n sunrises = daylight_data.groupby(daylight_data.index.date).min()['hours']\n sunsets = daylight_data.groupby(daylight_data.index.date).max()['hours']\n\n # We round in order to have an integer value for sunrise and sunset.\n average_sunrise = int(max(round(sunrises.mean()) - 1, 0))\n average_sunset = int(min(round(sunsets.mean()) + 1, 23))\n\n return average_sunrise, average_sunset", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def sunrise(self, date=None):\n self.site.horizon = self.horizon\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk_utc(date, latitude, longitude)\n \n return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}", "def rahukaalam_utc(self, date, latitude, longitude):\n \n if date is None:\n date = datetime.date.today()\n\n try:\n sunrise = self.sunrise_utc(date, latitude, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n \n octant_duration = (sunset - sunrise) / 8\n\n # Mo,Sa,Fr,We,Th,Tu,Su\n octant_index = [1,6,4,5,3,2,7]\n \n weekday = date.weekday()\n octant = octant_index[weekday]\n \n start = sunrise + (octant_duration * octant)\n end = start + octant_duration\n \n return {'start': start, 'end': end}", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def _sunrise_sunset(self, seconds=None, milliseconds=None, hour=None, freq=None, temp_start=None, temp_end=None, setting=True):\n FUDGE_FACTOR = 0.86\n if hour==None:\n # Work out what the defaults should be\n ## MOVE IT INSIDE THE Override values.\n t0 = temp_start.split('K')[0]\n t1 = temp_end.split('K')[0]\n if t0 > t1:\n temp_step = -100\n x_start = 0\n x_step_amount = 1\n else:\n temp_step = 100\n x_start = 60\n x_step_amount = -1\n temp_0 = int(t0)\n temp_n = int(t1)\n # You can override these defaults if either temp_start or temp_end is set\n if temp_start:\n try:\n _exists = NAMED_COLOURS[temp_start.lower()]\n except (TypeError,ValueError): # Means the starting temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your starting colour temperature '{}' is not a valid colour temperature\".format(temp_start))\n if temp_end:\n try:\n _exists = NAMED_COLOURS[temp_end.lower()]\n except (TypeError, ValueError): # Means the ending temp has NOT been provided, use default\n pass\n except KeyError:\n logging.warning(\"Sunrise/sunset: Your ending colour temperature '{}' is not a valid colour temperature\".format(temp_end))\n\n #Add in a fudge factor to cater for CPU doing other things:\n #Calculate our z scaling factor:\n target_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=1000)\n z_factor = (target_time*FUDGE_FACTOR) / 2.564949357\n x_step = x_start\n #And run the loop\n t1 = time.time()\n check = True #We only check the current values on the first run\n for temp in xrange(temp_0,temp_n,temp_step):\n if self._sequence_stop_signal: #Bail if sequence should stop\n return None\n k = u\"%sk\" % temp\n self.fade(k, fade_time=((100+z_factor)/(65-x_step)), check=check) #ms, slows down as sunset progresses\n x_step += x_step_amount\n check=False\n t2 = time.time()\n logging.info(\"%ss, target=%ss\" % ((t2-t1),target_time/1000.0))\n else:\n temp_0=temp_start[0].split('K')[0]\n\t temp_n=temp_end[0].split('K')[0]\n if self.p_alarm != []:\n self.teardown_alarm()\n process_alarm=[]\n for tt in range(0,len(hour)):\n milliseconds=0\n proc_hour=hour[tt]\n\t\talarm_arg=(proc_hour,temp_0,temp_n,FUDGE_FACTOR,freq,seconds[tt],milliseconds)\n \n process_alarm.append(Process(target=self.schedule_alarm,args=alarm_arg))\n [pp.start() for pp in process_alarm] # Start processes in the background which contain the schedule of the alarm\n self.p_alarm=process_alarm", "def sunrise(self):\r\n try:\r\n return str(self.connect()['sys']['sunrise'])\r\n except:\r\n return '@weather_sunrise'", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def sun(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sun = self.astral.sun_utc(date, self.latitude, self.longitude)\n\n if local:\n for key, dt in sun.items():\n sun[key] = dt.astimezone(self.tz)\n\n return sun", "def mask_nighttime(lon, lat, date=date, mask_daytime=mask_daytime,\n ref_date=datetime.datetime(1899, 12, 31, 12),\n buffer_hours=buffer_hours, debug=False):\n # --- get lat and lon values from columns\n if debug:\n print((\"--- (s4-1) %s seconds ---\" % (time.time() - start_time)))\n # --- get sunrise and sunset for location\n o = ephem.Observer()\n # set lat (decimal?), lon (decimal?), and date (UTC)\n o.lat = str(lat)\n o.long = str(lon)\n o.date = date\n # planetary body\n s = ephem.Sun()\n if debug:\n print((\"--- (s4-2) %s seconds ---\" % (time.time() - start_time)))\n\n # Compute sun vs observer\n s.compute()\n if debug:\n print((\"--- (s4-3) %s seconds ---\" % (time.time() - start_time)))\n\n # Work out if day or night based on sunrises and sunsets\n mask_value = 0\n try:\n\n # get sunrise time and date\n next_rising = o.next_rising(s)\n next_setting = o.next_setting(s)\n\n # convert to datetime.datetime\n next_rising = add_days(ref_date, next_rising)\n next_setting = add_days(ref_date, next_setting)\n\n # did the sun last rise or set? (inc. any adjustments)\n sun_last_rose = False\n if next_setting < next_rising:\n sun_last_rose = True\n\n # Add buffer to rising/setting if provided with buffer_hours\n if buffer_hours != 0:\n\n # Calculate last rise\n previous_rising = o.previous_rising(s)\n # convert to datetime.datetime\n previous_rising = add_days(ref_date, previous_rising)\n # Calculate last setting\n previous_setting = o.previous_setting(s)\n # convert to datetime.datetime\n previous_setting = add_days(ref_date, previous_setting)\n\n # Calculate absolute difference\n time_from_rise = (date-previous_rising).total_seconds()\n time_till_set = (date-next_setting).total_seconds()\n time_from_set = (date-previous_setting).total_seconds()\n time_till_rise = (date-next_rising).total_seconds()\n\n # If absolutely difference less than buffer\n if abs(time_from_rise)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_till_set)/60./60. <= buffer_hours:\n mask_value = 1\n elif abs(time_from_set)/60./60. < buffer_hours:\n mask_value = 1\n elif abs(time_till_rise)/60./60. < buffer_hours:\n mask_value = 1\n\n # --- Check if daytime or nighttime and mask if condition met.\n if sun_last_rose:\n if mask_daytime:\n # ... and has not set yet, it must be daytime\n if (date < next_setting):\n mask_value = 1\n\n # if the sun last set... (mask nighttime is default)\n else:\n # if mask nighttime (aka not mask_daytime)\n if not mask_daytime:\n # ... and has not risen yet, it must be nighttime\n if (date < next_rising):\n mask_value = 1\n\n # Add gotcha for locations where sun is always up.\n except AlwaysUpError:\n if mask_daytime:\n mask_value = 1\n\n # Add gotcha for locations where sun is always down.\n except NeverUpError:\n if not mask_daytime:\n mask_value = 1\n\n except:\n print('FAIL')\n sys.exit()\n\n # Mask value in array\n return mask_value", "def start_hour_utc(self) -> int:\n return pulumi.get(self, \"start_hour_utc\")", "def sunset(self, seconds=None, milliseconds=None, temp_start=None, temp_end=None):\n return self.run_sequence(self._sunrise_sunset, seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end, setting=True)", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def solar_noon_utc(self, date, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0)\n\n eqtime = self._eq_of_time(newt)\n timeUTC = 720.0 + (longitude * 4.0) - eqtime\n\n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return noon", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def get_unixtime(self):\n if not self.Complete():\n raise DateTimeError(\"get_unixtime requires complete timepoint\")\n zoffset = self.time.GetZoneOffset()\n if zoffset is None:\n raise DateTimeError(\"get_unixtime requires timezone\")\n elif zoffset == 0:\n zt = self\n else:\n zt = self.ShiftZone(zDirection=0)\n days = zt.date.GetAbsoluteDay() - EPOCH.date.GetAbsoluteDay()\n seconds = zt.time.GetTotalSeconds() - EPOCH.time.GetTotalSeconds()\n return 86400 * days + seconds", "def start_hour_utc(self) -> Optional[int]:\n return pulumi.get(self, \"start_hour_utc\")", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def sunset(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sunset = self.astral.sunset_utc(date, self.latitude, self.longitude)\n\n if local:\n return sunset.astimezone(self.tz) \n else:\n return sunset", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def getUtcSeconde(self) -> int:\n ...", "def _get_tz():\n return 'UTC'", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def run_at_sunrise(self, callback, *args, auto_constraints=False, **kwargs):\n if not auto_constraints:\n return super().run_at_sunrise(callback, **kwargs)\n\n return self._attach_constraints(super().run_at_sunrise, callback, **kwargs)", "def morning_twilight_6(self, date=None):\n self.site.horizon = self.horizon6\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "async def run_at_sunrise(self, callback: Callable, **kwargs) -> str:\n name = self.name\n self.logger.debug(\"Registering run_at_sunrise with kwargs = %s for %s\", kwargs, name)\n handle = await self._schedule_sun(name, \"next_rising\", callback, **kwargs)\n return handle", "def tai_day_secs(self):\n days = self._days - _TAI_BASE\n if days < 0:\n raise ValueError('Date precedes TAI origin')\n seconds = self.seconds + self.leapseconds\n day_offset, seconds = divmod(seconds, SECONDS_PER_DAY)\n return days + day_offset, seconds", "def timezone():\n \n pass", "def time_to_live_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"time_to_live_utc\")", "def moon_rise(self, date=None):\n self._set_site_date(date)\n moonrise = self.site.next_rising(self.moon)\n moonrise = self.date_to_local(moonrise.datetime())\n ## if moonrise < self.sunset():\n ## moonrise = None\n return moonrise", "def SunPosition(time):\n # Correct for light travel time from the Sun.\n # Otherwise season calculations (equinox, solstice) will all be early by about 8 minutes!\n adjusted_time = time.AddDays(-1.0 / C_AUDAY)\n earth2000 = _CalcEarth(adjusted_time)\n sun2000 = [-earth2000.x, -earth2000.y, -earth2000.z]\n\n # Convert to equatorial Cartesian coordinates of date.\n stemp = _precession(sun2000, adjusted_time, _PrecessDir.From2000)\n sun_ofdate = _nutation(stemp, adjusted_time, _PrecessDir.From2000)\n\n # Convert equatorial coordinates to ecliptic coordinates.\n true_obliq = math.radians(adjusted_time._etilt().tobl)\n return _RotateEquatorialToEcliptic(sun_ofdate, true_obliq, time)", "def __init__(self, date, latitude, longitude, timezone):\n self.name = \"Sunrise Sunset Calculator\"\n self.date = date\n self.latitude = latitude\n self.longitude = longitude\n self.timezone = timezone\n return", "def get_current_india_time():\n india_offset = datetime.timedelta(hours=5, minutes=30)\n in_time = datetime.datetime.utcnow() + india_offset\n return in_time", "def goto_sun(self, seconds_ahead = 0, blocking = True):\n assert self.is_initialized\n solar_ephemeris = self.devices['solar_ephemeris']\n tracking_mirror_positioner = self.controllers['tracking_mirror_positioner']\n #self.set_windings('on')\n #start tracking time\n t0 = time.time()\n #get current sun location\n jd_now, el_now, az_now = solar_ephemeris.update()\n #predict where sun will be at next control point\n jd_future, el_future, az_future = solar_ephemeris.predict(seconds_ahead, jd_now)\n #send start event\n info = OrderedDict()\n info['timestamp'] = t0\n info['seconds_ahead'] = seconds_ahead\n info['jd_now'] = jd_now\n info['az_now'] = az_now\n info['el_now'] = el_now\n info['jd_future'] = jd_future\n info['az_future'] = az_future\n info['el_future'] = el_future\n \n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_STARTED\", info)\n if blocking:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )\n t1 = time.time()\n used_t = t1-t0\n #send end event\n info = OrderedDict()\n info['timestamp'] = t1\n info['az_pos'] = self.az_pos\n info['el_pos'] = self.el_pos\n info['used_time'] = used_t\n self._send_event(\"SOLAR_TRACKER_GOTO_SUN_COMPLETED\", info)\n return used_t\n else:\n tracking_mirror_positioner.goto(az_target = az_future,\n el_target = el_future,\n blocking = blocking,\n )", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def calc_rise_time(self, contact, start_time):\n # The percentage from and to of the rise time calculations\n # for example 0.1 of the final value to 0.9 of it\n from_percent = 0.1\n to_percent = 0.9\n times = self.get_data_set(\"time\")\n assert times[0] <= start_time < times[-1]\n # The index to start looking from\n starting_index = find_index_of_closets_value(self.get_data_set(\"time\"), start_time)\n # Get relevant voltages\n voltages = self.get_data_set(\"%s InnerVoltage\" % contact)[starting_index:]\n final_voltage = voltages[-1]\n from_index = find_index_of_closets_value(voltages, from_percent * final_voltage)\n to_index = find_index_of_closets_value(voltages[from_index:], to_percent * final_voltage)\n rise_time = times[starting_index+from_index+to_index] - times[starting_index+from_index]\n return rise_time", "def Utc(self):\n return _EPOCH + datetime.timedelta(days=self.ut)", "def start_time_utc(self) -> Optional[str]:\n return pulumi.get(self, \"start_time_utc\")", "def ut1_utc(self):\n values = self._interpolate_table(\"ut1_utc\", leap_second_correction=True)\n values += self._corrections((\"ortho_eop\", iers.ortho_eop, 2, 1e-6), (\"utlibr\", iers.utlibr, 0, 1e-6))\n\n # low frequency tides\n if \"rg_zont2\" in self.models:\n correction_cache = self._correction_cache.setdefault(\"rg_zont2\", dict())\n # Julian centuries since J2000\n t_julian_centuries = (self.time.tt.jd - 2451545.0) / 36525\n\n if self.time.isscalar:\n mjd = self.time.tt.mjd\n if mjd not in correction_cache:\n correction_cache[mjd] = iers.rg_zont2(t_julian_centuries)[0]\n dut1_corr = correction_cache[mjd]\n else:\n dut1_corr = list()\n for t in self.time.tt:\n if t.mjd not in correction_cache:\n t_julian_centuries = (t.tt.jd - 2451545.0) / 36525\n correction_cache[t.mjd] = iers.rg_zont2(t_julian_centuries)[0]\n dut1_corr.append(correction_cache[t.mjd])\n\n values += dut1_corr\n return values", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def output(self):\n if self.after_sunrise:\n return \"%02d:%02d:%02dR\" % self.time\n if self.after_sunset:\n return \"%02d:%02d:%02dT\" % self.time\n return \"%02d:%02d:%02d\" % self.time", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def FromNowUTC(cls):\n t = pytime.time()\n utcTime = pytime.gmtime(t)\n return cls.FromStructTime(utcTime).WithZone(zDirection=0)", "def _get_utcoffset(self, tzname):\n # SQL Server has no built-in support for tz database\n # see http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx\n zone = pytz.timezone(tzname)\n # no way to take DST into account at this point\n now = datetime.datetime.now()\n delta = zone.localize(now, is_dst=False).utcoffset()\n return delta.days * 86400 + delta.seconds", "def get_time():\n # Use this one for production:\n now_time = pendulum.now(tz=pendulum.timezone(\"America/New_York\"))\n # Use this one for testing and modify as needed:\n # now_time = pendulum.datetime(2019, 7, 21, 20, 00, tz='America/New_York')\n\n return now_time", "def timetz(self):\n return time(\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n self._tzinfo,\n fold=self.fold,\n )", "def set_time_by_timezone(df):\n df = set_city_time_by_timezone(df, 1078, 3)\n df = set_city_time_by_timezone(df, 22390, 4)\n df = set_city_time_by_timezone(df, 22430, 4)\n df = set_city_time_by_timezone(df, 22438, 5)\n return df", "def get_time_of_the_day(self, ts):\n h, m, s = ts.hour, ts.minute, ts.second\n # Convert the hours, minutes, and seconds to seconds: referenced to 0 AM\n t = int(h) * 3600 + int(m) * 60 + int(s)\n if t >= 0:\n return t\n else:\n return t + 24*3600", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def get_local(utc_time, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n utc_alarm = utc_now.replace(hour=utc_time.hour, minute=utc_time.minute)\n local_tz = pytz.timezone(tz)\n local_alarm = local_tz.normalize(utc_alarm)\n return local_alarm.time()", "def _clock_day(self):\n return int(self._shifted_time / 86400)", "def get_tz_offset_seconds() -> float:\n import time\n import datetime\n tval = time.time()\n utc_offset = (datetime.datetime.fromtimestamp(tval) -\n datetime.datetime.utcfromtimestamp(tval)).total_seconds()\n return utc_offset", "def get_utc(local_tm, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n local_tz = pytz.timezone(tz)\n local_now = local_tz.normalize(utc_now)\n local_alarm = local_now.replace(hour=local_tm.hour, minute=local_tm.minute)\n utc_alarm = utc_tz.normalize(local_alarm)\n return utc_alarm.time()", "def sun_on_date(location, date):\n\n sun_api_url = \"http://api.geonames.org/timezoneJSON?lat=\" + str(location[0]) + \\\n \"&lng=\" + str(location[1]) + \"&date=\" + date + \"&username=demo\"\n # use my name as username, if it does not work with demo\n sun_api_url_response = requests.get(sun_api_url)\n sun_data = sun_api_url_response.json()\n try:\n sunrise = sun_data[\"dates\"][0][\"sunrise\"][11:16]\n sunset = sun_data[\"dates\"][0][\"sunset\"][11:16]\n return sunrise, sunset\n except:\n print sun_data[\"status\"][\"message\"]\n print \"Change usarname to my name to make it work\"\n return exit()", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def utcoffset (self, dt):\n return self.__utcOffset_td", "def mean_earth_sun_distance(utc_datetime): \n\n return (1 - (0.0335 * math.sin(360 * ((solar.GetDayOfYear(utc_datetime)) - 94)) / (365)))", "def brasilia_day():\n return (dt.datetime.utcnow() + dt.timedelta(hours=-3)).replace(hour=0, minute=0, second=0, microsecond=0)", "def _local_timestamps(self) -> npt.NDArray[np.int64]:\n if self.tz is None or timezones.is_utc(self.tz):\n # Avoid the copy that would be made in tzconversion\n return self.asi8\n return tz_convert_from_utc(self.asi8, self.tz, reso=self._creso)", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def seconds_since_midnight(time):\n return time.hour * 3600 + time.minute * 60 + time.second", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def ut1_utc_rate(self):\n values = self._interpolate_table(\"ut1_utc\", leap_second_correction=True, derivative_order=1)\n # values += self._corrections((\"ortho_eop\", iers.ortho_eop, 2, 1e-6), (\"utlibr\", iers.utlibr, 0, 1e-6))\n\n # Low frequency tides\n # if \"rg_zont2\" in self.models:\n # correction_cache = self._correction_cache.setdefault(\"rg_zont2\", dict())\n # # Julian centuries since J2000\n # t_julian_centuries = (self.time.tt.jd - 2451545.0) / 36525\n #\n # if self.time.isscalar:\n # mjd = self.time.tt.mjd\n # if mjd not in correction_cache:\n # correction_cache[mjd] = iers.rg_zont2(t_julian_centuries)[0]\n # dut1_corr = correction_cache[mjd]\n # else:\n # dut1_corr = list()\n # for t in self.time.tt:\n # if t.mjd not in correction_cache:\n # t_julian_centuries = (t.tt.jd - 2451545.0) / 36525\n # correction_cache[t.mjd] = iers.rg_zont2(t_julian_centuries)[0]\n # dut1_corr.append(correction_cache[t.mjd])\n #\n # values += dut1_corr\n # return values\n return values", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def generate_sunrise_sunset_chart(\n sunrise: datetime.datetime,\n sunset: datetime.datetime,\n current: datetime.datetime,\n):\n\n fig, ax = plt.subplots()\n\n # The bar should shart at sunrise and end at sunset\n # Black bar showing sunset\n ax.barh([1], [time_to_float(sunset)+0.75], color=\"black\")\n # First make an orange bar for the sunset\n ax.barh([1], [time_to_float(sunset)], color=\"orange\")\n # Then make a black bar for sunset\n ax.barh([1], [time_to_float(sunrise)], color=\"black\")\n \n\n # Vertical line to show current time\n ax.axvline(x=time_to_float(current), linewidth=1, color=\"black\", ls=\"--\")\n\n # x-axis labels should be the time\n ax.set_xticks(\n [time_to_float(sunrise), 12, time_to_float(sunset)],\n )\n ax.set_xticklabels(\n [format_time(sunrise), \"12:00\", format_time(sunset)],\n )\n\n # Don't make the graph too wide\n ax.set_xlim([time_to_float(sunrise)-0.75, time_to_float(sunset)+0.75])\n\n # No y-axis labels required\n ax.set_yticks([])\n\n plt.tight_layout()\n\n fig.set_size_inches(7, 3)\n fig.savefig(\"./assets/sun-chart.png\")", "def Date_calibration(DD, s, TZ):\r\n \r\n dif_hour_tz = dt.datetime.now(tz=pytz.timezone(str(TZ))).hour - dt.datetime.now(tz=pytz.utc).hour\r\n #If s = 1 = True\r\n if(s):\r\n #Change the datetime to UTC\r\n DD = DD - dt.timedelta(hours=dif_hour_tz)\r\n else:\r\n #Change the datetime to the original time\r\n DD = DD + dt.timedelta(hours=dif_hour_tz)\r\n \r\n return DD", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def utc(self):\n return self._utc", "def calculate_time(start_time):\r\n return round(time() - start_time, 2)", "def timezone():\n\n return time.timezone", "def fromutc(self, dt):\n if dt.tzinfo is None:\n return dt.replace(tzinfo=self)\n return super(UTC, self).fromutc(dt)", "def __init__(self, h=0, m=0, s=0, after_sunrise=False, after_sunset=False):\n assert (after_sunrise and after_sunset) == False, \\\n \"Must not specify both after_sunrise and after_sunset\"\n \n self.time = (h, m, s)\n self.after_sunrise = after_sunrise\n self.after_sunset = after_sunset", "def morning_twilight_18(self, date=None):\n self.site.horizon = self.horizon18\n self._set_site_date(date)\n r_date = self.site.next_rising(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def local_tz(self):\n return pytz.timezone(self.calendar.timezone)", "def schedule(self):\n\n crontab = self._crontab\n return datetime.now() + timedelta(\n seconds=math.ceil(\n crontab.next(default_utc=False)\n )\n )", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def get_nightly_start_time():\n return 14 # 2PM local Tucson time", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def itow2utc(itow: int) -> datetime.time:\r\n\r\n utc = datetime(1980, 1, 6) + timedelta(seconds=(itow / 1000) - (35 - 19))\r\n return utc.time()", "def get_tz_offset_s(time_dt):\n return s_from_dt(time_dt.replace(tzinfo=pytz.UTC)) - s_from_dt(time_dt)", "def utcnow():\n return datetime.utcnow().replace(tzinfo=UTC)", "def from_ios_time(self):\n try:\n dt_obj = (int(ios) / int(self.nano_2001)) + 978307200\n self.in_iostime = dt.utcfromtimestamp(dt_obj).strftime('%Y-%m-%d %H:%M:%S.%f')\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.in_iostime = False\n return self.in_iostime", "def tropical_longitude(cls, fixed_date):\n days = ifloor(fixed_date - OldHindu.EPOCH)\n precession = 27 - abs(54 - mod(27 + (108 * 600/1577917828 * days), 108))\n return mod(cls.solar_longitude(fixed_date) - precession, 360)", "def update(self, time):\n\n delta_J2000 = self.time - constant.J2000_DATE\n n_days_J2000 = delta_J2000.days + delta_J2000.seconds/86400\n\n mean_lon_sun = 280.460 + 0.9856474*n_days_J2000\n mean_lon_sun %= 360.0\n mean_lon_sun *= constant.DEG_TO_RAD\n\n mean_anomaly_sun = 357.528 + 0.9856003*n_days_J2000\n mean_anomaly_sun %= 360.0\n mean_anomaly_sun *= constant.DEG_TO_RAD\n\n ecliptic_lon_sun = ( mean_lon_sun/constant.DEG_TO_RAD +\n 1.915*math.sin(mean_anomaly_sun) +\n 0.020*math.sin(2.0*mean_anomaly_sun) )\n ecliptic_lon_sun *= constant.DEG_TO_RAD\n\n dist_earth_to_sun = (1.00014 -\n 0.01671*math.cos(mean_anomaly_sun) -\n 0.00014*math.cos(2.0*mean_anomaly_sun) )\n dist_earth_to_sun *= constant.AU_TO_KM\n\n obliquity_ecliptic = 23.439 - 0.0000004*n_days_J2000\n obliquity_ecliptic *= constant.DEG_TO_RAD\n\n x_J2000_sun = math.cos(ecliptic_lon_sun)\n y_J2000_sun = math.cos(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n z_J2000_sun = math.sin(obliquity_ecliptic)*math.sin(ecliptic_lon_sun)\n\n self.direction = vt.Vector([x_J2000_sun, y_J2000_sun, z_J2000_sun])\n self.distance = dist_earth_to_sun\n self.time = time" ]
[ "0.74266624", "0.7356725", "0.72597265", "0.7091791", "0.70251125", "0.7006317", "0.6827473", "0.68216705", "0.68117666", "0.66464955", "0.65655184", "0.6516135", "0.6488186", "0.6406055", "0.63925016", "0.6312478", "0.62507564", "0.6104449", "0.60976666", "0.6094293", "0.60576445", "0.5848984", "0.5747845", "0.5713557", "0.5574055", "0.550444", "0.5495502", "0.5492533", "0.5469824", "0.5416343", "0.54148316", "0.53633285", "0.53485173", "0.53286326", "0.531234", "0.52661645", "0.52618164", "0.5257517", "0.52356875", "0.52206576", "0.5203264", "0.5201801", "0.5200753", "0.5169239", "0.51606613", "0.51572907", "0.51539415", "0.5144642", "0.5140593", "0.5139767", "0.51364374", "0.51080906", "0.5081212", "0.5075055", "0.50667566", "0.50633395", "0.5053496", "0.5048256", "0.50398546", "0.5037355", "0.5021812", "0.50200826", "0.5012411", "0.5006824", "0.50024277", "0.4988205", "0.4978857", "0.49710667", "0.49640548", "0.49634495", "0.49492335", "0.49424776", "0.4942374", "0.4929344", "0.49138615", "0.49138615", "0.49106166", "0.49074835", "0.49059168", "0.49023524", "0.4879494", "0.48689932", "0.4867761", "0.48631543", "0.4857712", "0.48562372", "0.4844124", "0.48441222", "0.48156112", "0.4809487", "0.48074025", "0.48064578", "0.48057652", "0.47966763", "0.47826484", "0.47763175", "0.47619134", "0.47587705", "0.47557813", "0.47545734" ]
0.79015124
0
Calculate solar noon time in the UTC timezone.
def solar_noon_utc(self, date, longitude): julianday = self._julianday(date.day, date.month, date.year) newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0) eqtime = self._eq_of_time(newt) timeUTC = 720.0 + (longitude * 4.0) - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return noon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_noon(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n noon = self.astral.solar_noon_utc(date, self.longitude)\n\n if local:\n return noon.astimezone(self.tz) \n else:\n return noon", "def solar_noon_utc(LonDegE):\n _timezone = array([-180, -172.5, -157.5, -142.5, -127.5, -112.5, -97.5, -82.5, -67.5, -52.5, -37.5, -22.5, -7.5, 7.5, 22.5, 37.5, 52.5, 67.5, 82.5, 97.5, 112.5, 127.5, 142.5, 157.5, 172.5, 180]).repeat(2, 0)[1:-1].reshape(-1, 2)\n for i, (low, high) in enumerate(_timezone):\n if LonDegE >= low:\n if LonDegE <= high:\n return 12 -(-12 + i)", "def _get_tz():\n return 'UTC'", "def timezone():\n \n pass", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def _getUTC(self, config = {} ):\n # Default implementation: get system local time\n return datetime.datetime.utcnow()", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def _local_time_offset():\n if time.localtime().tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def __get_stock_time(stock_tz: timezone) -> datetime:\n return datetime.now().astimezone(stock_tz)", "def solar_noon_local(LonDegE):\n return 12.", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def get_current_india_time():\n india_offset = datetime.timedelta(hours=5, minutes=30)\n in_time = datetime.datetime.utcnow() + india_offset\n return in_time", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def test_time_to_commute_retrieved_from_google_api_in_posix_is_converted_to_utc(self):\n result = calculate_time_of_commute(\n origin_name='Gatwick Airport',\n destination_name='Kings Cross St Pancras',\n )\n assert type(result) == datetime\n assert result.tzinfo is None # Assert it is a naive datetime", "def utcnow():\n return datetime.utcnow().replace(tzinfo=UTC)", "def time_NEURON():\n recorded_time = h.Vector()\n recorded_time.record(h._ref_t)\n return recorded_time", "def get_utc_now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.timezone(\"UTC\"))", "def FromNowUTC(cls):\n t = pytime.time()\n utcTime = pytime.gmtime(t)\n return cls.FromStructTime(utcTime).WithZone(zDirection=0)", "def get_time():\n # Use this one for production:\n now_time = pendulum.now(tz=pendulum.timezone(\"America/New_York\"))\n # Use this one for testing and modify as needed:\n # now_time = pendulum.datetime(2019, 7, 21, 20, 00, tz='America/New_York')\n\n return now_time", "def timezone():\n\n return time.timezone", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def __correct_token_time(self, t_time=None):\n\n if t_time is None:\n t_time = time.time()\n\n if time.localtime(t_time).tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def time_zone():\n return timezone('Etc/GMT-10')", "def local_tz(self):\n return pytz.timezone(self.calendar.timezone)", "def _local_timestamps(self) -> npt.NDArray[np.int64]:\n if self.tz is None or timezones.is_utc(self.tz):\n # Avoid the copy that would be made in tzconversion\n return self.asi8\n return tz_convert_from_utc(self.asi8, self.tz, reso=self._creso)", "def getUtcSeconde(self) -> int:\n ...", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def utcnow() -> datetime.datetime:\n return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.time.utcnow().replace(microsecond=0).time()\n else:\n return datetime.time.utcnow().time()", "def ntp_to_system_time(date):\n return date - NTP_DELTA", "def timezoneNaive(self):\n try:\n return self._timezone_naive\n except AttributeError:\n return None", "def localTimeOffset(t=None):\n\tif t is None:\n\t\tt = time.time()\n\t\n\tif time.localtime(t).tm_isdst and time.daylight:\n\t\treturn -time.altzone\n\telse:\n\t\treturn -time.timezone", "def now(self):\n if 'timezone' in self._data:\n return pytz.utc.localize(datetime.datetime.utcnow()).astimezone(pytz.timezone(self._data['timezone']))\n else:\n return pytz.utc.localize(datetime.datetime.utcnow())", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.datetime.utcnow().replace(microsecond=0).time()\n else:\n return datetime.datetime.utcnow().time()", "def brasilia_day():\n return (dt.datetime.utcnow() + dt.timedelta(hours=-3)).replace(hour=0, minute=0, second=0, microsecond=0)", "def get_now_utc(no_microseconds=True):\n if no_microseconds:\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n microsecond=0\n )\n else:\n return pytz.utc.localize(datetime.datetime.utcnow())", "def get_now_utc(no_microseconds=True):\n if no_microseconds:\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n microsecond=0\n )\n else:\n return pytz.utc.localize(datetime.datetime.utcnow())", "def time_zone(self):\n\n\t\tg = geocoders.GoogleV3()\n\n\t\t#Gives the name of the timezone, ex: Africa/Luanda\n\t\ttimezone_name = str(g.timezone((self.latitude_value(), self.longitude_value())))\n\n\t\t#Returns the numeric value of the timezone, ex: +0100\n\t\treturn int(pytz.timezone(timezone_name).localize(datetime.datetime(2011,1,1)).strftime('%z'))/100", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def get_ph_time(as_array=False):\n utc = timezone('UTC')\n phtz = timezone('Asia/Manila')\n now = utc.localize(datetime.utcnow())\n now = now.astimezone(phtz)\n if as_array:\n return [now.year, now.month, now.day, now.hour, now.minute, now.second]\n else:\n return datetime(now.year, now.month, now.day, now.hour, now.minute, now.second)", "def utc_now():\n return datetime.now(tz=timezone.utc)", "def get_time(self):\n return datetime.datetime.now(self.time_zone)", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def solar_azimuth(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n\n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0#\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n return azimuth", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def _now():\n return datetime.now(timezone.utc).astimezone()", "def ensure_tucson_time():\n if 'TZ' not in os.environ.keys() or os.environ['TZ'] != 'US/Arizona':\n os.environ['TZ'] = 'US/Arizona'\n time.tzset()", "def now_utc() -> datetime:\n return datetime.now(timezone.utc)", "def time_zone_minus_8(time):\n return time + timedelta(hours=-8)", "def dawn(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n dawn = self.astral.dawn_utc(date, self.latitude, self.longitude)\n\n if local:\n return dawn.astimezone(self.tz) \n else:\n return dawn", "async def get_now(self) -> dt.datetime:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz)", "def GetUtcTime(dt, tz):\n #Enriches the given time with the given timezone. For example 5 pm is enriched\n #to 5 pm EST, taking into account DST.\n local_time = tz.localize(dt)\n #We convert to utc\n utc_time = local_time.astimezone(pytz.utc)\n #We remove the timezone information ( = naive time)\n return utc_time.replace(tzinfo=None)", "def make_naive_utc(date_time: datetime.datetime) -> datetime.datetime:\n utc_timezone = datetime.timezone(datetime.timedelta(seconds=0))\n return date_time.astimezone(utc_timezone).replace(tzinfo=None)", "def get_timezone():\n return dates.get_timezone(_get_tz())", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def static_now():\n return datetime.datetime(2000, 9, 4).replace(tzinfo=timezone.utc)", "def set_time_by_timezone(df):\n df = set_city_time_by_timezone(df, 1078, 3)\n df = set_city_time_by_timezone(df, 22390, 4)\n df = set_city_time_by_timezone(df, 22430, 4)\n df = set_city_time_by_timezone(df, 22438, 5)\n return df", "def test_utc_in_timez(monkeypatch):\n utcoffset8_local_time_in_naive_utc = (\n datetime.datetime(\n year=2020,\n month=1,\n day=1,\n hour=1,\n minute=23,\n second=45,\n tzinfo=datetime.timezone(datetime.timedelta(hours=8)),\n )\n .astimezone(datetime.timezone.utc)\n .replace(tzinfo=None)\n )\n\n class mock_datetime:\n @classmethod\n def utcnow(cls):\n return utcoffset8_local_time_in_naive_utc\n\n monkeypatch.setattr('datetime.datetime', mock_datetime)\n rfc3339_utc_time = str(cherrypy._cplogging.LazyRfc3339UtcTime())\n expected_time = '2019-12-31T17:23:45Z'\n assert rfc3339_utc_time == expected_time", "def get_timzone_offset(self, timezone):\n raise NotImplementedError", "def sunrise_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunrise", "def get_time(self):\r\n\t\tactual_time = '{}, {} de {} del {} ({}:{} {} (UTC))'\r\n\t\tda_taim = actual_time.format(self.wday, self.day, self.mon,\r\n\t\t\t\t\t\t\t\t\t self.year, self.hour, self.min,\r\n\t\t\t\t\t\t\t\t\t self.day_or_night)\r\n\t\treturn da_taim", "def get_solar_time(longitude_deg, min_date, hour_date, day_date):\n solar_time_min = hour_date * 60 + min_date + 4 * longitude_deg + get_equation_of_time(day_date)\n\n return solar_time_min/60", "def setup_datetime(self):\n \n current_date_time = datetime.now()\n timezone_diference = timedelta(hours=-3)\n return timezone(timezone_diference), current_date_time", "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n \n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n exoatmElevation = 90.0 - zenith\n\n if exoatmElevation > 85.0:\n refractionCorrection = 0.0\n else:\n te = tan(radians(exoatmElevation))\n if exoatmElevation > 5.0:\n refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te)\n elif exoatmElevation > -0.575:\n step1 = (-12.79 + exoatmElevation * 0.711)\n step2 = (103.4 + exoatmElevation * (step1))\n step3 = (-518.2 + exoatmElevation * (step2))\n refractionCorrection = 1735.0 + exoatmElevation * (step3)\n else:\n refractionCorrection = -20.774 / te\n \n refractionCorrection = refractionCorrection / 3600.0\n \n solarzen = zenith - refractionCorrection\n \n solarelevation = 90.0 - solarzen\n \n return solarelevation", "def _get_datetime():\n pst_now = datetime.datetime.utcnow().astimezone(pytz.timezone(\"America/Los_Angeles\"))\n return pst_now.strftime(\"%a %b %e %H:%M %Z %G\")", "def may_the_timezone_be_with_it(self):\n self.timestamp = pytz.utc.localize(self.timestamp)", "def nancay():\n return coord.EarthLocation(lat=47.376511*u.deg, lon=2.1924002*u.deg)", "def utc2localtime(date):\n return date - (datetime.utcnow() - datetime.now())", "def datetime_utc_now() -> datetime:\n return datetime.now(timezone.utc)", "def tropical_longitude(cls, fixed_date):\n days = ifloor(fixed_date - OldHindu.EPOCH)\n precession = 27 - abs(54 - mod(27 + (108 * 600/1577917828 * days), 108))\n return mod(cls.solar_longitude(fixed_date) - precession, 360)", "def to_nztimezone(t):\n from dateutil import tz\n utctz = tz.gettz('UTC')\n nztz = tz.gettz('Pacific/Auckland')\n return [ti.replace(tzinfo=utctz).astimezone(nztz) for ti in pd.to_datetime(t)]", "def _now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)", "def GetLocalTime(dt, tz):\n return dt.replace(tzinfo=pytz.utc).astimezone(tz)", "async def _timezone(self, ctx: commands.Context, tz: str = None):\n self.check_if_exist(ctx.guild)\n\n self.daily_guilds[str(ctx.guild.id)][\"tz\"] = tz\n self.daily_info.update(\"guilds\", self.daily_guilds)\n await ctx.reply(\"New daily timezone is {0}\".format(tz))", "def time_detected(self) -> datetime:\n return datetime.fromtimestamp(\n self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6\n ).astimezone(timezone.utc)", "def get_utc(local_tm, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n local_tz = pytz.timezone(tz)\n local_now = local_tz.normalize(utc_now)\n local_alarm = local_now.replace(hour=local_tm.hour, minute=local_tm.minute)\n utc_alarm = utc_tz.normalize(local_alarm)\n return utc_alarm.time()", "def utc(self):\n return self._utc", "def get_nicedate(self):\n if self.valid is None:\n return \"(unknown issuance time)\"\n localts = self.valid\n fmt = \"%b %-d, %H:%M UTC\"\n if self.tz is not None:\n localts = self.valid.astimezone(self.tz)\n # A bit of complexity as offices may not implement daylight saving\n if self.z.endswith(\"ST\") and localts.dst():\n localts -= datetime.timedelta(hours=1)\n fmt = \"%b %-d, %-I:%M %p \" + self.z\n return localts.strftime(fmt)", "def _convertTZ(self):\n tz = timezone.get_current_timezone()\n dtstart = self['DTSTART']\n dtend = self['DTEND']\n if dtstart.zone() == \"UTC\":\n dtstart.dt = dtstart.dt.astimezone(tz)\n if dtend.zone() == \"UTC\":\n dtend.dt = dtend.dt.astimezone(tz)", "def UTMZone(x,y):\n\n #take longitudinal coordinate and add 180, then divide by 6 and round up\n lon = int(np.ceil((x + 180)/6))\n \n #determine whether y is in the Northern or Southern Hemisphere\n if y > 0:\n code = 326\n else:\n code = 327\n \n #return epsg of the utm zone\n epsg = int(str(code)+str(lon))\n return epsg", "def expiration_time_utc(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"expiration_time_utc\")", "def ozone(self) -> float | None:\n return self._ozone", "def _get_tzinfo(zonelabel):\n return moment.tzinfo(zonelabel) if zonelabel else _get_global_tz()", "def hydrate_time(nanoseconds, tz=None):\n seconds, nanoseconds = map(int, divmod(nanoseconds, 1000000000))\n minutes, seconds = map(int, divmod(seconds, 60))\n hours, minutes = map(int, divmod(minutes, 60))\n seconds = (1000000000 * seconds + nanoseconds) / 1000000000\n t = Time(hours, minutes, seconds)\n if tz is None:\n return t\n tz_offset_minutes, tz_offset_seconds = divmod(tz, 60)\n zone = FixedOffset(tz_offset_minutes)\n return zone.localize(t)", "def now_datetime():\n return datetime.utcnow().replace(tzinfo=timezone)", "def localtime(stamp):\n return stamp - utc_offset", "def now_dt(tz='UTC'):\n if tz != 'UTC':\n raise NotImplementedError()\n return datetime.datetime.utcnow().replace(tzinfo = pytz.utc)", "def local_time(self) -> SmartNvmeLocalTime:\n return self._local_time", "def now():\n return datetime.datetime.now(pytz.utc)", "def make_tz_aware(local_dt):\n aware_dt = timezone('US/Eastern').localize(local_dt)\n return aware_dt" ]
[ "0.6777714", "0.6724988", "0.6343755", "0.626581", "0.62460506", "0.59971875", "0.59616363", "0.594989", "0.5928303", "0.5893481", "0.5877765", "0.58015263", "0.5749765", "0.5749219", "0.5719723", "0.571865", "0.571865", "0.56694263", "0.5617303", "0.5612241", "0.56063336", "0.5605388", "0.5579593", "0.557632", "0.55701196", "0.5569826", "0.55687845", "0.552343", "0.5520006", "0.5497911", "0.5480549", "0.5449046", "0.54426754", "0.5436455", "0.54344344", "0.5431395", "0.54311216", "0.5422084", "0.5409944", "0.54081833", "0.5406108", "0.53875184", "0.53875184", "0.5377974", "0.53773767", "0.53720003", "0.5371617", "0.5369661", "0.5365463", "0.5357892", "0.5357892", "0.5355817", "0.53552014", "0.53507984", "0.53384006", "0.5333984", "0.5327155", "0.53249615", "0.53229314", "0.5316496", "0.530493", "0.5287642", "0.5279526", "0.5272025", "0.5272025", "0.5267649", "0.5267604", "0.52595633", "0.52568203", "0.5248165", "0.52363664", "0.52086514", "0.5201948", "0.5197465", "0.51860505", "0.5172163", "0.5169536", "0.51557475", "0.5152739", "0.5147474", "0.51460123", "0.5144927", "0.51413625", "0.5139826", "0.513726", "0.5136384", "0.5134183", "0.5133495", "0.51301825", "0.5125767", "0.5119841", "0.51157665", "0.5110765", "0.5110574", "0.5110377", "0.510568", "0.5105155", "0.5104211", "0.5099736", "0.5086337" ]
0.74279106
0
Calculate sunset time in the UTC timezone.
def sunset_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunset(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_sunset(latitude, solarDec) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return sunset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sunset(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sunset = self.astral.sunset_utc(date, self.latitude, self.longitude)\n\n if local:\n return sunset.astimezone(self.tz) \n else:\n return sunset", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def set_utc(date_time):\n utc = datetime.timezone(datetime.timedelta(0))\n date_time = date_time.replace(tzinfo=utc)\n return date_time", "def getUtcSeconde(self) -> int:\n ...", "def _get_tz():\n return 'UTC'", "def fromutc(self, dt):\n if dt.tzinfo is None:\n return dt.replace(tzinfo=self)\n return super(UTC, self).fromutc(dt)", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def start_hour_utc(self) -> int:\n return pulumi.get(self, \"start_hour_utc\")", "def FromNowUTC(cls):\n t = pytime.time()\n utcTime = pytime.gmtime(t)\n return cls.FromStructTime(utcTime).WithZone(zDirection=0)", "async def sunset(self, aware=False, today=False, days_offset=0) -> dt.datetime:\n return await self.AD.sched.sunset(aware, today=today, days_offset=days_offset)", "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def get_utc(local_tm, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n local_tz = pytz.timezone(tz)\n local_now = local_tz.normalize(utc_now)\n local_alarm = local_now.replace(hour=local_tm.hour, minute=local_tm.minute)\n utc_alarm = utc_tz.normalize(local_alarm)\n return utc_alarm.time()", "def utcnow():\n return datetime.utcnow().replace(tzinfo=UTC)", "def utcnow():\r\n if utcnow.override_time:\r\n try:\r\n return utcnow.override_time.pop(0)\r\n except AttributeError:\r\n return utcnow.override_time\r\n return datetime.datetime.utcnow()", "def utcnow():\n if utcnow.override_time:\n try:\n return utcnow.override_time.pop(0)\n except AttributeError:\n return utcnow.override_time\n return datetime.datetime.utcnow()", "def utc(self):\n return self._utc", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def _utc_date(self):\n if self.date_stamp == '0':\n return '0'\n else:\n if '.' in self.date_stamp:\n t = datetime.datetime.strptime(self.date_stamp,\n '%Y%m%d%H%M%S.%f')\n else:\n t = datetime.datetime.strptime(self.date_stamp,\n '%Y%m%d%H%M%S')\n tdelta = datetime.timedelta(hours = int(self.tzone[1:3]),\n minutes = int(self.tzone[3:5]))\n \n if self.tzone[0] == '-':\n ut = t - tdelta\n return ut.strftime('%Y%m%d%H%M%S.%f')\n else:\n ut = t + tdelta\n return ut.strftime('%Y%m%d%H%M%S.%f')", "def expiration_time_utc(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"expiration_time_utc\")", "def ensure_utc_time(ts: datetime) -> datetime:\n if ts.tzinfo is None:\n return datetime(*ts.timetuple()[:6], tzinfo=UTC_TZ)\n elif str(ts.tzinfo) != str(UTC_TZ):\n return ts.astimezone(UTC_TZ)\n return ts", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def get_unixtime(self):\n if not self.Complete():\n raise DateTimeError(\"get_unixtime requires complete timepoint\")\n zoffset = self.time.GetZoneOffset()\n if zoffset is None:\n raise DateTimeError(\"get_unixtime requires timezone\")\n elif zoffset == 0:\n zt = self\n else:\n zt = self.ShiftZone(zDirection=0)\n days = zt.date.GetAbsoluteDay() - EPOCH.date.GetAbsoluteDay()\n seconds = zt.time.GetTotalSeconds() - EPOCH.time.GetTotalSeconds()\n return 86400 * days + seconds", "def getutv(self):\n t = datetime.datetime.now()\n utc_seconds = (time.mktime(t.timetuple()))\n utc_seconds = int(utc_seconds * 1000)\n return str(utc_seconds)", "def clear_time_override():\r\n utcnow.override_time = None", "def time_to_live_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"time_to_live_utc\")", "def get_tz_offset_s(time_dt):\n return s_from_dt(time_dt.replace(tzinfo=pytz.UTC)) - s_from_dt(time_dt)", "def round_utc_hour_up(dateString):\n date_object = datetime.strptime(dateString, \"%Y-%m-%d %H:%M:%S\")\n newHour = (date_object.hour + 1) % 24\n date_object = date_object.replace(hour=newHour)\n return date_object.strftime(\"%Y-%m-%d %H:00:00\")", "def sunset(self, seconds=None, milliseconds=None, temp_start=None, temp_end=None):\n return self.run_sequence(self._sunrise_sunset, seconds=seconds, milliseconds=milliseconds, temp_start=temp_start, temp_end=temp_end, setting=True)", "def start_hour_utc(self) -> Optional[int]:\n return pulumi.get(self, \"start_hour_utc\")", "def _get_time_utc(time_utc_str):\n dt = datetime.strptime(time_utc_str, TIME_FORMAT)\n return int(calendar.timegm(dt.utctimetuple()))", "def get_site_last_updated(cls, utc_now=None):\n if utc_now is None:\n utc_now = arrow.utcnow()\n\n today_update_hour = cls.stamp_time(utc_now)\n if today_update_hour > utc_now:\n update_time = today_update_hour.shift(days=-1)\n else:\n update_time = today_update_hour\n\n return update_time", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def utcnow():\n utctime = datetime.datetime.utcnow()\n utcstamp = utctime.strftime(\"%Y-%m-%d %H:%M:%S\")\n return utcstamp", "def timetz(self):\n return time(\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n self._tzinfo,\n fold=self.fold,\n )", "def _getUTC(self, config = {} ):\n # Default implementation: get system local time\n return datetime.datetime.utcnow()", "def utcoffset (self, dt):\n return self.__utcOffset_td", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.time.utcnow().replace(microsecond=0).time()\n else:\n return datetime.time.utcnow().time()", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.datetime.utcnow().replace(microsecond=0).time()\n else:\n return datetime.datetime.utcnow().time()", "def ensure_tucson_time():\n if 'TZ' not in os.environ.keys() or os.environ['TZ'] != 'US/Arizona':\n os.environ['TZ'] = 'US/Arizona'\n time.tzset()", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def _get_utcoffset(self, tzname):\n # SQL Server has no built-in support for tz database\n # see http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx\n zone = pytz.timezone(tzname)\n # no way to take DST into account at this point\n now = datetime.datetime.now()\n delta = zone.localize(now, is_dst=False).utcoffset()\n return delta.days * 86400 + delta.seconds", "def expiration_time_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"expiration_time_utc\")", "def expiration_time_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"expiration_time_utc\")", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def timezone():\n \n pass", "def _local_time_offset():\n if time.localtime().tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def timestampfromutc(utc):\n return (utc - datetime(1970, 1, 1)).total_seconds()", "def sunset(self, date=None):\n self.site.horizon = self.horizon\n self._set_site_date(date)\n r_date = self.site.next_setting(self.sun)\n r_date = self.date_to_local(r_date.datetime())\n return r_date", "def utctime(stamp):\n return stamp + utc_offset", "def __correct_token_time(self, t_time=None):\n\n if t_time is None:\n t_time = time.time()\n\n if time.localtime(t_time).tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def rahukaalam_utc(self, date, latitude, longitude):\n \n if date is None:\n date = datetime.date.today()\n\n try:\n sunrise = self.sunrise_utc(date, latitude, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n \n octant_duration = (sunset - sunrise) / 8\n\n # Mo,Sa,Fr,We,Th,Tu,Su\n octant_index = [1,6,4,5,3,2,7]\n \n weekday = date.weekday()\n octant = octant_index[weekday]\n \n start = sunrise + (octant_duration * octant)\n end = start + octant_duration\n \n return {'start': start, 'end': end}", "def start_time_utc(self) -> Optional[str]:\n return pulumi.get(self, \"start_time_utc\")", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def get_utc_now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.timezone(\"UTC\"))", "def utcnow() -> datetime.datetime:\n return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)", "def may_the_timezone_be_with_it(self):\n self.timestamp = pytz.utc.localize(self.timestamp)", "def time_zone_minus_8(time):\n return time + timedelta(hours=-8)", "def fromutc(self, dt):\n if not isinstance(dt, real_datetime):\n raise TypeError(\"fromutc() requires a datetime argument\")\n if dt.tzinfo is not self:\n raise ValueError(\"dt.tzinfo is not self\")\n\n dtoff = dt.utcoffset()\n if dtoff is None:\n raise ValueError(\"fromutc() requires a non-None utcoffset() \" \"result\")\n\n # See the long comment block at the end of this file for an\n # explanation of this algorithm.\n dtdst = dt.dst()\n if dtdst is None:\n raise ValueError(\"fromutc() requires a non-None dst() result\")\n delta = dtoff - dtdst\n if delta:\n dt += delta\n dtdst = dt.dst()\n if dtdst is None:\n raise ValueError(\n \"fromutc(): dt.dst gave inconsistent \" \"results; cannot convert\"\n )\n return dt + dtdst", "def utc():\n return date_from_utc(dt.utcnow())", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def localtime(stamp):\n return stamp - utc_offset", "def sunrise_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunrise", "def stop_time(self):\n return self.start_time + timedelta(hours=self.duration)", "def computeDaytimeStartEnd(self, date):\n dayStartTime = datetime.datetime.combine(date.date(), datetime.time())\n #compute sunrise time for that date\n (h, m, s) = self.sun.sunrise(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunrise_datetime = dayStartTime + time_delta\n #print(sunrise_datetime) \n #compute sunset time for that date \n (h, m, s) = self.sun.sunset(when=date)\n time_delta = datetime.timedelta(hours=h, minutes=m, seconds=s)\n sunset_datetime = dayStartTime + time_delta\n \n return (sunrise_datetime, sunset_datetime)", "def sunset(self):\r\n try:\r\n return str(self.connect()['sys']['sunset'])\r\n except:\r\n return '@weather_sunset'", "def get_local(utc_time, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n utc_alarm = utc_now.replace(hour=utc_time.hour, minute=utc_time.minute)\n local_tz = pytz.timezone(tz)\n local_alarm = local_tz.normalize(utc_alarm)\n return local_alarm.time()", "def itow2utc(itow: int) -> datetime.time:\r\n\r\n utc = datetime(1980, 1, 6) + timedelta(seconds=(itow / 1000) - (35 - 19))\r\n return utc.time()", "def utcoffset(self):\n if self._tzinfo is None:\n return None\n offset = self._tzinfo.utcoffset(None)\n _check_utc_offset(\"utcoffset\", offset)\n return offset", "def get_utc_offset_str():\n # Calculate the UTC time difference in seconds.\n\n timestamp = time.time()\n time_now = datetime.fromtimestamp(timestamp)\n time_utc = datetime.utcfromtimestamp(timestamp)\n utc_offset_secs = (time_now - time_utc).total_seconds()\n\n # Flag variable to hold if the current time is behind UTC.\n is_behind_utc = False\n\n # If the current time is behind UTC convert the offset\n # seconds to a positive value and set the flag variable.\n if utc_offset_secs < 0:\n is_behind_utc = True\n utc_offset_secs *= -1\n\n # Build a UTC offset string suitable for use in a timestamp.\n\n if is_behind_utc:\n pos_neg_prefix = \"-\"\n else:\n pos_neg_prefix = \"+\"\n\n utc_offset = time.gmtime(utc_offset_secs)\n utc_offset_fmt = time.strftime(\"%H\", utc_offset)\n utc_offset_str = pos_neg_prefix + utc_offset_fmt\n\n return utc_offset_str", "def set_utc(df, locale):\n return df.tz_localize('utc').tz_convert(None)", "def destroy_event_time(self) -> str:\n return pulumi.get(self, \"destroy_event_time\")", "def to_utc(dt):\n if dt.tzinfo is None:\n return dt.replace(tzinfo=pytz.utc)\n else:\n return dt.astimezone(pytz.utc)", "def set_time_by_timezone(df):\n df = set_city_time_by_timezone(df, 1078, 3)\n df = set_city_time_by_timezone(df, 22390, 4)\n df = set_city_time_by_timezone(df, 22430, 4)\n df = set_city_time_by_timezone(df, 22438, 5)\n return df", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def utcfromtimestamp(cls, t):\n return cls._fromtimestamp(t, True, None)", "def __get_stock_time(stock_tz: timezone) -> datetime:\n return datetime.now().astimezone(stock_tz)", "def Date_calibration(DD, s, TZ):\r\n \r\n dif_hour_tz = dt.datetime.now(tz=pytz.timezone(str(TZ))).hour - dt.datetime.now(tz=pytz.utc).hour\r\n #If s = 1 = True\r\n if(s):\r\n #Change the datetime to UTC\r\n DD = DD - dt.timedelta(hours=dif_hour_tz)\r\n else:\r\n #Change the datetime to the original time\r\n DD = DD + dt.timedelta(hours=dif_hour_tz)\r\n \r\n return DD", "def get_tz_offset_seconds() -> float:\n import time\n import datetime\n tval = time.time()\n utc_offset = (datetime.datetime.fromtimestamp(tval) -\n datetime.datetime.utcfromtimestamp(tval)).total_seconds()\n return utc_offset", "def local_to_utc(local_dt):\n local_dt = local_dt.replace(tzinfo=tz.tzlocal())\n return local_dt.astimezone(tz.tzlocal())", "def utc_now():\n return datetime.now(tz=timezone.utc)", "def db_datetime_utc():\n t = datetime.datetime.utcnow()\n return time.mktime(t.timetuple())", "def last_updated_time_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"last_updated_time_utc\")", "def Utc(self):\n return _EPOCH + datetime.timedelta(days=self.ut)", "def setup_datetime(self):\n \n current_date_time = datetime.now()\n timezone_diference = timedelta(hours=-3)\n return timezone(timezone_diference), current_date_time", "def get_umeastime(self):\n return self.utime", "def utcoffset(self):\n if self._tzinfo is None:\n return None\n offset = self._tzinfo.utcoffset(self._realized_if_concrete_tzinfo())\n _check_utc_offset(\"utcoffset\", offset)\n return offset", "def localTimeOffset(t=None):\n\tif t is None:\n\t\tt = time.time()\n\t\n\tif time.localtime(t).tm_isdst and time.daylight:\n\t\treturn -time.altzone\n\telse:\n\t\treturn -time.timezone", "def local_to_utc(local_dt: datetime) -> datetime:\n if local_dt is None:\n return None\n utc_dt = local_dt.astimezone(tz.UTC)\n return utc_dt", "def expiration_time_if_not_activated_utc(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"expiration_time_if_not_activated_utc\")", "def last_updated_time_utc(self) -> Optional[str]:\n return pulumi.get(self, \"last_updated_time_utc\")" ]
[ "0.65699303", "0.64950114", "0.63881093", "0.61808205", "0.61730003", "0.6134258", "0.60923696", "0.60374415", "0.60219973", "0.5875478", "0.5835468", "0.5818931", "0.5818664", "0.58087045", "0.5799634", "0.57659054", "0.57374483", "0.5717565", "0.5715173", "0.5711675", "0.57083106", "0.57057077", "0.5691319", "0.5683911", "0.5680497", "0.5668587", "0.5656173", "0.5650784", "0.5623827", "0.5612923", "0.5612843", "0.5598927", "0.5575586", "0.5575301", "0.556423", "0.5556633", "0.55474424", "0.55413294", "0.5537956", "0.5529287", "0.55192554", "0.5512566", "0.5512239", "0.54908127", "0.54837173", "0.54599136", "0.5448895", "0.5448895", "0.5447133", "0.54423654", "0.54423654", "0.5437595", "0.5433903", "0.54205203", "0.53898156", "0.5389336", "0.5384323", "0.5380959", "0.5346833", "0.53445375", "0.5337364", "0.5334186", "0.53289944", "0.53214306", "0.5310137", "0.53092885", "0.5306011", "0.53044", "0.5283182", "0.5283182", "0.52760124", "0.5272256", "0.5262921", "0.5254945", "0.5241604", "0.52331656", "0.5222573", "0.52210385", "0.52179706", "0.5214227", "0.5210033", "0.5201476", "0.5199682", "0.5197579", "0.51967806", "0.51942784", "0.5193463", "0.5189223", "0.51758975", "0.5167813", "0.5166561", "0.5164667", "0.516192", "0.51591223", "0.5140795", "0.5138157", "0.5132222", "0.5131809", "0.5118471", "0.51183623" ]
0.74293166
0
Calculate dusk time in the UTC timezone.
def dusk_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunset(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return dusk
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def _get_tz():\n return 'UTC'", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def timezone():\n \n pass", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def get_current_india_time():\n india_offset = datetime.timedelta(hours=5, minutes=30)\n in_time = datetime.datetime.utcnow() + india_offset\n return in_time", "def timezone():\n\n return time.timezone", "def get_time():\n # Use this one for production:\n now_time = pendulum.now(tz=pendulum.timezone(\"America/New_York\"))\n # Use this one for testing and modify as needed:\n # now_time = pendulum.datetime(2019, 7, 21, 20, 00, tz='America/New_York')\n\n return now_time", "def _getUTC(self, config = {} ):\n # Default implementation: get system local time\n return datetime.datetime.utcnow()", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def get_utc_now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.timezone(\"UTC\"))", "def utcnow():\n return datetime.utcnow().replace(tzinfo=UTC)", "def _get_datetime():\n pst_now = datetime.datetime.utcnow().astimezone(pytz.timezone(\"America/Los_Angeles\"))\n return pst_now.strftime(\"%a %b %e %H:%M %Z %G\")", "def get_timezone():\n return dates.get_timezone(_get_tz())", "def now_utc() -> datetime:\n return datetime.now(timezone.utc)", "async def time(self) -> dt.time:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz).time()", "def utc_now():\n return datetime.now(tz=timezone.utc)", "def get_time(self):\r\n\t\tactual_time = '{}, {} de {} del {} ({}:{} {} (UTC))'\r\n\t\tda_taim = actual_time.format(self.wday, self.day, self.mon,\r\n\t\t\t\t\t\t\t\t\t self.year, self.hour, self.min,\r\n\t\t\t\t\t\t\t\t\t self.day_or_night)\r\n\t\treturn da_taim", "def getUtcSeconde(self) -> int:\n ...", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def __get_stock_time(stock_tz: timezone) -> datetime:\n return datetime.now().astimezone(stock_tz)", "def _local_time_offset():\n if time.localtime().tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def utcnow():\n utctime = datetime.datetime.utcnow()\n utcstamp = utctime.strftime(\"%Y-%m-%d %H:%M:%S\")\n return utcstamp", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def getutv(self):\n t = datetime.datetime.now()\n utc_seconds = (time.mktime(t.timetuple()))\n utc_seconds = int(utc_seconds * 1000)\n return str(utc_seconds)", "def utc():\n return date_from_utc(dt.utcnow())", "def time_zone():\n return timezone('Etc/GMT-10')", "def utcnow() -> datetime.datetime:\n return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)", "def now(self):\n if 'timezone' in self._data:\n return pytz.utc.localize(datetime.datetime.utcnow()).astimezone(pytz.timezone(self._data['timezone']))\n else:\n return pytz.utc.localize(datetime.datetime.utcnow())", "def test_time_to_commute_retrieved_from_google_api_in_posix_is_converted_to_utc(self):\n result = calculate_time_of_commute(\n origin_name='Gatwick Airport',\n destination_name='Kings Cross St Pancras',\n )\n assert type(result) == datetime\n assert result.tzinfo is None # Assert it is a naive datetime", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def dusk(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n dusk = self.astral.dusk_utc(date, self.latitude, self.longitude)\n\n if local:\n return dusk.astimezone(self.tz) \n else:\n return dusk", "def get_time(self):\n return datetime.datetime.now(self.time_zone)", "def utc(self):\n return self._utc", "def itow2utc(itow: int) -> datetime.time:\r\n\r\n utc = datetime(1980, 1, 6) + timedelta(seconds=(itow / 1000) - (35 - 19))\r\n return utc.time()", "def utc2localtime(date):\n return date - (datetime.utcnow() - datetime.now())", "def utctime(stamp):\n return stamp + utc_offset", "def now_dt(tz='UTC'):\n if tz != 'UTC':\n raise NotImplementedError()\n return datetime.datetime.utcnow().replace(tzinfo = pytz.utc)", "def datetime_utc_now() -> datetime:\n return datetime.now(timezone.utc)", "def static_now():\n return datetime.datetime(2000, 9, 4).replace(tzinfo=timezone.utc)", "def now_datetime():\n return datetime.utcnow().replace(tzinfo=timezone)", "def localTimeOffset(t=None):\n\tif t is None:\n\t\tt = time.time()\n\t\n\tif time.localtime(t).tm_isdst and time.daylight:\n\t\treturn -time.altzone\n\telse:\n\t\treturn -time.timezone", "def timetz(self):\n return time(\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n self._tzinfo,\n fold=self.fold,\n )", "def start_hour_utc(self) -> int:\n return pulumi.get(self, \"start_hour_utc\")", "def GetUtcTime(dt, tz):\n #Enriches the given time with the given timezone. For example 5 pm is enriched\n #to 5 pm EST, taking into account DST.\n local_time = tz.localize(dt)\n #We convert to utc\n utc_time = local_time.astimezone(pytz.utc)\n #We remove the timezone information ( = naive time)\n return utc_time.replace(tzinfo=None)", "def _now():\n return datetime.now(timezone.utc).astimezone()", "def now():\n return datetime.datetime.now(pytz.utc)", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def setup_datetime(self):\n \n current_date_time = datetime.now()\n timezone_diference = timedelta(hours=-3)\n return timezone(timezone_diference), current_date_time", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def local_tz(self):\n return pytz.timezone(self.calendar.timezone)", "def db_datetime_utc():\n t = datetime.datetime.utcnow()\n return time.mktime(t.timetuple())", "def GetTimeAndZone(self):\n return self.hour, self.minute, self.second, self.zDirection, self.zOffset", "def get_time(self) -> int:\n t = str(self.eval(\"pyb.RTC().datetime()\").encode(\"utf-8\"))[1:-1].split(\", \")\n return int(t[4]) * 3600 + int(t[5]) * 60 + int(t[6])", "def Utc(self):\n return _EPOCH + datetime.timedelta(days=self.ut)", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def get_today_utc():\n return pytz.utc.localize(datetime.datetime.utcnow()).replace(\n hour=0, minute=0, second=0, microsecond=0\n )", "def now(self):\n os.environ['TZ'] = conf.timezone\n time.tzset()\n return time.strftime(\"%B %d %Y %H:%M:%S IST\", time.localtime())", "def utcnow():\r\n if utcnow.override_time:\r\n try:\r\n return utcnow.override_time.pop(0)\r\n except AttributeError:\r\n return utcnow.override_time\r\n return datetime.datetime.utcnow()", "def now():\n return utcfromtimestamp(time.time())", "def _utc_date(self):\n if self.date_stamp == '0':\n return '0'\n else:\n if '.' in self.date_stamp:\n t = datetime.datetime.strptime(self.date_stamp,\n '%Y%m%d%H%M%S.%f')\n else:\n t = datetime.datetime.strptime(self.date_stamp,\n '%Y%m%d%H%M%S')\n tdelta = datetime.timedelta(hours = int(self.tzone[1:3]),\n minutes = int(self.tzone[3:5]))\n \n if self.tzone[0] == '-':\n ut = t - tdelta\n return ut.strftime('%Y%m%d%H%M%S.%f')\n else:\n ut = t + tdelta\n return ut.strftime('%Y%m%d%H%M%S.%f')", "def datetime_utcnow() -> datetime:\n return datetime.now(tz=pytz.timezone('UTC'))", "def test_get_current_time_is_in_utc() -> None:\n time_provider = TimeProvider()\n current_time = time_provider.get_current_time()\n\n assert current_time.as_datetime().timezone == UTC", "def utcnow():\n if utcnow.override_time:\n try:\n return utcnow.override_time.pop(0)\n except AttributeError:\n return utcnow.override_time\n return datetime.datetime.utcnow()", "def FromNowUTC(cls):\n t = pytime.time()\n utcTime = pytime.gmtime(t)\n return cls.FromStructTime(utcTime).WithZone(zDirection=0)", "async def get_now(self) -> dt.datetime:\n now = await self.AD.sched.get_now()\n return now.astimezone(self.AD.tz)", "def localtime(stamp):\n return stamp - utc_offset", "def local_datetime(dt):\n return dt - datetime.timedelta(seconds=utc_offset)", "def get_time() -> dict:\n prague = pytz.timezone('Europe/Prague')\n now = prague.localize(datetime.now())\n fmt = '%H:%M'\n au_tz = pytz.timezone('Australia/Sydney')\n sydney = now.astimezone(au_tz).strftime(fmt)\n lon_tz = pytz.timezone('Europe/London')\n london = now.astimezone(lon_tz).strftime(fmt)\n ny_tz = pytz.timezone('US/Eastern')\n ny = now.astimezone(ny_tz).strftime(fmt)\n sf_tz = pytz.timezone('US/Pacific')\n sf = now.astimezone(sf_tz).strftime(fmt)\n return {\"sydney\": sydney, \"london\": london, \"ny\": ny, \"sf\": sf}", "def _get_time(self):\n # get the current time in UTC (make sure we are timezone aware)\n now_utc = datetime.datetime.now(pytz.UTC)\n \n # convert to our local timezone\n timenow = now_utc.astimezone(self.timezone)\n \n # save the data to our data\n self.data['year'][0] = timenow.year\n self.data['month'][0] = timenow.month\n self.data['day'][0] = timenow.day\n self.data['hour'][0] = timenow.hour\n self.data['minute'][0] = timenow.minute\n self.data['second'][0] = timenow.second\n \n return", "def _clock_time(self):\n return self._shifted_time % (24*3600)", "def brasilia_day():\n return (dt.datetime.utcnow() + dt.timedelta(hours=-3)).replace(hour=0, minute=0, second=0, microsecond=0)", "def _now():\n return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)", "def ntp_to_system_time(date):\n return date - NTP_DELTA", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.time.utcnow().replace(microsecond=0).time()\n else:\n return datetime.time.utcnow().time()", "def get_utc(local_tm, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n local_tz = pytz.timezone(tz)\n local_now = local_tz.normalize(utc_now)\n local_alarm = local_now.replace(hour=local_tm.hour, minute=local_tm.minute)\n utc_alarm = utc_tz.normalize(local_alarm)\n return utc_alarm.time()", "def time_to_live_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"time_to_live_utc\")", "def get_unixtime(self):\n if not self.Complete():\n raise DateTimeError(\"get_unixtime requires complete timepoint\")\n zoffset = self.time.GetZoneOffset()\n if zoffset is None:\n raise DateTimeError(\"get_unixtime requires timezone\")\n elif zoffset == 0:\n zt = self\n else:\n zt = self.ShiftZone(zDirection=0)\n days = zt.date.GetAbsoluteDay() - EPOCH.date.GetAbsoluteDay()\n seconds = zt.time.GetTotalSeconds() - EPOCH.time.GetTotalSeconds()\n return 86400 * days + seconds", "def time_detected(self) -> datetime:\n return datetime.fromtimestamp(\n self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6\n ).astimezone(timezone.utc)", "def _get_now():\n return datetime.now(tz=timezone.utc)", "def currentTime():\n zone='America/Bogota'\n getDate = datetime.now(pytz.timezone(zone));\n #Format -> d/m/Y H:M:S\n return getDate", "def _getUpTime(self):\n diff = (datetime.datetime.now() - self._startTime).__str__()\n return diff[:diff.find('.')]", "async def time():\n utc_moment = datetime.utcnow()\n utc_moment = utc_moment.replace(tzinfo=pytz.utc)\n formatting = \"%Y-%m-%d %H:%M:%S\"\n timezone = \"Europe/Moscow\"\n timezone_dt = utc_moment.astimezone(pytz.timezone(timezone))\n dt_str = timezone_dt.strftime(formatting)\n storage.add_data(dt_str)\n return {\"Moscow datetime\": dt_str}", "def get_now_hour_utc(no_microseconds=True):\n if no_microseconds:\n return datetime.datetime.utcnow().replace(microsecond=0).time()\n else:\n return datetime.datetime.utcnow().time()", "def dawn(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n dawn = self.astral.dawn_utc(date, self.latitude, self.longitude)\n\n if local:\n return dawn.astimezone(self.tz) \n else:\n return dawn", "def utctime(self) -> datetime:\n return datetime.utcfromtimestamp(float(self.ns_since_epoch) / 1e9)", "def dest_time(self) -> float:\n return ntp_to_system_time(self.dest_timestamp)", "def get_uk_time(message):\n time_api = 'http://worldtimeapi.org/api/timezone/Europe/London.json'\n london_time = requests.get(time_api).json()\n\n return(\"The current time in London, England is {}\".format(\n london_time['datetime'][11:16]))", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()" ]
[ "0.69508314", "0.6756243", "0.646933", "0.64476836", "0.64054435", "0.6378315", "0.6367498", "0.6344899", "0.630024", "0.6293508", "0.62492496", "0.62225604", "0.6172558", "0.615256", "0.6152383", "0.6115891", "0.61116207", "0.610577", "0.6104182", "0.6066876", "0.606603", "0.60462725", "0.6027401", "0.6025502", "0.5996779", "0.59947246", "0.5994004", "0.5986161", "0.5967244", "0.5965447", "0.5961886", "0.5961255", "0.5951371", "0.59473985", "0.59273434", "0.59154844", "0.58910614", "0.5888261", "0.5887904", "0.58828145", "0.58416617", "0.58213204", "0.581673", "0.5809171", "0.5807685", "0.580337", "0.57974595", "0.5792982", "0.5788415", "0.5775532", "0.57741", "0.57705754", "0.57703805", "0.5764446", "0.57640845", "0.5756957", "0.5744366", "0.5742444", "0.5742444", "0.5739253", "0.5733985", "0.57315683", "0.57290745", "0.5713496", "0.5709189", "0.5707781", "0.5701601", "0.56917566", "0.5686502", "0.5673671", "0.56724346", "0.567179", "0.5660506", "0.56523705", "0.5639962", "0.5639332", "0.5639044", "0.56362545", "0.56235826", "0.5618309", "0.561464", "0.5611775", "0.56088483", "0.5608531", "0.5598935", "0.5595502", "0.5590177", "0.5588818", "0.5588209", "0.55858177", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696", "0.5584696" ]
0.7123817
0
Calculate ruhakaalam times in the UTC timezone.
def rahukaalam_utc(self, date, latitude, longitude): if date is None: date = datetime.date.today() try: sunrise = self.sunrise_utc(date, latitude, longitude) sunset = self.sunset_utc(date, latitude, longitude) except: raise AstralError('Sun remains below horizon on this day, at this location.') octant_duration = (sunset - sunrise) / 8 # Mo,Sa,Fr,We,Th,Tu,Su octant_index = [1,6,4,5,3,2,7] weekday = date.weekday() octant = octant_index[weekday] start = sunrise + (octant_duration * octant) end = start + octant_duration return {'start': start, 'end': end}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rahukaalam(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n rahukaalam = self.astral.rahukaalam_utc(date, self.latitude, self.longitude)\n\n if local:\n for key, dt in rahukaalam.items():\n rahukaalam[key] = dt.astimezone(self.tz)\n \n return rahukaalam", "def timezone():\n \n pass", "def set_time_by_timezone(df):\n df = set_city_time_by_timezone(df, 1078, 3)\n df = set_city_time_by_timezone(df, 22390, 4)\n df = set_city_time_by_timezone(df, 22430, 4)\n df = set_city_time_by_timezone(df, 22438, 5)\n return df", "def get_ph_time(as_array=False):\n utc = timezone('UTC')\n phtz = timezone('Asia/Manila')\n now = utc.localize(datetime.utcnow())\n now = now.astimezone(phtz)\n if as_array:\n return [now.year, now.month, now.day, now.hour, now.minute, now.second]\n else:\n return datetime(now.year, now.month, now.day, now.hour, now.minute, now.second)", "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def _get_tz():\n return 'UTC'", "def get_utc(local_tm, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n local_tz = pytz.timezone(tz)\n local_now = local_tz.normalize(utc_now)\n local_alarm = local_now.replace(hour=local_tm.hour, minute=local_tm.minute)\n utc_alarm = utc_tz.normalize(local_alarm)\n return utc_alarm.time()", "def hmstora(rah,ram,ras):\n\thrs = (float(rah)+(float(ram)/60)+(float(ras)/3600.0)) % 24\n\n\treturn 15*hrs", "def _local_timestamps(self) -> npt.NDArray[np.int64]:\n if self.tz is None or timezones.is_utc(self.tz):\n # Avoid the copy that would be made in tzconversion\n return self.asi8\n return tz_convert_from_utc(self.asi8, self.tz, reso=self._creso)", "def get_local(utc_time, tz):\n utc_tz = pytz.utc\n utc_now = datetime.utcnow().replace(tzinfo=utc_tz)\n utc_alarm = utc_now.replace(hour=utc_time.hour, minute=utc_time.minute)\n local_tz = pytz.timezone(tz)\n local_alarm = local_tz.normalize(utc_alarm)\n return local_alarm.time()", "def get_times(my_vars):\n base_time = my_vars['base_time'].getValue()\n try:\n times=my_vars['time']\n except KeyError:\n times = my_vars['time_offset']\n\n ts = []\n for time in times:\n temp = datetime.utcfromtimestamp(base_time+time)\n if (temp.minute == 0) :\n ts.append(temp)\n return ts", "def localtime2utc(date):\n return date + (datetime.utcnow() - datetime.now())", "def _get_time_utc(time_utc_str):\n dt = datetime.strptime(time_utc_str, TIME_FORMAT)\n return int(calendar.timegm(dt.utctimetuple()))", "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytimeStartEnd(dateTime)\n self.sunriseTimeDict[dateTime.date()] = daytimeStart\n self.sunsetTimeDict[dateTime.date()] = daytimeEnd\n dateTime += self.oneDayDelta", "def ut1_utc_rate(self):\n values = self._interpolate_table(\"ut1_utc\", leap_second_correction=True, derivative_order=1)\n # values += self._corrections((\"ortho_eop\", iers.ortho_eop, 2, 1e-6), (\"utlibr\", iers.utlibr, 0, 1e-6))\n\n # Low frequency tides\n # if \"rg_zont2\" in self.models:\n # correction_cache = self._correction_cache.setdefault(\"rg_zont2\", dict())\n # # Julian centuries since J2000\n # t_julian_centuries = (self.time.tt.jd - 2451545.0) / 36525\n #\n # if self.time.isscalar:\n # mjd = self.time.tt.mjd\n # if mjd not in correction_cache:\n # correction_cache[mjd] = iers.rg_zont2(t_julian_centuries)[0]\n # dut1_corr = correction_cache[mjd]\n # else:\n # dut1_corr = list()\n # for t in self.time.tt:\n # if t.mjd not in correction_cache:\n # t_julian_centuries = (t.tt.jd - 2451545.0) / 36525\n # correction_cache[t.mjd] = iers.rg_zont2(t_julian_centuries)[0]\n # dut1_corr.append(correction_cache[t.mjd])\n #\n # values += dut1_corr\n # return values\n return values", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def localize_time_utc(non_utc_time):\n return pytz.utc.localize(non_utc_time)", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def test_timezones(self):\n a_user = User.objects.create()\n user = VSBUser.objects.create(user=a_user)\n\n today_datetime = timezone.datetime.today()\n today_datetime = timezone.datetime(year=today_datetime.year, month=today_datetime.month, day=today_datetime.day)\n\n tomorrow_late_EST = timezone.make_aware(today_datetime + timezone.timedelta(hours=23), timezone=pytz.timezone('US/Eastern'))\n tomorrow_last_UTC = (tomorrow_late_EST + timezone.timedelta(minutes=10)).astimezone(pytz.utc)\n ETC_event = CalenderEvent.objects.create(user=user, time=tomorrow_late_EST)\n UTC_event = CalenderEvent.objects.create(user=user, time=tomorrow_last_UTC)\n\n received = util.bucket_calenderevents(user.calenderevent_set)\n\n self.assertEqual(received, [[ETC_event, UTC_event]], msg=\"CalenderEvents.timezones: Timezones failed to align.\")", "def ut1_utc(self):\n values = self._interpolate_table(\"ut1_utc\", leap_second_correction=True)\n values += self._corrections((\"ortho_eop\", iers.ortho_eop, 2, 1e-6), (\"utlibr\", iers.utlibr, 0, 1e-6))\n\n # low frequency tides\n if \"rg_zont2\" in self.models:\n correction_cache = self._correction_cache.setdefault(\"rg_zont2\", dict())\n # Julian centuries since J2000\n t_julian_centuries = (self.time.tt.jd - 2451545.0) / 36525\n\n if self.time.isscalar:\n mjd = self.time.tt.mjd\n if mjd not in correction_cache:\n correction_cache[mjd] = iers.rg_zont2(t_julian_centuries)[0]\n dut1_corr = correction_cache[mjd]\n else:\n dut1_corr = list()\n for t in self.time.tt:\n if t.mjd not in correction_cache:\n t_julian_centuries = (t.tt.jd - 2451545.0) / 36525\n correction_cache[t.mjd] = iers.rg_zont2(t_julian_centuries)[0]\n dut1_corr.append(correction_cache[t.mjd])\n\n values += dut1_corr\n return values", "def tai_to_utc(tai, config, time_format=\"%Y/%j-%H:%M:%S\"):\n epoch = dt.datetime.strptime(config.config['runtime']['epoch'], \"%m/%d/%Y-%H:%M:%S\")\n try:\n utc = epoch + dt.timedelta(seconds=int(tai))\n except OverflowError:\n utc = epoch\n return utc.strftime(time_format)", "def ensure_tucson_time():\n if 'TZ' not in os.environ.keys() or os.environ['TZ'] != 'US/Arizona':\n os.environ['TZ'] = 'US/Arizona'\n time.tzset()", "def brasilia_time():\n brasilia_time = pd.Timestamp.now('UTC') - pd.Timedelta(hours=3)\n return brasilia_time", "def time_to_live_utc(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"time_to_live_utc\")", "def stamp_time(utc):\n return utc.replace(hour=15, minute=30, second=0, microsecond=0)", "def localize_time(self, apitime):\n return self.feedzone.localize(apitime).astimezone(self.localzone)", "def time_zone():\n return timezone('Etc/GMT-10')", "def round_trip_time(self):\n ...", "def timezone():\n\n return time.timezone", "def get_time(self):\r\n\t\tactual_time = '{}, {} de {} del {} ({}:{} {} (UTC))'\r\n\t\tda_taim = actual_time.format(self.wday, self.day, self.mon,\r\n\t\t\t\t\t\t\t\t\t self.year, self.hour, self.min,\r\n\t\t\t\t\t\t\t\t\t self.day_or_night)\r\n\t\treturn da_taim", "def getUnixTime(utc_time):\n\ttemp=time.strptime(utc_time, \"%a %b %d %H:%M:%S +0000 %Y\")\n\treturn calendar.timegm(temp)", "def within_schedule(utc, *timezones):\n utc_aware = utc.replace(tzinfo=pytz.utc)\n\n localized_times = []\n\n for tz in timezones:\n if tz not in TIMEZONES:\n raise ValueError('not a valid timezone')\n\n tz = pytz.timezone(tz)\n localized_times.append(utc_aware.astimezone(tz))\n\n return all(dt.hour in MEETING_HOURS for dt in localized_times)", "def itow2utc(itow: int) -> datetime.time:\r\n\r\n utc = datetime(1980, 1, 6) + timedelta(seconds=(itow / 1000) - (35 - 19))\r\n return utc.time()", "def local_to_utc(local: pd.Series, tz: Iterable, **kwargs: Any) -> pd.Series:\n return local.groupby(tz).transform(\n lambda x: x.dt.tz_localize(\n datetime.timezone(datetime.timedelta(hours=x.name))\n if isinstance(x.name, (int, float))\n else x.name,\n **kwargs,\n ).dt.tz_convert(None)\n )", "def get_time() -> dict:\n prague = pytz.timezone('Europe/Prague')\n now = prague.localize(datetime.now())\n fmt = '%H:%M'\n au_tz = pytz.timezone('Australia/Sydney')\n sydney = now.astimezone(au_tz).strftime(fmt)\n lon_tz = pytz.timezone('Europe/London')\n london = now.astimezone(lon_tz).strftime(fmt)\n ny_tz = pytz.timezone('US/Eastern')\n ny = now.astimezone(ny_tz).strftime(fmt)\n sf_tz = pytz.timezone('US/Pacific')\n sf = now.astimezone(sf_tz).strftime(fmt)\n return {\"sydney\": sydney, \"london\": london, \"ny\": ny, \"sf\": sf}", "def within_schedule(utc, *timezones):\n utc = pytz.utc.localize(utc)\n for timezone in timezones:\n if timezone not in TIMEZONES:\n raise ValueError\n city_local_time = utc.astimezone(pytz.timezone(timezone))\n if city_local_time.hour not in MEETING_HOURS:\n return False\n return True", "def localtime(stamp):\n return stamp - utc_offset", "def utc_now():\n realtime = datetime.utcnow()\n realtime = pytz.utc.localize(realtime)\n return realtime", "def test_time_to_commute_retrieved_from_google_api_in_posix_is_converted_to_utc(self):\n result = calculate_time_of_commute(\n origin_name='Gatwick Airport',\n destination_name='Kings Cross St Pancras',\n )\n assert type(result) == datetime\n assert result.tzinfo is None # Assert it is a naive datetime", "def resolved_at_to_utc(user_time, user_tz):\n if isinstance(user_tz, str):\n user_tz = dateutil.tz.gettz(user_tz)\n\n localized_time = user_time.replace(tzinfo=user_tz)\n return localized_time.to(\"UTC\").datetime", "def _local_time_offset():\n if time.localtime().tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def Timezones():\n return sorted(list(PytzCache._zmap.values()))", "def utc2localtime(date):\n return date - (datetime.utcnow() - datetime.now())", "def time_zone(self):\n\n\t\tg = geocoders.GoogleV3()\n\n\t\t#Gives the name of the timezone, ex: Africa/Luanda\n\t\ttimezone_name = str(g.timezone((self.latitude_value(), self.longitude_value())))\n\n\t\t#Returns the numeric value of the timezone, ex: +0100\n\t\treturn int(pytz.timezone(timezone_name).localize(datetime.datetime(2011,1,1)).strftime('%z'))/100", "def within_schedule(utc, *timezones):\n\n try:\n tz = [pytz.timezone(_timezone) for _timezone in timezones]\n except: \n raise ValueError\n\n tz_hour = [tz.fromutc(utc).hour for tz in tz]\n return all(item in MEETING_HOURS for item in tz_hour)", "def calculateScheduleUR(self, updateRanges):\n schedule = self.blankSchedule()\n for uRange in updateRanges:\n urMin = (uRange['position'][0] * 24 + uRange['position'][1] - uRange['width'])\n urMax = (uRange['position'][0] * 24 + uRange['position'][1] + uRange['width'])\n for i in range(urMin, urMax + 1):\n day = i%(7*24)/24\n hour = i%(7*24) - day*24\n schedule[day][hour] = 1\n return schedule", "def __correct_token_time(self, t_time=None):\n\n if t_time is None:\n t_time = time.time()\n\n if time.localtime(t_time).tm_isdst and time.daylight:\n return -time.altzone\n else:\n return -time.timezone", "def convertToUsersTimeZone(self, timestamp):\n user = self.zport.dmd.ZenUsers.getUserSettings()\n if user.timezone:\n utc_dt = pytz.utc.localize(datetime.utcfromtimestamp(int(timestamp)))\n tz = pytz.timezone(user.timezone)\n tval = tz.normalize(utc_dt.astimezone(tz))\n return tval.strftime(convertJsTimeFormatToPy(user.dateFormat+\" \"+user.timeFormat))\n return isoDateTime(timestamp)", "def test_tzinfo(self):\n if tzset is None:\n raise SkipTest(\"Platform cannot change timezone; unable to verify offsets.\")\n\n def testForTimeZone(name, expectedOffsetDST, expectedOffsetSTD):\n setTZ(name)\n\n localDST = mktime((2006, 6, 30, 0, 0, 0, 4, 181, 1))\n localSTD = mktime((2007, 1, 31, 0, 0, 0, 2, 31, 0))\n\n tzDST = FixedOffsetTimeZone.fromLocalTimeStamp(localDST)\n tzSTD = FixedOffsetTimeZone.fromLocalTimeStamp(localSTD)\n\n self.assertEqual(tzDST.tzname(localDST), \"UTC{}\".format(expectedOffsetDST))\n self.assertEqual(tzSTD.tzname(localSTD), \"UTC{}\".format(expectedOffsetSTD))\n\n self.assertEqual(tzDST.dst(localDST), timedelta(0))\n self.assertEqual(tzSTD.dst(localSTD), timedelta(0))\n\n def timeDeltaFromOffset(offset):\n assert len(offset) == 5\n\n sign = offset[0]\n hours = int(offset[1:3])\n minutes = int(offset[3:5])\n\n if sign == \"-\":\n hours = -hours\n minutes = -minutes\n else:\n assert sign == \"+\"\n\n return timedelta(hours=hours, minutes=minutes)\n\n self.assertEqual(\n tzDST.utcoffset(localDST), timeDeltaFromOffset(expectedOffsetDST)\n )\n self.assertEqual(\n tzSTD.utcoffset(localSTD), timeDeltaFromOffset(expectedOffsetSTD)\n )\n\n addTZCleanup(self)\n\n # UTC\n testForTimeZone(\"UTC+00\", \"+0000\", \"+0000\")\n # West of UTC\n testForTimeZone(\"EST+05EDT,M4.1.0,M10.5.0\", \"-0400\", \"-0500\")\n # East of UTC\n testForTimeZone(\"CEST-01CEDT,M4.1.0,M10.5.0\", \"+0200\", \"+0100\")\n # No DST\n testForTimeZone(\"CST+06\", \"-0600\", \"-0600\")", "def unix_to_localtime(t, tz=\"US/Eastern\"):\n\n from datetime import datetime\n from pytz import timezone\n import pytz\n\n utc = pytz.utc\n tz = timezone(tz)\n\n timestamp = datetime.utcfromtimestamp(t)\n\n return(utc.localize(timestamp).astimezone(tz).strftime(\"%H:%M:%S\"))", "def utcnow_ts():\r\n return calendar.timegm(utcnow().timetuple())", "def get_timzone_offset(self, timezone):\n raise NotImplementedError", "def timetz(self):\n return time(\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n self._tzinfo,\n fold=self.fold,\n )", "def get_ruuvitag_scan_time(self):\n results = {}\n\n with psycopg.connect(create_db_conn_string(self._config['db'])) as conn:\n with conn.cursor() as cursor:\n for location in self._config['ruuvitag']['Location'].split(','):\n cursor.execute(\"\"\"SELECT recorded FROM ruuvitag_observations WHERE\n location = %s ORDER BY recorded DESC LIMIT 1\"\"\", (location,))\n\n result = cursor.fetchone()\n results[location] = result[0] if result else datetime.now()\n\n return results", "def _get_time(self):\n # get the current time in UTC (make sure we are timezone aware)\n now_utc = datetime.datetime.now(pytz.UTC)\n \n # convert to our local timezone\n timenow = now_utc.astimezone(self.timezone)\n \n # save the data to our data\n self.data['year'][0] = timenow.year\n self.data['month'][0] = timenow.month\n self.data['day'][0] = timenow.day\n self.data['hour'][0] = timenow.hour\n self.data['minute'][0] = timenow.minute\n self.data['second'][0] = timenow.second\n \n return", "def latest_synop_time()-> datetime:\n utc = datetime.utcnow()\n\n if utc.hour < 1:\n utc = utc - timedelta(days=1)\n utc = utc.replace(hour=18)\n elif utc.hour < 7:\n utc = utc.replace(hour=0)\n elif utc.hour < 13:\n utc = utc.replace(hour=6)\n elif utc.hour < 19:\n utc = utc.replace(hour=12)\n else:\n utc = utc.replace(hour=18)\n\n utc.replace(minute=0, second=0)\n return utc", "def _convertTZ(self):\n tz = timezone.get_current_timezone()\n dtstart = self['DTSTART']\n dtend = self['DTEND']\n if dtstart.zone() == \"UTC\":\n dtstart.dt = dtstart.dt.astimezone(tz)\n if dtend.zone() == \"UTC\":\n dtend.dt = dtend.dt.astimezone(tz)", "def localize(self, dt):\n\n #\n # TODO: implement various RRULE styles (at least common ones..)\n # possibly move rrule parsing into own classes because it's used by VEVENT as well\n # TODO: move get x-th day of month, first sunday, etc in separate functions\n\n logging.debug('localizing %s for timezone %s', (dt, self.tzid))\n\n cur_timezone = None\n cur_timestamp = None\n\n for t in self._times:\n dtstart = t['DTSTART']\n\n if 'RRULE' in t.keys():\n target_date = None\n vals = {}\n for k in t['RRULE'].split(';'):\n (key, value) = k.split('=')\n vals[key] = value\n\n if 'FREQ' in vals.keys():\n if vals['FREQ'] == 'YEARLY':\n month = int(vals['BYMONTH'])\n day = vals['BYDAY']\n\n if not day.isnumeric():\n wd = day[-2:]\n if day[:1] == \"-\":\n cnt = int(day[1:2])\n year = datetime.today().year\n month = (month + 1) % 12\n if month == 1:\n year += 1\n\n start_date = datetime(year, int(month), 1)\n\n day_num = start_date.weekday()\n day_num_target = VTIMEZONE._weekdays.index(wd)\n days_ago = (7 + day_num - day_num_target) % 7\n if days_ago == 0:\n days_ago = 7\n target_date = start_date - timedelta(days=days_ago + ((cnt-1)*7))\n\n else:\n cnt = int(day[:1])\n\n start_date = datetime(datetime.today().year, int(month), 1)\n\n day_num = start_date.weekday()\n day_num_target = VTIMEZONE._weekdays.index(wd)\n days_ago = (7 + day_num_target - day_num) % 7\n if days_ago == 0:\n days_ago = 7\n target_date = start_date + timedelta(days=days_ago + ((cnt-1)*7))\n\n if target_date is not None:\n if cur_timestamp is None:\n cur_timestamp = target_date\n cur_timezone = t\n else:\n if target_date.date() < dt.date():\n if cur_timestamp.date() > dt.date() or target_date.date() > cur_timestamp.date():\n cur_timestamp = target_date\n cur_timezone = t\n else:\n logging.error('RRULE not implemented yet, no localization possible (%s)' % t['RRULE'])\n\n logging.debug('decided on timezone offset: %s' % cur_timezone['TZOFFSETTO'])\n\n m = re.search(r'([+-])?(\\d\\d)(\\d\\d)', cur_timezone['TZOFFSETTO'])\n\n if m.group(1) == \"-\":\n dt -= timedelta(hours=int(m.group(2)), minutes=int(m.group(3)))\n else:\n dt += timedelta(hours=int(m.group(2)), minutes=int(m.group(3)))\n\n logging.debug('localized to %s' % dt)\n return dt", "def FromNowUTC(cls):\n t = pytime.time()\n utcTime = pytime.gmtime(t)\n return cls.FromStructTime(utcTime).WithZone(zDirection=0)", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.sunset(date)\n b = cls.sunrise(date + 1)\n t = Clock.days_from_hours(18)\n else:\n a = cls.sunrise(date)\n b = cls.sunset(date)\n t = Clock.days_from_hours(6)\n return a + (2 * (b - a) * (time - t))", "def to_nztimezone(t):\n from dateutil import tz\n utctz = tz.gettz('UTC')\n nztz = tz.gettz('Pacific/Auckland')\n return [ti.replace(tzinfo=utctz).astimezone(nztz) for ti in pd.to_datetime(t)]", "def test_utc_in_timez(monkeypatch):\n utcoffset8_local_time_in_naive_utc = (\n datetime.datetime(\n year=2020,\n month=1,\n day=1,\n hour=1,\n minute=23,\n second=45,\n tzinfo=datetime.timezone(datetime.timedelta(hours=8)),\n )\n .astimezone(datetime.timezone.utc)\n .replace(tzinfo=None)\n )\n\n class mock_datetime:\n @classmethod\n def utcnow(cls):\n return utcoffset8_local_time_in_naive_utc\n\n monkeypatch.setattr('datetime.datetime', mock_datetime)\n rfc3339_utc_time = str(cherrypy._cplogging.LazyRfc3339UtcTime())\n expected_time = '2019-12-31T17:23:45Z'\n assert rfc3339_utc_time == expected_time", "async def time():\n utc_moment = datetime.utcnow()\n utc_moment = utc_moment.replace(tzinfo=pytz.utc)\n formatting = \"%Y-%m-%d %H:%M:%S\"\n timezone = \"Europe/Moscow\"\n timezone_dt = utc_moment.astimezone(pytz.timezone(timezone))\n dt_str = timezone_dt.strftime(formatting)\n storage.add_data(dt_str)\n return {\"Moscow datetime\": dt_str}", "def GetTimeAndZone(self):\n return self.hour, self.minute, self.second, self.zDirection, self.zOffset", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def test_idle_time_alt_time_zone(self):\n FakeDateTime.now = classmethod(lambda cls, tz: datetime(2021, 7, 3, 7, 0, 0, tzinfo=tz))\n # mock_datetime.now.return_value = datetime(2021, 7, 3, 7, 0, 0, tzinfo=MST)\n # mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)\n i = idle_time(tz='US/Pacific')\n self.assertEqual(i, -3600)\n FakeDateTime.now = classmethod(lambda cls, tz: datetime(2021, 7, 3, 11, 0, 0, tzinfo=tz))\n # mock_datetime.return_value = datetime(2021, 7, 3, 11, 0, 0, tzinfo=MST)\n i = idle_time(tz='US/Pacific')\n self.assertEqual(i, 3600)\n FakeDateTime.now = classmethod(lambda cls, tz: datetime(2021, 7, 3, 13, 0, 0, tzinfo=tz))\n # mock_datetime.return_value = datetime(2021, 7, 3, 13, 0, 0, tzinfo=MST)\n i = idle_time(tz='US/Pacific')\n self.assertEqual(i, -3600)", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def time_zone_minus_8(time):\n return time + timedelta(hours=-8)", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def right_time(user):\n timezone = user.timezone\n start_time = float(user.night_start.replace(':', '.'))\n end_time = float(user.night_end.replace(':', '.'))\n\n #Get user's timezon's local time\n local_time = utils.local_from_timezone(timezone)\n return not (local_time >= start_time or local_time < end_time)", "def _calc_times():\n app.logger.debug(\"Got a JSON request\")\n km = request.args.get('km', 999, type=float)\n distance = request.args.get('distance', type = int)\n begin_date = request.args.get('begin_date', type = str)\n begin_time = request.args.get('begin_time', type = str)\n dateAndTime = begin_date + \" \" + begin_time\n time = arrow.get(dateAndTime, 'YYYY-MM-DD HH:mm') \n \n open_time = acp_times.open_time(km, distance, time.isoformat())\n close_time = acp_times.close_time(km, distance, time.isoformat())\n result = {\"open\": open_time, \"close\": close_time}\n return flask.jsonify(result=result)", "def reverse_gmt(timezone: str):\n if \"+\" in timezone:\n timezone = timezone.replace(\"+\", \"-\")\n elif \"-\" in timezone:\n timezone = timezone.replace(\"-\", \"+\")\n\n return timezone", "def _getUTC(self, config = {} ):\n # Default implementation: get system local time\n return datetime.datetime.utcnow()", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def associate_timezones_to_countries(self):\n\t\t\n\t\tresult = {}\n\t\twith open(\"/usr/share/zoneinfo/zone.tab\", \"r\") as f:\n\t\t\tfor line in f.readlines():\n\t\t\t\tif line[0] == \"#\": continue\n\t\t\t\t\n\t\t\t\tline = line.replace(\"\\n\",\"\").split(\"\\t\")\n\t\t\t\tif not line[0] in result: result[line[0]] = line[2]\n\t\t\n\t\treturn result", "def get_time():\n return {\n 'timestamp': datetime.now()+ timedelta(hours=-1)\n }", "def tz(self, tz):\n return timezones.maybe_get_tz('dateutil/' + tz)", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def localToUTC(t, local_tz):\n t_local = local_tz.localize(t, is_dst=None)\n t_utc = t_local.astimezone(pytz.utc)\n return t_utc", "def nowUTC():\n return datetime.datetime.now(pytz.utc)", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def copenhagen_time(*args):\n _ = args # to explicitly remove warning\n utc_dt = pytz.utc.localize(datetime.utcnow()) + timedelta(minutes=5, seconds=30)\n local_timezone = pytz.timezone(\"Europe/Copenhagen\")\n converted = utc_dt.astimezone(local_timezone)\n return converted.timetuple()", "def run_hourly_hygienist(self):\n self.ensure_timebox_trackers_accurate()\n self.copy_tasks_with_schedule_string()", "def get_uk_time(message):\n time_api = 'http://worldtimeapi.org/api/timezone/Europe/London.json'\n london_time = requests.get(time_api).json()\n\n return(\"The current time in London, England is {}\".format(\n london_time['datetime'][11:16]))", "def proxy_hours_minutes(self):\n\n td = self.convert_last_col_filtered()\n resultat = td.days * 24 + td.seconds // 3600, (td.seconds // 60) % 60\n # print('{} H {} M'.format(*resultat))\n print(resultat)\n return resultat", "async def test_process_timestamp_to_utc_isoformat() -> None:\n datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt_util.UTC)\n datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0)\n est = dt_util.get_time_zone(\"US/Eastern\")\n datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est)\n est = dt_util.get_time_zone(\"US/Eastern\")\n datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est)\n nst = dt_util.get_time_zone(\"Canada/Newfoundland\")\n datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst)\n hst = dt_util.get_time_zone(\"US/Hawaii\")\n datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst)\n\n assert (\n process_timestamp_to_utc_isoformat(datetime_with_tzinfo)\n == \"2016-07-09T11:00:00+00:00\"\n )\n assert (\n process_timestamp_to_utc_isoformat(datetime_without_tzinfo)\n == \"2016-07-09T11:00:00+00:00\"\n )\n assert (\n process_timestamp_to_utc_isoformat(datetime_est_timezone)\n == \"2016-07-09T15:00:00+00:00\"\n )\n assert (\n process_timestamp_to_utc_isoformat(datetime_nst_timezone)\n == \"2016-07-09T13:30:00+00:00\"\n )\n assert (\n process_timestamp_to_utc_isoformat(datetime_hst_timezone)\n == \"2016-07-09T21:00:00+00:00\"\n )\n assert process_timestamp_to_utc_isoformat(None) is None", "def utcnow(cls):\n t = _time.time()\n return cls.utcfromtimestamp(t)", "def local_to_utc(local_dt):\n local_dt = local_dt.replace(tzinfo=tz.tzlocal())\n return local_dt.astimezone(tz.tzlocal())", "def _seconds2utc(self, seconds, reftime=None): # same as in rf package\n from collections import Iterable\n from obspy import UTCDateTime as UTC\n if isinstance(seconds, Iterable):\n return [_seconds2utc(self, s, reftime=reftime) for s in seconds]\n if isinstance(seconds, UTC) or reftime is None or seconds is None:\n return seconds\n if not isinstance(reftime, UTC):\n reftime = self.stats[reftime]\n return reftime + seconds", "def _get_utcoffset(self, tzname):\n # SQL Server has no built-in support for tz database\n # see http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx\n zone = pytz.timezone(tzname)\n # no way to take DST into account at this point\n now = datetime.datetime.now()\n delta = zone.localize(now, is_dst=False).utcoffset()\n return delta.days * 86400 + delta.seconds", "def get_times():\n server = datetime.datetime.now().astimezone()\n local = server.astimezone(dwx.dwx_tz())\n target = post_time_today(local)\n return {'server': server, 'local': local, 'target': target}", "def may_the_timezone_be_with_it(self):\n self.timestamp = pytz.utc.localize(self.timestamp)", "def test_get_current_time_is_in_utc() -> None:\n time_provider = TimeProvider()\n current_time = time_provider.get_current_time()\n\n assert current_time.as_datetime().timezone == UTC", "def compute_angams(self, compute_lagnams=True):\n\n # INITIALISE VARIABLES\n self.jd_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_sunset = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_moonrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.jd_moonset = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.solar_month = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.solar_month_day = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n solar_month_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.lunar_month = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.month_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.tithi_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.tithi_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.nakshatram_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.nakshatram_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.yogam_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.yogam_sunrise = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.karanam_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.rashi_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.lagna_data = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.weekday = [None] * jyotisha.panchangam.temporal.MAX_SZ\n self.kaalas = [dict() for _x in range(jyotisha.panchangam.temporal.MAX_SZ)]\n daily_panchaangas = [None] * jyotisha.panchangam.temporal.MAX_SZ\n\n self.fest_days = {}\n self.festivals = [[] for _x in range(jyotisha.panchangam.temporal.MAX_SZ)]\n\n # Computing solar month details for Dec 31\n # rather than Jan 1, since we have an always increment\n # solar_month_day at the start of the loop across every day in\n # year\n daily_panchangam_start = daily.Panchangam(city=self.city, julian_day=self.jd_start - 1,\n ayanamsha_id=self.ayanamsha_id)\n daily_panchangam_start.compute_solar_day()\n self.solar_month[1] = daily_panchangam_start.solar_month\n solar_month_day = daily_panchangam_start.solar_month_day\n\n if self.solar_month[1] != 9:\n logging.error(self.solar_month[1])\n raise (ValueError('Dec 31 does not appear to be Dhanurmasa!'))\n\n month_start_after_sunset = False\n\n #############################################################\n # Compute all parameters -- sun/moon latitude/longitude etc #\n #############################################################\n\n for d in range(jyotisha.panchangam.temporal.MAX_SZ):\n self.weekday[d] = (self.weekday_start + d - 1) % 7\n\n for d in range(-1, jyotisha.panchangam.temporal.MAX_DAYS_PER_YEAR + 2):\n [y, m, dt, t] = swe.revjul(self.jd_start + d - 1)\n\n # checking @ 6am local - can we do any better?\n local_time = tz(self.city.timezone).localize(datetime(y, m, dt, 6, 0, 0))\n # compute offset from UTC in hours\n tz_off = (datetime.utcoffset(local_time).days * 86400 +\n datetime.utcoffset(local_time).seconds) / 3600.0\n\n # What is the jd at 00:00 local time today?\n jd = self.jd_start - (tz_off / 24.0) + d - 1\n\n # TODO: Eventually, we are shifting to an array of daily panchangas. Reason: Better modularity.\n # The below block is temporary code to make the transition seamless.\n daily_panchaangas[d + 1] = daily.Panchangam(city=self.city, julian_day=jd + 1, ayanamsha_id=self.ayanamsha_id)\n daily_panchaangas[d + 1].compute_sun_moon_transitions()\n daily_panchaangas[d + 1].compute_solar_month()\n self.jd_sunrise[d + 1] = daily_panchaangas[d + 1].jd_sunrise\n self.jd_sunset[d + 1] = daily_panchaangas[d + 1].jd_sunset\n self.jd_moonrise[d + 1] = daily_panchaangas[d + 1].jd_moonrise\n self.jd_moonset[d + 1] = daily_panchaangas[d + 1].jd_moonset\n self.solar_month[d + 1] = daily_panchaangas[d + 1].solar_month_sunset\n\n solar_month_sunrise[d + 1] = daily_panchaangas[d + 1].solar_month_sunrise\n\n if (d <= 0):\n continue\n # This is just to initialise, since for a lot of calculations,\n # we require comparing with tomorrow's data. This computes the\n # data for day 0, -1.\n\n # Solar month calculations\n if month_start_after_sunset is True:\n solar_month_day = 0\n month_start_after_sunset = False\n\n solar_month_end_jd = None\n if self.solar_month[d] != self.solar_month[d + 1]:\n solar_month_day = solar_month_day + 1\n if self.solar_month[d] != solar_month_sunrise[d + 1]:\n month_start_after_sunset = True\n [_m, solar_month_end_jd] = jyotisha.panchangam.temporal.get_angam_data(\n self.jd_sunrise[d], self.jd_sunrise[d + 1], jyotisha.panchangam.temporal.SOLAR_MONTH,\n ayanamsha_id=self.ayanamsha_id)[0]\n elif solar_month_sunrise[d] != self.solar_month[d]:\n # sankrAnti!\n # sun moves into next rAshi before sunset\n solar_month_day = 1\n [_m, solar_month_end_jd] = jyotisha.panchangam.temporal.get_angam_data(\n self.jd_sunrise[d], self.jd_sunrise[d + 1], jyotisha.panchangam.temporal.SOLAR_MONTH,\n ayanamsha_id=self.ayanamsha_id)[0]\n else:\n solar_month_day = solar_month_day + 1\n solar_month_end_jd = None\n\n # if self.solar_month[d-1] != self.solar_month[d]:\n # # We have a sUrya sankrAnti between yest. and today's sunsets\n # solar_month_day = 1\n # if solar_month_sunrise[d] == self.solar_month[d]:\n # #the sankrAnti happened before today's sunrise\n # #so search for the end time between yesterday and\n # #today's sunrises\n # [_m, solar_month_end_jd] = helper_functions.get_angam_data(self.jd_sunrise[d-1],\n # self.jd_sunrise[d],SOLAR_MONTH)[0]\n # else:\n # #the sankrAnti happens after today's sunrise\n # #so search for the end time between today and\n # #tomorrow's sunrises\n # [_m, solar_month_end_jd] = helper_functions.get_angam_data(self.jd_sunrise[d],\n # self.jd_sunrise[d + 1],SOLAR_MONTH)[0]\n # #print ('-----',revjul(jd = solar_month_end_jd, tz_off = tz_off))\n # else:\n # solar_month_day += 1\n # solar_month_end_jd = None\n\n if solar_month_end_jd is None:\n solar_month_end_time = ''\n else:\n solar_month_end_time = '\\\\mbox{%s{\\\\tiny\\\\RIGHTarrow}\\\\textsf{%s}}' % (\n jyotisha.panchangam.temporal.NAMES['RASHI_NAMES'][self.script][_m], jyotisha.panchangam.temporal.Time(\n 24 * (solar_month_end_jd - jd)).toString(format=self.fmt))\n\n # logging.debug(jyotisha.panchangam.temporal.NAMES)\n\n self.month_data[d] = '\\\\sunmonth{%s}{%d}{%s}' % (\n jyotisha.panchangam.temporal.NAMES['RASHI_NAMES'][self.script][self.solar_month[d]],\n solar_month_day, solar_month_end_time)\n self.solar_month_day[d] = solar_month_day\n\n # KARADAYAN NOMBU -- easy to check here\n if solar_month_end_jd is not None: # month ends today\n if (self.solar_month[d] == 12 and solar_month_day == 1) or \\\n (self.solar_month[d] == 11 and solar_month_day != 1):\n self.fest_days['ta:kAraDaiyAn2 nOn2bu'] = [d]\n\n # Compute the various kaalas\n # Sunrise/sunset and related stuff (like rahu, yama)\n YAMAGANDA_OCTETS = [4, 3, 2, 1, 0, 6, 5]\n RAHUKALA_OCTETS = [7, 1, 6, 4, 5, 3, 2]\n GULIKAKALA_OCTETS = [6, 5, 4, 3, 2, 1, 0]\n\n self.kaalas[d] = {\n 'prAtaH sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d - 1], self.jd_sunrise[d], 14, 15),\n 'prAtaH sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 4, 15),\n 'prAtah': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 0, 5),\n 'saGgava': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 1, 5),\n 'madhyAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 2, 5),\n 'mAdhyAhnika sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 5, 15),\n 'mAdhyAhnika sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 13, 15),\n 'aparAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 3, 5),\n 'sAyAhna': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 4, 5),\n 'sAyaM sandhyA': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d], 14, 15),\n 'sAyaM sandhyA end': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 1, 15),\n 'rAtri yAma 1': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 1, 4),\n 'zayana': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 3, 8),\n 'dinAnta': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunset[d], self.jd_sunrise[d + 1], 18.25, 30),\n 'rahu': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n RAHUKALA_OCTETS[self.weekday[d]], 8),\n 'yama': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n YAMAGANDA_OCTETS[self.weekday[d]], 8),\n 'gulika': jyotisha.panchangam.temporal.get_kaalas(self.jd_sunrise[d], self.jd_sunset[d],\n GULIKAKALA_OCTETS[self.weekday[d]], 8)\n }\n\n # Compute all the anga datas\n self.tithi_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.TITHI,\n ayanamsha_id=self.ayanamsha_id)\n self.tithi_sunrise[d] = self.tithi_data[d][0][0]\n self.nakshatram_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d],\n self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.NAKSHATRAM,\n ayanamsha_id=self.ayanamsha_id)\n self.nakshatram_sunrise[d] = self.nakshatram_data[d][0][0]\n self.yogam_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.YOGAM,\n ayanamsha_id=self.ayanamsha_id)\n self.yogam_sunrise[d] = self.yogam_data[d][0][0]\n self.karanam_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d],\n self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.KARANAM,\n ayanamsha_id=self.ayanamsha_id)\n self.rashi_data[d] = jyotisha.panchangam.temporal.get_angam_data(self.jd_sunrise[d], self.jd_sunrise[d + 1],\n jyotisha.panchangam.temporal.RASHI,\n ayanamsha_id=self.ayanamsha_id)\n if compute_lagnams:\n self.lagna_data[d] = get_lagna_data(self.jd_sunrise[d], self.city.latitude,\n self.city.longitude, tz_off, ayanamsha_id=self.ayanamsha_id)", "def __get_stock_time(stock_tz: timezone) -> datetime:\n return datetime.now().astimezone(stock_tz)", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()", "def test_guest_timezone(self):\n self.check_guest_timezone()" ]
[ "0.6330134", "0.61681116", "0.60523003", "0.59781736", "0.5969716", "0.56076247", "0.55986744", "0.55825686", "0.5552908", "0.5434768", "0.53342485", "0.53334755", "0.53267014", "0.5321076", "0.5287703", "0.5276938", "0.5276938", "0.5272016", "0.52586037", "0.5246418", "0.52275836", "0.52225786", "0.52136314", "0.5209741", "0.52077013", "0.5198716", "0.5163273", "0.51616657", "0.51586884", "0.5158453", "0.51512027", "0.5142669", "0.51169723", "0.51052845", "0.50996476", "0.5098327", "0.50796896", "0.50649196", "0.504963", "0.50436187", "0.5038717", "0.50359744", "0.5033157", "0.50229084", "0.5018634", "0.49898624", "0.49762917", "0.4965405", "0.49560064", "0.49539873", "0.49423018", "0.49421063", "0.49408117", "0.4937305", "0.49246782", "0.49153408", "0.49103737", "0.49007887", "0.4899279", "0.48973534", "0.48929992", "0.48888636", "0.48861507", "0.48782253", "0.48693788", "0.485896", "0.48571953", "0.48507127", "0.4848611", "0.4845633", "0.48295078", "0.48282698", "0.4817296", "0.4812584", "0.47991836", "0.47943088", "0.47911784", "0.47891185", "0.47874662", "0.47817743", "0.47816646", "0.47807413", "0.47807413", "0.47787333", "0.47776735", "0.47731006", "0.47726187", "0.47717458", "0.47712502", "0.4763728", "0.47629932", "0.4751979", "0.47511694", "0.47402582", "0.47351238", "0.47331348", "0.47308257", "0.47308257", "0.47308257", "0.47308257" ]
0.6230036
1
Calculate the azimuth of the sun in the UTC timezone.
def solar_azimuth(self, dateandtime, latitude, longitude): if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 zone = -dateandtime.utcoffset().seconds / 3600.0 utc_datetime = dateandtime.astimezone(pytz.utc) timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600) JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year) t = self._jday_to_jcentury(JD + timenow / 24.0) theta = self._sun_declination(t) Etime = self._eq_of_time(t) eqtime = Etime solarDec = theta # in degrees solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone) trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix # in minutes while trueSolarTime > 1440: trueSolarTime = trueSolarTime - 1440 hourangle = trueSolarTime / 4.0 - 180.0 # Thanks to Louis Schwarzmayr for the next line: if hourangle < -180: hourangle = hourangle + 360.0 harad = radians(hourangle) csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) if csz > 1.0: csz = 1.0 elif csz < -1.0: csz = -1.0 zenith = degrees(acos(csz)) azDenom = (cos(radians(latitude)) * sin(radians(zenith))) if (abs(azDenom) > 0.001): azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom if abs(azRad) > 1.0: if azRad < 0: azRad = -1.0 else: azRad = 1.0 azimuth = 180.0 - degrees(acos(azRad)) if hourangle > 0.0: azimuth = -azimuth else: if latitude > 0.0: azimuth = 180.0 else: azimuth = 0# if azimuth < 0.0: azimuth = azimuth + 360.0 return azimuth
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_azimuth(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n \n return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)", "def _sun_north_angle_to_z(frame):\n # Find the Sun center in HGS at the frame's observation time(s)\n sun_center_repr = SphericalRepresentation(0*u.deg, 0*u.deg, 0*u.km)\n # The representation is repeated for as many times as are in obstime prior to transformation\n sun_center = SkyCoord(sun_center_repr._apply('repeat', frame.obstime.size),\n frame=HGS, obstime=frame.obstime)\n\n # Find the Sun north in HGS at the frame's observation time(s)\n # Only a rough value of the solar radius is needed here because, after the cross product,\n # only the direction from the Sun center to the Sun north pole matters\n sun_north_repr = SphericalRepresentation(0*u.deg, 90*u.deg, 690000*u.km)\n # The representation is repeated for as many times as are in obstime prior to transformation\n sun_north = SkyCoord(sun_north_repr._apply('repeat', frame.obstime.size),\n frame=HGS, obstime=frame.obstime)\n\n # Find the Sun center and Sun north in the frame's coordinate system\n sky_normal = sun_center.transform_to(frame).data.to_cartesian()\n sun_north = sun_north.transform_to(frame).data.to_cartesian()\n\n # Use cross products to obtain the sky projections of the two vectors (rotated by 90 deg)\n sun_north_in_sky = sun_north.cross(sky_normal)\n z_in_sky = CartesianRepresentation(0, 0, 1).cross(sky_normal)\n\n # Normalize directional vectors\n sky_normal /= sky_normal.norm()\n sun_north_in_sky /= sun_north_in_sky.norm()\n z_in_sky /= z_in_sky.norm()\n\n # Calculate the signed angle between the two projected vectors\n cos_theta = sun_north_in_sky.dot(z_in_sky)\n sin_theta = sun_north_in_sky.cross(z_in_sky).dot(sky_normal)\n angle = np.arctan2(sin_theta, cos_theta).to('deg')\n\n # If there is only one time, this function's output should be scalar rather than array\n if angle.size == 1:\n angle = angle[0]\n\n return Angle(angle)", "def get_mean_sun_angles(self) -> (float, float):\n # Get MTD XML file\n root, _ = self.read_mtd()\n\n # Open zenith and azimuth angle\n zenith_angle = float(root.findtext(\".//SolarZenith\"))\n azimuth_angle = float(root.findtext(\".//SolarAzimuth\"))\n\n return azimuth_angle, zenith_angle", "def get_azimuth(self):\n self.degrees = self.azimuth_encoder.get_degrees()\n self.tele_azimuth = self.Calculations.convert_degrees(self.degrees)\n return self.tele_azimuth", "def imu_get_azimuth(self):\n return self.imu.get_azimuth()", "def sunrise_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunrise", "def get_azimuth(self, degrees=True):\n if degrees:\n return math.degrees(self.current_location.az)\n else:\n return self.current_location.az", "def azimuth(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[0];", "def set_azimuth(self):\n self.azimuth = self.Calculations.convert_to_azimuth( self.declination, self.right_ascension, self.Latitude, self.LHA)\n if self.azimuth < 0:\n self.azimuth = self.azimuth + 360.0\n return self.azimuth\n else:\n pass\n return self.azimuth\n print('azimuth set to', self.azimuth)", "def getAzimuthAngle(self):\n return self._azimuth", "def looks_azimuth(self) -> Optional[int]:\n return self._get_property(LOOKS_AZIMUTH_PROP, int)", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def azimuth(poly):\n num = len(poly) - 1\n vec = unit_normal(poly[0], poly[1], poly[num])\n vec_azi = np.array([vec[0], vec[1], 0])\n vec_n = np.array([0, 1, 0])\n # update by Santosh\n # angle2vecs gives the smallest angle between the vectors\n # so for a west wall angle2vecs will give 90\n # the following 'if' statement will make sure 270 is returned\n x_vector = vec_azi[0]\n if x_vector < 0:\n return 360 - angle2vecs(vec_azi, vec_n)\n else:\n return angle2vecs(vec_azi, vec_n)", "def resolution_azimuth(self) -> Optional[float]:\n return self._get_property(RESOLUTION_AZIMUTH_PROP, float)", "def azimuth(self, right: GeoSpatialValue) -> ir.FloatingValue:\n return ops.GeoAzimuth(self, right).to_expr()", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def fun_azimuth(self):\n\n energy_kev = self.energy_kev.get()\n hkl = self.hkl_magnetic.get()\n hkl = hkl.replace(',', ' ') # remove commas\n hkl = hkl.replace('(', '').replace(')', '') # remove brackets\n hkl = hkl.replace('[', '').replace(']', '') # remove brackets\n hkl = np.fromstring(hkl, sep=' ')\n\n azi = self.azim_zero.get()\n azi = azi.replace(',', ' ') # remove commas\n azi = azi.replace('(', '').replace(')', '') # remove brackets\n azi = azi.replace('[', '').replace(']', '') # remove brackets\n azi = np.fromstring(azi, sep=' ')\n\n pol = self.polval.get()\n if pol == u'\\u03c3-\\u03c3':\n pol = 's-s'\n elif pol == u'\\u03c3-\\u03c0':\n pol = 's-p'\n elif pol == u'\\u03c0-\\u03c3':\n pol = 'p-s'\n else:\n pol = 'p-p'\n\n F0 = self.resF0.get()\n F1 = self.resF1.get()\n F2 = self.resF2.get()\n\n isres = self.isres.get()\n if isres:\n # Resonant scattering\n self.xtl.Plot.simulate_azimuth_resonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol,\n F0=F0, F1=F1, F2=F2)\n plt.show()\n else:\n # Non-Resonant scattering\n self.xtl.Plot.simulate_azimuth_nonresonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol)\n plt.show()", "def mean_earth_sun_distance(utc_datetime): \n\n return (1 - (0.0335 * math.sin(360 * ((solar.GetDayOfYear(utc_datetime)) - 94)) / (365)))", "def _altaz_rotation(self, jd):\n R_lon = rot_z(- self.longitude.radians - jd.gast * TAU / 24.0)\n return einsum('ij...,jk...,kl...->il...', self.R_lat, R_lon, jd.M)", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def pixel_spacing_azimuth(self) -> Optional[float]:\n return self._get_property(PIXEL_SPACING_AZIMUTH_PROP, float)", "def sunset_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return sunset", "def rahukaalam_utc(self, date, latitude, longitude):\n \n if date is None:\n date = datetime.date.today()\n\n try:\n sunrise = self.sunrise_utc(date, latitude, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n \n octant_duration = (sunset - sunrise) / 8\n\n # Mo,Sa,Fr,We,Th,Tu,Su\n octant_index = [1,6,4,5,3,2,7]\n \n weekday = date.weekday()\n octant = octant_index[weekday]\n \n start = sunrise + (octant_duration * octant)\n end = start + octant_duration\n \n return {'start': start, 'end': end}", "def sun(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sun = self.astral.sun_utc(date, self.latitude, self.longitude)\n\n if local:\n for key, dt in sun.items():\n sun[key] = dt.astimezone(self.tz)\n\n return sun", "def azimuth(vv, v0, v1):\n with np.errstate(divide='ignore', invalid='ignore'):\n n0 = np.cross(v0, v1)\n n0 /= np.dual.norm(n0, axis=-1)[..., np.newaxis]\n nn = np.cross(v0, vv)\n nn /= np.dual.norm(nn, axis=-1)[..., np.newaxis]\n\n azi = np.arccos(np.sum(nn * n0, -1))\n if len(np.shape(azi)) > 0:\n azi[np.dot(vv, n0) < 0] *= -1\n # arbitrary angle where vv is (anti)parallel to v0\n azi[np.isnan(azi)] = 0\n elif np.isnan(azi):\n return 0\n elif np.dot(vv, v0) < 1 and azi > 0:\n azi *= -1\n\n return azi", "def GetSunriseSunset(latitude_deg, longitude_deg, utc_datetime, timezone):\n\n # Day of the year\n day = solar.GetDayOfYear(utc_datetime)\n\n # Solar hour angle\n SHA = ((timezone)* 15.0 - longitude_deg)\n\n # Time adjustment\n TT = (279.134+0.985647*day)*math.pi/180\n\n # Time adjustment in hours\n time_adst = ((5.0323 - 100.976*math.sin(TT)+595.275*math.sin(2*TT)+\n 3.6858*math.sin(3*TT) - 12.47*math.sin(4*TT) - 430.847*math.cos(TT)+\n 12.5024*math.cos(2*TT) + 18.25*math.cos(3*TT))/3600)\n \n # Time of noon\n TON = (12 + (SHA/15.0) - time_adst)\n \n sunn = (math.pi/2-(23.45*math.pi/180)*math.tan(latitude_deg*math.pi/180)*\n math.cos(2*math.pi*day/365.25))*(180/(math.pi*15))\n\n # Sunrise_time in hours\n sunrise_time = (TON - sunn + time_adst)\n \n # Sunset_time in hours\n sunset_time = (TON + sunn - time_adst) \n\n sunrise_time_dt = date_with_decimal_hour(utc_datetime, sunrise_time) \n sunset_time_dt = date_with_decimal_hour(utc_datetime, sunset_time) \n\n return sunrise_time_dt, sunset_time_dt", "def zodiac(cls, tee):\n return quotient(float(cls.solar_longitude(tee)), 30) + 1", "def test_az_za():\n Nside = 128\n obs = observatory.Observatory(latitude, longitude, fov=20, nside=Nside)\n center = [0, 0]\n lon, lat = [5, 0]\n ind0 = hp.ang2pix(Nside, lon, lat, lonlat=True)\n lon, lat = hp.pix2ang(Nside, ind0, lonlat=True)\n za, az, pix = obs.calc_azza(center, return_inds=True)\n ind = np.where(pix == ind0)\n # lon = longitude of the source, which is set to 5deg off zenith (hence, zenith angle)\n assert np.isclose(np.degrees(za[ind]), lon)\n assert np.isclose(np.degrees(az[ind]), 90.0)", "def _orientation(location, time='now'):\n obstime = parse_time(time)\n\n # Define the frame where its Z axis is aligned with local zenith\n local_frame = AltAz(obstime=obstime, location=location)\n\n return _sun_north_angle_to_z(local_frame)", "def calc_surface_azimuth(xdir, ydir, B):\n B = radians(B)\n teta_z = degrees(asin(xdir / sin(B)))\n # set the surface azimuth with on the sing convention (E,N)=(+,+)\n if xdir < 0:\n if ydir <0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (-,-)\n else: surface_azimuth = 360 + teta_z # (xdir,ydir) = (-,+)\n elif ydir < 0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (+,-)\n else: surface_azimuth = teta_z # (xdir,ydir) = (+,+)\n return surface_azimuth # degree", "def get_altaz(ra,dec,jd=None,lat = 37.9183, lon = -122.1067, alt = 304, equinox='J2000'):\n if jd: t = ap.time.Time(jd,format='jd')\n else: t = ap.time.Time(time.time(),format='unix')\n l = ap.coordinates.EarthLocation(lat=lat*u.deg,\n lon=lon*u.deg,height=alt*u.m)\n f = ap.coordinates.AltAz(obstime=t,location=l)\n c = ap.coordinates.SkyCoord(ra, dec, frame='fk5',unit='deg',equinox=equinox)\n altaz = c.transform_to(f)\n return altaz.alt.deg, altaz.az.deg", "def get_azimuth(self, p, az):\n az.value = self._get_azimuth(p, az.value)", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk_utc(date, latitude, longitude)\n \n return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}", "def leaf_azimuth(size=1, phyllotactic_angle=180, phyllotactic_deviation=15, plant_orientation=0, spiral=False):\n if size == 1:\n return plant_orientation\n if spiral:\n main = numpy.arange(0, size) * phyllotactic_angle\n else:\n it = cycle((0, phyllotactic_angle))\n main = numpy.array([it.next() for i in xrange(size)])\n azim = plant_orientation + main + (numpy.random.random(size) - 0.5) * 2 * phyllotactic_deviation\n azim = azim % 360\n return numpy.where(azim <= 180, azim, azim - 360)", "def era(self):\n # earth rotation angle using Universal Time\n J = self.MJD - 51544.5\n fraction = np.mod(J, self.turn)\n theta = np.mod(0.7790572732640 + 0.00273781191135448*J, self.turn)\n return self.turndeg*np.mod(theta + fraction, self.turn)", "def offset_to_altaz(xoff, yoff, azimuth, altitude):\n #Deal with situations where offset = 0?\n\n d = sqrt(xoff*xoff+yoff*yoff)\n pos = np.where(d==0)\n d=1e-12 * u.deg # add a very small offset to prevent math errors\n\n q = arctan(d.to(u.rad).value)\n\n sq = sin(q)\n xp1 = xoff * (sq/d)\n yp1 = yoff * (sq/d)\n zp1 = cos(q)\n\n cx = sin(altitude)\n sx = cos(altitude)\n\n xp0 = cx*xp1 - sx*zp1\n yp0 = yp1\n zp0 = sx*xp1 + cx*zp1\n\n obj_altitude = arcsin(zp0)\n obj_altitude[pos]=altitude\n obj_azimuth = arctan2(yp0,-xp0) + azimuth\n obj_azimuth[pos] = azimuth\n\n #if obj_azimuth.value < 0.:\n # obj_azimuth += 2.*pi\n #elif obj_azimuth.value >= (2.*pi ):\n # obj_azimuth -= 2.*pi\n\n return obj_altitude,obj_azimuth", "def altaz_to_radec(alt_az, pos=local_latlong,\n minute=minute, hour=hour, day=day,\n month=month, year=year, tz_offset=5):\n # Retrieve the coordinates and convert them to rads for some trig.\n lat, long = pos[0] * (np.pi/180), pos[1] * (np.pi/180)\n alt, az = alt_az[0] * (np.pi/180), alt_az[1] * (np.pi/180)\n\n gmst = localtime_to_gmst(minute=minute, hour=hour,\n day=day, month=month, year=year, tz_offset=5)\n\n sin_dec = np.sin(alt) * np.sin(lat) + np.cos(alt) * np.cos(lat) * np.cos(az)\n dec = np.arcsin(sin_dec)\n\n cosHA = (np.sin(alt) - np.sin(lat) * np.sin(dec))/(np.cos(lat) * np.cos(dec))\n HA = np.arccos(cosHA) * (180/np.pi)\n\n dec *= (180/np.pi)\n ra = gmst + HA + (long * 180/np.pi) if az < np.pi else gmst - HA + (long * 180/np.pi)\n\n ra_dec = (round(ra, 4), round(dec, 4))\n return ra_dec", "def utm_getZone(longitude):\n return (int(1+(longitude+180.0)/6.0))", "def azimuth(self, other, projected=True):\n x0, y0 = self.x, self.y\n if self.crs != other.crs:\n x1, y1 = other.get_vertex(self.crs)[:2]\n else:\n x1, y1 = other.x, other.y\n\n if (x0, y0) == (x1, y1):\n az = np.nan\n elif projected and not isinstance(self.crs, GeographicalCRS):\n az = 90.0 - math.atan2(y1-y0, x1-x0)*180.0/math.pi\n az = (az+180) % 360 - 180\n else:\n lon0, lat0 = self.crs.project(x0, y0, inverse=True)\n lon1, lat1 = self.crs.project(x1, y1, inverse=True)\n az, _, _ = self.crs.inverse(lon0, lat0, lon1, lat1)\n return az", "def AngleFromSun(body, time):\n if body == Body.Earth:\n raise EarthNotAllowedError()\n sv = GeoVector(Body.Sun, time, True)\n bv = GeoVector(body, time, True)\n return AngleBetween(sv, bv)", "def solar_noon_utc(self, date, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0)\n\n eqtime = self._eq_of_time(newt)\n timeUTC = 720.0 + (longitude * 4.0) - eqtime\n\n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return noon", "def calculate_orbiting_angle(orbiting_center, raft):\n\n # note the negative sign before the first component, the y component\n # it is to make the orbiting angle in a right-handed coordiante.\n angle = np.arctan2(-(raft[1] - orbiting_center[1]), (raft[0] - orbiting_center[0])) * 180 / np.pi\n\n return angle", "def dawn_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n \n try:\n hourangle = self._hour_angle_sunrise(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n \n dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dawn", "def test_az_za_astropy():\n\n Nside = 128\n\n altitude = 0.0\n loc = EarthLocation.from_geodetic(longitude, latitude, altitude)\n\n obs = observatory.Observatory(latitude, longitude, nside=Nside)\n\n t0 = Time(2458684.453187554, format=\"jd\")\n obs.set_fov(180)\n\n zen = AltAz(alt=Angle(\"90d\"), az=Angle(\"0d\"), obstime=t0, location=loc)\n\n zen_radec = zen.transform_to(ICRS())\n center = [zen_radec.ra.deg, zen_radec.dec.deg]\n northloc = EarthLocation.from_geodetic(lat=\"90.d\", lon=\"0d\", height=0.0)\n north_radec = AltAz(\n alt=\"90.0d\", az=\"0.0d\", obstime=t0, location=northloc\n ).transform_to(ICRS())\n yvec = np.array([north_radec.ra.deg, north_radec.dec.deg])\n za, az, inds = obs.calc_azza(center, yvec, return_inds=True)\n\n ra, dec = hp.pix2ang(Nside, inds, lonlat=True)\n\n altaz_astropy = ICRS(\n ra=Angle(ra, unit=\"deg\"), dec=Angle(dec, unit=\"deg\")\n ).transform_to(AltAz(obstime=t0, location=loc))\n\n za0 = altaz_astropy.zen.rad\n az0 = altaz_astropy.az.rad\n\n if environ.get(\"VIS\", False):\n hmap = np.zeros(12 * Nside ** 2) + hp.UNSEEN\n hmap[inds] = np.unwrap(az0 - az)\n import IPython\n\n IPython.embed()\n\n print(np.degrees(za0 - za))\n assert np.allclose(za0, za, atol=1e-4)\n assert np.allclose(\n np.unwrap(az0 - az), 0.0, atol=3e-4\n ) # About 1 arcmin precision. Worst is at the southern horizon.", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def horiz_angle(time, data):\n\n # TODO What should 0deg be? Set it to inline w/ target? facing target?\n\n # direction of the sun. measured in degrees counted clockwise from north.\n azimuth = data[time]['azimuth']\n\n h_angle = (azimuth / 2 - 90)\n\n # returns answer between -180 and 180 degrees\n return round(((h_angle + 180) % 360) - 180, 4)", "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)\n timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600)\n \n JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year)\n t = self._jday_to_jcentury(JD + timenow / 24.0)\n theta = self._sun_declination(t)\n Etime = self._eq_of_time(t)\n \n eqtime = Etime\n solarDec = theta # in degrees\n \n solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone)\n trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix\n # in minutes\n \n while trueSolarTime > 1440:\n trueSolarTime = trueSolarTime - 1440\n \n hourangle = trueSolarTime / 4.0 - 180.0\n # Thanks to Louis Schwarzmayr for the next line:\n if hourangle < -180:\n hourangle = hourangle + 360.0\n \n harad = radians(hourangle)\n \n csz = sin(radians(latitude)) * sin(radians(solarDec)) + \\\n cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad)\n \n if csz > 1.0:\n csz = 1.0\n elif csz < -1.0:\n csz = -1.0\n \n zenith = degrees(acos(csz))\n \n azDenom = (cos(radians(latitude)) * sin(radians(zenith)))\n \n if (abs(azDenom) > 0.001):\n azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom\n \n if abs(azRad) > 1.0:\n if azRad < 0:\n azRad = -1.0\n else:\n azRad = 1.0\n \n azimuth = 180.0 - degrees(acos(azRad))\n \n if hourangle > 0.0:\n azimuth = -azimuth\n else:\n if latitude > 0.0:\n azimuth = 180.0\n else:\n azimuth = 0\n \n if azimuth < 0.0:\n azimuth = azimuth + 360.0\n \n exoatmElevation = 90.0 - zenith\n\n if exoatmElevation > 85.0:\n refractionCorrection = 0.0\n else:\n te = tan(radians(exoatmElevation))\n if exoatmElevation > 5.0:\n refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te)\n elif exoatmElevation > -0.575:\n step1 = (-12.79 + exoatmElevation * 0.711)\n step2 = (103.4 + exoatmElevation * (step1))\n step3 = (-518.2 + exoatmElevation * (step2))\n refractionCorrection = 1735.0 + exoatmElevation * (step3)\n else:\n refractionCorrection = -20.774 / te\n \n refractionCorrection = refractionCorrection / 3600.0\n \n solarzen = zenith - refractionCorrection\n \n solarelevation = 90.0 - solarzen\n \n return solarelevation", "def getAltAz(arr,header,time,location):\n\tsoln = wcs.WCS(header)\n\tcoords = cartesian([arange(arr.shape[1]),arange(arr.shape[0])])\n\tworld = soln.wcs_pix2world(coords,0)\n\tradec = SkyCoord(ra=world[:,0],dec=world[:,1],frame='icrs',unit='deg')\n\taltaz = radec.transform_to(AltAz(obstime=time,location=telescope))\n\treturn altaz.alt.deg,altaz.az.deg,coords[:,0],coords[:,1]", "def sunrise(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n \n if date is None:\n date = datetime.date.today()\n\n sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude)\n\n if local:\n return sunrise.astimezone(self.tz) \n else:\n return sunrise", "def point_to_point_azimuth(point0, point1, out=None):\n azimuth_in_rads = point_to_point_angle(point0, point1, out=out)\n if out is None:\n return (np.pi * 0.5 - azimuth_in_rads) * 180.0 / np.pi\n else:\n np.subtract(np.pi * 0.5, azimuth_in_rads, out=out)\n return np.multiply(out, 180.0 / np.pi, out=out)", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianday)\n eqtime = self._eq_of_time(t)\n solarDec = self._sun_declination(t)\n\n try:\n hourangle = self._hour_angle_sunset(latitude, solarDec)\n except:\n raise AstralError('Sun remains below horizon on this day, at this location.')\n\n delta = longitude - degrees(hourangle)\n timeDiff = 4.0 * delta\n timeUTC = 720.0 + timeDiff - eqtime\n\n newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0)\n eqtime = self._eq_of_time(newt)\n solarDec = self._sun_declination(newt)\n hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression)\n delta = longitude - degrees(hourangle)\n timeDiff = 4 * delta\n timeUTC = 720 + timeDiff - eqtime\n \n timeUTC = timeUTC/60.0\n hour = int(timeUTC)\n minute = int((timeUTC - hour) * 60)\n second = int((((timeUTC - hour) * 60) - minute) * 60)\n\n if second > 59:\n second -= 60\n minute += 1\n elif second < 0:\n second += 60\n minute -= 1\n\n if minute > 59:\n minute -= 60\n hour += 1\n elif minute < 0:\n minute += 60\n hour -= 1\n\n if hour > 23:\n hour -= 24\n date += datetime.timedelta(days=1)\n elif hour < 0:\n hour += 24\n date -= datetime.timedelta(days=1)\n\n dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc)\n\n return dusk", "def platform_auto_calibrate_azimuth_servo(self):\n self._platform_auto_calibrate_check()\n self.platform.auto_calibrate_azimuth_servo()", "def azimuth_speed(self, degrees = True):\n return self.angularSpeed(self.future_location.az, self.old_location.az)", "def azimuthalAverage(image, center=None):\n # Calculate the indices from the image\n y, x = np.indices(image.shape)\n\n if not center:\n center = np.array([(x.max() - x.min()) / 2.0, (x.max() - x.min()) / 2.0])\n\n r = np.hypot(x - center[0], y - center[1])\n\n # Get sorted radii\n ind = np.argsort(r.flat)\n r_sorted = r.flat[ind]\n i_sorted = image.flat[ind]\n\n # Get the integer part of the radii (bin size = 1)\n r_int = r_sorted.astype(int)\n\n # Find all pixels that fall within each radial bin.\n deltar = r_int[1:] - r_int[:-1] # Assumes all radii represented\n rind = np.where(deltar)[0] # location of changed radius\n nr = rind[1:] - rind[:-1] # number of radius bin\n\n # Cumulative sum to figure out sums for each radius bin\n csim = np.cumsum(i_sorted, dtype=float)\n tbin = csim[rind[1:]] - csim[rind[:-1]]\n\n radial_prof = tbin / nr\n\n return radial_prof", "def azalt(ra, dec):\n\tx = rectanglize(ra, dec)\n\ty = np.dot(R_1, x)\n\tz = np.dot(R_2, y)\n\treturn sphericalize(z)", "def altaz_to_offset(obj_azimuth,obj_altitude,azimuth,altitude):\n\n daz = obj_azimuth - azimuth\n coa = cos(obj_altitude)\n\n xp0 = -cos(daz) * coa\n yp0 = sin(daz) * coa\n zp0 = sin(obj_altitude)\n\n cx = sin(altitude)\n sx = cos(altitude)\n\n xp1 = cx*xp0 + sx*zp0\n yp1 = yp0\n zp1 = -sx*xp0 + cx*zp0\n\n q = arccos(zp1)\n d = tan(q)\n alpha = arctan2(yp1,xp1)\n\n xoff = d * cos(alpha)\n yoff = d * sin(alpha)\n\n return xoff,yoff", "def calculate_yaw(pixel_x, center_x) -> float:\n yaw = math.degrees(math.atan((pixel_x - center_x) / H_FOCAL_LENGTH))\n return yaw", "def sidereal_zodiac(tee):\n return quotient(int(sidereal_solar_longitude(tee)), 30) + 1", "def sunpos_mag(t,lat,lon,elev,gm,temp=None,press=None,radians=True):\n #az_zen is a (...,5) dimension ndarray\n az_zen = sunpos(t,lat,lon,elev,temp,press,radians=radians)\n decl = declination(lat,lon,elev,t,gm,radians)\n az_zen[...,0] -= decl\n #subtract declination to go from true N to magnetic N\n return az_zen", "def solarelevation_function_clear(latitude_deg, longitude_deg, utc_datetime,temperature_celsius = 25,\n pressure_millibars = 1013.25, elevation = elevation_default):\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars) \n return (0.038175 + (1.5458 * (math.sin(altitude))) + ((-0.59980) * (0.5 * (1 - math.cos(2 * (altitude))))))", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def _get_target_azimuths(radar_in):\n sweep_start = radar_in.sweep_start_ray_index['data'][0]\n sweep_end = radar_in.sweep_end_ray_index['data'][0]\n target_azimuths = np.sort(\n radar_in.azimuth['data'][sweep_start:sweep_end+1])\n az_tol = np.median(target_azimuths[1:]-target_azimuths[:-1])\n\n return target_azimuths, az_tol", "def phi(self):\n return np.arctan2(np.sqrt(self.x**2 + self.y**2), self.z)", "def camera_rot_XYZEuler(azimuth, elevation, tilt):\n\n azimuth, elevation, tilt = float(azimuth), float(elevation), float(tilt)\n x, y, z = math.pi/2, 0, math.pi/2 # set camera at x axis facing towards object\n\n # twist\n # if tilt > 0:\n # y = tilt\n # else:\n # y = 360 + tilt\n\n # latitude\n x = x - elevation\n # longtitude\n z = z + azimuth\n\n return x, y, z", "def _P(time='now'):\n obstime = parse_time(time)\n\n # Define the frame where its Z axis is aligned with geocentric north\n geocentric = PrecessedGeocentric(equinox=obstime, obstime=obstime)\n\n return _sun_north_angle_to_z(geocentric)", "def get_tz_offset(self) -> float:\n return self.AD.tz.utcoffset(self.datetime()).total_seconds() / 60", "def set_azimuth(self, phi: \"float\") -> \"void\":\n return _beamforming_swig.phasedarray_sptr_set_azimuth(self, phi)", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "async def sunrise(self, aware=False, today=False, days_offset=0) -> dt.datetime:\n return await self.AD.sched.sunrise(aware, today=today, days_offset=days_offset)", "def equatorial_hour_angle(hour, location):\n equatorial_angle = (hour - location.timezone) * 2 * np.pi / 24 + (np.deg2rad(location.longitude))\n logging.getLogger(\"hour.angle.equ\").debug(\"For hour %d, equatorial angle %g\" % (hour, np.rad2deg(equatorial_angle)))\n return equatorial_angle", "def lambert_azimuthal(coordinate_triples, longitude_offset=pi/8,\n latitude_offset=pi/8):\n latitudes, longitudes = cartesian_to_geographical(coordinate_triples)\n k = np.sqrt(2/(1 + np.cos(latitudes - latitude_offset)\n *np.cos(longitudes - longitude_offset)))\n x_projected = (k*np.cos(latitudes - latitude_offset)\n *np.sin(longitudes - longitude_offset))\n y_projected = k*np.sin(latitudes - latitude_offset)\n return np.array([x_projected, y_projected])", "def set_azimuth(self, phi: \"float\") -> \"void\":\n return _beamforming_swig.phasedarray_set_azimuth(self, phi)", "def solar_angles(df, lat, lon, alt=0):\n\n jd = pd.Timestamp(df).to_julian_date()\n\n # offset (2451543.5)\n d_offset = pd.Timestamp('1999-12-31 00:00:00').to_julian_date()\n\n d = jd - d_offset\n\n\n # Keplerian elements for the sun (geocentric)\n w = 282.9404 + 4.70935E-5 * d # longitude of perihelion [degrees]\n a = 1.0 # mean distance [AU]\n e = 0.016709 - 1.151E-9 * d # eccentricity [-]\n M = np.mod(356.0470 + 0.9856002585 * d, 360.0) # mean anomaly [degrees]\n L = w + M # Sun's mean longitude [degrees]\n oblecl = 23.4393 - 3.563E-7 * d # Sun's obliquity of the eliptic [degrees]\n\n # Auxiliary angle [degrees]\n E = M + (180.0 / np.pi) * e * np.sin(np.deg2rad(M)) * (1.0 + e * np.cos(np.deg2rad(M)))\n\n # Rectangular coordinates in the plane of the ecliptic (x-axis toward perihelion)\n x = np.cos(np.deg2rad(E)) - e\n y = np.sin(np.deg2rad(E)) * np.sqrt(1 - (e ** 2))\n\n # Distance (r) and true anomaly (v)\n r = np.sqrt((x ** 2) + (y ** 2))\n v = np.rad2deg(np.arctan2(y, x))\n\n # Longitude of the sun\n lon_sun = v + w\n\n # Ecliptic rectangular coordinates\n xeclip = r * np.cos(np.deg2rad(lon_sun))\n yeclip = r * np.sin(np.deg2rad(lon_sun))\n zeclip = 0.0\n\n # Rotate coordinates to equatorial rectangular coordinates\n xequat = xeclip\n yequat = yeclip * np.cos(np.deg2rad(oblecl)) + zeclip * np.sin(np.deg2rad(oblecl))\n zequat = yeclip * np.sin(np.deg2rad(23.4406)) + zeclip * np.cos(np.deg2rad(oblecl))\n\n # Convert equatorial rectangular coordinates to right-ascension (RA) and declination\n r = np.sqrt(xequat ** 2 + yequat ** 2 + zequat ** 2) - (alt / 149598000.0)\n RA = np.rad2deg(np.arctan2(yequat, xequat))\n delta = np.rad2deg(np.arcsin(zequat / r))\n\n # Calculate local siderial time\n uth = df.hour + (df.minute / 60.0) + (df.second / 3600.0)\n gmst0 = np.mod(L + 180.0, 360.0) / 15.0\n sidtime = gmst0 + uth + (lon / 15.0)\n\n # Replace RA with hour-angle (HA)\n HA = sidtime * 15.0 - RA\n\n # Convert to rectangular coordinates\n x = np.cos(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n y = np.sin(np.deg2rad(HA)) * np.cos(np.deg2rad(delta))\n z = np.sin(np.deg2rad(delta))\n\n # Rotate along an axis going East-West\n xhor = x * np.cos(np.deg2rad(90.0 - lat)) - z * np.sin(np.deg2rad(90.0 - lat))\n yhor = y\n zhor = x * np.sin(np.deg2rad(90.0 - lat)) + z * np.cos(np.deg2rad(90.0 - lat))\n\n # Find azimuthal and elevation angles\n azimuthal = np.rad2deg(np.arctan2(yhor, xhor)) + 180.0\n elevation = np.rad2deg(np.arcsin(zhor))\n\n zenith = 90.0 - elevation\n\n return np.column_stack((zenith, elevation, azimuthal))", "def __call__( self , theta ):\r\n offset = np.dot( z_rot( theta ) , [ self.radius , 0 , 0 ] )\r\n # print \"Offset:\" , offset\r\n return np.add( self.center , offset )", "def altaz(self, temperature_C=None, pressure_mbar='standard'):\n try:\n topos = self.observer.topos\n R = self.observer.altaz_rotation\n except AttributeError:\n raise ValueError('to compute an apparent position, you must'\n ' observe from a specific Earth location that'\n ' you specify using a Topos instance')\n\n # TODO: wobble\n\n position_au = einsum('ij...,j...->i...', R, self.position.au)\n r_au, alt, az = to_polar(position_au)\n\n if temperature_C is None:\n alt = Angle(radians=alt)\n else:\n if temperature_C == 'standard':\n temperature_C = 10.0\n if pressure_mbar == 'standard':\n pressure_mbar = 1010.0 * exp(-topos.elevation.m / 9.1e3)\n alt = refract(alt * RAD2DEG, temperature_C, pressure_mbar)\n alt = Angle(degrees=alt)\n\n return alt, Angle(radians=az), Distance(r_au)", "def rotated_equatorial_hour_angle(hour, location):\n equatorial_angle = equatorial_hour_angle(hour, location)\n equatorial_angle_from_solar_noon = equatorial_angle - np.pi\n # Angle currently is angle referenced from solar noon, positive (pm) towards the east.\n # Change to mathematical angle, anticlockwise from 0 in the east.\n return np.pi / 2 - equatorial_angle_from_solar_noon", "def azimuth_update(self):\n self.current_azimuth = self.azimuth_encoder.get_degrees()\n azimuth_error = self.azimuth - float(self.current_azimuth)\n # print('goal azimuth', self.azimuth, 'current azimuth', self.azimuth_encoder.get_degrees(), 'difference in azimuth', azimuth_error)\n if azimuth_error >0:\n # print('positive azimuth')\n self.azimuth_motor.set_direction(1)\n elif azimuth_error > 0:\n # print('negative azimuth')\n self.azimuth_motor.set_direction(0)\n azimuth_error = abs(azimuth_error)\n self.azimuth_error = azimuth_error\n if azimuth_error >= 0:\n self.azimuth_motor.set_speed(0)\n if azimuth_error >= 35:\n self.azimuth_motor.set_speed(1)\n if azimuth_error >= 40:\n self.azimuth_motor.set_speed(2)\n if azimuth_error >= 80:\n self.azimuth_motor.set_speed(3)\n if azimuth_error >= 160:\n self.azimuth_motor.set_speed(4)\n if azimuth_error >= 280:\n self.azimuth_motor.set_speed(5)\n self.azimuth_error = azimuth_error\n print('debug_azimuth', self.current_azimuth, self.azimuth_error, self.azimuth_motor.speed)\n return self.azimuth_error", "def azizen(self):\n # x0,y0 array pixel coordinates relative to cx,cy\n# ndy0,ndx0=img.shape\n ndy0=self.ndy0\n ndx0=self.ndx0\n x0,y0=np.meshgrid(np.linspace(0,ndx0-1,ndx0)-self.cx,np.linspace(0,ndy0-1,ndy0)-self.cy)\n r0=np.sqrt(x0**2+y0**2)/self.pr0 # fractional radial distance from 0,0\n# self.roi=np.s_[ystart:ystart+self.ny0,xstart:xstart+self.nx0]\n # why not model the zenith angle dependence with polynomial directly\n # rather than linear interpolation between roots.\n roots=np.zeros(51)\n rr=np.arange(51)/100.0\n for i,ref in enumerate(rr):\n roots[i]=np.real(np.roots([self.c3,0,self.c2,0,self.c1,-ref])[-1])\n theta0 = np.interp(r0/2,rr,roots)\n \n phi0 = np.arctan2(x0,y0) - self.rot ####phi (i.e., azimuth) is reckoned with -pi corresponding to north, increasing clockwise, NOTE: pysolar use sub-standard definition\n phi0 = phi0%(2*np.pi)\n\n #####correction for the tilt of the camera\n k=np.array((np.sin(self.azm),np.cos(self.azm),0))\n a=np.array([np.sin(theta0)*np.cos(phi0),np.sin(theta0)*np.sin(phi0),np.cos(theta0)]); \n a = np.transpose(a,[1,2,0])\n b=np.cos(self.beta)*a + np.sin(self.beta)*np.cross(k,a,axisb=2) \\\n + np.reshape(np.outer(np.dot(a,k),k),(self.ndy0,self.ndx0,3))*(1-np.cos(self.beta))\n theta0=np.arctan(np.sqrt(b[:,:,0]**2+b[:,:,1]**2)/b[:,:,2])\n phi0=np.arctan2(b[:,:,1],b[:,:,0])%(2*np.pi)\n# max_theta *= deg2rad \n# valid0 = (theta0<max_theta) & (theta0>0); \n# theta0[valid0]=np.nan;\n self.theta0,self.phi0=theta0,phi0", "def point_to_node_azimuth(self, point, node=None, out=None):\n return point_to_point_azimuth(point, self._get_coord_at_node(node), out=out)", "def skycoord(self):\n return SkyCoord(self['raj'], self['decj'], unit=(uu.hour, uu.degree))", "def ascensional_difference(cls, date, location):\n sin_delta = (1397/3438) * cls.sine(cls.tropical_longitude(date))\n phi = location.latitude\n diurnal_radius = cls.sine(90 + cls.arcsin(sin_delta))\n tan_phi = cls.sine(phi) / cls.sine(90 + phi)\n earth_sine = sin_delta * tan_phi\n return cls.arcsin(-earth_sine / diurnal_radius)", "def phi_up(self):\n return 0.5 * (self.phi + 10 * (self.phi / 30.0) ** 2) / 180.0 * np.pi", "def platform_set_azimuth(self, azimuth):\n self.platform.set_azimuth(azimuth)", "def get_utc_offset():\n timedelta = datetime.datetime.now() - datetime.datetime.utcnow()\n # XXX: `return -time.timezone`?\n return timedelta.total_seconds()", "def get_compass_dir_azimuth(azimuth, resolution='intercardinal', format='short'):\n if azimuth < 0:\n azimuth += 360\n if format not in ['short', 'long']:\n raise KeyError(f'Direction format {format} is not supported')\n if resolution not in ['cardinal', 'intercardinal', 'meteorological']:\n raise KeyError(f'Direction resolution {resolution} is not supported')\n if resolution == 'cardinal':\n angles = np.arange(0, 360 + 90, 90)\n if format == 'long':\n points = LONG_CARDINAL_POINTS\n else:\n points = SHORT_CARDINAL_POINTS\n elif resolution == 'intercardinal':\n angles = np.arange(0, 360 + 45, 45)\n if format == 'long':\n points = LONG_INTERCARDINAL_POINTS\n else:\n points = SHORT_INTERCARDINAL_POINTS\n elif resolution == 'meteorological':\n angles = np.arange(0, 360 + 22.5, 22.5)\n if format == 'long':\n points = LONG_METEOROLOGICAL_POINTS\n else:\n points = SHORT_METEOROLOGICAL_POINTS\n\n adiff = abs(azimuth - angles)\n i = adiff.argmin()\n return points[i]", "def calc_angle_of_incidence(g, lat, ha, tilt, teta_z):\n # surface normal vector\n n_E = sin(tilt)*sin(teta_z)\n n_N = sin(tilt)*cos(teta_z)\n n_Z = cos(tilt)\n # solar vector\n s_E = -cos(g)*sin(ha)\n s_N = sin(g)*cos(lat) - cos(g)*sin(lat)*cos(ha)\n s_Z = cos(g)*cos(lat)*cos(ha) + sin(g)*sin(lat)\n\n # angle of incidence\n teta_B = acos(n_E*s_E + n_N*s_N + n_Z*s_Z)\n return teta_B", "def set_sun_position(self, elevation, azimuth, intensity=None):\n\n if elevation < 0 or elevation >= 90:\n raise ValueError(\"The elevation cannot be negative or >= 90\")\n if azimuth < 0 or azimuth >= 360:\n raise ValueError(\"The azimuth cannot be negative or >= 360\")\n if intensity is not None:\n if intensity < 0:\n raise ValueError(\"The intensity cannot be negative\")\n\n theta, phi = f.convert_direction(elevation, azimuth)\n\n self.sun_elevation = theta\n self.sun_azimuth = phi\n if intensity is not None and self.sun_intensity is not None:\n print(\n \"The set sun intensity might not fit to the suns intensity \\\n at the set wavelengh\"\n )\n self.sun_intensity = np.zeros((self.stokes_dim))\n self.sun_intensity[0] = intensity\n elif intensity is not None and self.sun_intensity is None:\n self.sun_intensity = np.zeros((self.stokes_dim))\n self.sun_intensity[0] = intensity\n else:\n self.sun_intensity = f.sun_init_intensity(self.wavelength, self.stokes_dim)", "def parse_azimuth_elevation(filename):\n match = REGEX.match(filename)\n return int(match.group(1)), int(match.group(2))", "def longitudeArcSec(longitude):\n return math.cos(math.radians(longitude)) * 30.87", "def calcScatterAngleOld(R, PHI, THETA, sun_rotation):\n \n H_rot = atmo_utils.calcRotationMatrix(sun_rotation)\n\n X_ = R * np.sin(THETA) * np.cos(PHI)\n Y_ = R * np.sin(THETA) * np.sin(PHI)\n Z_ = R * np.cos(THETA)\n \n XYZ_dst = np.vstack((X_.ravel(), Y_.ravel(), Z_.ravel(), np.ones(R.size)))\n XYZ_src_ = np.dot(H_rot, XYZ_dst)\n \n Z_rotated = XYZ_src_[2, :]\n R_rotated = np.sqrt(np.sum(XYZ_src_[:3, :]**2, axis=0))\n \n angle = np.arccos(Z_rotated/(R_rotated+amitibo.eps(R_rotated)))\n \n return angle", "def latToTheta(lat):\n return (90.0 - lat) * (np.pi/180.0)", "def circumference(self, lat):\n return 2 * np.pi * self.rsphere * np.cos(np.deg2rad(lat))", "def _get_tz():\n return 'UTC'", "def UTMZone(x,y):\n\n #take longitudinal coordinate and add 180, then divide by 6 and round up\n lon = int(np.ceil((x + 180)/6))\n \n #determine whether y is in the Northern or Southern Hemisphere\n if y > 0:\n code = 326\n else:\n code = 327\n \n #return epsg of the utm zone\n epsg = int(str(code)+str(lon))\n return epsg", "def create_azimuthal_polarization(dim, rotation):\n theta_array = np.zeros((dim, dim))\n\n for i in range(np.size(theta_array, 0)):\n for j in range(np.size(theta_array, 1)):\n x = -dim / 2 + i\n y = -dim / 2 + j\n # perform roation\n th = math.pi*rotation/180.0\n x = np.cos(th)*x - np.sin(th)*y\n y = np.sin(th)*x + np.cos(th)*y\n\n rot = math.atan2(x, y) + math.pi/2\n # factor = (rot % (2*math.pi))\n theta_array[i][j] = (rot % (2 * math.pi))\n return theta_array", "def ecliptic_latlon(self):\n vector = _ECLIPJ2000.dot(self.position.au)\n d, lat, lon = to_polar(vector)\n return (Angle(radians=lat, signed=True),\n Angle(radians=lon),\n Distance(au=d))", "def SunPosition(time):\n # Correct for light travel time from the Sun.\n # Otherwise season calculations (equinox, solstice) will all be early by about 8 minutes!\n adjusted_time = time.AddDays(-1.0 / C_AUDAY)\n earth2000 = _CalcEarth(adjusted_time)\n sun2000 = [-earth2000.x, -earth2000.y, -earth2000.z]\n\n # Convert to equatorial Cartesian coordinates of date.\n stemp = _precession(sun2000, adjusted_time, _PrecessDir.From2000)\n sun_ofdate = _nutation(stemp, adjusted_time, _PrecessDir.From2000)\n\n # Convert equatorial coordinates to ecliptic coordinates.\n true_obliq = math.radians(adjusted_time._etilt().tobl)\n return _RotateEquatorialToEcliptic(sun_ofdate, true_obliq, time)", "def altAz2RADec(azim, elev, jd, lat, lon):\n\n azim = np.radians(azim)\n elev = np.radians(elev)\n lat = np.radians(lat)\n lon = np.radians(lon)\n \n # Calculate hour angle\n ha = np.arctan2(-np.sin(azim), np.tan(elev)*np.cos(lat) - np.cos(azim)*np.sin(lat))\n\n # Calculate Local Sidereal Time\n lst = np.radians(JD2LST(jd, np.degrees(lon))[0])\n \n # Calculate right ascension\n ra = (lst - ha)%(2*np.pi)\n\n # Calculate declination\n dec = np.arcsin(np.sin(lat)*np.sin(elev) + np.cos(lat)*np.cos(elev)*np.cos(azim))\n\n return np.degrees(ra), np.degrees(dec)" ]
[ "0.67131555", "0.6504118", "0.64523375", "0.6439935", "0.6436878", "0.641925", "0.6345609", "0.6120051", "0.6109093", "0.6081307", "0.6055947", "0.6041341", "0.6014488", "0.5996403", "0.5920246", "0.5873155", "0.586566", "0.5822988", "0.58073103", "0.5778373", "0.573385", "0.5699704", "0.5693492", "0.5680611", "0.5665804", "0.5636087", "0.56360525", "0.5617519", "0.56039816", "0.5575141", "0.55729055", "0.5555941", "0.5551726", "0.5538638", "0.5444007", "0.5441563", "0.54239976", "0.54038787", "0.5385983", "0.53776497", "0.5351896", "0.5345681", "0.53327554", "0.53119075", "0.5306916", "0.53063107", "0.5304964", "0.52989537", "0.52973205", "0.5269203", "0.52674097", "0.5255891", "0.5238143", "0.52338845", "0.52332264", "0.5225331", "0.5204165", "0.51946396", "0.5184347", "0.5168938", "0.51688516", "0.51636606", "0.5147262", "0.5142648", "0.5139429", "0.5135091", "0.51304454", "0.5126521", "0.51190555", "0.5101095", "0.50999033", "0.509258", "0.50913095", "0.5080048", "0.50740886", "0.5071777", "0.50673753", "0.5061879", "0.5056754", "0.50424385", "0.5039658", "0.5035344", "0.5022272", "0.5020511", "0.50148", "0.49974275", "0.4993507", "0.49931613", "0.4987637", "0.49837604", "0.4983726", "0.49734324", "0.49730176", "0.49608883", "0.49451151", "0.4934243", "0.4926216", "0.49202177", "0.49172428", "0.49026304" ]
0.6639568
1
Calculate the elevation of the sun.
def solar_elevation(self, dateandtime, latitude, longitude): if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 zone = -dateandtime.utcoffset().seconds / 3600.0 utc_datetime = dateandtime.astimezone(pytz.utc) timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600) JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year) t = self._jday_to_jcentury(JD + timenow / 24.0) theta = self._sun_declination(t) Etime = self._eq_of_time(t) eqtime = Etime solarDec = theta # in degrees solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone) trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix # in minutes while trueSolarTime > 1440: trueSolarTime = trueSolarTime - 1440 hourangle = trueSolarTime / 4.0 - 180.0 # Thanks to Louis Schwarzmayr for the next line: if hourangle < -180: hourangle = hourangle + 360.0 harad = radians(hourangle) csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) if csz > 1.0: csz = 1.0 elif csz < -1.0: csz = -1.0 zenith = degrees(acos(csz)) azDenom = (cos(radians(latitude)) * sin(radians(zenith))) if (abs(azDenom) > 0.001): azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom if abs(azRad) > 1.0: if azRad < 0: azRad = -1.0 else: azRad = 1.0 azimuth = 180.0 - degrees(acos(azRad)) if hourangle > 0.0: azimuth = -azimuth else: if latitude > 0.0: azimuth = 180.0 else: azimuth = 0 if azimuth < 0.0: azimuth = azimuth + 360.0 exoatmElevation = 90.0 - zenith if exoatmElevation > 85.0: refractionCorrection = 0.0 else: te = tan(radians(exoatmElevation)) if exoatmElevation > 5.0: refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te) elif exoatmElevation > -0.575: step1 = (-12.79 + exoatmElevation * 0.711) step2 = (103.4 + exoatmElevation * (step1)) step3 = (-518.2 + exoatmElevation * (step2)) refractionCorrection = 1735.0 + exoatmElevation * (step3) else: refractionCorrection = -20.774 / te refractionCorrection = refractionCorrection / 3600.0 solarzen = zenith - refractionCorrection solarelevation = 90.0 - solarzen return solarelevation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def elevation(self):\n return self.altitude - self.heightAboveGround", "def elevation(x, y):\n file = os.path.abspath(\"..\") + \"\\Shape\\Shape.vrt\"\n layer = gdal.Open(file)\n gt = layer.GetGeoTransform()\n rasterx = int((x - gt[0]) / gt[1])\n rastery = int((y - gt[3]) / gt[5])\n print('elevation =', layer.GetRasterBand(1).ReadAsArray(rasterx, rastery, 1, 1)[0][0], 'm above sea level')", "def get_elevation(self):\n return self.elevation", "def get_mean_sun_angles(self) -> (float, float):\n # Get MTD XML file\n root, _ = self.read_mtd()\n\n # Open zenith and azimuth angle\n zenith_angle = float(root.findtext(\".//SolarZenith\"))\n azimuth_angle = float(root.findtext(\".//SolarAzimuth\"))\n\n return azimuth_angle, zenith_angle", "def get_surface_elevation(wind_lat, wind_lon):\n # Load the NetCDF file containing the geopotential of Europe.\n nc = Dataset(path_join(era5_data_dir, geopotential_file_name))\n \n # Read the variables from the netCDF file.\n geopot_lat = nc.variables['latitude'][:]\n geopot_lon = nc.variables['longitude'][:]\n \n \n # Check if wind and geopotential data use same grid.\n assert np.array_equal(geopot_lat, wind_lat) and np.array_equal(geopot_lon, wind_lon), \\\n \"Requested latitudes and/or longitudes do not correspond to those in the NetCDF file.\"\n\n geopot_z = nc.variables['z'][0, :, :]\n nc.close()\n\n surface_elevation = geopot_z/9.81\n print(\"Minimum and maximum elevation found are respectively {:.1f}m and {:.1f}m, removing those below zero.\"\n .format(np.amin(surface_elevation), np.amax(surface_elevation)))\n\n # Get rid of negative elevation values.\n for i, row in enumerate(surface_elevation):\n for j, val in enumerate(row):\n if val < 0.:\n surface_elevation[i, j] = 0.\n\n return surface_elevation", "def elevation(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[1];", "def elevation(self):\n return self.container['elevation']", "def imu_get_elevation(self):\n return self.imu.get_elevation()", "def elevation(self) -> typing.Union[None, int]:\n elevation = self.data[5]\n elevation = re.findall(r'THR (\\d+) FT', elevation)\n return int(elevation[0]) if elevation else None", "def elevation(self, rover):\n\t\tcurrent_tile = rover.planet.tiles[rover.y][rover.x]\n\t\t#current_tile is slope\n\t\tif current_tile.is_slope():\n\t\t\t#self is slope current_tile is slope\n\t\t\tif self.is_slope():\n\t\t\t\tif current_tile.high_elevation == self.low_elevation:\n\t\t\t\t\treturn \"/\"\n\t\t\t\tif current_tile.low_elevation == self.high_elevation:\n\t\t\t\t\treturn \"\\\\\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t\tif self.low_elevation > current_tile.high_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.low_elevation == current_tile.low_elevation\\\n\t\t\t\t\tand self.high_elevation == current_tile.high_elevation:\n\t\t\t\t\treturn \" \"\n\t\t\t#self is flat current_tile is slope\n\t\t\telse:\n\t\t\t\tif self.low_elevation > current_tile.high_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.low_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t\treturn \" \"\n\n\n\t\telse: #current_tile is flat\n\t\t\t#self is slope current_tile is flat\n\t\t\tif self.is_slope():\n\t\t\t\tif self.low_elevation == current_tile.low_elevation:\n\t\t\t\t\treturn \"/\"\n\t\t\t\tif self.high_elevation == current_tile.low_elevation:\n\t\t\t\t\treturn \"\\\\\"\n\t\t\t\tif self.low_elevation > current_tile.low_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t#self is flat current_tile is flat\n\t\t\telse:\n\t\t\t\tif self.low_elevation > current_tile.low_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\treturn \" \"", "def elevation(latitude, longitude):\n elevation = maps.Elevation()\n request = {'locations': '%f,%f' % (latitude, longitude)}\n results, status = elevation.elevation(request)\n if results:\n # We are only interested in the actual elevation\n return results[0]['elevation']\n else:\n raise UnknownLocationError(_('The location could not be found by the elevation API.'))", "def solar_elevation(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n\n return self.astral.solar_elevation(dateandtime, self.latitude, self.longitude)", "def altitude(self):\r\n pressure = self.pressure # in Si units for hPascal\r\n return 44330 * (1.0 - math.pow(pressure / self.sea_level_pressure, 0.1903))", "def calibrateElevation(self,elevation):\n if len(self.values) == self.values.maxlen:\n self.elevcomp = self.value / ((1.0 - ((elevation + self.heightAboveGround) * 0.3048 / 44330.0)) ** 5.255)\n self.calibrated = True\n else:\n self.calibratedElevation = elevation", "def _elevation(self, node):\n return self.graph_provider.get_coords(node)['z']", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def get_radiation():\n sun_pos = get_sun_position()\n if sun_pos <= POSITION_MIN or sun_pos >= POSITION_MAX:\n return 0\n else:\n # Calculate a new delta.\n delta = random.randint(0, RADIATION_DELTA)\n if random.random() > 0.5:\n delta = -1 * delta\n # Calculate the radiation based on the sun position.\n new_radiation = round(-0.1279 * pow(sun_pos, 2) + 46.05 * sun_pos - 3100)\n # Apply the delta and return the value.\n return new_radiation + delta", "def altitude(self):\n if self.__altitude:\n return sum(self.__altitude) / len(self.__altitude)\n else:\n return -9999", "def get_temperature(elevation, sea_level):\n if elevation <= sea_level:\n return 0.8\n else:\n return (-1.0 / (1.0 - sea_level)) * (elevation - sea_level) + 1.0", "def elevation(self):\n\n\t\twidth = self.no_name_level[0]\n\t\theight = self.no_name_level[1]\n\t\ttile = self.no_name_level[2]\n\t\tx = self.no_name_level[3]\n\t\ty = self.no_name_level[4]\n\t\t\n\t\ttiles = []\n\t\tfor i in tile:\n\t\t\ti = i[:-1]\n\t\t\ttiles.append(i)\t\n\t\ttiles_arranged = [tiles[i:i + width] for i in range(0, len(tile), width)]\n\t\n\t\tplanet_co = []\n\t\t\n\t\tfor i in tiles_arranged:\n\t\t\t\n\t\t\tplanet = []\n\t\t\tfor n in i:\n\t\t\t\tn = n.split(',')\n\t\t\t\tif len(n) != 3:\n\t\t\t\t\ta = ['-']\n\t\t\t\t\tn += a\n\t\t\t\t\t\n\t\t\t\t\tplanet.append(n)\n\t\t\t\telse:\n\t\t\t\t\tplanet.append(n)\n\t\t\t\t\t\n\t\t\tplanet_co.append(planet)\n\t\t\t\n\t\n\t\tplanet_map = Planet(planet_co, width, height)\n\t\tcoordinates = Planet(planet_co, width, height)\n\t\tcoordinates = Planet.coordinates(coordinates)\n\t\tplanet_map = Planet.coordinates_dict(planet_map)#this is my map in dictionary format(coordinates : tile)\n\t\t\n\t\tfor y1 in coordinates:\n\t\t\tif coordinates.index(y1) == y:\n\t\t\t\ty_value = coordinates.index(y1)\n\t\t\t\tfor x1 in y1:\n\t\t\t\t\tif x1 == [x, y]:\n\t\t\t\t\t\tx_value = y1.index(x1)\n\t\trover_d = coordinates[y_value][x_value]\n\t\n\t\tx1 = x_value + 1\n\t\tx2 = x_value + 2\n\t\ty1 = y_value + 1\n\t\ty2 = y_value + 2\n\t\n\t\tif x1 == len(coordinates[1]):\n\t\t\tx1 == 0\n\t\tif y1 == len(coordinates):\n\t\t\ty1 == 0\n\t\n\t\tif x2 > len(coordinates[1]):\n\t\t\tx2 = 1\n\t\tif y2 > len(coordinates[1]):\n\t\t\ty2 == 1\n\t\n\t\tfront2 = coordinates[y2][x_value]\n\t\tfront1 = coordinates[y1][x_value]\n\t\tback1 = coordinates[y_value-1][x_value]\n\t\tback2 = coordinates[y_value-2][x_value]\n\t\tright1 = coordinates[y_value][x1]\n\t\tright2 = coordinates[y_value][x2]\n\t\tleft1 = coordinates[y_value][x_value-1]\n\t\tleft2 = coordinates[y_value][x_value-2]\n\t\n\t\n\t\tfront1_right1 = coordinates[y1][x1]\n\t\tfront1_right2 = coordinates[y1][x2]\n\t\tfront2_right1 = coordinates[y2][x1]\n\t\tfront2_right2 = coordinates[y2][x2]\n\t\tfront1_left1 = coordinates[y1][x_value-1]\n\t\tfront1_left2 = coordinates[y1][x_value-2]\n\t\tfront2_left1 = coordinates[y2][x_value-1]\n\t\tfront2_left2 = coordinates[y2][x_value-2]\n\t\n\t\tback1_right1 = coordinates[y_value-1][x1]\n\t\tback1_right2 = coordinates[y_value-1][x2]\n\t\tback2_right1 = coordinates[y_value-2][x1]\n\t\tback2_right2 = coordinates[y_value-2][x2]\n\t\tback1_left1 = coordinates[y_value-1][x_value-1]\n\t\tback1_left2 = coordinates[y_value-1][x_value-2]\n\t\tback2_left1 = coordinates[y_value-2][x_value-1]\n\t\tback2_left2 = coordinates[y_value-2][x_value-2]\n\t\t\n\t\tco_f2r2 = planet_map[str(front2_right2)]\n\t\tco_f2r1 = planet_map[str(front2_right1)]\n\t\tco_f2 = planet_map[str(front2)]\n\t\tco_f2l1 = planet_map[str(front2_left1)]\n\t\tco_f2l2 = planet_map[str(front2_left2)]\n\t\tco_f1r2 = planet_map[str(front1_right2)]\n\t\tco_f1r1 = planet_map[str(front1_right1)]\n\t\tco_f1 = planet_map[str(front1)]\n\t\tco_f1l1 = planet_map[str(front1_left1)]\n\t\tco_f1l2 = planet_map[str(front1_left2)]\n\t\tco_r2 = planet_map[str(right2)]\n\t\tco_r1 = planet_map[str(right1)]\n\t\tco_rover = planet_map[str([x, y])]\n\t\tco_l1 = planet_map[str(left1)]\n\t\tco_l2 = planet_map[str(left2)]\n\t\tco_b1r2 = planet_map[str(back1_right2)]\n\t\tco_b1r1 = planet_map[str(back1_right1)]\n\t\tco_b1 = planet_map[str(back1)]\n\t\tco_b1l1 = planet_map[str(back1_left1)]\n\t\tco_b1l2 = planet_map[str(back1_left2)]\n\t\tco_b2r2 = planet_map[str(back2_right2)]\n\t\tco_b2r1 = planet_map[str(back2_right1)]\n\t\tco_b2 = planet_map[str(back2)]\n\t\tco_b2l1 = planet_map[str(back2_left1)]\n\t\tco_b2l2 = planet_map[str(back2_left2)]\n\t\n\t\tfirst_lineco = [co_f2l2, co_f2l1, co_f2, co_f2r1, co_f2r2]\n\t\tsecond_lineco = [co_f1l2, co_f1l1, co_f1, co_f1r1, co_f1r2]\n\t\tthird_lineco = [co_l2, co_l1, co_rover, co_r1, co_r2]\n\t\tfourth_lineco = [co_b1l2, co_b1l1, co_b1, co_b1r1, co_b1r2]\n\t\tfifth_lineco = [co_b2l2, co_b2l1, co_b2, co_b2r1, co_b2r2]\n\n\t\tfirst_line = ['|']\n\t\tsec_line = ['|']\n\t\tthird_line = ['|']\n\t\tfourth_line = ['|']\n\t\tfifth_line = ['|']\n\t\tfor i in first_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfirst_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfirst_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfirst_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfirst_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfirst_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"\\|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfirst_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfirst_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"+|\")\n\n\n\n\t\tfor i in second_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tsec_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tsec_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tsec_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tsec_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tsec_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tsec_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tsec_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"+|\")\n\t\n\t\tfor i in third_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tthird_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tthird_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tthird_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tthird_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tthird_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tthird_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tthird_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"+|\")\n\t\n\t\tfor i in fourth_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfourth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfourth_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfourth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfourth_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfourth_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfourth_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfourth_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"+|\")\n\t\n\t\tfor i in fifth_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfifth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfifth_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfifth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfifth_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfifth_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfifth_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfifth_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"+|\")\n\t\tthird_line2 = []\n\t\n\t\tfor n, i in enumerate(third_line):\n\t\t\tif n == 3:\n\t\t\t\ta = \"H|\"\n\t\t\t\t \n\t\t\t\tthird_line2.append(a)\n\t\t\telse:\n\t\t\t\tthird_line2.append(i)\n\t\tnumber1_line = \"\\n{}\\n{}\\n{}\\n{}\\n{}\\n\".format(\"\".join(fifth_line), \"\".join(fourth_line), \"\".join(third_line2),\"\".join(sec_line) , \"\".join(first_line))\n\t\t\n\t\treturn number1_line\n\n\n\n\n\t\tpass", "def solarelevation_function_overcast(latitude_deg, longitude_deg, utc_datetime,\n elevation = elevation_default, temperature_celsius = 25,\n pressure_millibars = 1013.25):\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars)\n return ((-0.0067133) + (0.78600 * (math.sin(altitude)))) + (0.22401 * (0.5 * (1 - math.cos(2 * altitude))))", "def get_evaporation_latent_heat() -> float:\n theta = 28.0\n return 2500.8 - 2.3668 * theta", "def sky_temperature(self) -> float:\n\n return 0.0552 * (self.ambient_temperature**1.5)", "def topographic_altitude(self, lat_d, lon_d):\n return self.altitude(lat_d, lon_d)", "def topographic_altitude(self, lat_d, lon_d):\n return self.altitude(lat_d, lon_d)", "def altitude(self) -> int:\n return self._point.altitude", "def sunrise(cls, date):\n return (date + Clock.days_from_hours(6) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n ((1577917828/1582237828 / 360) *\n (cls.ascensional_difference(date, cls.LOCATION) +\n (1/4 * cls.solar_sidereal_difference(date)))))", "def set_sun_position(self, elevation, azimuth, intensity=None):\n\n if elevation < 0 or elevation >= 90:\n raise ValueError(\"The elevation cannot be negative or >= 90\")\n if azimuth < 0 or azimuth >= 360:\n raise ValueError(\"The azimuth cannot be negative or >= 360\")\n if intensity is not None:\n if intensity < 0:\n raise ValueError(\"The intensity cannot be negative\")\n\n theta, phi = f.convert_direction(elevation, azimuth)\n\n self.sun_elevation = theta\n self.sun_azimuth = phi\n if intensity is not None and self.sun_intensity is not None:\n print(\n \"The set sun intensity might not fit to the suns intensity \\\n at the set wavelengh\"\n )\n self.sun_intensity = np.zeros((self.stokes_dim))\n self.sun_intensity[0] = intensity\n elif intensity is not None and self.sun_intensity is None:\n self.sun_intensity = np.zeros((self.stokes_dim))\n self.sun_intensity[0] = intensity\n else:\n self.sun_intensity = f.sun_init_intensity(self.wavelength, self.stokes_dim)", "def night_center(self, date=None):\n sunset = self.sunset(date=date)\n sunrise = self.sunrise(date=sunset)\n center = sunset + timedelta(0, (sunrise - sunset).total_seconds() / 2.0)\n center = self.date_to_local(center)\n return center", "def sunlongitude(time):\n B0 = 36000.7695\n C0 = 280.4659\n # fmt: off\n A = np.array([19147e-4, 200e-4, 48e-4, 20e-4, 18e-4, 18e-4, \\\n 15e-4, 13e-4, 7e-4, 7e-4, 7e-4, 6e-4, \\\n 5e-4, 5e-4, 4e-4, 4e-4])\n B = np.array([35999.050, 71998.1, 1934, 32964, 19, \\\n 445267, 45038, 22519, 65929, 3035, \\\n 9038, 33718, 155, 2281, 29930, \\\n 31557])\n C = np.array([267.520, 265.1, 145, 158, 159, 208, \\\n 254., 352, 45, 110, 64, 316, \\\n 118., 221, 48, 161])\n # fmt: on\n RAD = 0.0174532925199433\n A[0] = 1.9147 - 0.0048 * time\n tempb = (B * time + C) * RAD\n amp = A * np.cos(tempb)\n sunlon = np.sum(amp)\n sunlon = (sunlon + B0 * time + C0) * RAD\n return sunlon", "def mean_earth_sun_distance(utc_datetime): \n\n return (1 - (0.0335 * math.sin(360 * ((solar.GetDayOfYear(utc_datetime)) - 94)) / (365)))", "def get_raster_elevation(dataset, resample=None, **kwargs):\n extent = get_raster_extent(dataset)\n src_ds = wradlib.io.dem.get_srtm(extent, **kwargs)\n\n driver = gdal.GetDriverByName(\"MEM\")\n dst_ds = driver.CreateCopy(\"ds\", dataset)\n\n if resample is None:\n src_gt = src_ds.GetGeoTransform()\n dst_gt = dst_ds.GetGeoTransform()\n src_scale = min(abs(src_gt[1]), abs(src_gt[5]))\n dst_scale = min(abs(dst_gt[1]), abs(dst_gt[5]))\n ratio = dst_scale / src_scale\n\n resample = gdal.GRA_Bilinear\n if ratio > 2:\n resample = gdal.GRA_Average\n if ratio < 0.5:\n resample = gdal.GRA_NearestNeighbour\n\n gdal.ReprojectImage(\n src_ds, dst_ds, src_ds.GetProjection(), dst_ds.GetProjection(), resample\n )\n elevation = read_gdal_values(dst_ds)\n\n return elevation", "def view_elevation(self):\n if self.elevation_toggle:\n return rasterize(self.tri_mesh, aggregator=ds.mean('z'), precompute=True)\n else:\n return hv.Curve([])", "def set_altitude(self):\n self.altitude = self.Calculations.convert_to_altitude( self.declination, self.right_ascension, self.Latitude, self.LHA)\n print('altitude set to', self.altitude)\n if self.altitude < 0:\n self.altitude = self.altitude + 360.0\n return self.altitude \n else: \n pass\n return self.altitude", "def getSlantRangeElevation(self, groundRange, el):\r\n \r\n lat = self.ctrLat * pi / 180.0\r\n theta = el * pi / 180.0\r\n \r\n #figure out earth's radius at radar's lat ... non-spherical earth model\r\n e2 = self.eccen # First eccentricity squared - WGS-84 value = 0.00669437999013\r\n a = self.Requator # Equatorial radius - WGS-84 value = 6378137.0\r\n Rearth = a/sqrt(1-e2*(sin(lat))**2) # radius of curvature\r\n \r\n # Inverse of eq. 2.28b in Doviak and Zrnic 1993\r\n # Inverse of eq. 2.28c in Doviak and Zrnic 1993\r\n\r\n Rprime = self.effectiveRadiusMultiplier * self.Requator\r\n\r\n s = array(groundRange, dtype='float64')\r\n\r\n h = Rprime * ( math.cos(theta) / math.cos( theta + s / Rprime) - 1)\r\n\r\n r = (Rprime + h) * math.sin(s / Rprime) / math.cos(theta);\r\n\r\n # Use law of cosines (Side-Angle-Side triangle theorem) with \r\n # R', R'+h as sides and s/R' as the angle to get slant range\r\n #r = sqrt(Rprime**2.0 + (Rprime+h)**2.0 - 2*(Rprime+h)*Rprime*cos(s/Rprime))\r\n # Will return NaN for r=0\r\n #el = arccos((Rprime+h) * sin(s/Rprime) / r) \r\n #el *= 180.0 / pi\r\n \r\n return r,h", "def unit_sun_r(sun_pos):\n return sun_pos / vector_magnitude(sun_pos[0], sun_pos[1], sun_pos[2])", "def sunpos_mag(t,lat,lon,elev,gm,temp=None,press=None,radians=True):\n #az_zen is a (...,5) dimension ndarray\n az_zen = sunpos(t,lat,lon,elev,temp,press,radians=radians)\n decl = declination(lat,lon,elev,t,gm,radians)\n az_zen[...,0] -= decl\n #subtract declination to go from true N to magnetic N\n return az_zen", "def _earth_distance(time='now'):\n return get_earth(time).radius", "def get_altitude(self):\n self.degrees = self.altitude_encoder.get_degrees()\n self.tele_altitude = self.Calculations.convert_degrees( self.degrees)\n return self.tele_altitude", "def sun(xs, ys, s, n):\n yellow = (255, 255, 0) # sun color\n\n circle(screen, yellow, (xs, ys), 30 * s) # sun body\n for k in range(n + 1): # sun rays on the upper side of the sun\n polygon(screen, yellow,\n [(xs + 45 * s * np.cos(np.pi / n * (k - 1 / 2)), ys - 45 * s * np.sin(np.pi / n * (k - 1 / 2))),\n (xs + 30 * s * np.cos(np.pi * (k - 1) / n), ys - 30 * s * np.sin(np.pi * (k - 1) / n)),\n (xs + 30 * s * np.cos(np.pi * k / n), ys - 30 * s * np.sin(np.pi * k / n))], 0)\n for k in range(n + 1): # sun rays on the lower side of the sun\n polygon(screen, yellow,\n [(xs + 45 * s * np.cos(np.pi / n * (k - 1 / 2)), ys + 45 * s * np.sin(np.pi / n * (k - 1 / 2))),\n (xs + 30 * s * np.cos(np.pi * (k - 1) / n), ys + 30 * s * np.sin(np.pi * (k - 1) / n)),\n (xs + 30 * s * np.cos(np.pi * k / n), ys + 30 * s * np.sin(np.pi * k / n))], 0)", "def temperature(altitude):\n if altitude <= 36152:\n t = 59-0.00356*altitude # deg F\n else:\n t = -70 # deg F\n t = t + 459.7 # R\n return t", "def query_elevation(self, xy_pos=None):\r\n query_pos = xy_pos or self.vehicleNP.get_pos()\r\n \"\"\"\r\n This method is accurate and may be useful for placing \r\n objects on the terrain surface.\r\n \"\"\"\r\n result = self.world.ray_test_closest(\r\n LPoint3(query_pos.x, query_pos.y, -10000),\r\n LPoint3(query_pos.x, query_pos.y, 10000))\r\n if result.has_hit():\r\n hit_pos = result.get_hit_pos()\r\n if not xy_pos:\r\n print(\"Bullet heightfield elevation at \"\r\n \"X {:.2f} | Y {:.2f} is {:.3f}\".format(\r\n hit_pos.x, hit_pos.y, hit_pos.z))\r\n else:\r\n hit_pos = None\r\n if not xy_pos:\r\n print(\"Could not query elevation at {}\".format(xy_pos))\r\n \r\n \"\"\"\r\n This method is less accurate than the one above.\r\n Under heavy ray-testing stress (ray tests are performed for all vehicle\r\n wheels, the above elevation query etc.) Bullet sometimes seems to be a\r\n little unreliable.\r\n \"\"\"\r\n texspace_pos = self.terrain.get_relative_point(render, query_pos)\r\n stm_pos = self.terrain_node.uv_to_world(\r\n LTexCoord(texspace_pos.x, texspace_pos.y))\r\n if not xy_pos:\r\n print(\"ShaderTerrainMesh elevation at \"\r\n \"X {:.2f} | Y {:.2f} is {:.3f}\".format(\r\n stm_pos.x, stm_pos.y, stm_pos.z))\r\n \r\n return hit_pos or stm_pos", "def calculate_sun_earth_distance(doy):\n #The eccentricity of the Earth's orbit is currently about 0.0167 (wiki)\n ec=0.0167\n d=1+ec*np.sin(2*np.pi*(doy-93.5)/365)\n return d", "def easting(self):\r\n x, y = self.lonlat2xy(self.longitude, self.latitude)\r\n return x", "def elevation(self, elevation):\n\n self.container['elevation'] = elevation", "def topographic_altitude(lat, lon):\n global __model\n type_output = type(lat)\n lat = prepare_input_array(lat)\n lon = prepare_input_array(lon)\n lon = np.mod(lon, 360)\n val = __model.topographic_altitude(lat, lon)\n val = np.maximum(val, 1e-7)\n return prepare_output_array(val, type_output) * u.km", "def llh(self):\n return Station._ellipsoid.geodetic(self.xyz())", "def approx_sun_position_ECI(MJD):\n import math\n JD = MJD + 2400000.5\n OplusW = 282.94\n T = (JD - 2451545.0) / 36525\n\n M = math.radians(357.5256 + 35999.049 * T)\n\n long = math.radians(OplusW + math.degrees(M) + 6892 / 3600 * math.sin(M) + 72 / 3600 * math.sin(2*M))\n r_mag = (149.619 - 2.499 * math.cos(M) - 0.021 * math.cos(2*M)) * 10**6\n\n epsilon = math.radians(23.43929111)\n r_vec = (r_mag * math.cos(long), r_mag * math.sin(long) * math.cos(epsilon), r_mag * math.sin(long) * math.sin(epsilon))\n\n return r_vec", "def sunrise(self):\r\n try:\r\n return str(self.connect()['sys']['sunrise'])\r\n except:\r\n return '@weather_sunrise'", "def solarelevation_function_clear(latitude_deg, longitude_deg, utc_datetime,temperature_celsius = 25,\n pressure_millibars = 1013.25, elevation = elevation_default):\n altitude = solar.GetAltitude(latitude_deg, longitude_deg,utc_datetime, elevation, temperature_celsius,pressure_millibars) \n return (0.038175 + (1.5458 * (math.sin(altitude))) + ((-0.59980) * (0.5 * (1 - math.cos(2 * (altitude))))))", "def ayanamsha(tee):\n return Solar.solar_longitude(tee) - sidereal_solar_longitude(tee)", "def calc_direct_beam_intensity(self, height):\n if height < 0:\n raise ValueError(\"The height cannot be negative\")\n\n idx, height = f.argclosest(height, self.height_array, return_value=True)\n angle, _ = f.convert_direction(self.sun_elevation, self.sun_azimuth)\n\n tau = np.zeros((self.stokes_dim, self.stokes_dim))\n for lvl in np.arange(len(self.height_array) - 1, idx - 1, -1):\n tau += (\n (\n self.absorption_coeff_field[lvl]\n + self.scattering_coeff_field[lvl] * self.use_scat\n )\n * self.swiping_height\n / np.cos(np.deg2rad(angle))\n )\n\n if self.stokes_dim == 1:\n I_dir = self.sun_intensity * np.exp(-tau)\n else:\n I_dir = expm(-tau) @ self.sun_intensity\n return I_dir", "def getAltitudeAngle(self):\n return self._altitude", "def value_at(elevation, tau, offset, kelvin, tsky):\n with warnings.catch_warnings():\n warnings.simplefilter('ignore', RuntimeWarning)\n eps = -(np.exp(-tau / np.sin(elevation)) - 1)\n t_obs = eps * tsky\n return offset + (t_obs * kelvin)", "def elevation_to_color_formula(number):\n color_intensity = abs(round(((number - lowest_point) / (highest_point - lowest_point)) * 255))\n return color_intensity", "def get_altitude(self, degrees=True):\n if degrees:\n return math.degrees(self.current_location.al)\n else:\n return self.current_location.al", "def ee_radius_diffraction(self, energy=FIRST_AIRY_ENCIRCLED):\n return _inverse_analytic_encircled_energy(self.fno, self.wavelength, energy)", "def get_elevation(Address):\n loc = get_location_gecode_address_str(Address)\n lat, lng = loc['Latitude']['Value'], loc['Longitude']['Value']\n loc['Elevation'] = {'Value': None}\n if lat is not None and lng is not None:\n elev = gmaps.elevation((lat, lng))\n loc['Elevation']['Value'] = elev[0]['elevation']\n return loc", "def calculate_dew_point(temp, hum):\n return temp - (100 - hum) / 5", "def get_air_density(self, altitude):\n\n altitude /= 1000 # convert to kilometers\n\n return self.get_air_density_from_model(altitude)", "def get_van_Der_Waals_radius(self):\n return self.van_Der_Waals_radius", "def power(self):\n return irradiance_on_plane(self.vnorm, self.h,\n self.date, self.lat) * self.s * self.eff", "def AngleFromSun(body, time):\n if body == Body.Earth:\n raise EarthNotAllowedError()\n sv = GeoVector(Body.Sun, time, True)\n bv = GeoVector(body, time, True)\n return AngleBetween(sv, bv)", "def radial6(self) -> float:\n return self.distortion_coefficients[2]", "def parse_azimuth_elevation(filename):\n match = REGEX.match(filename)\n return int(match.group(1)), int(match.group(2))", "def estimate_sunrise_sunset(self, date):\n\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n historic_data = self.data\n # The range is 14 days ago to the end of yesterday\n start_date = date - datetime.timedelta(days=14)\n end_date = date - datetime.timedelta(hours=1)\n\n # We grab all hours where actual power is greater than 0\n relevant_data = historic_data[start_date:end_date]\n daylight_data = relevant_data[relevant_data['actuals'] > 0]\n\n # We do this to stop a warning from appearing, we know it's a copy\n daylight_data.is_copy = False\n daylight_data['hours'] = daylight_data.index.hour\n\n # Find the min and max hour for each day where we have positive\n # observed power generation.\n sunrises = daylight_data.groupby(daylight_data.index.date).min()['hours']\n sunsets = daylight_data.groupby(daylight_data.index.date).max()['hours']\n\n # We round in order to have an integer value for sunrise and sunset.\n average_sunrise = int(max(round(sunrises.mean()) - 1, 0))\n average_sunset = int(min(round(sunsets.mean()) + 1, 23))\n\n return average_sunrise, average_sunset", "def diff_of_errors(self):\n self.e_of_e = self.azimuth_error - self.altitude_error\n return self.e_of_e", "def calcEnergy(self):\n speed_light = constants.physical_constants[\"speed of light in vacuum\"][0]#m/sec by default\n if self.mass is None:\n raise CoordinateVector(\"The particle mass needs to be specified to calculate the energy.\")\n return speed_light*math.sqrt(self.p*self.p + (self.mass*speed_light)**2)", "def sun_single_day(date):\r\n\r\n\tsun = l.sun(date=date, local=True)\r\n\tsunrise = sun['sunrise']\r\n\tsunset = sun['sunset']\r\n\tday_length = str(sunset-sunrise)\r\n\tsolar_noon = l.solar_noon(date=date, local=True)\r\n\tsolar_zenith = l.solar_elevation(solar_noon.replace(tzinfo=None))\r\n\r\n\treturn {'sunrise':sunrise, 'sunset': sunset, 'daylength': day_length, 'solar_noon': solar_noon, 'zenith': solar_zenith}", "def sidereal_solar_longitude(tee):\n return mod(Solar.solar_longitude(tee) - Astro.precession(tee) + SIDEREAL_START, 360)", "def view_elevation(self):\n raise ChildProcessError('view elevation method not set')", "def sun_earth_test(stepper_type, dt):\n # numerical params\n T = 0\n\n # physical params\n R = common.M_S/common.M_E\n MS = np.array([R, 1])\n G = common.get_G(common.M_E, common.AU, common.YR)\n f = common.get_f(G, MS)\n period = np.sqrt(4 * np.pi**2 / (G * sum(MS)) * (1 + 1 / R)**3)\n\n T_F = 2 * period\n V_E = np.sqrt(G * R / (1 + 1/R))\n ys = np.array([\n -1 / R, 0, 0, -V_E / R,\n 1, 0, 0, V_E\n ], dtype=np.float64)\n earth_pos = [ys[4:6]]\n solver = stepper_type(f, T, ys, T_F, max_step=dt, G=G, Ms=MS,\n get_accel=common.get_accel, get_jerk=common.get_jerk\n )\n times = [T]\n while solver.status == 'running':\n solver.step()\n earth_pos.append(np.copy(solver.y[4:6]))\n times.append(solver.t)\n earth_arr = np.array(earth_pos)\n times_arr = np.array(times)\n exact_earth = np.array(list(zip(\n np.cos(2 * np.pi / period * times_arr),\n np.sin(2 * np.pi / period * times_arr)\n )))\n return np.sqrt(sum(common.l2_norm(earth_arr, exact_earth))**2)", "def era(self):\n # earth rotation angle using Universal Time\n J = self.MJD - 51544.5\n fraction = np.mod(J, self.turn)\n theta = np.mod(0.7790572732640 + 0.00273781191135448*J, self.turn)\n return self.turndeg*np.mod(theta + fraction, self.turn)", "def omega_sun_snodgrass90(lat):\n return differential_rotation(lat, 14.71, -2.39, -1.78)", "def get_location_offset_meters(self, dNorth, dEast, alt):\n earth_radius=6378137.0 #Radius of \"spherical\" earth\n #Coordinate offsets in radians\n dLat = dNorth/earth_radius\n dLon = dEast/(earth_radius*math.cos(math.pi*self.home.lat/180))\n\n #New position in decimal degrees\n newlat = self.home.lat + (dLat * 180/math.pi)\n newlon = self.home.lon + (dLon * 180/math.pi)\n return LocationGlobal(newlat, newlon,self.home.alt+alt)", "def pair_from_elev(elevation):\n # version converted from vb.net\n\n # return 101.3 * ((293. - 0.0065 * elevm) / 293.) ** (9.8 / (0.0065 * 286.9)) # kPa ' standardized by ASCE 2005\n\n # version from from DRI\n\n # return 101.3 * np.power((293.0 - 0.0065 * elevation) / 293.0, 5.26)\n\n # version extended to better match vb.net version\n # 5.255114352 = 9.8 / (0.0065 * 286.9\n\n return 101.3 * np.power((293.0 - 0.0065 * elevation) / 293.0, 5.255114352)", "def native_dew_point(self) -> float | None:\n return self._dew_point", "def manhatam_distance(self) -> int:\n return abs(self.north) + abs(self.east)", "def apparent_magnitude(sat, topos, earth, sun, time):\n\n position = earth + sat\n observer = earth + topos\n barycentric_o = position.at(time).observe(observer)\n barycentric_s = position.at(time).observe(sun)\n phase_angle = barycentric_o.separation_from(barycentric_s).radians\n _, _, distance = barycentric_o.radec()\n term_1 = -1.3 # standard satellite intrinsic magnitude\n term_2 = +5.0 * np.log10(distance.km / 1000.)\n arg = np.sin(phase_angle) + (np.pi - phase_angle) * np.cos(phase_angle)\n term_3 = -2.5 * np.log10(arg)\n return term_1 + term_2 + term_3", "def _calculate_salinity(self):\n params = self.parameters.keys()\n if 'seawater_salinity' in params:\n return\n else:\n if 'water_specific_conductance' in params:\n T = 25.0\n cond = self.data['water_specific_conductance'].rescale(\n sq.mScm).magnitude\n elif 'water_electrical_conductivity' in params:\n current_unit = self.data['water_temperature'].units\n temp_celsius = self.data['water_temperature'].rescale(pq.degC)\n temp_celsius += self._temperature_offset(current_unit, pq.degC)\n T = temp_celsius.magnitude\n cond = self.data['water_electrical_conductivity'].rescale(\n sq.mScm).magnitude\n else:\n return\n\n if 'water_depth_non_vented' in params:\n P = self.data['water_depth_non_vented'].rescale(\n sq.dbar).magnitude + (pq.atm).rescale(sq.dbar).magnitude\n elif 'water_depth_vented' in params:\n P = self.data['water_depth_vented'].rescale(sq.dbar).magnitude\n else:\n P = (pq.atm).rescale(sq.dbar).magnitude\n\n R = cond / 42.914\n sal = seawater.salt(R, T, P)\n\n self.set_standard_unit('seawater_salinity', sq.psu)\n self.data['seawater_salinity'] = sal * sq.psu", "def day_length(day_of_year, lat):\n day_hours = np.deg2rad(lat)\n declination = 23.45 * np.sin(np.deg2rad(360.0 * (283.0 + day_of_year) / 365.0))\n const_day = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) <= -1.0\n day_hours[const_day] = 24.0\n const_night = -np.tan(day_hours) * np.tan(np.deg2rad(declination)) >= 1.0\n day_hours[const_night] = 0.0\n day_night = ~((const_day) | (const_night))\n hour_angle = np.rad2deg(np.arccos(-np.tan(day_hours[day_night]) *\n np.tan(np.deg2rad(declination))))\n day_hours[day_night] = (hour_angle / 7.5)\n return day_hours", "def estimate_sunrise_sunset(self, date, verbose=True):\n if self.source_type != 'solar':\n raise ValueError(\"You can only estimate sunrise and sunset for \"\n \"solar sources.\")\n\n date = pd.Timestamp(date)\n\n if self.diurnal_pattern is None:\n if verbose:\n print(\"Warning: Source {} has no diurnal pattern, estimating \"\n \"sunrise and sunset using average of past data.\"\n .format(self.name), file=sys.stderr)\n return Source.estimate_sunrise_sunset(self, date)\n\n if verbose:\n print(\"{} {}: Using Diurnal Pattern to estimate sunrise and sunset\"\n .format(self.name, date.date()))\n\n diurnal_pattern = self.diurnal_pattern\n daily_pattern = diurnal_pattern[date:date+datetime.timedelta(hours=23)]\n\n sunrise, sunset = None, None\n\n # This will walk through finding first sun hour and first night hour\n for hour, pattern in enumerate(daily_pattern.values):\n if sunrise is None and pattern > 0:\n sunrise = hour\n\n # If sun has risen, and we have not found night and we reach a 0\n if sunrise is not None and sunset is None and pattern == 0:\n sunset = hour\n\n if sunrise is None and sunset is None:\n raise ValueError(\"No solar power was generated on {}\".format(date))\n\n return sunrise, sunset", "def pointing_dir_earth (self, time):\n\n return self.vect_from_lspe_to_earth (self.pointing_dir_lspe (time),\n time)", "def getDensity(h, R_w, R_sun): # k is a fitting constant\n\n R = np.sqrt(R_w**2+h**2)\n r = R/R_sun # units need to be in solar radii \n a = 77.1\n b = 31.4\n c = 0.954\n d = 8.30\n e = 0.550\n f = 4.63\n\n return (a*r**(-b) + c*r**(-d) + e*r**(-f))*10**8 #[cm-3]", "def get_azimuth(self):\n self.degrees = self.azimuth_encoder.get_degrees()\n self.tele_azimuth = self.Calculations.convert_degrees(self.degrees)\n return self.tele_azimuth", "def read_elevation(filepath):\n h = 83 #distance between elevation measures\n N = 1201\n theta = np.pi / 6\n elev_array = np.zeros((N, N))\n grad_array = np.zeros((N, N, 2))\n I_array = np.zeros((N, N))\n # Read the elevation data as described in Question 3, and store in the elvation array\n f = open(filepath, \"rb\")\n for i in range(N):\n for j in range(N):\n buf = f.read(2)\n val = struct.unpack(\">h\", buf)[0]\n elev_array[i][j] = val\n f.close()\n # Populate the gradient array\n for i in range(N):\n for j in range(N):\n #This if statements handle the border cases\n if j == 0:\n grad_array[i][j][0] = (elev_array[i][j+1] - elev_array[i][j]) / h\n elif j == N - 1:\n grad_array[i][j][0] = (elev_array[i][j] - elev_array[i][j-1]) / h\n else:\n grad_array[i][j][0] = (elev_array[i][j+1] - elev_array[i][j-1]) / (2 * h)\n \n if i == 0:\n grad_array[i][j][1] = (elev_array[i][j] - elev_array[i-1][j]) / h\n elif i == N - 1:\n grad_array[i][j][1] = (elev_array[i-1][j] - elev_array[i][j]) / h\n else:\n grad_array[i][j][1] = (elev_array[i-1][j] - elev_array[i+1][j]) / (2 * h)\n \n # Populate intensities\n for i in range(N):\n for j in range(N):\n denom = np.sqrt(grad_array[i][j][0] ** 2 + grad_array[i][j][1] ** 2 + 1)\n numer = np.cos(theta) * grad_array[i][j][0] + np.sin(theta) * grad_array[i][j][1]\n I_array[i][j] = -1 * numer / denom\n \n return elev_array, I_array", "def altitude(press, altimeter=29.92126):\n AS = altimeter*inHg2PA\n print(AS, press**(L*R/g/M))\n h = -(press**(L*R/g/M) - AS**(L*R/g/M))*T0/L/(P0**(L*R/g/M))\n return h/ft2m", "def get_earth(time='now'):\n earth = get_body_heliographic_stonyhurst('earth', time=time)\n\n # Explicitly set the longitude to 0\n earth = SkyCoord(0*u.deg, earth.lat, earth.radius, frame=earth)\n\n return earth", "def get_earth_radius(latitude):\n a = R_EARTH_MAX\n b = R_EARTH_MIN\n num = ((a ** 2 * np.cos(latitude)) ** 2 +\n (b ** 2 * np.sin(latitude)) ** 2)\n den = ((a * np.cos(latitude)) ** 2 +\n (b * np.sin(latitude)) ** 2)\n\n earth_radius = np.sqrt(num / den)\n\n return earth_radius", "def _compute_epera(self, units='erg'):\n if 'a' not in self:\n raise ValueError('Photons must have effective area data to permit the computation of fluxes.')\n\n energy = _const.h * _const.c / self['w']\n energy = energy.to(units).value\n epera = energy / self['a']\n return epera", "def calc_esat(tair):\n\n esat = 613.75 * np.exp(17.502 * tair / (240.97 + tair))\n\n return esat", "def getSlantRangeElevationHeight(self, groundRange, z):\r\n \r\n lat = self.ctrLat * pi / 180.0\r\n \r\n #figure out earth's radius at radar's lat ... non-spherical earth model\r\n e2 = self.eccen # First eccentricity squared - WGS-84 value = 0.00669437999013\r\n a = self.Requator # Equatorial radius - WGS-84 value = 6378137.0\r\n Rearth = a/sqrt(1-e2*(sin(lat))**2) # radius of curvature\r\n \r\n Rprime = self.effectiveRadiusMultiplier * self.Requator\r\n \r\n h = array(z - self.ctrAlt, dtype='float64')\r\n s = array(groundRange, dtype='float64')\r\n \r\n # Use law of cosines (Side-Angle-Side triangle theorem) with \r\n # R', R'+h as sides and s/R' as the angle to get slant range\r\n r = sqrt(Rprime**2.0 + (Rprime+h)**2.0 - 2*(Rprime+h)*Rprime*cos(s/Rprime))\r\n # Inverse of eq. 2.28c in Doviak and Zrnic 1993\r\n # Will return NaN for r=0\r\n el = arccos((Rprime+h) * sin(s/Rprime) / r) \r\n el *= 180.0 / pi\r\n \r\n return r, el", "def get_elevation_data(lat, lon):\n \n logging.info(\"Getting elevation data for the coordinate ({}, {}).\".format(lat, lon))\n \n # Initialising function variables\n grid_lat = None\n grid_lon = None\n coord = (lon, lat)\n config_data = get_config()[\"gis\"]\n elev_file_name = config_data[\"input_file_name\"]\n \n logging.info(\"Determining the appropriate tif file for the coordinate ({}, {}).\".format(lat, lon))\n \n # Determine location's latitude data from the image\n # grid. Valid values are 1 and 2.\n for key, value in config_data[\"latitude_condition\"].items():\n \n if value[\"min_lat\"] <= lat <= value[\"max_lat\"]:\n grid_lat = value[\"grid_lat\"]\n\n # Determine location's longitude data from the image\n # grid. Valid values are A, B, C and D.\n for key, value in config_data[\"longitude_condition\"].items():\n \n if value[\"min_lon\"] <= lon <= value[\"max_lon\"]:\n grid_lon = value[\"grid_lon\"]\n\n # Determine that there is a valid grid_lat and grid_lon data.\n if grid_lat is None or grid_lon is None:\n logging.error(\"Invalid coordinate ({}, {}). Please check the value!\".format(lat, lon))\n raise ValueError\n\n grid_id = \"\".join([grid_lon, grid_lat])\n file_name = elev_file_name.format(grid_id=grid_id)\n\n # Retrieve the elevation tif file path based on grid_id.\n elev_file_path = get_file_path(folder_name=\"data\"\n ,subdirectory=config_data[\"input_subdirectory\"]\n ,file_name=file_name)\n \n logging.info(\"Retrieving elevation data for the coordinate ({}, {}) is in {} file.\".format(lat, lon, file_name))\n\n # Retrieve the elevation data found in elev_file_path.\n with rio.open(elev_file_path) as file:\n elevs = file.sample((coord, coord))\n elev = next(elevs)[0]\n\n logging.info(\"Completed retrieving elevation data for the coordinate ({}, {}). Elevation value: {}.\".format(lat, lon, elev))\n \n return elev", "def EclipticLongitude(body, time):\n if body == Body.Sun:\n raise InvalidBodyError()\n hv = HelioVector(body, time)\n eclip = Ecliptic(hv)\n return eclip.elon", "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk_utc(date, latitude, longitude)\n \n return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}", "def skycoord(self):\n return SkyCoord(self['raj'], self['decj'], unit=(uu.hour, uu.degree))", "def area(\n self):\n pi = numpy.pi\n area0 = 4.0 * pi / 8.0\n areadiv = 4.0 ** self.depth\n area = area0 / areadiv * (180.0 / pi) ** 2\n return area", "def checkSun(ontology_sun):\n elevation = ontology_sun.has_elevation[0] #gets the elevation value of the Sun in the ontology. \n azimuth = ontology_sun.has_azimuth[0] #gets the azimuth value of the Sun in the ontology. \n intensity = ontology_sun.has_intensity[0] #gets the intensity value of the Sun in the ontology.\n return xosc.Sun(intensity,azimuth,elevation)", "def _get_elevation_percentage(height: float, distance: float) -> float:\n return height / (distance * 1000)", "def zodiac(cls, tee):\n return quotient(float(cls.solar_longitude(tee)), 30) + 1" ]
[ "0.6999895", "0.668499", "0.6425622", "0.6422953", "0.6396147", "0.6377398", "0.63741803", "0.61598486", "0.61377054", "0.61298066", "0.5996655", "0.59889287", "0.5973628", "0.58740777", "0.58685416", "0.5855382", "0.58081555", "0.57917005", "0.5758174", "0.5704655", "0.5704499", "0.56727254", "0.5664666", "0.5622825", "0.5622825", "0.559338", "0.55840534", "0.5524518", "0.54620355", "0.5445601", "0.5443145", "0.5433019", "0.5425157", "0.54221636", "0.54167", "0.540986", "0.5382669", "0.5381177", "0.53753203", "0.536666", "0.53655034", "0.53641105", "0.5360866", "0.53532386", "0.53237885", "0.53154886", "0.5313347", "0.5309959", "0.53076583", "0.5295451", "0.5288084", "0.52868736", "0.5283697", "0.52220064", "0.52171826", "0.52164483", "0.5189046", "0.5184869", "0.51803565", "0.51765656", "0.5175791", "0.51530474", "0.51519656", "0.51220965", "0.51115716", "0.5109626", "0.510672", "0.51030654", "0.5090001", "0.5086677", "0.5081933", "0.50778204", "0.5066204", "0.5058146", "0.5057045", "0.50513434", "0.5034616", "0.50330985", "0.5032126", "0.5028417", "0.5028149", "0.50274986", "0.5019756", "0.5018332", "0.5014025", "0.5012499", "0.5012003", "0.5006484", "0.50004876", "0.49967515", "0.49751762", "0.49739608", "0.49733424", "0.4969244", "0.496485", "0.49598482", "0.49563324", "0.49515694", "0.4938481", "0.4936118" ]
0.5579604
27
Calculates the phase of the moon on the specified date.
def moon_phase(self, date): jd = self._julianday(date.day, date.month, date.year) DT = pow((jd - 2382148), 2) / (41048480*86400) T = (jd + DT - 2451545.0) / 36525 T2 = pow(T,2) T3 = pow(T,3) D = 297.85 + (445267.1115*T) - (0.0016300*T2) + (T3/545868) D = radians(self._proper_angle(D)) M = 357.53 + (35999.0503*T) M = radians(self._proper_angle(M)) M1 = 134.96 + (477198.8676*T) + (0.0089970*T2) + (T3/69699) M1 = radians(self._proper_angle(M1)) elong = degrees(D) + 6.29*sin(M1) elong -= 2.10*sin(M) elong += 1.27*sin(2*D - M1) elong += 0.66*sin(2*D) elong = self._proper_angle(elong) moon = int(floor(((elong + 6.43) / 360) * 28)) if moon == 28: moon = 0 return moon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def moon_phase(self, date=None):\n self._set_site_date(date)\n self.moon.compute(self.site)\n return self.moon.moon_phase", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def phase_to_day(phase):\n if phase < 0:\n phase += 2*np.pi\n return phase*(365./(2*np.pi))", "def phase_to_day(phase):\n if phase < 0:\n phase += 2*np.pi\n return phase*(365./(2*np.pi))", "def MoonPhase(time):\n return PairLongitude(Body.Moon, Body.Sun, time)", "def calc_phase(self, time):\n dur = self.get_duration()\n phase = time / dur\n\n if self.enable_loop():\n phase -= np.floor(phase)\n else:\n phase = np.clip(phase, 0.0, 1.0)\n\n return phase", "def calc_phase(p, t):\n\n return (t % p)/p", "def phase(self, hjd):\n # 2009-09-28 14:07 IJC: Implemented object-oriented version\n return getorbitalphase(self, hjd)", "def moon_phase(\n datetime_index,\n epsilon=1e-6,\n epoch=2444237.905,\n ecliptic_longitude_epoch=278.833540,\n ecliptic_longitude_perigee=282.596403,\n eccentricity=0.016718,\n moon_mean_longitude_epoch=64.975464,\n moon_mean_perigee_epoch=349.383063,\n):\n # set time to Noon if not otherwise given, as midnight is confusingly close to previous day\n if np.sum(datetime_index.hour) == 0:\n datetime_index = datetime_index + pd.Timedelta(hours=12)\n days = datetime_index.to_julian_date() - epoch\n\n # Mean anomaly of the Sun\n a = (360 / 365.2422) * days\n N = a - 360.0 * np.floor(a / 360.0)\n N = N + ecliptic_longitude_epoch - ecliptic_longitude_perigee\n # Convert from perigee coordinates to epoch 1980\n M = a - 360.0 * np.floor(N / 360.0)\n\n m = torad(M)\n e = m.copy()\n while 1:\n delta = e - eccentricity * np.sin(e) - m\n e = e - delta / (1.0 - eccentricity * np.cos(e))\n if abs(delta).max() <= epsilon:\n break\n\n Ec = sqrt((1 + eccentricity) / (1 - eccentricity)) * np.tan(e / 2.0)\n # True anomaly\n Ec = 2 * todeg(np.arctan(Ec))\n # Suns's geometric ecliptic longuitude\n a = Ec + ecliptic_longitude_perigee\n lambda_sun = a - 360.0 * np.floor(a / 360.0)\n\n # Calculation of the Moon's position\n\n # Moon's mean longitude\n a = 13.1763966 * days + moon_mean_longitude_epoch\n moon_longitude = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's mean anomaly\n a = moon_longitude - 0.1114041 * days - moon_mean_perigee_epoch\n MM = a - 360.0 * np.floor(a / 360.0)\n\n # Moon's ascending node mean longitude\n # MN = fixangle(c.node_mean_longitude_epoch - 0.0529539 * day)\n\n evection = 1.2739 * np.sin(torad(2 * (moon_longitude - lambda_sun) - MM))\n\n # Annual equation\n annual_eq = 0.1858 * np.sin(torad(M))\n\n # Correction term\n A3 = 0.37 * np.sin(torad(M))\n\n MmP = MM + evection - annual_eq - A3\n\n # Correction for the equation of the centre\n mEc = 6.2886 * np.sin(torad(MmP))\n\n # Another correction term\n A4 = 0.214 * np.sin(torad(2 * MmP))\n\n # Corrected longitude\n lP = moon_longitude + evection + mEc - annual_eq + A4\n\n # Variation\n variation = 0.6583 * np.sin(torad(2 * (lP - lambda_sun)))\n\n # True longitude\n lPP = lP + variation\n\n # Calculation of the phase of the Moon\n\n # Age of the Moon, in degrees\n moon_age = lPP - lambda_sun\n\n # Phase of the Moon\n moon_phase = (1 - np.cos(torad(moon_age))) / 2.0\n return moon_phase\n # return pd.Series(moon_phase, index=datetime_index)", "def phases(self,dataset):\n start = '1984-1-1'\n if dataset == \"ISCCP_raw\":\n stop = '2007-12-31'\n else:\n stop = '2009-12-31'\n X = getattr(self,dataset)(time=(start,stop))\n R,P = sc.fast_annual_cycle(X)\n return MV.masked_where(np.isnan(P),P)", "def phaseEstimator(phases,omegas,T_s,k):\n length = phases.shape[0]\n pis = np.tile(2*np.pi,length)\n a = phases - T_s*k*omegas\n phaseShifts = np.mod(a,pis)\n b = phases-phaseShifts\n omega_hat = np.mod(b,pis)\n n = omega_hat/omegas\n estimatedTime = np.sum(n)/length\n \n estimatedPhase = phaseShifts + estimatedTime*omegas\n \n return estimatedPhase", "def phase(self):\r\n\r\n #XXX calcluate this from the standard output, instead of recalculating:\r\n\r\n tseries_length = self.input.data.shape[0]\r\n spectrum_length = self.spectrum.shape[-1]\r\n\r\n phase = np.zeros((tseries_length,\r\n tseries_length,\r\n spectrum_length))\r\n\r\n for i in range(tseries_length):\r\n for j in range(i, tseries_length):\r\n phase[i][j] = np.angle(\r\n self.spectrum[i][j])\r\n\r\n phase[j][i] = np.angle(\r\n self.spectrum[i][j].conjugate())\r\n return phase", "def phase(self):\n return np.arctan(np.sum(np.imag(self.values)) / np.sum(np.real(self.values)))", "def phase(self):\n\n self.theta = np.arctan(np.sqrt(self.P / (1 - self.P)))\n return self", "def phase_Saturn_2(alpha):\n phase = 10.**(-0.4*(- 3.7e-04*alpha +6.16e-04*alpha**2.))\n return phase", "def getPhase(phase):", "def jovian_year(cls, date):\n return amod(quotient(cls.hindu_day_count(date), cls.ARYA_JOVIAN_PERIOD / 12) + 27, 60)", "def phase_Earth(alpha):\n phase = 10.**(-0.4*(- 1.060e-3*alpha + 2.054e-4*alpha**2.))\n return phase", "def date_ym_value(date: dt.datetime) -> int:\n return (100 * date.year) + date.month", "def declination_degree(day_date, TY):\n\n return 23.45 * np.vectorize(sin)((2 * pi / (TY)) * (day_date - 81))", "def phase(state, i):\n particles = bin(state >> i + 1).count(\"1\")\n return 1 if particles % 2 == 0 else -1", "def timestep_from_date(self, this_date):\n this_timestep = this_date.year - self._date_at_timestep0.year\n return this_timestep", "def fringes_morlet_phase(m1,m2, quasi_pi=False):\n ### cross spectrum\n cross_spec = np.conj(m1.cwt)*m2.cwt\n phi = np.angle(cross_spec)\n if quasi_pi:\n phi = np.mod(phi + np.pi/2, 2*np.pi)\n weight = abs(m1.cwt)*abs(m2.cwt)\n phase = np.sum(phi*weight, axis=0)/np.sum(weight, axis=0)\n if quasi_pi:\n phase -= np.pi/2\n return phase", "def phase_offset(frq,start,base):\r\n \r\n if type(start)==datetime:\r\n dx = start - base\r\n dx = dx.total_seconds()\r\n else:\r\n dx = start -base\r\n \r\n return np.mod(dx*np.array(frq),2*np.pi)", "def compute_phase(self, dt, phase_speed):\n num_time_steps = int(self._traj_len / phase_speed)\n\n phase = Phase(dt=self._dt, phase_speed=phase_speed, time_steps=num_time_steps)\n\n return phase", "def getorbitalphase(planet, hjd, **kw):\n\n hjd = array(hjd).copy()\n if bool(planet.transit)==True:\n thiseph = planet.tt\n else:\n thiseph = planet.rveph(hjd.max())\n\n orbphase = ((hjd - thiseph) ) / planet.per\n orbphase -= int(orbphase.mean())\n\n return orbphase", "def yoga(date):\n return ifloor(mod((HinduSolarDate.longitude(date) + HinduLunarDate.longitude(date)) / angle(0, 800, 0), 27)) + 1", "def kuramoto_ODE_jac(self, t, y, arg):\n\n w, k = arg\n yt = y[:,None]\n dy = y-yt\n\n phase = [m*k[m-1]*np.cos(m*dy) for m in range(1,1+self.m_order)]\n phase = np.sum(phase, axis=0)\n\n for i in range(self.n_osc):\n phase[i,i] = -np.sum(phase[:,i])\n\n return phase", "def phase(self):\n pass", "def kuramoto_ODE(self, t, y, arg):\n w, k = arg\n yt = y[:,None]\n dy = y-yt\n\n phase = w.astype(self.dtype)\n if self.noise != None:\n n = self.noise().astype(self.dtype)\n dy += n\n \n for m, _k in enumerate(k):\n phase += np.sum(_k*np.sin((m+1)*dy),axis=1)\n \n return phase", "def seasonality(time, period, amplitude=1 , phase=0):\n season_time = ((time + phase) % period) / period\n return amplitude * seasonal_pattern(season_time)", "def phase(self):\n return self.__phase", "def m_phase(self):\n return self._m_phase", "def phase_Mercury(alpha):\n phase = 10.**(-0.4*(6.3280e-02*alpha - 1.6336e-03*alpha**2. + 3.3644e-05*alpha**3. - 3.4265e-07*alpha**4. + 1.6893e-09*alpha**5. - 3.0334e-12*alpha**6.))\n return phase", "def estimate_phase(img_ft, sim_frq, dxy):\n ny, nx = img_ft.shape\n fx = tools.get_fft_frqs(nx, dxy)\n fy = tools.get_fft_frqs(ny, dxy)\n\n phase = np.mod(np.angle(tools.get_peak_value(img_ft, fx, fy, sim_frq, 2)), 2*np.pi)\n\n return phase", "def seasonality(time, period, amplitude=1, phase=0):\n\tseason_time = ((time + phase) % period) / period\n\treturn amplitude * seasonal_pattern(season_time)", "def daily_motion(cls, date):\n mean_motion = 360 / cls.SIDEREAL_YEAR\n anomaly = cls.mean_position(date, cls.ANOMALISTIC_YEAR)\n epicycle = 14/360 - abs(cls.sine(anomaly)) / 1080\n entry = quotient(float(anomaly), angle(0, 225, 0))\n sine_table_step = cls.sine_table(entry + 1) - cls.sine_table(entry)\n factor = -3438/225 * sine_table_step * epicycle\n return mean_motion * (factor + 1)", "def phaseEstimator2(phases,omegas,T_s,k):\n \n \n length = phases.shape[0]\n pis = np.tile(2*np.pi,length)\n a = phases - k*omegas\n phaseShifts = np.mod(a,pis)\n\n averagedPhaseShift = np.sum(phaseShifts)/length\n \n estimatedPhase = np.mod(averagedPhaseShift + k*omegas,pis)\n #estimatedPhase = np.array([np.pi/2,np.pi/2,np.pi/2]) + k*omegas\n \n return estimatedPhase", "def GetPhase(self):\n ...", "def phases(self):\r\n\r\n phase = tsa.cache_to_phase(self.cache, self.ij)\r\n\r\n return phase", "def app_phase(data_pupil,data_phase,oversize=4):\n return phaseangle(app_complex(data_pupil,data_phase,oversize))", "def seasonality(time, period, amplitude=1, phase=0):\r\n season_time = ((time + phase) % period) / period\r\n return amplitude * seasonal_pattern(season_time)", "def seasonality(time, period, amplitude=1, phase=0):\r\n season_time = ((time + phase) % period) / period\r\n return amplitude * seasonal_pattern(season_time)", "def phase_Saturn_1(alpha,beta=0.):\n phase = 10.**(-0.4*(-1.825*np.sin(beta*np.pi/180.) + 0.026*alpha - 0.378*np.sin(beta*np.pi/180.)*np.exp(-2.25*alpha)))\n return phase", "def get_phase(self,):\n\n # for comparison\n initial_state = self._read('CPOW0')\n\n POW_step = 0.02197265\n POW = 0x00 | initial_state[0] << 8 | initial_state[1]\n phase = round(POW*POW_step, 2)\n\n print ('Latest phase set (i.e. currently in register):', phase)\n\n return self.phases", "def seasonality(time, period, amplitude=1, phase=0):\n season_time = ((time + phase) % period) / period\n return amplitude * seasonal_pattern(season_time)", "def seasonality(time, period, amplitude=1, phase=0):\n season_time = ((time + phase) % period) / period\n return amplitude * seasonal_pattern(season_time)", "def princarg(phase_in):\n phase = np.mod(phase_in + np.pi,-2*np.pi)+np.pi;\n return phase", "def day_angle(day):\n return 2*pi*( day - 1 )/365", "def fn(y, m, d):\n if m < 3: \n y -= 1\n m += 12\n return 365*y + y//4 + y//400 - y//100 + (153*m + 8)//5 + d", "def day_from_moment(cls, tee):\n return quotient(cls.lunar_phase(tee), 12) + 1", "def moon_phase_df(datetime_index, epoch=2444237.905):\n moon = pd.Series(moon_phase(datetime_index, epoch=epoch), index=datetime_index)\n full_moon = ((moon > moon.shift(1)) & (moon > moon.shift(-1))).astype(int)\n new_moon = ((moon < moon.shift(1)) & (moon < moon.shift(-1))).astype(int)\n # account for end (shift) being new_moon\n if new_moon.tail(29).sum() == 0:\n new_moon.iloc[-1] = 1\n if full_moon.tail(29).sum() == 0:\n full_moon.iloc[-1] = 1\n moon_df = pd.concat([moon, full_moon, new_moon], axis=1)\n moon_df.columns = ['phase', 'full_moon', 'new_moon']\n return moon_df", "def phase(self):\n return -self.attrs['RFphase']*2*np.pi", "def get_phase_end(self, start):\n if isinstance(self.phase, mafia.Night):\n return self.get_next_occurrence(start, self.night_end)\n elif isinstance(self.phase, mafia.Day):\n return self.get_next_occurrence(start, self.day_end)\n else:\n raise click.ClickException(\"Unknown phase: %s\", type(self.phase))", "def phase_at(self, location):\n r = self.path_length(location)\n return (self.k * r + self.phase_offset) % (2 * math.pi)", "def phase_Jupiter_1(alpha):\n phase = 10.**(-0.4*(- 3.7e-04*alpha + 6.16e-04*alpha**2.))\n return phase", "def phase_velocity(self):\n return 1/np.sqrt(self.mu*self.epsilon)", "def phase_modulation_state(state, phase):\n size = len(state)\n st_mod = np.zeros((size, size), dtype=complex)\n for p1 in range(size):\n for p2 in range(size):\n st_mod[p1, p2] = state[p1, p2] * np.exp(1j * p1 * phase)\n return st_mod", "def time_goal(self, date=None):\n\n target_date = date or datetime.date.today()\n sum = 0\n\n goals = Goal.query.filter_by(project=self.id).all()\n for goal in goals:\n # check if goal applies to target date\n if (2 ** target_date.weekday() & goal.days):\n sum += goal.time\n return sum", "def _period( self ):\r\n\treturn 2 * pi * sqrt( self.orbital_elements[0]**3 / self.mu_central_body )\r\n\t# http://en.wikipedia.org/wiki/Orbital_period#Calculation\r", "def current_season_phase():\n _update_week_number()\n return _cur_season_phase", "def sunset(cls, date):\n return (date + Clock.days_from_hours(18) + \n ((cls.UJJAIN.longitude - cls.LOCATION.longitude) / 360) -\n cls.equation_of_time(date) +\n (((1577917828/1582237828) / 360) *\n (- cls.ascensional_difference(date, cls.LOCATION) +\n (3/4 * cls.solar_sidereal_difference(date)))))", "def phase(self):\r\n return 0.2 * self.weights", "def unit_mo(self):\n return (((self.time_base * 60.0) * 24.0) * 365.0) / 12", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def EOMONTH(start_date, months):\n return DATE(start_date.year, start_date.month + months + 1, 1) - datetime.timedelta(days=1)", "def jump_process(phi, \n dt, \n n, \n m):\n \n if n != m:\n phi_new = np.zeros(len(phi), dtype = complex) \n \n phi_new[n] = np.copy(phi[m]/abs(phi[m]))\n \n else:\n phi_new = np.zeros(len(phi), dtype = complex) \n \n phi_new[n] = np.copy(phi[m]/abs(phi[m]))*np.exp( 1.0j * np.pi * 0.5 )\n\n return phi_new", "def _create_phases(self):\n start_dates, end_dates = self._phase_range(self._change_dates)\n pop_list = [self.pop_dict[date] for date in start_dates]\n phase_series = PhaseSeries(\n self.dates[0], self.dates[-1], self.population, use_0th=self._use_0th\n )\n phase_itr = enumerate(zip(start_dates, end_dates, pop_list))\n for (i, (start_date, end_date, population)) in phase_itr:\n phase_series.add(\n start_date=start_date,\n end_date=end_date,\n population=population\n )\n return phase_series", "def Phase(self, *xpars):\n return np.angle(self.combineResult(*xpars))", "def phase_Mars_1(alpha):\n phase = 10.**(-0.4*(0.02267*alpha - 0.0001302*alpha**2.+ 0. + 0.))#L(λe) + L(LS)\n return phase", "def calculation_time_analysis():\n\tfrom . import spectra as sp\n\tp_dict = {'Bfield':700,'rb85frac':1,'Btheta':88*np.pi/180,'Bphi':0*np.pi/180,'lcell':75e-3,'T':84,'Dline':'D2','Elem':'Cs'}\n\tchiL,chiR,chiZ = sp.calc_chi([-3500],p_dict)\n\t\n\tfor angle in [0, np.pi/32, np.pi/16, np.pi/8, np.pi/4, np.pi/2]:\n\t\tprint(('Angle (degrees): ',angle*180/np.pi))\n\t\tRotMat, n1, n2 = solve_diel(chiL,chiR,chiZ,angle)", "def phase(self) -> Optional[str]:\n return pulumi.get(self, \"phase\")", "def phase_shift(annuli,annulus):\n delta_t = viscous_timescale(annuli[annulus+1]) - viscous_timescale(annuli[annulus])\n return int(delta_t)", "def year_cost_rule(_m, y):\r\n\r\n return sum(m.RHO[y, s] * m.SCEN[y, s] for s in m.S)", "def Phase(data):\r\n hil = signal.hilbert(data)\r\n return np.unwrap(np.arctan2(hil.imag, hil.real))", "def _set_phase(self):\n self.phase = np.sign(self._model())", "def phasesin14(param, x):\n # 2010-04-27 11:49 IJC: Created.\n # 2011-06-09 16:40 IJMC: Definition slightly changed to be a\n # multiplicative factor.\n cparam = array(param[3::], copy=True)\n cparam[0] = 1. / prod(1. + cparam[1::]) - 1.\n\n param[2] = param[2] % (2*pi)\n\n if len(x.shape)==1:\n was1d = True\n x = x.reshape(14, len(x)/14.)\n else:\n was1d = False\n\n ret = param[0] - abs(param[1]) *cos(2*pi*x +param[2])\n #print 'param[3::]>>',param[3::]\n #print 'x.shape>>',x.shape\n ret *= (1. + cparam.reshape(14,1))\n\n if was1d:\n ret = ret.ravel()\n\n return ret", "def phase(freqs, p0, p1, p2):\n x = utils.reduce_by_midpoint(freqs)\n phi = p0 + p1 * x + p2 * x ** 2\n return np.exp(1j * phi)", "def phase_Jupiter_2(alpha):\n # inds = np.where(alpha > 180.)[0]\n # alpha[inds] = [180.]*len(inds)\n # assert np.all((1.0 - 1.507*(alpha/180.) - 0.363*(alpha/180.)**2. - 0.062*(alpha/180.)**3.+ 2.809*(alpha/180.)**4. - 1.876*(alpha/180.)**5.) >= 0.), \"error in alpha input\"\n difference = phase_Jupiter_1(12.) - 10.**(-0.4*(- 2.5*np.log10(1.0 - 1.507*(12./180.) - 0.363*(12./180.)**2. - 0.062*(12./180.)**3.+ 2.809*(12./180.)**4. - 1.876*(12./180.)**5.)))\n phase = difference + 10.**(-0.4*(- 2.5*np.log10(1.0 - 1.507*(alpha/180.) - 0.363*(alpha/180.)**2. - 0.062*(alpha/180.)**3.+ 2.809*(alpha/180.)**4. - 1.876*(alpha/180.)**5.)))\n return phase", "def _unit_mo(self):\n return (((self.time_base * 60.0) * 24.0) * 365.0) / 12", "def phase_lifetime(r, freq=1):\n return np.tan(np.angle(r)) / (2 * np.pi * freq)", "def calculate(self):\n\n x_wamv = self.location[0]\n y_wamv = self.location[1]\n theta_wamv = self.heading\n\n x_wpt = self.waypoint_loc[0]\n y_wpt = self.waypoint_loc[1]\n\n dy = y_wpt - y_wamv\n dx = x_wpt - x_wamv\n\n # Could add in a 'round' if required.\n theta_wpt = math.degrees(math.atan2(dx,dy))\n\n self.heading_publisher.publish(theta_wpt)", "def phase(self):\n return 0.0 * self.__weights", "def subtract_months(P, reference):\n ref = phase_to_day(reference)\n phase = phase_to_day(P)\n \n\n if ref < phase:\n backward = phase - ref #Ref lags phase\n forward = -ref - (365 - phase) #Move into the next year\n \n else:\n forward = phase + 365 - ref\n backward = phase - ref\n\n\n return forward, backward", "def signal_phase(signal):\n pi2 = 2.0*np.pi\n\n # Get pro-phase\n prophase = np.mod(np.angle(scipy.signal.hilbert(signal)), pi2)\n\n # Transform a pro-phase to a real phase\n sort_idx = np.argsort(prophase) # Get a sorting index\n reverse_idx = np.argsort(sort_idx) # Get index reversing sorting\n tht = pi2 * np.arange(prophase.size)/(prophase.size) # Set up sorted real phase\n phase = tht[reverse_idx] # Reverse the sorting of it\n\n return phase", "def pow_to_phase(self, pow):\n return 2 * np.pi * self._ensure_number(pow) / 2**self.POW_WIDTH", "def phase(N, phi0=0, *, dtype=None):\n dtype = dtype or settings.core[\"default_dtype\"] or _data.Dense\n phim = phi0 + (2 * np.pi * np.arange(N)) / N # discrete phase angles\n n = np.arange(N)[:, np.newaxis]\n states = np.array([np.sqrt(kk) / np.sqrt(N) * np.exp(1j * n * kk)\n for kk in phim])\n ops = np.sum([np.outer(st, st.conj()) for st in states], axis=0)\n return Qobj(ops, dims=[[N], [N]], type='oper', copy=False).to(dtype)", "def phase_velocity(refractive_index):\n return cgs.c / refractive_index", "def phasor_from_lifetime(tau, freq=1):\n return 1 / (1 - 1j * 2 * np.pi * freq * tau)", "def phase_Venus_1(alpha):\n phase = 10.**(-0.4*(- 1.044e-03*alpha + 3.687e-04*alpha**2. - 2.814e-06*alpha**3. + 8.938e-09*alpha**4.))\n return phase", "def _phase_detect(acc_z):\n acc_mag_sd = pd.Series(acc_z).rolling(100).std(center=True)\n min_sd = 1.5\n mov = np.where(acc_mag_sd >= min_sd)[0]\n phase = np.zeros(len(acc_z)).astype(int)\n phase[mov] = 1\n\n return phase", "def visitCalculated(self, date):\n raise NotImplementedError()", "def calculate(self, time):\n from numpy import sqrt\n\n gamma = self.gamma\n xr, r1, p1, u1 = self.vn[0]\n xl, r4, p4, u4 = self.vn[3]\n\n a4 = sqrt(gamma*p4/r4)\n a1 = sqrt(gamma*p1/r1)\n\n # calculate region 2.\n p41 = p4/p1\n p21 = self.strength(p4/p1, gamma, a1/a4)\n r21 = (1+(gamma+1)/(gamma-1)*p21) / ((gamma+1)/(gamma-1)+p21)\n r2 = r21*r1\n u2 = a1/gamma * (p21-1) * sqrt( \n 2*gamma/(gamma+1) / (p21 + (gamma-1)/(gamma+1)) )\n p2 = p21*p1\n\n # calculate shock speed.\n us = a1 * sqrt( (gamma+1)/(2*gamma)*(p21-1) + 1 )\n self.vn[1] = us*time, r2, p2, u2\n\n # caluculate region 3.\n p34 = p21/p41\n r34 = p34**(1/gamma)\n r3 = r34*r4\n p3 = p34*p4\n u3 = u2\n a3 = sqrt(gamma*p3/r3)\n self.vn[2] = u2*time, r3, p3, u3\n\n # calculate expansion wave.\n self.ve[:] = self.expwave(\n r4, p4, a4, u3, a3, gamma, time, self.nx)\n\n self.ve[0] += self.xshift\n self.vn[:,1:3] += self.xshift", "def extract_phase(eigvector, point_arr=[]):\n pa = point_arr\n if np.size(pa) == 0:\n pa = np.arange(len(evY))\n\n evX = eigvector[2 * pa]\n evY = eigvector[2 * pa + 1]\n phase = np.arctan2(evY.real, evX.real)\n # print 'evY[0] =', evY[0]\n # print 'evX[0] =', evX[0]\n # print 'phase[0] = ', phase[0]\n return phase", "def _solar_time(date, lon, lat):\n # IDL is computing time_t as hours and fractional minutes\n time_t = ee.Date(date).get('hour').add(\n ee.Date(date).get('minute').divide(60))\n\n # This will return the hour floating point value\n # time_t = ee.Date(date).get('hour').add(ee.Date(date).getFraction('hour'))\n\n # CGM - DOY and hour could be images in order to make these expressions\n julian = _to_jd(date)\n\n # Sunrise time\n julian_ = time_t.divide(24.0).add(julian)\n j_cen = julian_.add(0.5 - 2451545.0).divide(36525.0)\n # CGM - Does the mod happen before or after the multiply\n lon_sun = j_cen.multiply(0.0003032).add(36000.76983) \\\n .multiply(j_cen).mod(360.0).add(280.46646).subtract(360)\n an_sun = j_cen.multiply(-0.0001537).add(35999.05029) \\\n .multiply(j_cen).add(357.52911)\n ecc = j_cen.multiply(0.0000001267).add(0.000042037) \\\n .multiply(j_cen).multiply(-1).add(0.016708634)\n ob_ecl = j_cen.multiply(-0.001813).add(0.00059) \\\n .multiply(j_cen).add(46.815) \\\n .multiply(j_cen).multiply(-1).add(21.448) \\\n .divide(60.0).add(26).divide(60).add(23)\n ob_corr = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).cos() \\\n .multiply(0.00256).add(ob_ecl)\n var_y = ob_corr.divide(2.0).multiply(deg2rad).tan().multiply(\n ob_corr.divide(2.0).multiply(deg2rad).tan())\n eq_t = (\n lon_sun.multiply(2.0).multiply(deg2rad).sin().multiply(var_y)\n .subtract(an_sun.multiply(deg2rad).sin().multiply(ecc).multiply(2.0))\n .add(an_sun.multiply(deg2rad).sin()\n .multiply(lon_sun.multiply(2.0).multiply(deg2rad).cos())\n .multiply(var_y).multiply(ecc).multiply(4.0))\n .subtract(lon_sun.multiply(4.0).multiply(deg2rad).sin()\n .multiply(var_y).multiply(var_y).multiply(0.5))\n .subtract(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(ecc).multiply(ecc).multiply(1.25))\n .multiply(4.0).multiply(rad2deg))\n sun_eq = (\n an_sun.multiply(deg2rad).sin().multiply(\n j_cen.multiply(0.000014).add(0.004817)\n .multiply(j_cen).multiply(-1).add(1.914602))\n .add(an_sun.multiply(2.0).multiply(deg2rad).sin()\n .multiply(j_cen.multiply(-0.000101).add(0.019993)))\n .add(an_sun.multiply(3.0).multiply(deg2rad).sin().multiply(0.000289)))\n sun_true = sun_eq.add(lon_sun)\n sun_app = j_cen.multiply(-1934.136).add(125.04).multiply(deg2rad).sin() \\\n .multiply(-0.00478).subtract(0.00569).add(sun_true)\n\n # CGM - Intentionally not converting back to degrees\n d = ob_corr.multiply(deg2rad).sin() \\\n .multiply(sun_app.multiply(deg2rad).sin()) \\\n .asin()\n\n # CGM - Functions below are lat/lon dependent and can be written as\n # ee.Image expressions\n # CGM - d is still in radians, not converting\n ha_t = lat.expression(\n 'acos((cos(90.833 * pi / 180) / (cos(lat) * cos(d))) - tan(lat) * tan(d))'\n ' * (180 / pi)',\n {'lat': lat, 'd': d, 'pi': math.pi})\n\n # print('\\n{:10s} {:.12f}'.format('julian_', julian_.getInfo()))\n # print('{:10s} {:.12f}'.format('time_t', time_t.getInfo()))\n # print('{:10s} {:.12f}'.format('j_cen', j_cen.getInfo()))\n # print('{:10s} {:.12f}'.format('lon_sun', lon_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('an_sun', an_sun.getInfo()))\n # print('{:10s} {:.12f}'.format('ecc', ecc.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_ecl', ob_ecl.getInfo()))\n # print('{:10s} {:.12f}'.format('ob_corr', ob_corr.getInfo()))\n # print('{:10s} {:.12f}'.format('var_y', var_y.getInfo()))\n # print('{:10s} {:.12f}'.format('eq_t', eq_t.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_eq', sun_eq.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_true', sun_true.getInfo()))\n # print('{:10s} {:.12f}'.format('sun_app', sun_app.getInfo()))\n # print('{:10s} {:.12f}'.format('d', d.getInfo()))\n\n return d, eq_t, ha_t", "def year_demand_rule(_m, y):\r\n\r\n return sum(m.SCENARIO_DEMAND[y, s] for s in m.S)", "def phaseangle(complexr):\n return numpy.arctan2(complexr.imag,complexr.real)", "def phase_Saturn_3(alpha):\n difference = phase_Saturn_2(6.5) - 10.**(-0.4*(2.446e-4*6.5 + 2.672e-4*6.5**2. - 1.505e-6*6.5**3. + 4.767e-9*6.5**4.))\n phase = difference + 10.**(-0.4*(2.446e-4*alpha + 2.672e-4*alpha**2. - 1.505e-6*alpha**3. + 4.767e-9*alpha**4.))\n return phase", "def phaseshift(self, dl=0, dm=0, im=[[0]], size=0):\n\n ang = lambda dl,dm,u,v,freq: (dl*n.outer(u,freq/self.freq_orig[0]) + dm*n.outer(v,freq/self.freq_orig[0])) # operates on single time of u,v\n\n if ((len(im) != 1) & (size != 0)):\n y,x = n.where(im == im.max())\n length = len(im)\n dl = (length/2 - x[0]) * 1./size\n dm = (y[0] - length/2) * 1./size\n print 'Shifting phase center to image peak: (dl,dm) = (%e,%e) = (%e,%e) arcsec' % (dl, dm, n.degrees(dl)*3600, n.degrees(dm)*3600)\n elif isinstance(dl,n.ndarray) and isinstance(dm,n.ndarray):\n if not len(dl) == self.nints:\n raise ValueError('dl is an array but its length (%d) does not match the number of integrations (%d)' % (len(dl),self.nints))\n \n elif ((dl != 0) | (dm != 0)):\n print 'Shifting phase center by given (dl,dm) = (%e,%e) = (%e,%e) arcsec' % (dl, dm, n.degrees(dl)*3600, n.degrees(dm)*3600)\n dl = dl * n.ones(self.nints)\n dm = dm * n.ones(self.nints)\n else:\n raise ValueError('Need to give either dl or dm, or im and size.')\n\n for i in xrange(self.nints):\n for pol in xrange(self.npol):\n self.data[i,:,:,pol] = self.data[i,:,:,pol] * n.exp(-2j*n.pi*ang(dl[i], dm[i], self.u[i], self.v[i], self.freq))\n \n self.l0 = self.l0 + dl\n self.m0 = self.m0 + dm\n self.dataph = (self.data.mean(axis=3).mean(axis=1)).real # multi-pol\n self.min = self.dataph.min()\n self.max = self.dataph.max()\n print 'New dataph min, max:'\n print self.min, self.max", "def declination_degree(utc_datetime, TY = TY_default ): \n return 23.45 * math.sin((2 * math.pi / (TY)) * ((solar.GetDayOfYear(utc_datetime)) - 81))" ]
[ "0.7753409", "0.64832145", "0.6266615", "0.6266615", "0.5898424", "0.5834775", "0.5758287", "0.5736949", "0.5586432", "0.5580027", "0.5443291", "0.5376677", "0.53231025", "0.53126574", "0.5306432", "0.5282793", "0.52764946", "0.5266146", "0.5248463", "0.5236514", "0.5230845", "0.5217797", "0.5201141", "0.51913184", "0.5187528", "0.5175733", "0.5162414", "0.5157162", "0.51302814", "0.5118767", "0.50955224", "0.5091113", "0.5079824", "0.50766855", "0.5076246", "0.50693244", "0.5050085", "0.50392103", "0.5021399", "0.50192606", "0.5005518", "0.50054216", "0.50054216", "0.49895272", "0.49787024", "0.49746782", "0.49746782", "0.49708807", "0.49496424", "0.49425992", "0.49404404", "0.48955145", "0.48936707", "0.48856086", "0.48706728", "0.48639208", "0.48611894", "0.48546797", "0.48532203", "0.48469573", "0.48440894", "0.48365918", "0.4823144", "0.4781241", "0.47807145", "0.4772859", "0.4759727", "0.47585806", "0.47459337", "0.47401047", "0.47359648", "0.47350016", "0.471281", "0.47109953", "0.47066233", "0.46888757", "0.46829402", "0.46824557", "0.46813133", "0.46803117", "0.46786833", "0.46735913", "0.46692356", "0.46671116", "0.46533212", "0.46467283", "0.46361575", "0.46341768", "0.46326092", "0.46247017", "0.4618024", "0.459414", "0.45936853", "0.45928487", "0.45913425", "0.45896572", "0.4588496", "0.4584579", "0.45823646", "0.45707947" ]
0.7782467
0
Reorder 'shape' according to the chosen data layout to optimize data distribution.
def _optimizeshape(shape): shape.sort() if ORDER == 'C': shape[:] = shape[::-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unchanged_shape(input_shape):\n return input_shape", "def restore_backup_shape(self):\n\n self.shape = self.shape_backup", "def backup_shape(self):\n\n self.shape_backup = np.copy(self.shape)", "def set_shape(self, shape):\n self._shape = self._shape.merge_with(shape)", "def change_orientation(self):\n self.shape = self.shape.T", "def changeInputShape(self,shape):\n self.input_shape = shape", "def data_shape():\n return DATA_SHAPE", "def _init_nd_shape_and_axes_sorted(x, shape, axes):\n noaxes = axes is None\n shape, axes = _init_nd_shape_and_axes(x, shape, axes)\n\n if not noaxes:\n shape = shape[axes.argsort()]\n axes.sort()\n\n return shape, axes", "def _reorder(self, unordered_data: np.ndarray) -> np.ndarray:\n if unordered_data is None:\n return unordered_data\n\n if self._memory_allocation == ShotOrder.circuit_first:\n return unordered_data.T.flatten()\n else:\n return unordered_data.flatten()", "def reshape(data, shape, symmetric=False, layout='row', **kwargs):\n return Component(\n \"Reshape\",\n arguments={\n 'data': Component.of(data)\n },\n options={\n 'symmetric': symmetric,\n 'layout': layout,\n 'shape': shape\n },\n constraints=kwargs)", "def data_shapes(self):", "def _settle_shape(self, shape):\n if shape:\n for block in shape.blocks:\n self.array[block.row_position][block.column_position] = block\n self.remove_completed_lines()", "def _normalize_shape(shape):\n\n if isinstance(shape, (np.integer, int)):\n if shape < 1:\n raise ValueError(\"shape value must be greater than 0: %d\"\n % shape)\n shape = (shape,) # N is a shorthand for (N,)\n try:\n shape = tuple(shape)\n except TypeError:\n raise TypeError(\"shape must be an integer or sequence: %r\"\n % (shape,))\n\n # XXX Get from HDF5 library if possible.\n # HDF5 does not support ranks greater than 32\n if len(shape) > 32:\n raise ValueError(\n f\"shapes with rank > 32 are not supported: {shape!r}\")\n\n return tuple(SizeType(s) for s in shape)", "def expand_to_shape(data, shape, dtype=None, background=None):\n if dtype is None:\n dtype = data.dtype\n if shape==data.shape:\n return data.astype(dtype)\n if background is None:\n background = data.min()\n expanded_data = numpy.zeros(shape, dtype=dtype) + background\n slices = []\n rhs_slices = []\n for s1, s2 in zip (shape, data.shape):\n a, b = (s1-s2+1)//2, (s1+s2+1)//2\n c, d = 0, s2\n while a<0:\n a += 1\n b -= 1\n c += 1\n d -= 1\n slices.append(slice(a, b))\n rhs_slices.append(slice(c, d))\n try:\n expanded_data[tuple(slices)] = data[tuple (rhs_slices)]\n except ValueError:\n print data.shape, shape\n raise\n return expanded_data", "def processed_shape(self, shape):\n return shape", "def reshape(x, shape):\n return Reshape(shape)(x)", "def translate_shape(shape, x_shift, y_shift):", "def contract_to_shape(data, shape, dtype=None):\n if dtype is None:\n dtype = data.dtype\n if shape==data.shape:\n return data.astype(dtype)\n slices = []\n for s1, s2 in zip (data.shape, shape):\n slices.append(slice((s1-s2)//2, (s1+s2)//2))\n return data[tuple(slices)].astype(dtype)", "def _special_handle_reshape(cls, op, X, W):\n node_name = op.name + \":shape\"\n return [\n numpy_helper.from_array(np.array(op.shape, dtype=np.int64),\n node_name)\n ]", "def set_shape(self, shape):\n self.rows = shape[0]\n self.cols = shape[1]", "def _get_preprocess_shape(self, old_shape: Tuple[int, int], longest_edge: int):\n oldh, oldw = old_shape\n scale = longest_edge * 1.0 / max(oldh, oldw)\n newh, neww = oldh * scale, oldw * scale\n newh = int(newh + 0.5)\n neww = int(neww + 0.5)\n return (newh, neww)", "def reshape_output_shape_0(input_shape): \n shape_1 = input_shape[0]\n shape_2 = input_shape[1]\n shape_3 = input_shape[2]\n return(shape_1, shape_2, shape_3, 1)", "def reshape(tensor, newshape):\n raise NotImplementedError", "def _keras_update_shape(self, prep):\n\n # Run preprocessing on the training data\n X_transform = prep.fit_transform(self.X_train)\n\n # If the input shape has not been specified, it is simply the number of features in X_transform\n if 'input_shape' not in self.model.first_layer_kwargs:\n self.model.first_layer_kwargs['input_shape'] = tuple([X_transform.shape[1]])\n # Else update the input shape based on the number of features after preprocessing\n else:\n # Transform to a list to make the input_shape mutable\n self.model.first_layer_kwargs['input_shape'] = list(self.model.first_layer_kwargs['input_shape'])\n # Update the number of features based on X_transform\n if self.model.lags:\n self.model.first_layer_kwargs['input_shape'][-1] = X_transform.shape[1]//(self.model.lags + (1 if self.model.current_sample_as_input else 0))\n else:\n self.model.first_layer_kwargs['input_shape'][-1] = X_transform.shape[1]//np.prod(self.model.first_layer_kwargs['input_shape'][:-1])\n # Transform back to a tuple as required by Keras\n self.model.first_layer_kwargs['input_shape'] = tuple(self.model.first_layer_kwargs['input_shape'])\n \n # Ensure the Architecture has been updated\n self.model.architecture.iloc[0, 2]['input_shape'] = self.model.first_layer_kwargs['input_shape']\n \n # 2D, 3D and 4D data is valid. \n # e.g. The input_shape can be a tuple of (subsequences, timesteps, features), with subsequences and timesteps as optional.\n # A 4D shape may be valid for e.g. a ConvLSTM with (timesteps, rows, columns, features) \n if len(self.model.first_layer_kwargs['input_shape']) > 5:\n err = \"Unsupported input_shape: {}\".format(self.model.first_layer_kwargs['input_shape'])\n raise Exception(err)", "def reshape(self, *shape):\n return F.Reshape.apply(self, shape)", "def adjust_shape(placeholder, data):\n if not isinstance(data, np.ndarray) and not isinstance(data, list):\n return data\n if isinstance(data, list):\n data = np.array(data)\n\n placeholder_shape = [x or -1 for x in placeholder.shape.as_list()]\n\n assert _check_shape(placeholder_shape, data.shape), \\\n 'Shape of data {} is not compatible with shape of the placeholder {}'.format(data.shape, placeholder_shape)\n\n return np.reshape(data, placeholder_shape)", "def predataShape(self):\n self._predatashape['name']=self._name\n self._predatashape['location'] = self._location\n self._predatashape['origin'] = self._origin\n self._predatashape['width'] = self._width\n self._predatashape['height'] = self._height\n return self._predatashape", "def reorder_input_dimensions(self, train_x):\n\n source = [0, 1, 2, 3]\n destination = [3, 0, 2, 1]\n return np.moveaxis(train_x, source, destination)", "def resample_data_or_seg(data, new_shape, is_seg, axis=None, order=3, do_separate_z=False, order_z=0):\n assert len(data.shape) == 4, \"data must be (c, x, y, z)\"\n if is_seg:\n resize_fn = resize_segmentation\n kwargs = OrderedDict()\n else:\n resize_fn = resize\n kwargs = {'mode': 'edge', 'anti_aliasing': False}\n dtype_data = data.dtype\n shape = np.array(data[0].shape)\n new_shape = np.array(new_shape)\n if np.any(shape != new_shape):\n data = data.astype(float)\n if do_separate_z:\n print(\"separate z, order in z is\",\n order_z, \"order inplane is\", order)\n assert len(axis) == 1, \"only one anisotropic axis supported\"\n axis = axis[0]\n if axis == 0:\n new_shape_2d = new_shape[1:]\n elif axis == 1:\n new_shape_2d = new_shape[[0, 2]]\n else:\n new_shape_2d = new_shape[:-1]\n\n reshaped_final_data = []\n for c in range(data.shape[0]):\n reshaped_data = []\n for slice_id in range(shape[axis]):\n if axis == 0:\n reshaped_data.append(\n resize_fn(data[c, slice_id], new_shape_2d, order, **kwargs))\n elif axis == 1:\n reshaped_data.append(\n resize_fn(data[c, :, slice_id], new_shape_2d, order, **kwargs))\n else:\n reshaped_data.append(resize_fn(data[c, :, :, slice_id], new_shape_2d, order,\n **kwargs))\n reshaped_data = np.stack(reshaped_data, axis)\n if shape[axis] != new_shape[axis]:\n\n # The following few lines are blatantly copied and modified from sklearn's resize()\n rows, cols, dim = new_shape[0], new_shape[1], new_shape[2]\n orig_rows, orig_cols, orig_dim = reshaped_data.shape\n\n row_scale = float(orig_rows) / rows\n col_scale = float(orig_cols) / cols\n dim_scale = float(orig_dim) / dim\n\n map_rows, map_cols, map_dims = np.mgrid[:rows, :cols, :dim]\n map_rows = row_scale * (map_rows + 0.5) - 0.5\n map_cols = col_scale * (map_cols + 0.5) - 0.5\n map_dims = dim_scale * (map_dims + 0.5) - 0.5\n\n coord_map = np.array([map_rows, map_cols, map_dims])\n if not is_seg or order_z == 0:\n reshaped_final_data.append(map_coordinates(reshaped_data, coord_map, order=order_z,\n mode='nearest')[None])\n else:\n unique_labels = np.unique(reshaped_data)\n reshaped = np.zeros(new_shape, dtype=dtype_data)\n\n for i, cl in enumerate(unique_labels):\n reshaped_multihot = np.round(\n map_coordinates((reshaped_data == cl).astype(float), coord_map, order=order_z,\n mode='nearest'))\n reshaped[reshaped_multihot > 0.5] = cl\n reshaped_final_data.append(reshaped[None])\n else:\n reshaped_final_data.append(reshaped_data[None])\n reshaped_final_data = np.vstack(reshaped_final_data)\n else:\n print(\"no separate z, order\", order)\n reshaped = []\n for c in range(data.shape[0]):\n reshaped.append(\n resize_fn(data[c], new_shape, order, **kwargs)[None])\n reshaped_final_data = np.vstack(reshaped)\n return reshaped_final_data.astype(dtype_data)\n else:\n print(\"no resampling necessary\")\n return data", "def data_shape(self):\n raise NotImplementedError", "def data_shape(self):\n raise NotImplementedError", "def reshape(data):\n return K.reshape(x=data, shape=(K.shape(data)[0], 1, reshape_size))", "def _sorted_shape_qhape(tensor=None, shape=None, qhape=None):\n shape = tensor.shape if shape is None else shape\n qhape = tensor.qhape if qhape is None else qhape\n sorted_qhp = []\n sorted_shp = []\n for qim, dim in zip(qhape, shape):\n qim, dim = zip(*sorted(zip(qim, dim)))\n sorted_qhp.append(qim)\n sorted_shp.append(dim)\n return sorted_shp, sorted_qhp", "def upsampling3d(\n data,\n scale_d,\n scale_h,\n scale_w,\n layout=\"NCDHW\",\n method=\"nearest_neighbor\",\n coordinate_transformation_mode=\"half_pixel\",\n output_shape=None,\n):\n base_layout = layout[0:5]\n if base_layout == \"NCDHW\":\n if not output_shape: # static case\n scaled_d = data.shape[2] * scale_d\n scaled_h = data.shape[3] * scale_h\n scaled_w = data.shape[4] * scale_w\n resize_shape = (\n simplify(topi.cast(te.round(scaled_d), data.shape[2].dtype)),\n simplify(topi.cast(te.round(scaled_h), data.shape[3].dtype)),\n simplify(topi.cast(te.round(scaled_w), data.shape[4].dtype)),\n )\n else: # dynamic case -- don't need to scale; already done in shape func\n resize_shape = (\n simplify(topi.cast(te.round(output_shape[2]), data.shape[2].dtype)),\n simplify(topi.cast(te.round(output_shape[3]), data.shape[3].dtype)),\n simplify(topi.cast(te.round(output_shape[4]), data.shape[4].dtype)),\n )\n elif layout == \"NDHWC\":\n if not output_shape: # static case\n scaled_d = data.shape[1] * scale_d\n scaled_h = data.shape[2] * scale_h\n scaled_w = data.shape[3] * scale_w\n resize_shape = (\n simplify(topi.cast(te.round(scaled_d), data.shape[1].dtype)),\n simplify(topi.cast(te.round(scaled_h), data.shape[2].dtype)),\n simplify(topi.cast(te.round(scaled_w), data.shape[3].dtype)),\n )\n else: # dynamic case\n resize_shape = (\n simplify(topi.cast(te.round(output_shape[1]), data.shape[1].dtype)),\n simplify(topi.cast(te.round(output_shape[2]), data.shape[2].dtype)),\n simplify(topi.cast(te.round(output_shape[3]), data.shape[3].dtype)),\n )\n else:\n raise ValueError(f\"not support this layout {layout} yet\")\n if method[0:3] == \"tri\":\n method = method[3:]\n return topi.image.resize3d(\n data,\n [0.0] * 6,\n resize_shape,\n layout=layout,\n method=method,\n coordinate_transformation_mode=coordinate_transformation_mode,\n )", "def resize_ND_volume_to_given_shape(volume, out_shape, order = 3):\n shape0=volume.shape\n assert(len(shape0) == len(out_shape))\n scale = [(out_shape[i] + 0.0)/shape0[i] for i in range(len(shape0))]\n out_volume = ndimage.interpolation.zoom(volume, scale, order = order)\n return out_volume", "def reorder_after_dim_reduction(order):\n arr = sorted(range(len(order)), key=lambda x: order[x])\n return tuple(arr)", "def translate_shape(shape, x_shift, y_shift): \n new_shape = [] \n for i in range(int(len(shape)/2 )): \n x_new = shape[2*i] + x_shift \n y_new = shape[2*i+1] + y_shift \n new_shape.append(x_new) \n new_shape.append(y_new) \n return new_shape", "def _reduce_outshape(self, outshape):\n return tuple([index for index in outshape if index != 1])", "def _broadcast_shape(\n data, rank, world_size, num_parts, is_feat_data, feat_name\n):\n assert len(data.shape) in [\n 1,\n 2,\n ], f\"Data is expected to be 1-D or 2-D but got {data.shape}.\"\n data_shape = list(data.shape)\n\n if len(data_shape) == 1:\n data_shape.append(1)\n\n if is_feat_data:\n data_shape.append(DATA_TYPE_ID[data.dtype])\n\n data_shape = torch.tensor(data_shape, dtype=torch.int64)\n data_shape_output = [\n torch.zeros_like(data_shape) for _ in range(world_size)\n ]\n dist.all_gather(data_shape_output, data_shape)\n logging.debug(\n f\"[Rank: {rank} Received shapes from all ranks: {data_shape_output}\"\n )\n shapes = [x.numpy() for x in data_shape_output if x[0] != 0]\n shapes = np.vstack(shapes)\n\n if is_feat_data:\n logging.debug(\n f\"shapes: {shapes}, condition: {all(shapes[0,2] == s for s in shapes[:,2])}\"\n )\n assert all(\n shapes[0, 2] == s for s in shapes[:, 2]\n ), f\"dtypes for {feat_name} does not match on all ranks\"\n\n # compute tids here.\n type_counts = list(shapes[:, 0])\n tid_start = np.cumsum([0] + type_counts[:-1])\n tid_end = np.cumsum(type_counts)\n tid_ranges = list(zip(tid_start, tid_end))\n logging.debug(f\"starts -> {tid_start} ... end -> {tid_end}\")\n\n return tid_ranges", "def reshape(x, shape):\n if x.shape == shape:\n return chainer.as_variable(x)\n y, = Reshape(shape).apply((x,))\n return y", "def convert_reshape(g, op, block):\n\n input_shape = op.input(\"Shape\")\n input_shape_tensor = op.input(\"ShapeTensor\")\n data = g.get_node(op.input(\"X\")[0])\n if input_shape:\n new_shape = g.get_node(input_shape[0])\n elif input_shape_tensor:\n new_shape = []\n for shape_name in input_shape_tensor:\n shape = g.get_node(shape_name)\n if len(infer_shape(shape)) == 0:\n shape = _op.reshape(shape, [-1])\n new_shape.append(shape)\n new_shape = _op.concatenate(new_shape, axis=0)\n new_shape, infered = try_infer_value(new_shape, parameters=g.get_params())\n if infered:\n new_shape = new_shape.tolist()\n else:\n new_shape = op.attr(\"shape\")\n out = _op.reshape(data, new_shape)\n g.add_node(op.output(\"Out\")[0], out)", "def c_layout_from_shape(shape, dtype):\n dim = c_layout_from_shape(tail(shape), dtype)\n extent = head(shape)\n stride = dim.stride * dim.extent\n return Dimension(dim, extent, stride)", "def orient_shapes_hwd(data, slice_axis):\n if slice_axis == 0:\n return np.array(data)[[2, 1, 0]]\n elif slice_axis == 1:\n return np.array(data)[[2, 0, 1]]\n elif slice_axis == 2:\n return np.array(data)", "def reshape(self, new_shape):\n return self.__class__(pos=self.pos.reshape(new_shape),\n vel=self.vel.reshape(new_shape),\n frame=self.frame)", "def __init__(self, target_shape, **kwargs):\n super(Reshape, self).__init__(**kwargs)\n self.target_shape = nest.flatten(target_shape)", "def conver_testing_shape(args):\n testing_shape = [int(args.testing_shape), int(args.testing_shape)]\n return testing_shape", "def scale_shape(shape, x_amp, y_amp, x_offset, y_offset):\n x_list =[]\n y_list =[]\n new_shape = []\n # Split the list into separate x and y lists. \n for i in range(len(shape)/2):\n x_list.append(shape[2*i])\n y_list.append(shape[2*i + 1])\n\n # Amplify, add offsets and re-interleave the x and y components.\n for j in range(len(x_list)):\n x_list[j] = ( x_list[j] * x_amp ) + x_offset \n new_shape.append( x_list[j] )\n y_list[j] = ( y_list[j] * y_amp ) + y_offset \n new_shape.append( y_list[j] ) \n return new_shape", "def expand_shape(shape):\n expanded = [shape[0]]\n for i in range(1, len(shape)):\n next = [shape[i]] * shape[i-1]\n for j in range(1, i):\n next = [next] * shape[j]\n expanded.append(next)\n return expanded", "def reshape_output_shape(input_shape):\n shape_1 = input_shape[0]\n shape_2 = 384\n return(shape_1, shape_2)", "def amplify_2d_shape(shape, x_amplify, y_amplify):", "def rescale(self, new_shape):\n if not isinstance(new_shape, (list, tuple, np.ndarray)) or len(new_shape) != self._n_dim \\\n or not np.all([ isinstance(n, int) for n in new_shape ]):\n raise ValueError(\"new_shape must be a tuple with %d integers\" % self._n_dim)\n if np.any([ n < 4 for n in new_shape ]):\n raise ValueError(\"elements in new_shape must be at least 4\")\n \n if new_shape == self._grid_shape:\n pass\n elif len(new_shape) == 2:\n fn = RegularGridInterpolator((self._zaxis, self._xaxis), self._velocity_model)\n zaxis = np.linspace(self._zmin, self._zaxis[-1], new_shape[0])\n xaxis = np.linspace(self._xmin, self._xaxis[-1], new_shape[1])\n Z, X = np.meshgrid(zaxis, xaxis, indexing = \"ij\")\n cz, cx = [ new / old for new, old in zip(new_shape, self._grid_shape) ]\n self._velocity_model = fn([ [ z, x ] for z, x in zip(Z.ravel(), X.ravel()) ]).reshape(new_shape)\n self._grid_shape = new_shape\n self._grid_size = (self._grid_size[0] / cz, self._grid_size[1] / cx)\n self._zaxis = self._zmin + self._grid_size[0] * np.arange(self._grid_shape[0])\n self._xaxis = self._xmin + self._grid_size[1] * np.arange(self._grid_shape[1])\n elif len(new_shape) == 3:\n fn = RegularGridInterpolator((self._zaxis, self._xaxis, self._yaxis), self._velocity_model)\n zaxis = np.linspace(self._zmin, self._zaxis[-1], new_shape[0])\n xaxis = np.linspace(self._xmin, self._xaxis[-1], new_shape[1])\n yaxis = np.linspace(self._ymin, self._yaxis[-1], new_shape[2])\n Z, X, Y = np.meshgrid(zaxis, xaxis, yaxis, indexing = \"ij\")\n cz, cx, cy = [ new / old for new, old in zip(new_shape, self._grid_shape) ]\n self._velocity_model = fn([ [ z, x, y ] for z, x, y in zip(Z.ravel(), X.ravel(), Y.ravel()) ]).reshape(new_shape)\n self._grid_shape = new_shape\n self._grid_size = (self._grid_size[0] / cz, self._grid_size[1] / cx, self._grid_size[2] / cy)\n self._zaxis = self._zmin + self._grid_size[0] * np.arange(self._grid_shape[0])\n self._xaxis = self._xmin + self._grid_size[1] * np.arange(self._grid_shape[1])\n self._yaxis = self._ymin + self._grid_size[2] * np.arange(self._grid_shape[2])", "def __init__ (self, shape) :\r\n self._shape=[int(s) for s in shape]\r\n offset=[1]\r\n for i,incr in enumerate(self._shape[:-1]) :\r\n offset.append(offset[i]*incr)\r\n self._offset=offset", "def locate_shape(shape):", "def reshape(ring_buffer, shape):\n try:\n buffer = ring_buffer._data\n except AttributeError:\n buffer = ring_buffer\n\n new_shape = get_shape(shape)\n myshape = get_shape(buffer.shape)\n if new_shape[1] == 0:\n new_shape = (new_shape[0], 1) + new_shape[2:]\n\n if new_shape[0] == -1:\n try: # Only change the column shape\n buffer.shape = new_shape\n except ValueError: # Change the entire array shape\n rows = int(np.ceil(myshape[0]/new_shape[1]))\n new_shape = (rows, ) + new_shape[1:]\n buffer.resize(new_shape, refcheck=False)\n\n else:\n # Force proper sizing\n buffer.resize(new_shape, refcheck=False)\n\n # Clear the buffer if it did anything but grow in length\n # if not (new_shape[0] > myshape[0] and new_shape[1:] == myshape[1:]):\n try:\n ring_buffer.clear()\n except AttributeError:\n pass", "def _sort_ds(self):\n d = []\n for layer in self.structure:\n if (layer.type == 'Layer' or layer.type == 'Substrate'):\n d.append(layer.thickness)\n d.insert(0, self.structure[0].thickness)\n d.append(self.structure[-1].thickness)\n d = np.asarray(d)\n return d", "def clear_loaded_shapefiles(self):\n self.shapes = pd.DataFrame()", "def shape_for_keras(data):\n raise NotImplementedError", "async def infer_shape_reshape(track, v, shape):\n shp = await shape['value']\n if shp == ANYTHING:\n shp_t = await shape['type']\n shp = (ANYTHING,) * len(shp_t.elements)\n v_shp = await v['shape']\n if (all(s is not ANYTHING for s in shp) and\n all(s is not ANYTHING for s in v_shp) and\n prod(shp) != prod(v_shp)):\n raise MyiaShapeError(\"Cannot change the total number of elements \"\n \"in reshape\")\n return shp", "def order(self, type1):\n\t\tif self.shapes[1].type() == type1:\n\t\t\tself.shapes = (self.shapes[1], self.shapes[0])\n\t\treturn self", "def sample_shape(shape,nsamp=None):\r\n if nsamp == None:\r\n sample_shape = shape\r\n else:\r\n sample_shape = shape + (nsamp,)\r\n return sample_shape", "def set_pianoroll_shape(pianoroll, data_shape):\n pianoroll.set_shape(data_shape)\n return pianoroll", "def _obtain_input_shape(input_shape,\n default_size,\n min_size,\n data_format,\n include_top):\n if data_format == 'channels_first':\n default_shape = (3, default_size, default_size)\n else:\n default_shape = (default_size, default_size, 3)\n if include_top:\n if input_shape is not None:\n if input_shape != default_shape:\n raise ValueError('When setting`include_top=True`, '\n '`input_shape` should be ' + str(default_shape) + '.')\n input_shape = default_shape\n else:\n if data_format == 'channels_first':\n if input_shape is not None:\n if len(input_shape) != 3:\n raise ValueError('`input_shape` must be a tuple of three integers.')\n if input_shape[0] != 3:\n raise ValueError('The input must have 3 channels; got '\n '`input_shape=' + str(input_shape) + '`')\n if ((input_shape[1] is not None and input_shape[1] < min_size) or\n (input_shape[2] is not None and input_shape[2] < min_size)):\n raise ValueError('Input size must be at least ' +\n str(min_size) + 'x' + str(min_size) + ', got '\n '`input_shape=' + str(input_shape) + '`')\n else:\n input_shape = (3, None, None)\n else:\n if input_shape is not None:\n if len(input_shape) != 3:\n raise ValueError('`input_shape` must be a tuple of three integers.')\n if input_shape[-1] != 3:\n raise ValueError('The input must have 3 channels; got '\n '`input_shape=' + str(input_shape) + '`')\n if ((input_shape[0] is not None and input_shape[0] < min_size) or\n (input_shape[1] is not None and input_shape[1] < min_size)):\n raise ValueError('Input size must be at least ' +\n str(min_size) + 'x' + str(min_size) + ', got '\n '`input_shape=' + str(input_shape) + '`')\n else:\n input_shape = (None, None, 3)\n return input_shape", "def _change_shape(self,x,y,w,h):\n top = y \n left = x\n right = x + w\n bottom = y + h\n return top,right,bottom,left", "def compute_output_shape(self, input_shape):\r\n return input_shape", "def _deserialize(self, data):\n\n firstInd = 0\n deserialized_data = []\n for shp in self._data_shape_list:\n if len(shp) > 1:\n shift = np.prod(shp)\n elif len(shp) == 0:\n shift = 1\n else:\n shift = shp[0]\n tmp_array = data[firstInd:firstInd+shift]\n tmp_array = tmp_array.reshape(shp)\n deserialized_data.append(tmp_array)\n firstInd += shift\n return deserialized_data", "def initialize(self, shape):\n \n # clear params and caches\n self._X_shape = None\n self._cols = None\n self._max_idx = None\n \n # return output shape\n return self.outshape(shape)", "def create_helper_reshape_node(input_name, output_name, shape, kwargs):\n shape_tensor_node, = create_helper_tensor_node(\n np.asarray(shape, dtype=np.int64), output_name + \"__shape\", kwargs\n )\n reshape_node = onnx.helper.make_node(\n \"Reshape\",\n inputs=[input_name, shape_tensor_node.name],\n outputs=[output_name],\n name=output_name\n )\n\n return [shape_tensor_node, reshape_node]", "def load_data(data_path, input_shape):\n # load the original data.\n orig_data = pickle.load(open(data_path, 'rb'), encoding='iso-8859-1')\n\n # Get the set of snr & modulations\n mode_snr = list(orig_data.keys())\n mods, snrs = [sorted(list(set(x[i] for x in mode_snr))) for i in [0, 1]]\n mods.remove('AM-SSB')\n mods.remove('WBFM')\n mods.remove('8PSK')\n mods.remove('BPSK')\n\n # Build the train set.\n samples = []\n labels = []\n samples_snr = []\n mod2cate = dict()\n cate2mod = dict()\n for cate in range(len(mods)):\n cate2mod[cate] = mods[cate]\n mod2cate[mods[cate]] = cate\n\n for snr in snrs:\n for mod in mods:\n samples.extend(orig_data[(mod, snr)])\n labels.extend(1000 * [mod2cate[mod]])\n samples_snr.extend(1000 * [snr])\n\n shape = [len(labels), height, width, 1]\n samples = np.array(samples).reshape(shape)\n samples_snr = np.array(samples_snr)\n labels = np.array(labels)\n return samples, labels, mod2cate, cate2mod, snrs, mods, samples_snr", "def set_shape(self):\n\t\tself.own_shape_matrix = np.array([[127., 0., 0., 0.],\n\t\t\t\t\t\t\t\t\t\t\t[ 0., 127., 0., 0.],\n\t\t\t\t\t\t\t\t\t\t\t[ 0., 0., 2420., 0.],\n\t\t\t\t\t\t\t\t\t\t\t[ 0., 0., 0., 1.]])\n\t\t\n\t\t\n\t\tpass", "def upsample(inputs, out_shape, data_format):\n if data_format == 'channels_first':\n inputs = tf.transpose(inputs, [0, 2, 3, 1])\n new_height = out_shape[3]\n new_width = out_shape[2]\n else:\n new_height = out_shape[2]\n new_width = out_shape[1]\n\n inputs = tf.image.resize_nearest_neighbor(inputs, (new_height, new_width))\n if data_format == 'channels_first':\n inputs = tf.transpose(inputs, [0, 3, 1, 2])\n return inputs", "def reshape(self, newshape):\n\n return ArrayCoordinates1d(self.coordinates.reshape(newshape), **self.properties)", "def reorder_examples(self):\n self.example_wise_shrink(Ordering, key=sort_key)", "def dataShape(self, role):\n return None", "def print_shape(self, data):\n print(data.shape)", "def _data_reshape(self, data):\n data_offset = [int(size / 2) for size in data.shape[1:]]\n data_diff = [int(size / 2) for size in self.shape]\n data_diff_min = data_diff\n data_diff_max = []\n for i, elem in enumerate(data_diff):\n if self.shape[i] % 2 == 0:\n data_diff_max.append(elem)\n else:\n data_diff_max.append(elem + 1)\n data = data[:, (data_offset[0] - data_diff_min[0]):(data_offset[0] + data_diff_max[0]),\n (data_offset[1] - data_diff_min[1]):(data_offset[1] + data_diff_max[1]),\n (data_offset[2] - data_diff_min[2]):(data_offset[2] + data_diff_max[2])]\n\n if data.shape[1] == 1:\n data = data.reshape(data.shape[0], data.shape[2], data.shape[3])\n return data", "def _split_shape_index(input_shape, axis):\n rank = len(input_shape)\n if isinstance(axis, int):\n axis = tuple([axis])\n reduction_indices = tuple([(i + rank) % rank for i in axis])\n other_indices = tuple(set(range(rank)) - set(reduction_indices))\n reduced_num = reduce(lambda x, y: x * y, [1] + [input_shape[i] for i in reduction_indices])\n other_num = reduce(lambda x, y: x * y, [1] + [input_shape[i] for i in other_indices])\n perm = reduction_indices + other_indices\n return tuple([reduced_num, other_num]), perm", "def _infer_shape(schema):\n for feature in schema.feature:\n # Currently we infer shape only for features with valency 1.\n if (feature.presence.min_fraction == 1 and\n feature.value_count.min == feature.value_count.max == 1):\n feature.shape.dim.add().size = 1", "def reshape(self, *shape):\n newTensor = super(MKLTensor, self).reshape(*shape)\n newTensor.set_mkl(self)\n return newTensor", "def reorderRelations(self, *order):\n order = self._reorder(order)\n if self._has(\"k\"):\n self._.k = tuple(self._.k[i] for i in order)\n if self._has(\"P\"):\n self._.P = Matrix(SR, [[r[j] for j in order] for r in self._.P])\n if self._has(\"Q\"):\n self._.Q = Matrix(SR, [self._.Q[i] for i in order])\n if self._has(\"p\"):\n self._.p.reorder(order)\n self._.triple = {tuple(order.index(i) for i in t):\n s.reorder(order, inplace=False)\n for t, s in self._.triple.items()}\n self._.triple_solution = {tuple(order.index(i) for i in t):\n {k: s.reorder(order, inplace=False)\n for k, s in d.items()}\n for t, d in self._.triple_solution.items()}\n self._.triple_solution_generator = \\\n {tuple(order.index(i) for i in t): g\n for t, g in self._.triple_solution_generator.items()}\n self._.quadruple = {tuple(order.index(i) for i in t):\n s.reorder(order, inplace=False)\n for t, s in self._.quadruple.items()}\n self._.subconstituents = [self._.subconstituents[i] for i in order]\n if self._has(\"pPolynomial_ordering\") and self._.pPolynomial_ordering:\n self._.pPolynomial_ordering = sorted(\n [tuple(order.index(i) for i in o)\n for o in self._.pPolynomial_ordering])", "def set_shape(self, connection_shape):\n self.shape = connection_shape", "def normalize_shape(shape: Union[int, Tuple[int, ...], None]) -> Tuple[int, ...]:\n\n if shape is None:\n raise TypeError(\"shape is None\")\n\n # handle 1D convenience form\n if isinstance(shape, numbers.Integral):\n shape = (int(shape),)\n\n # normalize\n shape = cast(Tuple[int, ...], shape)\n shape = tuple(int(s) for s in shape)\n return shape", "def reorderParameters(self, p, *order):\n self._.a = tuple(p[i, i, 1] for i in range(self._.d + 1))\n self._.b = tuple(p[i, i+1, 1] if i < self._.d else Integer(0)\n for i in range(self._.d + 1))\n self._.c = tuple(p[i, i-1, 1] if i > 0 else Integer(0)\n for i in range(self._.d + 1))\n if self._has(\"omega\"):\n self._.omega = Matrix(SR, [[r[i] for i in order]\n for r in self._.omega])\n if self._has(\"theta\"):\n del self._.theta\n if self._has(\"fsd\"):\n del self._.fsd", "def _reshape_like(mat: Tensor, shape: Tuple[int]) -> Tensor:\n return mat.reshape(-1, *shape)", "def add_reshape(self, input_name, shape, name=None, attr={}):\n return self._build_op('Reshape', [input_name, shape], name=name)", "def shape(self):", "def shape(self):", "def get_mds_shape(node):\n try: \n #This will fail if node.getShape doesn't exist or if shape is 0\n shape=list(node.getShape())\n assert len(shape)>0\n except (mds.mdsExceptions.MDSplusException,AssertionError):\n return []\n shape.reverse() #put in the python order\n return shape", "def reshape(x, shape):\n return float(x) if shape is None else jnp.reshape(x, shape)", "def slice_output_shape(input_shape):\n shape_1 = input_shape[0]\n shape_2 = 80\n shape_3 = input_shape[2]\n return (shape_1, shape_2, shape_3)", "def _prepare_image(self, image, initial_shape, gt_shape=None):\n image.landmarks['initial_shape'] = initial_shape\n image = image.rescale_to_reference_shape(\n self.reference_shape, group='initial_shape',\n interpolator=self.interpolator)\n\n if gt_shape:\n image.landmarks['gt_shape'] = initial_shape\n\n if self.n_levels > 1:\n if self.scaled_levels:\n pyramid = image.gaussian_pyramid(\n n_levels=self.n_levels, downscale=self.downscale)\n else:\n pyramid = image.smoothing_pyramid(\n n_levels=self.n_levels, downscale=self.downscale)\n images = [compute_features(i, self.feature_type)\n for i in pyramid]\n images.reverse()\n else:\n images = [compute_features(image, self.feature_type)]\n\n return images", "def _clone_layout_placeholders(self, slidelayout):\n latent_ph_types = (PH_TYPE_DT, PH_TYPE_SLDNUM, PH_TYPE_FTR)\n for sp in slidelayout.shapes:\n if not sp.is_placeholder:\n continue\n ph = Placeholder(sp)\n if ph.type in latent_ph_types:\n continue\n self.__clone_layout_placeholder(ph)", "def reset_shapes():\n if bpy.context.object.modeling_cloth:\n ob = bpy.context.object\n else: \n ob = extra_data['last_object']\n\n if ob.data.shape_keys == None:\n ob.shape_key_add('Basis') \n if 'modeling cloth source key' not in ob.data.shape_keys.key_blocks:\n ob.shape_key_add('modeling cloth source key') \n if 'modeling cloth key' not in ob.data.shape_keys.key_blocks:\n ob.shape_key_add('modeling cloth key') \n ob.data.shape_keys.key_blocks['modeling cloth key'].value=1\n \n keys = ob.data.shape_keys.key_blocks\n count = len(ob.data.vertices)\n co = np.zeros(count * 3, dtype=np.float32)\n keys['modeling cloth source key'].data.foreach_get('co', co)\n keys['modeling cloth key'].data.foreach_set('co', co)\n\n data[ob.name].vel *= 0\n \n ob.data.shape_keys.key_blocks['modeling cloth key'].mute = True\n ob.data.shape_keys.key_blocks['modeling cloth key'].mute = False", "def downsampleShape(self, numDesiredPoints):\n\n if len(self.x) > 2:\n t_current_x = np.linspace(0, 1, len(self.x))\n t_current_y = np.linspace(0, 1, len(self.y))\n t_desired_x = np.linspace(0, 1, numDesiredPoints)\n t_desired_y = np.linspace(0, 1, numDesiredPoints)\n f = interpolate.interp1d(t_current_x, self.x, kind='linear')\n self.x = f(t_desired_x).tolist()\n f = interpolate.interp1d(t_current_y, self.y, kind='linear')\n self.y = f(t_desired_y).tolist()\n\n self.len = numDesiredPoints", "async def infer_shape_distribute(track, v, shape):\n shp = await shape['value']\n if shp == ANYTHING:\n shp_t = await shape['type']\n shp = (ANYTHING,) * len(shp_t.elements)\n v_t = await v.get_shallow('type')\n if ismyiatype(v_t, Array):\n v_shp = await v['shape']\n delta = len(shp) - len(v_shp)\n if delta < 0:\n raise MyiaShapeError(\"Cannot distribute to smaller shape\")\n elif delta > 0:\n v_shp = (1,) * delta + v_shp\n for vs, s in zip(v_shp, shp):\n if vs != s and vs not in (1, ANYTHING) and s not in (1, ANYTHING):\n raise MyiaShapeError(\"Cannot change shape when distributing\")\n return shp", "def get_input_shape(self):\n\n fname = self.train_database[0]\n if self.mapfly:\n feature, _ = self.map_one_molecule(fname)\n else:\n feature, _ = self.load_one_molecule(fname)\n\n self.data_shape = feature.shape\n\n if self.pair_chain_feature:\n feature = self.make_feature_pair(\n feature, self.pair_chain_feature)\n\n if self.transform:\n feature = self.convert2d(feature, self.proj2D)\n\n self.input_shape = feature.shape", "def get_dshape(self):\n return dict(zip(self.axes_names, self.data.shape))", "def implement_shape(self, shape):\n if self.flag == 0:\n for coord in INDICES:\n self.kill(coord)\n for coord in shape:\n self.givebirth(coord)", "def _fix_unknown_dimension(self, input_shape, output_shape):\n output_shape = list(output_shape)\n msg = 'total size of new array must be unchanged'\n\n known, unknown = 1, None\n for index, dim in enumerate(output_shape):\n if dim < 0:\n if unknown is None:\n unknown = index\n else:\n raise ValueError('Can only specify one unknown dimension.')\n else:\n known *= dim\n\n original = np.prod(input_shape, dtype=int)\n if unknown is not None:\n if known == 0 or original % known != 0:\n raise ValueError(msg)\n output_shape[unknown] = original // known\n elif original != known:\n raise ValueError(msg)\n\n return tuple(output_shape)", "def reshape(a, shape=None, name=None):\n if K.is_sparse(a):\n reshape_op = tf.sparse.reshape\n else:\n reshape_op = tf.reshape\n\n return reshape_op(a, shape=shape, name=name)", "def turn(self):\n new_shape = []\n for shape in self.shape:\n new_shape.append([-shape[1], shape[0]])\n old_shape = self.shape[:]\n self.shape = new_shape\n if not self.board.is_valid_tetromino(self):\n self.shape = old_shape" ]
[ "0.601357", "0.6012796", "0.59265906", "0.59111005", "0.58404654", "0.5758127", "0.57051116", "0.5653655", "0.5619018", "0.5616662", "0.5612885", "0.56128573", "0.561095", "0.5574861", "0.5568789", "0.55197614", "0.5517362", "0.551245", "0.54615873", "0.54567415", "0.54471827", "0.54460835", "0.5418479", "0.5415293", "0.54097", "0.54044056", "0.54023105", "0.5379557", "0.53665763", "0.5350459", "0.5350459", "0.5345706", "0.5304022", "0.52894014", "0.5256573", "0.5256238", "0.5238607", "0.522887", "0.52157116", "0.52041864", "0.5188916", "0.51852745", "0.5178524", "0.51686287", "0.51597196", "0.5151521", "0.5147581", "0.5145696", "0.5143769", "0.51356494", "0.51204896", "0.5097919", "0.50882083", "0.50806385", "0.5074341", "0.5072704", "0.50676256", "0.50643945", "0.5053327", "0.5044302", "0.50401056", "0.50296706", "0.50264037", "0.5025748", "0.5025659", "0.5024467", "0.5020959", "0.50165504", "0.5010929", "0.5007865", "0.5007557", "0.50061655", "0.50029033", "0.5001012", "0.49782565", "0.49713978", "0.4965748", "0.49616718", "0.49555168", "0.4947258", "0.493528", "0.493071", "0.4926485", "0.49246547", "0.49208027", "0.49208027", "0.49200156", "0.49186155", "0.49128616", "0.49119186", "0.49102423", "0.4909795", "0.49078164", "0.4899376", "0.48983973", "0.4895271", "0.4883381", "0.4866429", "0.48636383", "0.48582757" ]
0.7083111
0
returns the communicator used to build this topology
def parent(self): return self._mpis.comm
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_comm() -> Communication:\n return __default_comm", "def get_comm(self):\n return self.comm", "def comm(self):\n return self._comm", "def GetComm(self):\n return _hypre.HypreParMatrix_GetComm(self)", "def topology(self):\n return self._topology", "def object_communicator():\n comm = MPI.COMM_WORLD", "def get_torch_default_comm():\n try:\n comm = dist.distributed_c10d._get_default_group()\n return comm\n except Exception as _:\n pass\n try:\n comm = dist.distributed_c10d._default_pg\n if comm is not None:\n return comm\n except Exception as _:\n pass\n raise RuntimeError(\"Unsupported PyTorch version\")", "def GetComm(self):\n return _hypre.HypreParVector_GetComm(self)", "def comm_group(self):\n return self._gcomm", "def network(self):\n return self.__network", "def nc(self):\n return self._syi.node_creator", "def build_topology(self):\n# errstr = \"build_topology() is not implemented.\\n\"\n# errstr += textwrap.dedent(self.build_topology.__doc__)\n# raise NotImplementedError(errstr)\n pass # May be a 1-compartment neuron. No need to abstract. ", "def comm_port(self):\r\n return self._comm_port", "def get_network(self):\n return self._network", "def _get_communities(self):\n return self.__communities", "def network(self):\n return self._network", "def network(self):\n return self._network", "def network(self):\n return self._network", "def _get_send_community(self):\n return self.__send_community", "def protocol(self):\n self._recv_protocol()\n return self._protocol", "def get_topology_map(self):\n # Get a row for the list\n # make a list from the topology\n master_slaves = [self._get_row(row) for row in self.topology]\n return master_slaves[0]", "def get_network(self):\n return self.get_ip_network()[-1]", "def current_build_proto(self):\n return self._build_proto", "def community(self):\n return self._community", "def network_interface(self): \n return self._network_interface", "def get_graph_interface(self):\n if self._graph_interface is None:\n self._compute_graph_interface()\n return self._graph_interface", "def topology(self):\n return self._h5[TOPOLOGY][()]", "def get_connection(self):\n\n return self.REMOTE_CONNECTION", "def protocol(self) -> NetworkProtocol:\n if hasattr(self, \"_protocol\"):\n return self._protocol\n _args: list[Arg] = []\n _ctx = self._select(\"protocol\", _args)\n return _ctx.execute_sync(NetworkProtocol)", "def get_distribution_operator(self):\n return MPIDistributionIdentityOperator(self.comm)", "def topology_name(self):\n return self._topology_name", "def network(self) -> str:\n return pulumi.get(self, \"network\")", "def proto(self):\n return self.sock.proto", "def getComputer(this):\n comp_id = \"192.168.0.78\"\n return comp_id", "def communities(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"communities\")", "def protocol(self):\n return helpers.get_protocol()", "def topology(self) -> List[Topology]:\n return self._topology", "def get_synchronizer(self):\n return self.synchronizer", "def make_comms(self,comm):\n # For masters we let child_comm be the communicator used to message the node's \n # children, and parent_comm be that used to message the node's parents.\n\n parent_rank = 0\n\n # Case (1)\n if self.num_masters > 1:\n self.make_comms_many(comm)\n if self.is_master:\n parent_comm = self.comm_masters\n if self.comm_masters.Get_rank() == 0: # rank 0 is the super-master\n child_comm = self.comm_masters\n parent_rank = None\n else:\n child_comm = self.comm_block\n # Case (2)\n else:\n self.make_comm_single(comm)\n if self.is_master:\n parent_comm = self.comm_block\n child_comm = self.comm_block\n parent_rank = None\n\n # Process initialization\n from .MPIProcess import MPIWorker, MPIMaster\n if self.is_master:\n self.set_val_data()\n num_sync_workers = self.get_num_sync_workers(child_comm)\n self.process = MPIMaster( parent_comm, parent_rank=parent_rank, \n data=self.data, child_comm=child_comm, num_epochs=self.num_epochs,\n num_sync_workers=num_sync_workers, callbacks=self.callbacks )\n else:\n self.set_train_data()\n self.process = MPIWorker( parent_comm=self.comm_block, parent_rank=parent_rank, \n num_epochs=self.num_epochs, data=self.data, callbacks=self.callbacks )", "def device(self) -> torch.device:\n for param in self.parameters():\n return param.device\n return get_device(\"cpu\")", "def network(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"network\")", "def getProtocol(self, _):\r\n return self._protocol", "def __str__(self):\n\n return \"Network: {0}\".format(self.topology)", "def network_client(self):\n if not self.client:\n self.client = get_client_from_cli_profile(NetworkManagementClient)\n return self.client", "def protocol(self):\n return self._protocol", "def get_network(self):\n\n # Find which nodes are input and which are output. We may want to store\n # this info somewhere else (like in the genome)\n\n inputs = []\n outputs = []\n bias = []\n edges = []\n node_num = dict() #Map from node_id to zero index node number\n\n for i, node in enumerate(self.node_genes):\n # Create mapping\n node_num[node.node_id] = i\n\n # Store input and output node_numbers\n if node.node_type is INPUT:\n inputs.append(i)\n elif node.node_type is OUTPUT:\n outputs.append(i)\n elif node.node_type is BIAS:\n bias.append(i)\n\n # Create edge list.\n for link in self.link_genes:\n if link.enabled:\n edges.append((node_num[link.to_node.node_id],\n node_num[link.from_node.node_id], link.weight))\n\n\n # Build an adjacency matrix for the network\n n = len(node_num)\n adj_matrix = np.zeros((n, n))\n try:\n for e in edges:\n adj_matrix[e[:2]] = e[2]\n except:\n global GENOME\n GENOME = self\n print([node.node_id for node in self.node_genes])\n print()\n print('len(node_genes)', len(self.node_genes))\n print('edge', e)\n print('adj.shape', adj_matrix.shape)\n sys.exit()\n\n return Network(adj_matrix, inputs, outputs, bias)", "def get_device(self):\n return self.parent.get_device()", "def get_com(self):\n return self.com", "def developer(self):\n return self.proto.creator", "def protocol(self):\n return self._host[CONF_PROTOCOL]", "def socket(self):\n return self.__socket", "def device(self) -> torch.device:\n return self._device", "def getPeerToPeerNetwork(self):\r\n raise NotImplementedError()", "def createPort(self):\n return _libsbml.CompModelPlugin_createPort(self)", "def get_device(self):\n raise NotImplementedError()", "def protocol(self) -> MessageProtocol:\n return self._sender.protocol", "def device(self):\n return torch.cuda.current_device()", "def interface(self):\n return self.broker.interface(**{\"DeviceRouteID\": self.DeviceRouteID})", "def getProtocol(self, transport: Any) -> TCompactProtocol:\n return TCompactProtocol.TCompactProtocol(transport)", "def buildNetwork(self):\n\n # create the network node for our module\n self.networkNode = cmds.createNode(\"network\", name=self.modName)\n\n # create attributes\n self.addAttributes()\n\n return self.networkNode", "def telescope(self):\n return _coordsys.coordsys_telescope(self)", "def getPort(self, *args):\n return _libsbml.CompModelPlugin_getPort(self, *args)", "def device(self):\n return self.broker.device(**{\"VirtualNetworkMemberID\": self.VirtualNetworkMemberID})", "def masterPort(self):\r\n return self._masterPort", "def getConexion_police(self):\n\t\t\treturn self.policeConn", "def getProcessManager(self): \n \n return self.procmgr", "def get_chat(self) -> Optional[ChatNode]:\n triples = self.agent_memory.get_triples(pred_text=\"chat_effect_\", obj=self.memid)\n if triples:\n chat_id, _, _ = triples[0]\n return ChatNode(self.agent_memory, chat_id)\n else:\n return None", "def construct_mpi_topology(self, dico):\n period = [True]*self.dim\n\n if dico is None:\n comm = mpi.COMM_WORLD\n else:\n comm = dico.get('comm', mpi.COMM_WORLD)\n self.mpi_topo = MPI_topology(self.dim, period, comm)", "def createTopology(self):\n\n # find DAG root\n dagRoot = None\n for mote in self.motes:\n if mote.id == 0:\n mote.role_setDagRoot()\n dagRoot = mote\n assert dagRoot\n\n if self.settings.mobilityModel == 'RPGM':\n # put DAG root at center of area\n dagRoot.setLocation(x=SimEngine.SimEngine().targets[0][0],\n y=SimEngine.SimEngine().targets[0][1])\n else:\n # put DAG root at center of area\n dagRoot.setLocation(x=self.squareSide/2,\n y=self.squareSide/2)\n\n # reposition each mote until it is connected\n connectedMotes = [dagRoot]\n motes_shuffled = copy.copy(self.motes)\n random.shuffle(motes_shuffled) # shuffle them around\n\n # for mote in self.motes:\n for mote in motes_shuffled:\n stableNeighbors = []\n if mote in connectedMotes:\n continue\n\n connected = False\n while not connected:\n # pick a random location\n # mote.setLocation(x=self.squareSide*random.random(),\n # y=self.squareSide*random.random())\n #\n # mote.setLocation(\n # x=self.settings.squareSide * random.random(),\n # y=self.settings.squareSide * random.random()\n # )\n\n newX = None\n newY = None\n # if no topology is not given, build the topology yourself\n if SimEngine.SimEngine().ilp_topology is None:\n newX = self.settings.squareSide * random.random()\n newY = self.settings.squareSide * random.random()\n else:\n # if no topology is given, use that topology\n newX = SimEngine.SimEngine().ilp_topology[str(mote.id)]['x']\n newY = SimEngine.SimEngine().ilp_topology[str(mote.id)]['y']\n\n mote.setLocation(\n x=newX,\n y=newY\n )\n\n numStableNeighbors = 0\n stableNeighbors = []\n\n # tryAgain = False\n # for cm in connectedMotes:\n # rssi = self._computeRSSI(mote, cm)\n # if rssi > -110:\n # tryAgain = True\n\n # if not tryAgain:\n # count number of neighbors with sufficient RSSI\n for cm in connectedMotes:\n\n rssi = self._computeRSSI(mote, cm)\n mote.setRSSI(cm, rssi)\n cm.setRSSI(mote, rssi)\n\n # save the intial RSSI values for future use in the mobility models\n mote.initialRSSI[cm] = rssi\n cm.initialRSSI[mote] = rssi\n\n if self.settings.individualModulations == 1:\n if self.rssiToPdr(rssi, modulation=Modulation.Modulation().minimalCellModulation[SimSettings.SimSettings().modulationConfig]) > self.settings.stableNeighborPDR:\n # if rssi > Modulation.Modulation().modulationStableRSSI[Modulation.Modulation().minimalCellModulation[SimSettings.SimSettings().modulationConfig]]:\n # print rssi\n numStableNeighbors += 1\n stableNeighbors.append(cm.id)\n else:\n if rssi > self.STABLE_RSSI:\n # print rssi\n numStableNeighbors += 1\n\n # make sure it is connected to at least STABLE_NEIGHBORS motes\n # or connected to all the currently deployed motes when the number of deployed motes\n # are smaller than STABLE_NEIGHBORS\n if numStableNeighbors >= self.stable_neighbors or numStableNeighbors == len(connectedMotes):\n print 'For mote {0}, stable neighbors {1}'.format(mote.id, stableNeighbors)\n connected = True\n\n connectedMotes += [mote]\n\n # for each mote, compute PDR to each neighbors\n for mote in self.motes:\n for m in self.motes:\n if mote == m:\n continue\n\n # set the distance to all other motes\n distance = math.sqrt((m.x - mote.x) ** 2 + (m.y - mote.y) ** 2)\n m.set_distance(mote, distance)\n mote.set_distance(m, distance)\n # print 'mote %d to mote %d: %.4f' % (m.id, mote.id, distance)\n if self.settings.individualModulations == 1:\n rssi_value = mote.getRSSI(m)\n # for modulationTmp in Modulation.Modulation().modulations:\n # if self.settings.ilpfile is not None:\n # ## I am not going to set this as this should be set by the ILP\n # pass\n # else:\n # # if the rssi value is higher than the minimal signal value required for this neighbor, take that modulation\n # # and compute the PDR using that modulation\n # pass\n # # if rssi_value > Modulation.Modulation().modulationStableRSSI[modulationTmp]:\n # # pdr = self._computePDR(mote, m, modulation=modulationTmp)\n # # mote.setPDR(m, pdr)\n # # m.setPDR(mote, pdr)\n # # mote.setModulation(m, modulationTmp)\n # # m.setModulation(mote, modulationTmp)\n else:\n if mote.getRSSI(m) > mote.minRssi:\n pdr = self._computePDR(mote, m)\n mote.setPDR(m, pdr)\n m.setPDR(mote, pdr)", "def getPeer(self):\n return \"Peer:PID:\" + str(self.transport.pid)", "def topology(self):\n try:\n return self._topology\n except AttributeError:\n # If none exists, we need to create it\n pass\n # Cache the topology for easy returning later\n self._topology = topology = Topology()\n\n last_chain = None\n last_residue = None\n # Add each chain (separate 'system's) and residue\n for atom in self.atom_list:\n resid = '%d%s' % (atom.residue.idx, atom.residue.inscode)\n if atom.system != last_chain:\n chain = topology.addChain(atom.system)\n last_chain = atom.system\n last_residue = None\n if resid != last_residue:\n last_residue = resid\n residue = topology.addResidue(atom.residue.resname, chain, str(atom.residue.idx), atom.residue.inscode)\n if atom.type is not None and atom.type.atomic_number != 0:\n # This is the most reliable way of determining the element\n elem = element.Element.getByAtomicNumber(atom.type.atomic_number)\n else:\n # Figure it out from the mass\n elem = element.Element.getByMass(atom.mass)\n topology.addAtom(atom.name, elem, residue)\n\n # Add all of the bonds\n atoms = list(topology.atoms())\n # Assign atom indexes to make sure they're current\n self.atom_list.assign_indexes()\n for bond in self.bond_list:\n topology.addBond(atoms[bond.atom1.idx], atoms[bond.atom2.idx])\n\n # Add the periodic box if there is one\n if self.box_vectors is not None:\n topology.setUnitCellDimensions(self.boxLengths)\n\n return topology", "def main_device(self):\n return self._main_device", "def transportprotocol(self) :\n\t\ttry :\n\t\t\treturn self._transportprotocol\n\t\texcept Exception as e:\n\t\t\traise e", "def protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> str:\n return self.__parameters.protocol", "def socket(self):\n if not hasattr(self, \"_socket\"):\n # create a new one\n self._socket = self.context.socket(zmq.REQ)\n if hasattr(zmq, \"RECONNECT_IVL_MAX\"):\n self._socket.setsockopt(zmq.RECONNECT_IVL_MAX, 5000)\n\n self._set_tcp_keepalive()\n if self.master.startswith(\"tcp://[\"):\n # Hint PF type if bracket enclosed IPv6 address\n if hasattr(zmq, \"IPV6\"):\n self._socket.setsockopt(zmq.IPV6, 1)\n elif hasattr(zmq, \"IPV4ONLY\"):\n self._socket.setsockopt(zmq.IPV4ONLY, 0)\n self._socket.linger = self.linger\n if self.id_:\n self._socket.setsockopt(zmq.IDENTITY, self.id_)\n self._socket.connect(self.master)\n return self._socket", "def init_comm(self, obj):\n comm = None\n if self.dynamic or self.renderer.widget_mode == 'live':\n comm = self.renderer.comms[self.renderer.mode][0](self)\n attach_streams(self, obj)\n return comm", "def _build_network(self):\n pass", "def kafka_get_connection(self):\n return self.kf_producer", "def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")", "def get_communities(self):\n return self._communities.values()", "def __get_zmq_pub(self):\n print(\"Publishing to tcp://127.0.0.1:%d channel: tweets\" % self.port)\n context = zmq.Context()\n socket = context.socket(zmq.PUB)\n socket.bind(\"tcp://127.0.0.1:%d\" % self.port)\n return socket", "def _get_protocol_type(self):\n return self.__protocol_type", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def coordinator(self):\n return self._coordinator", "def returnNetworkNode(self):\n\n networkNodes = cmds.ls(type=\"network\")\n for node in networkNodes:\n attrs = cmds.listAttr(node)\n if \"moduleName\" in attrs:\n if cmds.getAttr(node + \".moduleName\") == self.name:\n networkNode = node\n\n return networkNode", "def protocol(self) -> str:\n return __name__", "def socket(self):\n return self._socket", "def build_socket(self):\n sock = socket(AF_UNIX, SOCK_SEQPACKET)\n sock.connect(self.my_id)\n\n return sock", "def get_client_protocol(self, machine_id: str):\n peer = self.peer_registry.peers.get(machine_id)\n return peer.peer if peer is not None else None", "def network_plugin(self) -> Optional[pulumi.Input[Union[str, 'NetworkPlugin']]]:\n return pulumi.get(self, \"network_plugin\")", "def getNet(self):\n\t\treturn self.loader", "def rpc(self):\n return self.interface.supervisor", "def component_configuration():\n return ProtocolConfig(\"a_protocol\", \"an_author\", \"0.1.0\")", "def remote_container(self):\n return pn_connection_remote_container(self._impl)", "def device(self):\n return next(self.parameters()).device", "def device(self):\n return next(self.parameters()).device", "def device(self):\n return next(self.parameters()).device", "def device(self):\n return next(self.parameters()).device" ]
[ "0.72166127", "0.6937673", "0.66808903", "0.65805095", "0.65750676", "0.6487292", "0.62338114", "0.62185067", "0.5998818", "0.5994917", "0.59699655", "0.5910966", "0.5877223", "0.5876523", "0.58492416", "0.58371776", "0.58371776", "0.58371776", "0.58118683", "0.5713875", "0.5690591", "0.56720567", "0.5654453", "0.564234", "0.5592814", "0.5585134", "0.5558269", "0.555408", "0.55481493", "0.55341876", "0.5513632", "0.55031043", "0.5502711", "0.54827654", "0.5470818", "0.54550695", "0.54546356", "0.54514533", "0.540118", "0.5383014", "0.5380213", "0.53729814", "0.53728575", "0.5362061", "0.5323845", "0.53048867", "0.5294056", "0.5288523", "0.52826697", "0.5264198", "0.52624536", "0.522741", "0.52238095", "0.5213549", "0.5185357", "0.51835614", "0.5182384", "0.518028", "0.5173877", "0.51714534", "0.5161185", "0.5153327", "0.51513463", "0.5147594", "0.51425666", "0.51420105", "0.51385504", "0.5135215", "0.51191634", "0.5111839", "0.5111498", "0.5105396", "0.51016265", "0.51008976", "0.5095083", "0.50938606", "0.50924534", "0.5088551", "0.50865054", "0.50861603", "0.5084659", "0.5078536", "0.50754476", "0.5071308", "0.5071308", "0.50698704", "0.50456136", "0.504455", "0.5040594", "0.5030093", "0.5026416", "0.50199133", "0.5019446", "0.50015", "0.49968657", "0.49966192", "0.4994114", "0.4994114", "0.4994114", "0.4994114" ]
0.64308983
6
returns ghost layer width.
def ghosts(self): return self.mesh.discretization.ghosts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_width(self):\n\t\treturn len(self._background) if self._background else 0", "def get_dimension_width(self):\n pass", "def width(self) -> int:\n return self._obj[self.x_dim].size", "def getWidth(self):\n return _libsbml.Dimensions_getWidth(self)", "def getWidth(self):\n return self._image.width()", "def get_width(self) -> int:\n return int(self._surface.get_width())", "def width(self):\n self._updateExtents()\n return self._mWidth", "def getWidth(self):\n return self.width", "def getWidth(self):\n return self.width", "def getWidth(self):\n return self._width", "def width(self) -> int:\n return self._image_data.width", "def getWidth(self) -> int:\n ...", "def width(self):\n return (self.scene.shape[2] - self.size) // self.size + 1", "def getWidth(self):\n return _tkCall(self.image.width)", "def get_dimension_length(self):\n pass", "def tileWidth(self):\n return self._tileWidth", "def get_width(self):\r\n return self._width", "def get_width(self):\r\n return self._width", "def get_width(self):\r\n return self._width", "def width(self):\n return _libsbml.Dimensions_width(self)", "def width(self):\n return self.figure.scene.get_size()[0]", "def getWidth(self):\n return DEFAULT_WIDTH", "def get_width(self):\n return self.__width", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def get_width(self):\n return self._width", "def get_width ( self ):\n return self.width", "def get_width(self):\n return self.width", "def layer_size(self, layer_id): # -> int:\n ...", "def getWidth(self):\n return len(self._data[0])", "def width(self) -> int:\r\n return self.rect_uv.w", "def getWidth(self):\n area = self.getArea()\n length = self.getLength()\n return area / length", "def width(self) :\n return self.m_width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width(self):\n return self.__width", "def width (self):\n return self._w", "def width(self):\n return len(self.mine_map[0])", "def getWidth(self):\n return constants.DEFAULT_WIDTH", "def getWidth(self):\n return _libsbml.BoundingBox_getWidth(self)", "def dimension_count(self):\n return self._dimensionCount", "def width(self):\n return self.x.max() - self.x.min()", "def getdim(self):\n return round(self.w() / self.c)", "def width(self):\n return self.board.shape[1]", "def get_grid_width(self):\n # replace with your code\n return self._width", "def get_grid_width(self):\n # replace with your code\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def width(self):\n return self._width", "def dimension_size(self):\n return self._dim", "def w(self):\n return self.width", "def width(self):\n return self.get_delta_value(self.X_INDEX)", "def size(self):\n return self.width", "def size(self):\n return self.width", "def size(self):\n return self.width", "def size(self):\n return self.width", "def size(self):\n return self.width", "def size(self):\n return self.width", "def size(self):\n return self.width", "def get_dim(self, name):\n return len(self.root_group.dimensions[name])", "def getWidth(self):\n\t\tif (self.position==[]):\n\t\t\treturn 0\n\t\treturn abs(self.position[1][0]-self.position[0][0])", "def get_grid_width(self):\r\n # replace with your code\r\n return self._width", "def w(self):\r\n return self.size.x", "def get_size(self):\n return self._surf.get_size()", "def GetWidth(self):\r\n\r\n return self._width", "def GetWidth(self):\r\n\r\n return self._width", "def dimension(self):\r\n a = 0\r\n for x in self.faces():\r\n if (len(x) > a):\r\n a = len(x) \r\n return a-1", "def width(self) -> float:\n return self._width", "def numPixels(self):\n\t\treturn self.size", "def numPixels(self):\n\t\treturn self.size", "def get_grid_width(self):\n # replace with your code\n return self._grid_width", "def get_grid_width(self):\n # replace with your code\n return self._grid_width", "def width(self):\n # type: () -> float\n return self._width", "def getLength(self):\n return self.geometry.length", "def dimension(self):\n\t\treturn self.d", "def width(self):\n return self.maxx - self.minx", "def get_layer_size(self, layer_ind):\n assert(layer_ind < self.num_layers)\n return self._layer_sizes[layer_ind]", "def width(self):\n return self.i_node.distance(self.j_node)", "def get_width(self):\n width = np.size(self.img, 0)\n return width", "def zone_width(self):\n return self._zone_width", "def size(self):\n return (self.width)" ]
[ "0.74909455", "0.73224", "0.71234566", "0.7045834", "0.69717586", "0.6938515", "0.69376487", "0.693364", "0.693364", "0.69244164", "0.692004", "0.68859637", "0.6870889", "0.6870504", "0.6858993", "0.6841644", "0.68349195", "0.68349195", "0.68349195", "0.6833873", "0.68273956", "0.6762272", "0.67585415", "0.67568374", "0.67568374", "0.67568374", "0.67568374", "0.674077", "0.6720321", "0.67189044", "0.67145693", "0.6695148", "0.66685385", "0.6666819", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6666446", "0.6665721", "0.6663701", "0.666361", "0.6654473", "0.66510254", "0.6644691", "0.66396534", "0.66325045", "0.6631628", "0.6631628", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.6627472", "0.66271824", "0.66171646", "0.6601387", "0.6587966", "0.6587966", "0.6587966", "0.6587966", "0.6587966", "0.6587966", "0.6587966", "0.6583706", "0.6582437", "0.6580969", "0.6578992", "0.65776366", "0.65605956", "0.65605956", "0.6560368", "0.6545597", "0.65415007", "0.65415007", "0.6539639", "0.6539639", "0.65378845", "0.65332687", "0.65331274", "0.6530725", "0.6526173", "0.65156776", "0.65067047", "0.65025216", "0.65006715" ]
0.0
-1
returns id of the task that owns this topology
def task_id(self): return self._mpis.task_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def taskid(self):\n raise NotImplementedError('Must be implemented by subclass.')", "def task_id(self) -> str:\n return self.get_from_redis(\"task_id\")", "def get_task_id(self):\n if self.task_id:\n return self.task_id\n return (f'{self.task_type}_{self.get_source_system().lower()}'\n f'.{self.get_source_subsystem().lower()}.{self.get_name().upper()}')", "def getTaskZoneId(self):\n return self.getZoneId()", "def _make_task_id(self, task):\n index = self._tasks.add(task)\n task_id = '{name}-{idx}'.format(name=task.name, idx=index)\n\n return task_id", "def dst_task_id(self):\n return struct.unpack('<H', self.pkt.payload[2:4])[0]", "def src_task_id(self):\n return struct.unpack('<H', self.pkt.payload[4:6])[0]", "def dag_id(self):\n if self.parallelize_task:\n return f'{self.job_id_extension.get_parallel(self.job_id)}'\n else:\n return f'{self.job_id_extension.get_preparation(self.job_id)}'", "def get_id(self):\n for id, thread in threading._active.items(): \n if thread is self: \n return id", "def get_id(self):\n if hasattr(self, \"_thread_id\"):\n return self._thread_id\n for id, thread in threading._active.items():\n if thread is self:\n return id", "def get_id(self):\n if hasattr(self, \"_thread_id\"):\n return self._thread_id\n for id, thread in threading._active.items():\n if thread is self:\n return id", "def get_task_id(self, position):\n task_id = self.stn.get_task_id(position)\n if task_id:\n return task_id\n else:\n raise TaskNotFound", "def get_task_uuid(self):\n\t\treturn call_sdk_function('PrlRunningTask_GetTaskUuid', self.handle)", "def thread_id(self):\n return self._thread_id", "def create_task_id():\n return str(int(round(time.time() * 10**9)))", "def pool_id ( self ):\n return self._pool_id", "def node_id(self) -> int:\r\n return self._node_id", "def unique_id(self) -> str:\n return str(self.coordinator.gios.station_id)", "def get_task_index(self):\n return self.task_index", "def unique_id(self):\n return self._id", "def unique_id(self):\n return self._id", "def get_id(self):\n\n\t\treturn self.__id", "def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id", "def _get_id(self):\n return self.id", "def portlet_id(self):\n return id(self)", "def id(self):\n\t\treturn self.__id", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def identity(self):\n return self.id", "def get_threadbased_id(guarantee_uniq=False):\n\n return '{}:{}:{}:{}'.format(platform.node(), os.getpid(), str(threading.get_ident()),uuid.uuid4().hex if guarantee_uniq else '')", "def get_id(self):\n return self.__id", "def get_id(self):\n return self.__id", "def getid(self):\n return self.__id", "def get_parent_id():\n return getattr(threadlocal, \"parent_id\", None)", "def task(self) -> str:\n return self._task", "def topology_name(self):\n return self._topology_name", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def task(self):\n return self._task", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def get_target(self):\n task = self.task.get_task(self.task_id)\n if 'name' in task:\n return str(task['name'])\n return str(task)", "def id(self):\n return self.__id", "def draw_task(self):\n task = self.taskList[0]\n #for agentObj in self.agents:\n # agentObj.set_current_task(task)\n self.agent.set_current_task(task)\n del self.taskList[0]\n return task", "def get_output_task(self, name='0'):\n port = self.get_output(name).other\n if port is None:\n return None\n return port.task", "def _get_current_task():\r\n return current_task", "def _get_current_task():\r\n return current_task", "def getRunningId(self):\n return( int(self.id.split('.')[2]) )", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id" ]
[ "0.75476515", "0.75476515", "0.75476515", "0.75476515", "0.7357828", "0.72557056", "0.7198645", "0.6637944", "0.6620676", "0.65653884", "0.6474", "0.6436455", "0.63189775", "0.6293882", "0.6293882", "0.62764555", "0.6194142", "0.61492884", "0.6114325", "0.611248", "0.60441566", "0.604313", "0.6041296", "0.59985995", "0.59985995", "0.5988955", "0.59788394", "0.5969808", "0.5941835", "0.59410745", "0.59353393", "0.59353393", "0.59353393", "0.59353393", "0.5932347", "0.59316975", "0.5930219", "0.5930219", "0.5929875", "0.5927883", "0.59134746", "0.5911602", "0.5897528", "0.5897528", "0.5897528", "0.5897528", "0.5897528", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854902", "0.5854396", "0.5852411", "0.5829994", "0.58296573", "0.58276826", "0.58276826", "0.58250964", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337", "0.58201337" ]
0.75019264
4
Defines a 'plane' (1D) topology for a given mesh resolution. This function is to be used when topo/discretization features come from an external routine (e.g. scales or fftw)
def plane_precomputed(cls, localres, global_start, cdir=None, **kwds): msg = 'parameter is not required for plane_precomputed' msg += ' topology construction.' assert 'dim' not in kwds, 'dim ' + msg assert 'shape ' not in kwds, 'shape ' + msg assert 'cutdir ' not in kwds, 'cutdir ' + msg # Local mesh : global_start = npw.asdimarray(global_start) localres = npw.asdimarray(localres) mesh = Mesh(kwds['domain'], kwds['discretization'], localres, global_start) # MPI layout domain = kwds['domain'] cutdir = npw.zeros(domain.dimension, dtype=npw.bool) if cdir is not None: cutdir[cdir] = True else: if ORDER == 'C': cutdir[0] = True else: cutdir[-1] = True return cls(mesh=mesh, cutdir=cutdir, **kwds)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def WritePlane(self):\n if not self.__train:\n print('ERROR: Must use Train before WritePlane')\n sys.exit(-1)\n if not self.__openPlaneO:\n print('ERROR: Must use OpenPlaneO before WritePlane')\n sys.exit(-1)\n\n # Defines angular dimensions\n self.__nc_RSoft_O.createDimension('type', self.__n_type)\n\n # Defines variables\n if self.__containsRadial:\n rad_plane_id = self.__nc_RSoft_O.createVariable(\\\n 'radial_plane', 'f4', \\\n ('type','radial_structure_functions'))\n rad_plane_id[:] = self.radial_plane\n if self.__containsAngular:\n ang_plane_id = self.__nc_RSoft_O.createVariable(\\\n 'angular_plane', 'f4', \\\n ('type','angular_structure_functions'))\n ang_plane_id[:] = self.angular_plane\n intercept_id_O = self.__nc_RSoft_O.createVariable(\\\n 'intercept', 'f4', ('type'))\n intercept_id_O[:] = self.intercept", "def polyPlane(*args, axis: Union[List[float, float, float], bool]=None, createUVs: Union[int,\n bool]=1, height: Union[float, bool]=1.0, subdivisionsHeight: Union[int, bool]=0,\n subdivisionsWidth: Union[int, bool]=10, subdivisionsX: Union[int, bool]=5,\n subdivisionsY: Union[int, bool]=5, texture: Union[int, bool]=1, width:\n Union[float, bool]=1.0, caching: bool=True, constructionHistory: bool=True, name:\n AnyStr=\"\", nodeState: Union[int, bool]=0, object: bool=True, q=True, query=True,\n e=True, edit=True, **kwargs)->Union[List[AnyStr], Any]:\n pass", "def _prepare_plane(self):\n verticies = [\n # main plane - note that the mainplane is scaled so the mat_plane\n # matrix will it transform to the correct coordinates\n -self.i_border[0]/self._scaling[0], self.i_border[1]/self._scaling[1],\n -self.i_border[0]/self._scaling[0], -(self.o_wh[1]-self.i_border[1])/self._scaling[1],\n (self.o_wh[0]-self.i_border[0])/self._scaling[0], -(self.o_wh[1]-self.i_border[1])/self._scaling[1],\n (self.o_wh[0]-self.i_border[0])/self._scaling[0], -(self.o_wh[1]-self.i_border[1])/self._scaling[1],\n (self.o_wh[0]-self.i_border[0])/self._scaling[0], self.i_border[1]/self._scaling[1],\n -self.i_border[0]/self._scaling[0], self.i_border[1]/self._scaling[1],\n\n # coord plane\n 0, 0,\n 0, -self.o_wh[1],\n self.o_wh[0], -self.o_wh[1],\n self.o_wh[0], -self.o_wh[1],\n self.o_wh[0], 0,\n 0, 0,\n\n # axes\n 0, -self.o_wh[1], self.o_wh[0], -self.o_wh[1], #x\n 0, 0, 0, -self.o_wh[1], #y\n ]\n\n colors = [\n 1.0, 1.0, 1.0, 1.0, # outer box XXX Remove outer box...\n 1.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0,\n .9, .9, .9, 9.0, # plot box\n .9, .9, .9, 9.0,\n .9, .9, .9, 9.0,\n .9, .9, .9, 9.0,\n .9, .9, .9, 9.0,\n .9, .9, .9, 9.0,\n 0.0, 0.0, 0.0, 1.0, #lines\n 0.0, 0.0, 0.0, 1.0,\n 0.0, 0.0, 0.0, 1.0,\n 0.0, 0.0, 0.0, 1.0,\n ]\n\n self._fonts = []\n for u in range(1, self._unit_count[0]+1):\n verticies.append(self._unit_w[0]*u)\n verticies.append(-self.o_wh[1]+0.02)\n verticies.append(self._unit_w[0]*u)\n verticies.append(-self.o_wh[1]-0.02)\n colors += [0.0, 0.0, 0.0, 1.0]\n colors += [0.0, 0.0, 0.0, 1.0]\n self._fonts.append([\n '{:.2f}'.format(u*(self.i_axis[0]/self._unit_count[0])-self.i_origin[0]),\n (self._unit_w[0]*u+self.i_border[0]-0.05)*self._scaling[0],\n (-self.o_wh[1]+(self.i_border[3])*0.5)\n ])\n for u in range(0, self._unit_count[1]):\n verticies.append(0.02)\n verticies.append(-self._unit_w[1]*u)\n verticies.append(-0.02)\n verticies.append(-self._unit_w[1]*u)\n colors += [0.0, 0.0, 0.0, 1.0]\n colors += [0.0, 0.0, 0.0, 1.0]\n self._fonts.append([\n '{:.2f}'.format(self.i_axis[1]-u*self.i_axis[1]/self._unit_count[1]-self.i_origin[1]),\n (0.025)*self._scaling[0],\n (-(self._unit_w[1])*u-self.i_border[1]+0.01)*self._scaling[1]\n ])\n\n self._draw_plane_indicies = (0, 12)\n self._draw_line_indicies = (12, 4+self._unit_count[0]*2+self._unit_count[1]*2)\n\n # convert data into valid data format\n verticies = numpy.array(verticies, dtype=numpy.float32)\n colors = numpy.array(colors, dtype=numpy.float32)\n\n self._plane_vao = util.VAO()\n self._plane_vbo = util.VBO(2)\n\n with self._plane_vao:\n # plane verticies\n with self._plane_vbo.get(0):\n glBufferData(GL_ARRAY_BUFFER, ArrayDatatype.arrayByteCount(verticies), verticies, GL_STATIC_DRAW)\n glVertexAttribPointer(self.plane_shader.attributeLocation('vertex_position'), 2, GL_FLOAT, GL_FALSE, 0, None)\n glEnableVertexAttribArray(0)\n\n # place vertex colors\n with self._plane_vbo.get(1):\n glBufferData(GL_ARRAY_BUFFER, ArrayDatatype.arrayByteCount(colors), colors, GL_STATIC_DRAW)\n glVertexAttribPointer(self.plane_shader.attributeLocation('vertex_color'), 4, GL_FLOAT, GL_FALSE, 0, None)\n glEnableVertexAttribArray(1)", "def vplane(self, fig=None):\n #TODO more general multi-axis layout...\n figsize = (9, 6.5) # good for letter paper\n if fig is None: fig = plt.figure(figsize=figsize)\n else: fig.set_size_inches(*figsize)\n axkw = dict(frameon = True)\n left, width = 0.075, 0.6\n bh = 0.11\n pad = 0.04\n depth_ax = fig.add_axes((left, 6*pad+4.5*bh, width, bh*2), **axkw)\n axkw.update(dict(sharex = depth_ax))\n pitch_ax = fig.add_axes((left, 5*pad+3.5*bh, width, bh), **axkw)\n buoyancy_ax = fig.add_axes((left, 4*pad+2.5*bh, width, bh), **axkw)\n mass_ax = fig.add_axes((left, 3*pad + 1.5*bh, width, bh), **axkw)\n control_surface_ax = fig.add_axes((left, 2*pad + bh/2, width, bh), **axkw)\n control_mode_ax = fig.add_axes((left, pad, width, bh/2), **axkw)\n # TODO adjust scale and coverage for each axes\n # TODO do this again now that middle labels are removed\n\n self.plot_timeseries('depth', '-', axes=depth_ax)\n self.plot_timeseries('platform_pitch_angle', axes=pitch_ax)\n self.plot_timeseries('platform_mass_position', axes=mass_ax)\n self.plot_timeseries('platform_buoyancy_position', axes=buoyancy_ax)\n self.plot_timeseries('platform_elevator_angle', axes=control_surface_ax)\n # TODO Include another panel with VerticalControl mode (iff present)\n\n # TODO only if engineering data is requested...\n ### add to depth axes ###\n depth_science = {\n 'Depth_Keller/depth': 'c-',\n 'CTD_NeilBrown/depth': 'k-',\n 'Depth_MSI_US300/depth': 'm-'}\n for k, v in depth_science.items():\n try: self.plot_timeseries(k, v, axes=depth_ax)\n except: print('no {0}'.format(k))\n\n depth_engineering = {\n 'VerticalControl/smoothDepthInternal': 'r-',\n 'VerticalControl/depthCmd': 'g-',\n 'VerticalControl/depthErrorInternal': 'g:'}\n for k, v in depth_engineering.items():\n try: self.plot_timeseries(k, v, axes=depth_ax)\n except: print('no {0}'.format(k))\n # TODO only if sw debug flag is set \n depth_rate_engineering = {\n 'VerticalControl/depthRateCmd': 'gray',\n 'VerticalControl/depth_rate': 'gray', # XXX why same color?\n }\n for k, v in depth_rate_engineering.items():\n try: \n self.plot_timeseries(k, vi, axes=depth_ax, \n convert=oalib.make_multiplier(100))\n except: print('no {0}'.format(k))\n ### add to pitch axes ###\n pitch_engineering = {\n 'AHRS_sp3003D/platform_pitch_angle': 'k-', \n 'DVL_micro/platform_pitch_angle': 'm-',\n 'AHRS_3DMGX3/platform_pitch_angle': 'c-',\n 'InternalSim/platform_pitch_angle': ':r',\n }\n for k, v in pitch_engineering.items():\n try: self.plot_timeseries(k, v, axes=pitch_ax)\n except: print('no {0}'.format(k))\n ### add to mass axes ###\n mass_engineering = {\n 'VerticalControl/massPositionAction': 'g-', \n 'VerticalControl/massIntegralInternal': 'c-',\n 'MassServo/platform_mass_position': 'r-',\n #'VerticalControl/massPitchErrorInternal': ':r',\n }\n for k, v in mass_engineering.items():\n try: self.plot_timeseries(k, v, axes=mass_ax)\n except: print('no {0}'.format(k))\n ### add to buoyancy axes ###\n buoyancy_engineering = {\n 'VerticalControl/buoyancyAction': 'm-',\n 'BuoyancyServo/platform_buoyancy_position': 'b-',\n }\n for k, v in buoyancy_engineering.items():\n try: \n self.plot_timeseries(k, v,\n# convert=oalib.make_multiplier(-10), \n axes=buoyancy_ax)\n except: print('no {0}'.format(k))\n ### add to control surface axes ###\n control_surface_engineering = {\n 'VerticalControl/elevatorAngleAction': 'm-', \n 'VerticalControl/elevatorIntegralInternal': 'm:',\n 'ElevatorServo/platform_elevator_angle': 'c-',\n }\n for k, v in control_surface_engineering.items():\n try: \n self.plot_timeseries(k, v, convert = np.rad2deg, \n axes=control_surface_ax)\n except: print('no {0}'.format(k))\n \n\n # TODO only if supporting data is requested\n ### add other supporting data ###\n try: self.plot_timeseries('CTD_NeilBrown/depth', 'k-', axes=depth_ax)\n except: print('no CTD_NeilBrown/depth')\n try: self.plot_timeseries('Depth_MSI_US300', 'm-', axes=depth_ax)\n except: print('no Depth_MSI_US300')\n\n\n ### print additional information ###\n buoyancyNeutral = ('Config/Control/buoyancyNeutral',\n 'Config/Servo/buoyancyNeutral')\n for s in buoyancyNeutral:\n try:\n print('{0} = {1} {2}'.format(s, self[s+'/value'], self[s+'/units']))\n except:\n print('{0} not found'.format(s))\n \n# VertMd(0=N/A,1=Surf,2=Dep,3=DepRt,4=Pit0,5=Pit,6=PitRt,7=M&E,8=Flt),\n# VertHoldMd(0=N/A,1=Ms,2=El,3=Both)\n try:\n v, t = self.timeseries('VerticalControl/verticalMode')\n oalib.plot_date_blocks(t, v, axes=control_mode_ax, colormap=mpl.cm.jet)\n except: print('VerticalControl/verticalMode not found')\n\n depth_ax.invert_yaxis()\n for ax in fig.get_axes():\n ax.grid(True)\n try:\n ax.legend(loc='center left', bbox_to_anchor=(1, 0.5),\n fontsize='small')\n except:\n print('uncaught exception for legend...')\n for ax in fig.get_axes()[:-1]:\n plt.setp(ax.get_xticklabels(), visible=False)\n\n depth_ax.set_title(os.path.basename(self.filename))\n control_mode_ax.xaxis.set_major_formatter(mpl.dates.DateFormatter('%H:%M'))\n plt.setp(control_mode_ax.get_xticklabels(), rotation=30,\n fontsize='small')", "def create_surface_plane(align_to = None, axis = 'x', width = 0.5, freeze_tm = True):\n axis_dict = {'x': (1, 0, 0), 'y': (0, 1, 0), 'z': (0, 0, 1)}\n res = pm.nurbsPlane(\n axis = axis_dict[axis],\n width = width,\n degree = 1,\n constructionHistory = False\n )[0]\n if align_to is not None:\n transformation.align(res, align_to)\n if freeze_tm:\n transformation.freeze_transform(res)\n return res", "def test_create(self):\n f = azplugins.restrain.plane(group=hoomd.group.all(), point=(0,0,0), normal=(1,0,0), k=2.0)\n\n f.set_params(k=5.0)\n f.set_params(k=8)\n\n f.set_params(point=(0,0,1))\n f.set_params(point=[0,0,1])\n f.set_params(point=np.array([0,0,1]))\n\n f.set_params(normal=(0,0,1))\n f.set_params(normal=[0,0,1])\n f.set_params(normal=np.array([0,0,1]))\n\n f.set_params(point=(0,0,0), normal=(1,0,0), k=10.0)", "def build_topology(self):\n# errstr = \"build_topology() is not implemented.\\n\"\n# errstr += textwrap.dedent(self.build_topology.__doc__)\n# raise NotImplementedError(errstr)\n pass # May be a 1-compartment neuron. No need to abstract. ", "def create_plane(self):\n\n # First we calculate our point increment for both the x and y values\n inc_x = (self.xmax - self.xmin)/(self.xlen - 1)\n inc_y = (self.ymax - self.ymin)/(self.ylen - 1)\n\n # This for-loop will add every x-value with every y-value, saving the values column wise\n # i.e. (-10,-10), (-10,-9), (-10.-8),...,(-10,n) for n = our y-values.\n # store these combinations into a list, and add that to our plane. \n # The nested loop will then traverse again and will get the combinations for the next x-value.\n # The loop will continue until all x-values and y-value combinations are added to our plane.\n for y in range(0, self.ylen + 1):\n temp_list = []\n for x in range(0, self.xlen + 1):\n temp_list.append(self.f((self.xmin + x*inc_x) + (self.ymin + y*inc_y)*1j))\n self.plane.append(temp_list)", "def plane(self):\n return plane(self.N, self.o)", "def GetPlane(plane):\r\n pass", "def plane(*args, length: float=0.0, name: AnyStr=\"\", position: List[float, float, float]=None,\n rotation: List[float, float, float]=None, size: float=0.0, width: float=0.0,\n **kwargs)->AnyStr:\n pass", "def plane(self):\r\n from lsst.analysis import utils\r\n return utils.fitplane(self.points, self.z)", "def SetPlaneMode(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ShapeConvertToBezier_SetPlaneMode(self, *args)", "def build_topology(config):\n top_type = config['Metapopulation']['topology']\n assert top_type.lower() in ['moore', 'vonneumann', 'smallworld',\n 'complete', 'regular']\n\n if top_type.lower() == 'moore':\n return build_topology_moore(config)\n elif top_type.lower() == 'vonneumann':\n return build_topology_vonneumann(config)\n elif top_type.lower() == 'smallworld':\n return build_topology_smallworld(config)\n elif top_type.lower() == 'complete':\n return build_topology_complete(config)\n elif top_type.lower() == 'regular':\n return build_topology_regular(config)", "def create_plot_plane_2d(axis=(1.0, 1.0), origin=(0.0,0.0), size=(2.0,2.0)):\n ft = ImageFont.truetype (FONT_RESOURCES_DIR+\"/courier.ttf\", 12)\n gl_font = GlFont('', ft)\n gl_font.color = [0.0, 0, 0, 1.0]\n gl_plot = PlotPlane2d(gl_font)\n gl_plot.i_axis = axis\n gl_plot.i_origin = origin\n gl_plot.o_wh = size\n gl_plot.i_axis_units = (axis[0]/10, axis[1]/10)\n\n gl_plot.prepare()\n return gl_plot", "def nurbsPlane(*args, axis: Union[List[float, float, float], bool]=None, caching: bool=True,\n degree: Union[int, bool]=3, lengthRatio: Union[float, bool]=1.0, nodeState:\n Union[int, bool]=0, patchesU: Union[int, bool]=1, patchesV: Union[int, bool]=1,\n pivot: Union[List[float, float, float], bool]=None, width: Union[float,\n bool]=1.0, constructionHistory: bool=True, name: AnyStr=\"\", object: bool=True,\n polygon: int=0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[List[AnyStr], Any]:\n pass", "def image_plane_mesh_grid_from(\r\n self,\r\n image_plane_data_grid: Grid2D,\r\n adapt_data: np.ndarray = None,\r\n settings=SettingsPixelization(),\r\n ):\r\n return None", "def SetPlaneMode(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ConvertSurfaceToBezierBasis_SetPlaneMode(self, *args)", "def p(self):\n return 'Plane'", "def GetPlaneMode(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ShapeConvertToBezier_GetPlaneMode(self, *args)", "def create_fixed_distance_to_plane_constraint():\n return FixedDistanceToPlaneConstraint()", "def __init__(self):\n Page.__init__(self, u\"Paraboloide Elíptico<br><br>F(x,y)=(x, y, x<sup>2</sup>/a<sup>2</sup> + y<sup>2</sup>/b<sup>2</sup>)\")\n\n z = 0.5\n par = RevolutionPlot3D(lambda r, t: r ** 2 + z, (0, 1), (0, 2 * pi))\n\n x, y, z2, u, v, cose, sen, t = createVars(['x', 'y', 'z', 'u', 'v', 'cos', 'sen', 't'])\n\n mesh1 = Plot3D(lambda x, y, h: h * (x ** 2 + y ** 2 + z - .01), (-1, 1), (-1, 1))\n mesh1.addEqn(x**2+y**2 - z2**2 == 1)\n mesh1.addFunction(lambda x, y, h: h * (x ** 2 + y ** 2 + z + .01))\n mesh1.setLinesVisible(True)\n mesh1.setMeshVisible(False)\n mesh1.setBoundingBox(zrange=(-1, 1.5))\n par.setAmbientColor(_1(145, 61, 74))\n par.setDiffuseColor(_1(145, 61, 74))\n par.setSpecularColor(_1(145, 61, 74))\n baseplane = BasePlane()\n baseplane.setHeight(0)\n baseplane.setRange((-2, 2, 7))\n\n self.addChild(par)\n self.addChild(mesh1)\n self.addChild(baseplane)", "def plane(self):\n return Plane(Point(0, self.evaluations.exposedWing.edges[2].point1.y, 0), Vector(0, 1, 0),\n hidden=True)", "def network_topology(voxels, clusters, primaries, edges, mode='sphere'):\n # Define the arrays of node positions (barycenter of voxels in the cluster)\n pos = np.array([voxels[c].cpu().numpy().mean(0) for c in clusters])\n\n # Define the node features (label, color)\n n = len(clusters)\n node_labels = ['%d (%0.1f, %0.1f, %0.1f)' % (i, pos[i,0], pos[i,1], pos[i,2]) for i in range(n)]\n \n node_colors = ['#ff7f0e' if i in primaries else '#1f77b4' for i in range(n)]\n\n # Define the nodes and their connections\n graph_data = []\n edge_vertices = []\n if mode == 'sphere':\n # Define the node size\n logn = np.array([np.log(len(c)) for c in clusters])\n node_sizes = np.interp(logn, (logn.min(), logn.max()), (5, 50))\n \n # Define the nodes as sphere of radius proportional to the log of the cluster voxel content\n graph_data.append(go.Scatter3d(x = pos[:,0], y = pos[:,1], z = pos[:,2],\n name = 'clusters',\n mode = 'markers',\n marker = dict(\n symbol = 'circle',\n size = node_sizes,\n color = node_colors,\n colorscale = 'Viridis',\n line = dict(color='rgb(50,50,50)', width=0.5)\n ),\n text = node_labels,\n hoverinfo = 'text'\n ))\n\n # Define the edges center to center\n edge_vertices = np.concatenate([[pos[i], pos[j], [None, None, None]] for i, j in zip(edges[0], edges[1])])\n\n elif mode == 'hull':\n # For each cluster, add the convex hull of all its voxels\n graph_data += [go.Mesh3d(alphahull =10.0,\n name = '',\n x = voxels[c][:,0],\n y = voxels[c][:,1],\n z = voxels[c][:,2],\n color = node_colors[i],\n opacity = 0.3,\n text = node_labels[i],\n hoverinfo = 'text'\n ) for i, c in enumerate(clusters)]\n\n # Define the edges closest pixel to closest pixel\n import scipy as sp\n edge_vertices = []\n for i, j in zip(edges[0], edges[1]):\n vi, vj = voxels[clusters[i]], voxels[clusters[j]]\n d12 = sp.spatial.distance.cdist(vi, vj, 'euclidean')\n i1, i2 = np.unravel_index(np.argmin(d12), d12.shape)\n edge_vertices.append([vi[i1].cpu().numpy(), vj[i2].cpu().numpy(), [None, None, None]])\n \n edge_vertices = np.concatenate(edge_vertices)\n \n else:\n raise ValueError\n \n # Initialize a graph that contains the edges\n graph_data.append(go.Scatter3d(x = edge_vertices[:,0], y = edge_vertices[:,1], z = edge_vertices[:,2],\n mode = 'lines',\n name = 'edges',\n line = dict(\n color = 'rgba(50, 50, 50, 0.5)',\n width = 1\n ),\n hoverinfo = 'none'\n ))\n\n # Return\n return graph_data", "def set_topology(self, mapsize = None, mapshape = 'planar', lattice = 'rect', mask = None, compname = None):\n self.mapshape = mapshape\n self.lattice = lattice\n \n #to set mask\n if mask == None: \n self.mask = np.ones([1,self.dim])\n else:\n self.mask = mask\n \n \n #to set map size\n if mapsize == None: \n tmp = int(round(np.sqrt(self.dlen)))\n self.nnodes = tmp\n self.mapsize = [int(3./5*self.nnodes), int(2./5*self.nnodes)]\n else:\n if len(mapsize)==2:\n if np.min(mapsize) == 1:\n self.mapsize = [1, np.max(mapsize)]\n else: \n self.mapsize = mapsize\n elif len(mapsize) == 1:\n #s = int (mapsize[0]/2)\n self.mapsize = [1 ,mapsize[0]]\n print 'input was considered as node numbers'\n print 'map size is [{0},{1}]'.format(s,s) \n self.nnodes = self.mapsize[0]*self.mapsize[1]\n \n # to set component names\n if compname == None: \n try:\n cc = list()\n for i in range(0,self.dim):\n cc.append ('Variable-'+ str(i+1))\n self.compname = np.asarray(cc)[np.newaxis,:]\n except:\n pass\n print 'no data yet: plesae first set trainign data to the SOM'\n else:\n try:\n dim = getattr(self,'dim')\n if len(compname) == dim:\n self.compname = np.asarray(compname)[np.newaxis,:]\n else:\n print 'compname should have the same size'\n except:\n pass\n print 'no data yet: plesae first set trainign data to the SOM'", "def get_plane(dset, xaxis, yaxis, slices, **kw):\n\n # Build quad meshes from sorted grids\n xgrid = dset.dims[xaxis][0][indices[xaxis]]\n ygrid = dset.dims[yaxis][0][indices[yaxis]]\n xorder = np.argsort(xgrid)\n yorder = np.argsort(ygrid)\n xmesh, ymesh = quad_mesh(xgrid[xorder], ygrid[yorder], **kw)\n\n # Select and arrange data\n data = dset[slices]\n if xi < yi:\n data = data.T\n data = data[yorder]\n data = data[:, xorder]\n\n return xmesh, ymesh, data", "def plot_mpr_topology(options, tags=None, cursor=None):\n options['cur_src'] = 'topo'\n options['prefix'] = \"mpr\"\n ################################################################################\n locs = options['locs']\n colors = options['color2'](pylab.linspace(0, 1, 101))\n ################################################################################\n circ_max = 5\n line_max = 10\n floor_factor = 2\n floor_skew = -0.25\n line_min = 1\n\n hosts = get_hosts(options)\n mprs = cursor.execute('''\n SELECT DISTINCT(host)\n FROM nhdp_mpr_selectors\n ''').fetchall()\n\n for q, (tag_key, tag_id, nhdp_hi, nhdp_ht, mpr_minpdr) in enumerate(tags):\n logging.info('tag_id=\\\"%s\\\" (%d/%d)', tag_id, q+1, len(tags))\n min_max_time = cursor.execute('''\n SELECT min(time), max(time)\n FROM nhdp_he\n WHERE tag=?\n ''',(tag_key,)).fetchone()\n fig2d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate')\n fig3d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate', ThreeD=True)\n if not q:\n fig3d_onlynodes = MyFig(options, xlabel='x Coordinate [m]', ylabel='y Coordinate [$m$]', ThreeD=True)\n\n min_x = min_y = min_z = numpy.infty\n max_x = max_y = max_z = 0\n\n # first draw the edges...\n for nr, (host) in enumerate(hosts):\n logging.info(' [%d/%d] drawing edges for host=%s', nr+1, len(hosts), host)\n try:\n host_xpos, host_ypos, host_zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n ################################################################################\n # host is the receiving router, i.e. in our case the MPR\n # src is the sending router, i.e. the MPR selector\n # We only want to draw an edge if it connects an MPR with its SELECTOR.\n #\n ################################################################################\n cursor.execute('''\n SELECT DISTINCT(src), pdr\n FROM eval_helloPDR AS pdr JOIN nhdp_mpr_selectors AS mpr\n ON pdr.host = mpr.host AND pdr.tx_if = mpr.mprselector\n WHERE pdr.tag_key=? AND pdr.host=? AND mpr.time BETWEEN ? AND ?\n ''', (tag_key, host, min_max_time[0], min_max_time[1]))\n for src, pdr in cursor.fetchall():\n try:\n src_xpos, src_ypos, src_zpos = locs[src]\n except KeyError:\n logging.warning('no position found for node %s', src)\n continue\n\n fig2d.ax.plot(\n [host_xpos+host_zpos*floor_skew*floor_factor, src_xpos+src_zpos*floor_skew*floor_factor],\n [host_ypos+host_zpos*floor_factor, src_ypos+src_zpos*floor_factor],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n fig3d.ax.plot(\n [host_xpos, src_xpos],\n [host_ypos, src_ypos],\n [host_zpos, src_zpos],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n # draw the nodes\n for host in hosts:\n logging.info(' [%d/%d] drawing node %s', nr+1, len(hosts), host)\n try:\n xpos, ypos, zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n\n max_x = max(xpos, max_x)\n max_y = max(ypos, max_y)\n min_x = min(xpos, min_x)\n min_y = min(ypos, min_y)\n max_z = max(zpos, max_z)\n min_z = max(zpos, min_z)\n if (host,) in mprs:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='blue', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='blue', ms=circ_max)\n else:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='black', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='black', ms=circ_max)\n if not q:\n color = 'black'\n if host.startswith('a6'):\n color = 'red'\n elif host.startswith('a3'):\n color = 'blue'\n elif host.startswith('a7'):\n color = 'orange'\n fig3d_onlynodes.ax.plot([xpos], [ypos], [zpos], 'o', color=color, ms=circ_max)\n fig2d.colorbar = fig2d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n fig3d.colorbar = fig3d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n drawBuildingContours(fig3d.ax, options)\n drawBuildingContours(fig3d_onlynodes.ax, options)\n\n alinspace = numpy.linspace(0, 1, 100)\n alinspace = numpy.vstack((alinspace, alinspace))\n for tax in [fig2d.colorbar, fig3d.colorbar]:\n tax.imshow(alinspace, aspect='auto', cmap=options['color2'])\n tax.set_xticks(pylab.linspace(0, 100, 5))\n tax.set_xticklabels(['$%.2f$' % l for l in pylab.linspace(0, 1, 5)], fontsize=0.8*options['fontsize'])\n tax.set_yticks([])\n tax.set_title('$PDR$', size=options['fontsize'])\n fig2d.ax.axis((min_x-10, max_x+10, min_y-10, max_y+10+max_z*floor_factor+10))\n fig2d.save('2d_topology_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))\n fig3d.save('3d_topology_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))\n if not q:\n fig3d_onlynodes.save('3d_topology_only_nodes_hi_%d_ht_%d_minpdr_%.2f' % (nhdp_hi, nhdp_ht, mpr_minpdr))", "def cal_topology_feature(self):\n self.NPL()\n self.topo_efficiency_cal()\n self.efficiency_cal()\n self.cluster_cal()\n self.topo_diameter()\n self.spatial_diameter()", "def ggpl_bone_structure(file_name):\n def planeStructure(beamSection, pillarSection, distancePillars, intersectHeights):\n fStructs = []\n \n \"\"\" createFinalStruct\n \n Questo metodo crea una pilastro senza travi\n \n @param startPoint: Il punto di intersezione del pilastro con la base del piano di lavoro\n @param height: L'altezza del pilastro\n @returns: Il pilastro\n \"\"\"\n def createFinalStruct(startPoint, height):\n pillar = CUBOID([pillarSection[0], pillarSection[1], height])\n return STRUCT([T(2)(startPoint), pillar])\n\n \"\"\" createFStruct\n \n Questo metodo crea la struttura portante con il pilastro e le travi di riferimento\n \n @param startPoint: Il punto di intersezione del pilastro con la base del piano di lavoro\n @param height: L'altezza del pilastro\n @param lengthBeam: La lunghezza delle travi\n @returns: Il pilastro\n \"\"\"\n def createFStruct(startPoint, height, lengthBeam):\n fStruct = []\n\n fStruct.append(createFinalStruct(startPoint, height))\n hPillar = 0\n for i in range(len(intersectHeights)):\n beam = CUBOID([beamSection[1], lengthBeam, beamSection[0]])\n hPillar += intersectHeights[i]\n fStruct.append(STRUCT([\n T(3)(hPillar),\n T(2)(pillarSection[1] + startPoint),\n beam\n ]))\n return STRUCT(fStruct)\n\n height = SUM(intersectHeights) + beamSection[1]\n\n startDistance = 0\n for iterator in range(len(distancePillars)):\n fStructs.append(createFStruct(startDistance, height, distancePillars[iterator]))\n\n startDistance = startDistance + pillarSection[1] + distancePillars[iterator]\n\n fStructs.append(createFinalStruct(startDistance, height))\n\n return STRUCT(fStructs)\n \n \"\"\" createTrasversalBeam\n \n Questa funzione crea le travi di collegamento tra una parete e l'altra\n \n @param beanSection: una tupla (bx,bz) che contiene la dimensione delle travi\n @param pillarSection: una tupla (px,py) che contiene la dimensione dei pilastri\n @param distancePillars: una lista di distanze relative tra un pilastro e l'altro\n @param intersectHeights: una lista di altezze relative che indicano la distranza tra una trave e l'altra sull'asse x\n @returns: la struttura base, che corrisponde ad una parete dell'edificio\n \"\"\"\n def createTrasversalBeam(distance, beamSection, pillarSection, distancePillars, intersectHeights):\n y = []\n for index in range(len(distancePillars)):\n y.append(pillarSection[1])\n y.append(- distancePillars[index])\n y.append(pillarSection[1])\n\n x = [\n - pillarSection[0],\n distance\n ]\n\n z = []\n for i in range(len(intersectHeights)):\n if i == 0:\n z.append(-intersectHeights[i])\n else:\n z.append(-intersectHeights[i] + beamSection[1])\n z.append(beamSection[1])\n\n return PROD([\n PROD([QUOTE(x), QUOTE(y)]),\n QUOTE(z)\n ])\n \n \"\"\" parseCSV\n \n Questo metodo prende in input un file_name che indica il file csv da parsare per avere le informazioni della struttura\n \n Argomenti:\n file_name (string): il file name del file csv\n \n Yields:\n transaction: il vettore di traslazione tra una parete e l'altra\n beanSection: una tupla (bx,bz) che contiene la dimensione delle travi\n pillarSection: una tupla (px,py) che contiene la dimensione dei pilastri\n distancePillars: una lista di distanze relative tra un pilastro e l'altro\n intersectHeights: una lista di altezze relative che indicano la distranza tra una trave e l'altra sull'asse x\n \"\"\"\n def parseCSV(file_name):\n odd = True\n transaction = None\n with open(file_name, 'rb') as csvfile:\n builderreader = csv.reader(csvfile)\n\n for row in builderreader:\n if odd:\n odd = False\n transaction = [float(row[0]), float(row[1]), float(row[2])]\n else:\n odd = True\n beamSection = [float(row[0]), float(row[1])]\n pillarSection = [float(row[2]), float(row[3])]\n\n distancePillars = []\n start_point = 5\n finish_point = int(row[4]) + start_point\n for index in range(start_point, finish_point):\n distancePillars.append(int(row[index]))\n\n intersectHeights = []\n start_point = finish_point + 1\n finish_point = int(row[finish_point]) + start_point\n for index in range(start_point, finish_point):\n intersectHeights.append(int(row[index]))\n\n yield transaction, beamSection, pillarSection, distancePillars, intersectHeights\n\n frames = []\n for transaction, beamSection, pillarSection, distancePillars, intersectHeights in parseCSV(file_name):\n frames.append(T(2)(transaction[1]))\n\n if transaction[0] != 0:\n frames.append(\n createTrasversalBeam(\n transaction[0], beamSection, pillarSection, distancePillars,intersectHeights\n )\n )\n\n frames.append(T([1, 3])([transaction[0], transaction[2]]))\n frames.append(\n planeStructure(beamSection, pillarSection, distancePillars, intersectHeights)\n )\n\n return STRUCT(frames)", "def get_plane(\n self,\n pos=None,\n norm=None,\n plane=None,\n sx=None,\n sy=None,\n color=\"lightgray\",\n alpha=0.25,\n **kwargs,\n ):\n axes_pairs = dict(sagittal=(0, 1), horizontal=(2, 0), frontal=(2, 1))\n\n if pos is None:\n pos = self.root._mesh.centerOfMass()\n\n try:\n norm = norm or self.space.plane_normals[plane]\n except KeyError: # pragma: no cover\n raise ValueError( # pragma: no cover\n f\"Could not find normals for plane {plane}. Atlas space provides these normals: {self.space.plane_normals}\" # pragma: no cover\n )\n\n # Get plane width and height\n idx_pair = (\n axes_pairs[plane]\n if plane is not None\n else axes_pairs[\"horizontal\"]\n )\n\n bounds = self.root.bounds()\n root_bounds = [\n [bounds[0], bounds[1]],\n [bounds[2], bounds[3]],\n [bounds[4], bounds[5]],\n ]\n\n wh = [float(np.diff(root_bounds[i])) for i in idx_pair]\n if sx is None:\n sx = wh[0]\n if sy is None:\n sy = wh[1]\n\n # return plane\n return Actor(\n Plane(pos=pos, normal=norm, sx=sx, sy=sy, c=color, alpha=alpha),\n name=f\"Plane at {pos} norm: {norm}\",\n br_class=\"plane\",\n )", "def __init__(self, **params):\n\n # Initialize topo\n Topo.__init__(self, **params)\n\n # Host and link configuration\n hostConfiguration = {'cpu': cpuShare()}\n linkConfiguration = {'bw': args.bandwidth, 'delay': delay(), 'max_queue_len': args.input_queue_size }\n \n # Create the actual topology\n client = self.add_host( 'client0', **hostConfiguration )\n \n switch = self.add_switch('switch0' )\n \n self.add_link( switch, client, port1=0, port2=0, **linkConfiguration )\n \n for i in range(1,args.servers+1):\n server = self.add_host( 'server%d' % i, **hostConfiguration )\n self.add_link( switch, server, port1=i, port2=0, **linkConfiguration )", "def plane_2d(self, quantity, plane, pval, draw=False, fixed=None):\n self.log.info('Plotting plane')\n pval = int(pval)\n # x = np.arange(0, self.period, self.dx)\n # y = np.arange(0, self.period, self.dy)\n # z = np.arange(0, self.height + self.dz, self.dz)\n x = self.X\n y = self.Y\n z = self.Z\n # Get the scalar values\n freq = self.conf['Simulation']['params']['frequency']\n wvlgth = (consts.c / freq) * 1E9\n title = 'Frequency = {:.4E} Hz, Wavelength = {:.2f} nm'.format(\n freq, wvlgth)\n # Get the plane we wish to plot\n cs = self.get_plane(quantity, plane, pval)\n self.log.info('DATA SHAPE: %s' % str(cs.shape))\n show = self.conf['General']['show_plots']\n p = False\n sim_dir = os.path.expandvars(self.conf['General']['sim_dir'])\n if plane == 'yz' or plane == 'zy':\n labels = ('y [um]', 'z [um]', quantity, title)\n if self.conf['General']['save_plots']:\n p = os.path.join(sim_dir,\n '%s_plane_2d_yz_pval%s.png' % (quantity,\n str(pval)))\n self.heatmap2d(y, z, cs, labels, plane, pval,\n save_path=p, show=show, draw=draw, fixed=fixed)\n elif plane == 'xz' or plane == 'zx':\n labels = ('x [um]', 'z [um]', quantity, title)\n if self.conf['General']['save_plots']:\n p = os.path.join(sim_dir,\n '%s_plane_2d_xz_pval%s.png' % (quantity,\n str(pval)))\n self.heatmap2d(x, z, cs, labels, plane, pval,\n save_path=p, show=show, draw=draw, fixed=fixed)\n elif plane == 'xy' or plane == 'yx':\n labels = ('y [um]', 'x [um]', quantity, title)\n if self.conf['General']['save_plots']:\n p = os.path.join(sim_dir,\n '%s_plane_2d_xy_pval%s.png' % (quantity,\n str(pval)))\n self.heatmap2d(x, y, cs, labels, plane, pval,\n save_path=p, show=show, draw=draw, fixed=fixed)", "def polyAutoProjection(*args, layoutMethod: Union[int, bool]=0, pivot: Union[List[float, float,\n float], bool]=None, pivotX: Union[float, bool]=0.0, pivotY: Union[float,\n bool]=0.0, pivotZ: Union[float, bool]=0.0, rotate: Union[List[float,\n float, float], bool]=None, rotateX: Union[float, bool]=0.0, rotateY:\n Union[float, bool]=0.0, rotateZ: Union[float, bool]=0.0, scale:\n Union[List[float, float, float], bool]=None, scaleX: Union[float,\n bool]=1.0, scaleY: Union[float, bool]=1.0, scaleZ: Union[float,\n bool]=1.0, translate: Union[List[float, float, float], bool]=None,\n translateX: Union[float, bool]=0.0, translateY: Union[float, bool]=0.0,\n translateZ: Union[float, bool]=0.0, caching: bool=True,\n constructionHistory: bool=True, createNewMap: bool=True,\n insertBeforeDeformers: bool=True, layout: Union[int, bool]=0, name:\n AnyStr=\"\", nodeState: Union[int, bool]=0, optimize: Union[int, bool]=0,\n percentageSpace: Union[float, bool]=0.0, planes: Union[int, bool]=6,\n projectBothDirections: bool=True, scaleMode: Union[int, bool]=0,\n skipIntersect: bool=True, uvSetName: AnyStr=\"\", worldSpace: bool=True,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def plot_plane(unit_normal, x_array, y_array, fore):\n # print'unit normal = ', unit_normal\n z = (((unit_normal[0] * (fore[0] - x_array)) + (unit_normal[1] * (fore[1] - y_array))) / unit_normal[2]) + fore[2]\n # print 'plane numbers\\n', z\n return z", "def _repr_(self):\n return \"Hyperbolic plane\"", "def addPlaneToScene(self, foot, x, y):\r\n #research\r\n profprint()\r\n scene = slicer.mrmlScene\r\n # Create model node\r\n model = slicer.vtkMRMLModelNode()\r\n model.SetScene(scene)\r\n model.SetName(scene.GenerateUniqueName(\".ObturatorPlane\"))\r\n\r\n planeSource = vtk.vtkPlaneSource()\r\n foot-=25*(x+y)\r\n #planeSource.SetOrigin(np.array(foot))\r\n planeSource.SetOrigin(list(foot))\r\n planeSource.SetPoint1(np.array(foot)+50*x)\r\n planeSource.SetPoint2(np.array(foot)+50*y)\r\n planeSource.Update()\r\n model.SetAndObservePolyData(planeSource.GetOutput())\r\n\r\n # Create display node\r\n modelDisplay = slicer.vtkMRMLModelDisplayNode()\r\n modelDisplay.SetColor(1,1,0) # yellow\r\n modelDisplay.SetBackfaceCulling(0)\r\n modelDisplay.SetScene(scene)\r\n scene.AddNode(modelDisplay)\r\n model.SetAndObserveDisplayNodeID(modelDisplay.GetID())\r\n\r\n # Add to scene\r\n scene.AddNode(model)\r\n # transform = slicer.vtkMRMLLinearTransformNode()\r\n # scene.AddNode(transform)\r\n # model.SetAndObserveTransformNodeID(transform.GetID())\r\n #\r\n # vTransform = vtk.vtkTransform()\r\n # vTransform.Scale(50,50,50)\r\n # #vTransform.RotateX(30)\r\n # transform.SetAndObserveMatrixTransformToParent(vTransform.GetMatrix())\r", "def show_plane(axis, plane, cmap=\"gray\", title=None):\n axis.imshow(plane, cmap=cmap)\n axis.set_xticks([])\n axis.set_yticks([])\n\n if title:\n axis.set_title(title)\n\n return None", "def _make_exclusion_plane(args):\n cls_dict = _load_subset(args.cls_file, args.regions)\n\n ex_plane = planeplt.CLsExclusionPlane(interpolation=args.interpolation)\n cls_list = []\n if args.band_region:\n for sp in cls_dict[args.band_region]:\n sch, lsp = sp['scharm_mass'], sp['lsp_mass']\n low, high = sp['exp_d1s'], sp['exp_u1s']\n cls_list.append( (sch, lsp, low, high))\n ex_plane.add_band(cls_list)\n\n # add middle band\n def get_tup(x):\n return (x['scharm_mass'], x['lsp_mass'], x['cls_exp'])\n for conf_name, cls_list in sorted(cls_dict.items()):\n cls_tup = [get_tup(x) for x in cls_list]\n style = '-k' if conf_name == args.band_region else None\n ex_plane.add_config(cls_tup,conf_name, style=style, )\n\n ex_plane.save(args.output_plot)", "def GetPlaneMode(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ConvertSurfaceToBezierBasis_GetPlaneMode(self, *args)", "def get_plane_of_points(\n self,\n normal_vector=\"z\",\n planar_coordinate=None,\n ):\n # Get results vectors\n if (normal_vector == \"z\"):\n x_flat = self.floris.grid.x_sorted_inertial_frame[0, 0].flatten()\n y_flat = self.floris.grid.y_sorted_inertial_frame[0, 0].flatten()\n z_flat = self.floris.grid.z_sorted_inertial_frame[0, 0].flatten()\n else:\n x_flat = self.floris.grid.x_sorted[0, 0].flatten()\n y_flat = self.floris.grid.y_sorted[0, 0].flatten()\n z_flat = self.floris.grid.z_sorted[0, 0].flatten()\n u_flat = self.floris.flow_field.u_sorted[0, 0].flatten()\n v_flat = self.floris.flow_field.v_sorted[0, 0].flatten()\n w_flat = self.floris.flow_field.w_sorted[0, 0].flatten()\n\n # Create a df of these\n if normal_vector == \"z\":\n df = pd.DataFrame(\n {\n \"x1\": x_flat,\n \"x2\": y_flat,\n \"x3\": z_flat,\n \"u\": u_flat,\n \"v\": v_flat,\n \"w\": w_flat,\n }\n )\n if normal_vector == \"x\":\n df = pd.DataFrame(\n {\n \"x1\": y_flat,\n \"x2\": z_flat,\n \"x3\": x_flat,\n \"u\": u_flat,\n \"v\": v_flat,\n \"w\": w_flat,\n }\n )\n if normal_vector == \"y\":\n df = pd.DataFrame(\n {\n \"x1\": x_flat,\n \"x2\": z_flat,\n \"x3\": y_flat,\n \"u\": u_flat,\n \"v\": v_flat,\n \"w\": w_flat,\n }\n )\n\n # Subset to plane\n # TODO: Seems sloppy as need more than one plane in the z-direction for GCH\n if planar_coordinate is not None:\n df = df[np.isclose(df.x3, planar_coordinate)] # , atol=0.1, rtol=0.0)]\n\n # Drop duplicates\n # TODO is this still needed now that we setup a grid for just this plane?\n df = df.drop_duplicates()\n\n # Sort values of df to make sure plotting is acceptable\n df = df.sort_values([\"x2\", \"x1\"]).reset_index(drop=True)\n\n return df", "def construct_mpi_topology(self, dico):\n period = [True]*self.dim\n\n if dico is None:\n comm = mpi.COMM_WORLD\n else:\n comm = dico.get('comm', mpi.COMM_WORLD)\n self.mpi_topo = MPI_topology(self.dim, period, comm)", "def get_transformable_plane(self, x_range = None, y_range = None):\n plane_config = dict(self.plane_config)\n shift_val = ORIGIN\n if x_range is not None:\n x_min, x_max = x_range\n plane_config[\"x_radius\"] = x_max - x_min\n shift_val += (x_max+x_min)*RIGHT/2.\n if y_range is not None:\n y_min, y_max = y_range\n plane_config[\"y_radius\"] = y_max - y_min\n shift_val += (y_max+y_min)*UP/2.\n plane = ComplexPlane(**plane_config)\n plane.shift(shift_val)\n if self.use_multicolored_plane:\n self.paint_plane(plane)\n return plane", "def plotWholeRoom(mesh):\r\n fig = plt.figure()\r\n ax = fig.gca(projection='3d')\r\n X = np.arange(0, mesh.xLength+mesh.meshsize, mesh.meshsize)\r\n Y = np.arange(0, mesh.yLength+mesh.meshsize, mesh.meshsize)\r\n X, Y = np.meshgrid(X,Y)\r\n numberOfXNodes = mesh.x_res#round(mesh.xLength/mesh.meshsize)+1\r\n numberOfYNodes = mesh.y_res#round(mesh.yLength/mesh.meshsize)+1\r\n Z = np.array([[mesh.grid[i,j].funcVal for i in range(numberOfYNodes)] for j in range(numberOfXNodes)])\r\n if mesh.y_res==2:\r\n print()\r\n surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,\r\n linewidth=0, antialiased=False)\r\n # add vmin=4, vmax=41, to define lower and upper value for the color-scheme\r\n # set limits for z-axis\r\n ax.set_zlim(np.amin(Z)-mesh.meshsize, np.amax(Z)+mesh.meshsize)\r\n # don't know what these two lines are for\r\n # x.zaxis.set_major_locator(LinearLocator(10))\r\n # ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\r\n # don't know what these two lines are for\r\n fig.colorbar(surf, shrink=0.5, aspect=5)\r\n plt.show() \r\n return fig", "def plot_nhdp_hello_topology(options, tags=None, cursor=None):\n options['cur_src'] = 'topo'\n options['prefix'] = \"nhdp\"\n ################################################################################\n locs = options['locs']\n colors = options['color2'](pylab.linspace(0, 1, 101))\n ################################################################################\n circ_max = 5\n line_max = 10\n floor_factor = 2\n floor_skew = -0.25\n line_min = 1\n\n hosts = get_hosts(options)\n\n for q, (tag_key, tag_id, nhdp_hi, nhdp_ht, mpr_minpdr) in enumerate(tags):\n logging.info('tag_id=\\\"%s\\\" (%d/%d)', tag_id, q+1, len(tags))\n fig2d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate')\n fig3d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate', ThreeD=True)\n if not q:\n fig3d_onlynodes = MyFig(options, xlabel='x Coordinate [m]', ylabel='y Coordinate [$m$]', ThreeD=True)\n\n min_x = min_y = min_z = numpy.infty\n max_x = max_y = max_z = 0\n\n # draw the nodes\n for host in hosts:\n try:\n xpos, ypos, zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n\n max_x = max(xpos, max_x)\n max_y = max(ypos, max_y)\n min_x = min(xpos, min_x)\n min_y = min(ypos, min_y)\n max_z = max(zpos, max_z)\n min_z = max(zpos, min_z)\n\n fig2d.ax.plot(\n xpos+zpos*floor_skew*floor_factor,\n ypos+zpos*floor_factor,\n 'o', color='black', ms=circ_max)\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='black', ms=circ_max)\n if not q:\n color = 'black'\n if host.startswith('a6'):\n color = 'red'\n elif host.startswith('a3'):\n color = 'blue'\n elif host.startswith('a7'):\n color = 'orange'\n fig3d_onlynodes.ax.plot([xpos], [ypos], [zpos], 'o', color=color, ms=circ_max)\n fig2d.colorbar = fig2d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n fig3d.colorbar = fig3d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n drawBuildingContours(fig3d.ax, options)\n drawBuildingContours(fig3d_onlynodes.ax, options)\n\n alinspace = numpy.linspace(0, 1, 100)\n alinspace = numpy.vstack((alinspace, alinspace))\n for tax in [fig2d.colorbar, fig3d.colorbar]:\n tax.imshow(alinspace, aspect='auto', cmap=options['color2'])\n tax.set_xticks(pylab.linspace(0, 100, 5))\n tax.set_xticklabels(['$%.2f$' % l for l in pylab.linspace(0, 1, 5)], fontsize=0.8*options['fontsize'])\n tax.set_yticks([])\n tax.set_title('$PDR$', size=options['fontsize'])\n fig2d.ax.axis((min_x-10, max_x+10, min_y-10, max_y+10+max_z*floor_factor+10))\n fig2d.save('2d_%s' % (tag_id))\n fig3d.save('3d_%s' % (tag_id))\n if not q:\n fig3d_onlynodes.save('3d')", "def automatic_partitioning():\n def draw_labels(labels_vector):\n labs = {node: labels_vector[i] for i, node in enumerate(router.graph.nodes())}\n coord = {touple: list(touple) for touple in router.graph.nodes()}\n nx.draw_networkx(router.graph, coord, labels=labs)\n plt.show()\n\n file_path = PROJECT_PATH + \"/geographycal_data/adjacency_matrix/Howgrp.txt\"\n router = Router(adjacency_metrix=file_path)\n draw_labels(router.louvain_clustering(router.graph, weight='weight'))", "def plot_mpr_topology_per_mpr(options, tags=None, cursor=None):\n interval = 600\n options['cur_src'] = 'topo'\n options['prefix'] = \"mpr\"\n ################################################################################\n locs = options['locs']\n colors = options['color2'](pylab.linspace(0, 1, 101))\n ################################################################################\n circ_max = 5\n line_max = 10\n floor_factor = 2\n floor_skew = -0.25\n line_min = 1\n cur_time = cursor.execute('''\n SELECT min(time)\n FROM nhdp_mpr_selectors\n ''').fetchone()\n mprs = cursor.execute('''\n SELECT DISTINCT(host)\n FROM nhdp_mpr_selectors\n ''').fetchall()\n hosts = get_hosts(options)\n\n min_x = min_y = min_z = numpy.infty\n max_x = max_y = max_z = 0\n\n # first draw the edges...\n for i, (mpr,) in enumerate(mprs):\n logging.info('[%d/%d] plotting host=%s', i+1, len(mprs), mpr)\n fig2d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate')\n fig3d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate', ThreeD=True)\n try:\n host_xpos, host_ypos, host_zpos = locs[mpr]\n except KeyError:\n logging.warning('no position found for node %s', mpr)\n continue\n ################################################################################\n # host is the receiving router, i.e. in our case the MPR\n # src is the sending router, i.e. the MPR selector\n # We only want to draw an edge if it connects an MPR with its SELECTOR.\n #\n ################################################################################\n cursor.execute('''\n SELECT src, AVG(pdr)\n FROM eval_helloPDR AS pdr JOIN nhdp_mpr_selectors AS mpr\n ON pdr.host = mpr.host AND pdr.tx_if = mpr.mprselector\n WHERE pdr.host=?\n GROUP BY src\n ''', (mpr, ))\n\n for src, pdr in cursor.fetchall():\n try:\n src_xpos, src_ypos, src_zpos = locs[src]\n except KeyError:\n logging.warning('no position found for node %s', src)\n continue\n\n fig2d.ax.plot(\n [host_xpos+host_zpos*floor_skew*floor_factor, src_xpos+src_zpos*floor_skew*floor_factor],\n [host_ypos+host_zpos*floor_factor, src_ypos+src_zpos*floor_factor],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n fig3d.ax.plot(\n [host_xpos, src_xpos],\n [host_ypos, src_ypos],\n [host_zpos, src_zpos],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3)\n\n # draw all other nodes\n for host in hosts:\n try:\n xpos, ypos, zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n\n max_x = max(xpos, max_x)\n max_y = max(ypos, max_y)\n min_x = min(xpos, min_x)\n min_y = min(ypos, min_y)\n max_z = max(zpos, max_z)\n min_z = max(zpos, min_z)\n if host == mpr:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='blue', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='blue', ms=circ_max)\n else:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='black', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='black', ms=circ_max)\n fig2d.colorbar = fig2d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n fig3d.colorbar = fig3d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n drawBuildingContours(fig3d.ax, options)\n alinspace = numpy.linspace(0, 1, 100)\n alinspace = numpy.vstack((alinspace, alinspace))\n for tax in [fig2d.colorbar, fig3d.colorbar]:\n tax.imshow(alinspace, aspect='auto', cmap=options['color2'])\n tax.set_xticks(pylab.linspace(0, 100, 5))\n tax.set_xticklabels(['$%.2f$' % l for l in pylab.linspace(0, 1, 5)], fontsize=0.8*options['fontsize'])\n tax.set_yticks([])\n tax.set_title('$PDR$', size=options['fontsize'])\n fig2d.ax.axis((min_x-10, max_x+10, min_y-10, max_y+10+max_z*floor_factor+10))\n fig2d.save('2d_topology_per_mpr_%s' % (mpr))\n fig3d.save('3d_topology_per_mpr_%s' % (mpr))", "def ProjectToPlane(self):\n\n self.__do_essential_memebers_exist__()\n if self.element_type != \"tri\":\n raise ValueError(\"Project to plane is only applicable to triangles\")\n\n imesh = deepcopy(self)\n coordinates = []\n connectivities = []\n for counter, elem in enumerate(imesh.elements):\n\n elementCoordinates = imesh.points[elem,:]\n\n A = elementCoordinates[0,:]\n B = elementCoordinates[1,:]\n C = elementCoordinates[2,:]\n\n X = (B - A); X /= np.linalg.norm(X)\n Z = np.cross(X, C - A); Z /= np.linalg.norm(Z)\n Y = np.cross(Z, X)\n\n # PROJECT THE TRIANGLE TO THIS BASES\n a = [0., 0.]\n b = [np.linalg.norm((B - A)), 0.]\n c = [(C - A).dot(X), (C - A).dot(Y)]\n\n coordinates.append(a)\n coordinates.append(b)\n coordinates.append(c)\n\n elementConnectivity = [3 * counter, 3 * counter + 1, 3 * counter + 2]\n connectivities.append(elementConnectivity)\n\n coordinates = np.array(coordinates)\n connectivities = np.array(connectivities)\n imesh.points = coordinates\n imesh.elements = connectivities\n imesh.nelem = imesh.elements.shape[0]\n imesh.nnode = imesh.points.shape[0]\n\n return imesh", "def topology(self, topo_file: str, *args: str):\n pass", "def plot_mpr_topology_per_node(options, tags=None, cursor=None):\n interval = 600\n options['cur_src'] = 'topo'\n options['prefix'] = \"mpr\"\n ################################################################################\n locs = options['locs']\n colors = options['color2'](pylab.linspace(0, 1, 101))\n colors_mprs = ['green', 'red', 'cyan', 'magenta', 'yellow', 'grey']\n #colors_mprs = ['#222222', '#444444', '#666666', '#888888', '#aaaaaa', '#cccccc']\n ################################################################################\n circ_max = 5\n line_max = 10\n floor_factor = 2\n floor_skew = -0.25\n line_min = 1\n wait_time = 30\n measure_time = wait_time + 20\n draw_non_mpr_neighbors = False\n\n hosts = get_hosts(options)\n\n min_x = min_y = min_z = numpy.infty\n max_x = max_y = max_z = 0\n\n for q, (tag_key, tag_id, nhdp_hi, nhdp_ht, mpr_minpdr) in enumerate(tags):\n # first draw the edges...\n for host in hosts:\n logging.info(\"Plotting tag_id: %s host: %s\" % (tag_id, host))\n tx_if, = cursor.execute('''\n SELECT tx_if\n FROM he\n WHERE host=?\n ''',(host,)).fetchone()\n fig2d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate')\n fig3d = MyFig(options, rect=[0.1, 0.1, 0.8, 0.7], xlabel='x Coordinate', ylabel='y Coordinate', ThreeD=True)\n try:\n host_xpos, host_ypos, host_zpos = locs[host]\n except KeyError:\n logging.warning('no position found for node %s', host)\n continue\n ################################################################################\n # src is the receiving router, i.e. in our case the MPR\n # host is the sending router, i.e. the MPR selector\n # We only want to draw an edge if it connects a host with its MPR.\n #\n ################################################################################\n\n neighbors = cursor.execute('''\n SELECT DISTINCT(host)\n FROM rh\n WHERE prev = ?\n ''', (tx_if,)).fetchall()\n min_time, = cursor.execute('''\n SELECT min(time)\n FROM nhdp_he\n WHERE tag = ?\n ''',(tag_key,)).fetchone()\n mprs = cursor.execute('''\n SELECT pdr.host, AVG(pdr)\n FROM eval_helloPDR AS pdr JOIN nhdp_mpr_selectors AS mpr\n ON pdr.host = mpr.host AND pdr.tx_if = mpr.mprselector\n WHERE pdr.tx_if = ? AND pdr.tag_key = ? AND mpr.time BETWEEN ? AND ?\n GROUP BY pdr.host\n ''', (tx_if, tag_key, min_time + wait_time, min_time + measure_time)).fetchall()\n logging.info(\"Host is %s...\" % host)\n mpr_list = []\n for mpr, pdr in mprs:\n mpr_list.append(mpr)\n try:\n src_xpos, src_ypos, src_zpos = locs[mpr]\n except KeyError:\n logging.warning('no position found for node %s', mpr)\n continue\n\n fig2d.ax.plot(\n [host_xpos+host_zpos*floor_skew*floor_factor, src_xpos+src_zpos*floor_skew*floor_factor],\n [host_ypos+host_zpos*floor_factor, src_ypos+src_zpos*floor_factor],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3, label=tag_id)\n\n fig3d.ax.plot(\n [host_xpos, src_xpos],\n [host_ypos, src_ypos],\n [host_zpos, src_zpos],\n linestyle='-', color=colors[pdr*100], linewidth=max(line_max*pdr, line_min), alpha=0.3, label=tag_id)\n if draw_non_mpr_neighbors == True:\n for _host, in neighbors:\n try:\n src_xpos, src_ypos, src_zpos = locs[_host]\n except KeyError:\n logging.warning('no position found for node %s', _host)\n continue\n\n fig2d.ax.plot(\n [host_xpos+host_zpos*floor_skew*floor_factor, src_xpos+src_zpos*floor_skew*floor_factor],\n [host_ypos+host_zpos*floor_factor, src_ypos+src_zpos*floor_factor],\n linestyle='-', color='black', linewidth=line_min, alpha=0.3)\n\n fig3d.ax.plot(\n [host_xpos, src_xpos],\n [host_ypos, src_ypos],\n [host_zpos, src_zpos],\n linestyle='-', color='black', linewidth=line_min, alpha=0.3)\n # draw nodes\n color_idx = 0;\n skip_list = []\n n2_list = []\n for _host in hosts:\n if _host in skip_list:\n continue\n try:\n xpos, ypos, zpos = locs[_host]\n except KeyError:\n logging.warning('no position found for node %s', _host)\n continue\n\n max_x = max(xpos, max_x)\n max_y = max(ypos, max_y)\n min_x = min(xpos, min_x)\n min_y = min(ypos, min_y)\n max_z = max(zpos, max_z)\n min_z = max(zpos, min_z)\n if _host == host:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='blue', ms=circ_max*2)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='blue', ms=circ_max*2)\n elif _host in mpr_list:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color=colors_mprs[color_idx], ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color=colors_mprs[color_idx], ms=circ_max)\n mpr_tx_if, = cursor.execute('''\n SELECT tx_if\n FROM he\n WHERE host=? AND time BETWEEN ? AND ?\n ''',(_host, min_time + wait_time, min_time + measure_time)).fetchone()\n n2_ifs = list(pylab.flatten(cursor.execute('''\n SELECT DISTINCT(n2)\n FROM nhdp_mpr_n2\n WHERE host = ? AND time BETWEEN ? AND ?\n ''', (_host, min_time + wait_time, min_time + measure_time)).fetchall()))\n\n n2s = cursor.execute('''\n SELECT DISTINCT(host)\n FROM he\n WHERE tx_if IN (%s)\n ''' % ','.join('?'*len(n2_ifs)), n2_ifs).fetchall()\n #n2 = cursor.execute('''\n #SELECT DISTINCT(host)\n #FROM rh\n #WHERE prev = ? AND time BETWEEN ? AND ?\n #''', (mpr_tx_if, min_time + wait_time, min_time + measure_time)).fetchall()\n mpr_xpos, mpr_ypos, mpr_zpos = locs[_host]\n for __host, in n2s:\n __host = unicodedata.normalize('NFKD', __host).encode('ascii','ignore')\n if __host in n2_list:\n continue\n if __host in mpr_list or __host == host:\n continue\n n2_list.append(__host)\n skip_list.append(__host)\n try:\n _xpos, _ypos, _zpos = locs[__host]\n except KeyError:\n logging.warning('no position found for node %s', __host)\n continue\n max_x = max(_xpos, max_x)\n max_y = max(_ypos, max_y)\n min_x = min(_xpos, min_x)\n min_y = min(_ypos, min_y)\n max_z = max(_zpos, max_z)\n min_z = max(_zpos, min_z)\n fig3d.ax.plot([_xpos], [_ypos], [_zpos], 'o', color=colors_mprs[color_idx], ms=circ_max)\n fig2d.ax.plot(_xpos+_zpos*floor_skew*floor_factor,_ypos+_zpos*floor_factor,'o', color=colors_mprs[color_idx], ms=circ_max)\n\n n2_xpos, n2_ypos, n2_zpos = locs[__host]\n\n fig2d.ax.plot(\n [n2_xpos+n2_zpos*floor_skew*floor_factor, mpr_xpos+mpr_zpos*floor_skew*floor_factor],\n [n2_ypos+n2_zpos*floor_factor, mpr_ypos+mpr_zpos*floor_factor],\n linestyle='-', color=colors_mprs[color_idx], linewidth=line_min, alpha=0.3)\n\n fig3d.ax.plot(\n [n2_xpos, mpr_xpos],\n [n2_ypos, mpr_ypos],\n [n2_zpos, mpr_zpos],\n linestyle='-', color=colors_mprs[color_idx], linewidth=line_min, alpha=0.3)\n\n color_idx = color_idx + 1;\n if color_idx > 5:\n color_idx = 0\n else:\n fig3d.ax.plot([xpos], [ypos], [zpos], 'o', color='black', ms=circ_max)\n fig2d.ax.plot(xpos+zpos*floor_skew*floor_factor,ypos+zpos*floor_factor,'o', color='black', ms=circ_max)\n\n\n fig2d.colorbar = fig2d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n fig3d.colorbar = fig3d.fig.add_axes([0.1, 0.875, 0.8, 0.025])\n drawBuildingContours(fig3d.ax, options)\n alinspace = numpy.linspace(0, 1, 100)\n alinspace = numpy.vstack((alinspace, alinspace))\n for tax in [fig2d.colorbar, fig3d.colorbar]:\n tax.imshow(alinspace, aspect='auto', cmap=options['color2'])\n tax.set_xticks(pylab.linspace(0, 100, 5))\n tax.set_xticklabels(['$%.2f$' % l for l in pylab.linspace(0, 1, 5)], fontsize=0.8*options['fontsize'])\n tax.set_yticks([])\n tax.set_title('$PDR$', size=options['fontsize'])\n fig2d.ax.axis((min_x-10, max_x+10, min_y-10, max_y+10+max_z*floor_factor+10))\n logging.info(\"saving %s\" %(host))\n fig2d.save('2d_mpr_topology_%s' % (host))\n fig3d.save('3d_mpr_topology_%s' % (host))", "def plot_sag_plane(self, P0=None, sag_pl=None):\n if P0 is None: P0 = np.array([0,0,0])\n if sag_pl is None: sag_pl = self.sp\n norm, d = sag_pl[:3], sag_pl[3]\n import matplotlib.pyplot as plt\n from mpl_toolkits.mplot3d import Axes3D\n plt.ion()\n # create x,y\n xypts = 10\n xrng = 300\n yrng = 130\n xrng_mesh = np.linspace(P0[0], P0[0]-xrng, xypts)\n yrng_mesh = np.linspace(P0[1]-yrng/2., P0[1]+yrng, xypts)\n xx, yy = np.meshgrid(xrng_mesh, yrng_mesh)\n # calculate corresponding z\n zz = -1 * (norm[0] * xx + norm[1] * yy + d) / norm[2]\n # plot the surface\n self.fig = plt.figure()\n self.fig_ax = self.fig.add_subplot(111, projection='3d')\n self.fig_ax.plot_wireframe(xx, yy, zz, color='gray')\n #ax.quiver(P0[0], P0[1], norm[0], norm[1])\n self.fig_ax.set_xlabel('X')\n self.fig_ax.set_ylabel('Y')\n self.fig_ax.set_zlabel('Z')\n self.fig_ax.set_zlim(P0[2]-xrng, P0[2]+yrng)\n plt.show()", "def plane_generator():\n\n planarRadius = np.sqrt(N / (np.pi * pointDensity)) # <-- convert pointDensity into radius\n planarThreshold = planarRadius * np.sqrt(thresholdFrac)\n\n # distance function (law of cosines)\n def dist(rTheta1, rTheta2): # rThetai is a coordinate tuple: (r, theta)\n a, b = rTheta1[0], rTheta2[0]\n theta1, theta2 = rTheta1[1], rTheta2[1]\n return np.sqrt(a ** 2 + b ** 2 - 2 * a * b * np.cos(theta1 - theta2)) # <-- law of cosines\n\n # computes the adjacency matrices\n data_plane = []\n for r in range(num_graphs):\n\n # generates dictionary of positions for each node: node_pos = {node_i: (radius, theta)}\n node_pos = {}\n for i in range(N):\n rnd_angle = np.random.random() * 2 * np.pi\n rnd_radii = np.random.random() * planarRadius\n node_pos.update({i: (rnd_radii, rnd_angle)})\n\n Adj_Matrix = np.zeros((N, N))\n for i in range(N):\n for j in range(N):\n ij_dist = dist(node_pos[i], node_pos[j])\n if ij_dist < planarThreshold:\n Adj_Matrix[i, j] = 1 # nodes that are connected are assigned a 1 in the matrix\n\n data_plane.append(Adj_Matrix)\n\n return data_plane", "def resMode(mode): \n if mode==0:\n makeMesh(r0x, r0y)\n elif mode==1:\n makeMesh(r1x, r1y)\n elif (mode==2):\n makeMesh(r2x, r2y)", "def topo_plane_paramEval(self, param):\n # Create an empty numpy array with the same number as pixels as the real data.\n self.topo_plane_fit_data = np.zeros((self.y_res, self.x_res))\n for y in range(0, self.y_res): # Iterate over the y-axis pixels.\n for x in range(0, self.x_res): # Iterate over the x-axis pixels.\n self.topo_plane_fit_data[y, x] = param[0]*x + param[1]*y + param[2] # Generate plane value.\n return self.topo_plane_fit_data # Return entire array.", "def load_plane(image):\n pixels = image.getPrimaryPixels()\n return pixels.getPlane(0, 0, 0)", "def build_topology_moore(config):\n columns = config['MooreTopology']['width']\n rows = config['MooreTopology']['height']\n radius = config['MooreTopology']['radius']\n assert rows > 0 and columns > 0 and radius > 0\n\n periodic = config['MooreTopology']['periodic']\n\n top = nx.empty_graph()\n top.name = \"Moore Lattice: {r} rows, {c} columns, radius={rx}\".format(r=rows,\n c=columns,\n rx=radius)\n if periodic:\n top.name += ' with periodoc boundaries'\n\n top.add_nodes_from(list(range(rows * columns)))\n\n for node in top.nodes_iter():\n (myrow, mycol) = divmod(node, columns)\n top.node[node]['coords'] = (myrow, mycol)\n\n for r in range(myrow - radius, myrow + radius + 1):\n if periodic == False and (r < 0 or r >= rows):\n continue\n\n for c in range(mycol - radius, mycol + radius + 1):\n if periodic == False and (c < 0 or c >= columns):\n continue\n\n neighbor = (columns * (r % rows)) + (c % columns)\n\n if node != neighbor:\n top.add_edge(node, neighbor)\n\n return top", "def mesh_slicer(self, plane, opt):\n\n # get plane coefficients\n a = plane[0]\n b = plane[1]\n c = plane[2]\n\n # create vtk plane object\n VTKplane = vtk.vtkPlane()\n # for now we choose the center point as the point of rotation\n VTKplane.SetOrigin(self.mesh_poly.GetCenter())\n VTKplane.SetNormal(a, b, c)\n VTKplane.SetOrigin(self.epi_apex_node)\n\n # create cutter\n cutEdges = vtk.vtkCutter()\n cutEdges.SetInputData(self.mesh_poly)\n cutEdges.SetCutFunction(VTKplane)\n cutEdges.GenerateCutScalarsOn()\n cutEdges.SetValue(0, 0.5)\n\n # create renderer\n ren = vtk.vtkRenderer()\n ren.SetBackground(0.0, 0.0, 0.0)\n\n # create mapper\n mapper = vtk.vtkPolyDataMapper()\n mapper.SetInputConnection(cutEdges.GetOutputPort())\n\n # create actor\n actor = vtk.vtkActor()\n actor.SetMapper(mapper)\n actor.GetProperty().SetColor(0.0, 0.0, 1.0)\n actor.GetProperty().SetLineWidth(2)\n\n # display apex point\n apexA = include_points(list(self.epi_apex_node), 1, 15, (0, 0, 1))\n\n if (opt == 'mesh'):\n meshMapper = vtk.vtkPolyDataMapper()\n meshMapper.SetInputData(self.mesh_poly)\n meshActor = vtk.vtkActor()\n meshActor.SetMapper(meshMapper)\n meshActor.GetProperty().SetColor(1.0, 0.0, 0.0)\n\n # generate renderer\n ren.AddActor(self.meshActor)\n ren.AddActor(actor)\n ren.AddActor(apexA)\n\n else:\n ren.AddActor(actor)\n ren.AddActor(apexA)\n\n # display\n vtk_show(ren)", "def build_planar_mesh(cellWidth, x, y, geom_points, geom_edges,\n out_filename='base_mesh.nc', logger=None):\n\n with LoggingContext(__name__, logger=logger) as logger:\n\n da = xarray.DataArray(cellWidth,\n dims=['y', 'x'],\n coords={'y': y, 'x': x},\n name='cellWidth')\n cw_filename = 'cellWidthVsXY.nc'\n da.to_netcdf(cw_filename)\n\n logger.info('Step 1. Generate mesh with JIGSAW')\n jigsaw_driver(cellWidth, x, y, on_sphere=False,\n geom_points=geom_points, geom_edges=geom_edges,\n logger=logger)\n\n logger.info('Step 2. Convert triangles from jigsaw format to netcdf')\n jigsaw_to_netcdf(msh_filename='mesh-MESH.msh',\n output_name='mesh_triangles.nc', on_sphere=False)\n\n logger.info('Step 3. Convert from triangles to MPAS mesh')\n args = ['MpasMeshConverter.x',\n 'mesh_triangles.nc',\n out_filename]\n check_call(args=args, logger=logger)", "def PlotAirplane():\n airplane = vtkInterface.PolyData(planefile)\n airplane.Plot()", "def fig4():\n # fmt: off\n tpm = np.array([\n [0, 0, 0],\n [0, 0, 1],\n [1, 0, 1],\n [1, 0, 0],\n [1, 0, 0],\n [1, 1, 1],\n [1, 0, 1],\n [1, 1, 0],\n ])\n cm = np.array([\n [0, 1, 1],\n [1, 0, 1],\n [1, 1, 0],\n ])\n # fmt: on\n return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])", "def createTopology(self):\n\n # find DAG root\n dagRoot = None\n for mote in self.motes:\n if mote.id == 0:\n mote.role_setDagRoot()\n dagRoot = mote\n assert dagRoot\n\n if self.settings.mobilityModel == 'RPGM':\n # put DAG root at center of area\n dagRoot.setLocation(x=SimEngine.SimEngine().targets[0][0],\n y=SimEngine.SimEngine().targets[0][1])\n else:\n # put DAG root at center of area\n dagRoot.setLocation(x=self.squareSide/2,\n y=self.squareSide/2)\n\n # reposition each mote until it is connected\n connectedMotes = [dagRoot]\n for mote in self.motes:\n if mote in connectedMotes:\n continue\n\n connected = False\n while not connected:\n # pick a random location\n mote.setLocation(x=self.squareSide*random.random(),\n y=self.squareSide*random.random())\n\n # if mote.id == 1:\n # mote.setLocation(\n # x=self.squareSide / 2.5 + 0.02,\n # y=self.squareSide / 2.5 + 0.3\n # )\n # elif mote.id == 2:\n # mote.setLocation(\n # x=self.squareSide / 2.5 + 0.3,\n # y=self.squareSide / 2.5 + 0.3\n # )\n #\n # elif mote.id == 3:\n # mote.setLocation(\n # x=self.squareSide / 2.5 + 0.1,\n # y=self.squareSide / 2.5 + 0.4\n # )\n\n # elif mote.id == 4:\n # mote.setLocation(\n # x=self.squareSide / 2.5 + 0.2,\n # y=self.squareSide / 2.5 + 0.65\n # )\n # else:\n\n mote.setLocation(\n x=self.settings.squareSide * random.random(),\n y=self.settings.squareSide * random.random()\n )\n\n numStableNeighbors = 0\n\n # count number of neighbors with sufficient RSSI\n for cm in connectedMotes:\n\n rssi = self._computeRSSI(mote, cm)\n mote.setRSSI(cm, rssi)\n cm.setRSSI(mote, rssi)\n\n # save the intial RSSI values for future use in the mobility models\n mote.initialRSSI[cm] = rssi\n cm.initialRSSI[mote] = rssi\n\n if rssi > self.STABLE_RSSI:\n numStableNeighbors += 1\n\n # make sure it is connected to at least stable_neighbors motes\n # or connected to all the currently deployed motes when the\n # number of deployed motes are smaller than stable_neighbors\n if (numStableNeighbors >= self.stable_neighbors or\n numStableNeighbors == len(connectedMotes)):\n print 'moteid %d, mote x %.4f, mote y %.4f: valid %s' % (mote.id, mote.x, mote.y , SimEngine.SimEngine().checkValidPosition(mote.x, mote.y, countSquare=True, placement=True))\n if self.settings.mobilityModel == 'RPGM' and SimEngine.SimEngine().checkValidPosition(mote.x, mote.y, countSquare=True, placement=True):\n connected = True\n elif self.settings.mobilityModel != 'RPGM':\n connected = True\n\n connectedMotes += [mote]\n\n # self.motes[3].setRSSI(self.motes[0], -96)\n # self.motes[0].setRSSI(self.motes[3], -96)\n\n # for each mote, compute PDR to each neighbors\n for mote in self.motes:\n for m in self.motes:\n if mote == m:\n continue\n if mote.getRSSI(m) > mote.minRssi:\n pdr = self._computePDR(mote, m)\n mote.setPDR(m, pdr)\n m.setPDR(mote, pdr)", "def __init__(self, initial_surfaces):\n\n nsurf = len(initial_surfaces)\n nvert, nedge, ngroup, \\\n surf_ptrs, edge_ptrs, \\\n surf_group, edge_group \\\n = self._compute_topology(initial_surfaces)\n\n self._num = {\n 'surf': len(initial_surfaces),\n 'vert': nvert,\n 'edge': nedge,\n 'group': ngroup,\n }\n\n self._topo = {\n 'surf_ptrs': surf_ptrs,\n 'edge_ptrs': edge_ptrs,\n 'surf_group': surf_group,\n 'edge_group': edge_group,\n }\n\n self._mult = {\n 'vert': numpy.zeros(nvert, int),\n 'edge': numpy.zeros(nedge, int),\n 'diff_vert': numpy.zeros(nvert, int),\n 'diff_edge': numpy.zeros(nedge, int),\n }\n\n self._bspline = {\n 'order': 4 * numpy.ones(ngroup, int),\n 'num_cp': 4 * numpy.ones(ngroup, int),\n 'num_pt': 10 * numpy.ones(ngroup, int),\n }\n\n self._surf_indices = {\n 'df': numpy.zeros((nsurf, 2), int, 'F'),\n 'cp': numpy.zeros((nsurf, 2), int, 'F'),\n 'pt': numpy.zeros((nsurf, 2), int, 'F'),\n }\n\n self._edge_indices = {\n 'df': numpy.zeros((nedge, 2), int, 'F'),\n 'cp': numpy.zeros((nedge, 2), int, 'F'),\n 'pt': numpy.zeros((nedge, 2), int, 'F'),\n }\n\n self._str_indices = {\n 'cp': numpy.zeros((nsurf, 2), int, 'F'),\n 'pt': numpy.zeros((nsurf, 2), int, 'F'),\n }\n\n self._vert_indices = numpy.zeros(nvert, int)\n\n self._size = {\n 'df_str': 0,\n 'df': 0,\n 'cp': 0,\n 'cp_str': 0,\n 'pt_str': 0,\n 'pt': 0,\n }\n\n self.diff = {\n 'surf': numpy.zeros((nsurf, 3, 3), bool, 'F'),\n 'edge': numpy.zeros((nedge, 2), bool, 'F'),\n }\n\n self.hidden = numpy.zeros(nsurf, bool)\n\n self.jac = {\n 'd(df)/d(df_str)': None,\n 'd(cp)/d(df)': None,\n 'd(cp_str)/d(cp)': None,\n 'd(pt_str)/d(cp_str)': None,\n 'd(pt)/d(pt_str)': None,\n }\n\n self.vec = {\n 'df_str': None,\n 'df': None,\n 'cp': None,\n 'cp_str': None,\n 'pt_str': None,\n 'pt': None,\n }", "def topology(self, topo_file: str, *args: str):\n self.scion_sh('topology', '-c', topo_file, '-d', *args)", "def plane_OLS(x,y,z, print_distances=False):\n A = np.array([x, y]).T\n B = z.T\n\n #solve Ax=B\n p = np.matmul(np.matmul(linalg.inv(np.matmul(A.T, A)), A.T), B.T) #uses left pseudo-inverse {(A^T * A)^-1 * A^T} due to system being overconstrained (A doesn't have a true inverse)\n params = [-float(p[0]), -float(p[1]), 1.0] #c=1.0 by default\n bottom = np.sqrt(params[0]*params[0] + params[1]*params[1] + params[2]*params[2])\n for i in range(len(params)): params[i] = params[i]/bottom\n print(\"# - Normalized best-fit plane parameters: {0}\".format(params))\n if print_distances:\n for i in range(len(x)):\n print(plane_dist(x[i], y[i], z[i], params))\n return params", "def __init__(self, constraints=[], infeasiblePoints=[], feasiblePoints=[], optimalPoint=None, costVector=None, zoom=1.0, frameTime=0.0): \n super(PacmanPlotLP, self).__init__(zoom, frameTime)\n\n xmin = 100000\n ymin = 100000\n xmax = -100000\n ymax = -100000\n\n for point in feasiblePoints:\n if point[0] < xmin:\n xmin = point[0]\n if point[0] > xmax:\n xmax = point[0]\n if point[1] < ymin:\n ymin = point[1]\n if point[1] > ymax:\n ymax = point[1]\n\n if len(feasiblePoints) == 0:\n for point in infeasiblePoints:\n if point[0] < xmin:\n xmin = point[0]\n if point[0] > xmax:\n xmax = point[0]\n if point[1] < ymin:\n ymin = point[1]\n if point[1] > ymax:\n ymax = point[1]\n\n xmin = int(math.floor(xmin)) - 3\n ymin = int(math.floor(ymin)) - 3\n xmax = int(math.ceil(xmax)) + 3\n ymax = int(math.ceil(ymax)) + 3\n width = xmax-xmin+1\n height = ymax-ymin+1\n\n# p = feasiblePoints[2]\n# print(\"p={}\".format(p))\n# print(\"feasible={}\".format(self.pointFeasible(p, constraints)))\n# g = self.cartesianToLayout(xmin, ymin, xmax, ymax, p)\n# print(\"g={}\".format(g))\n# gr = (int(round(g[0])), int(round(g[1])))\n# p2 = self.layoutToCartesian(xmin, ymin, xmax, ymax, gr)\n# print(\"p2={}\".format(p2))\n# print(\"p2 feasible={}\".format(self.pointFeasible(p2, constraints)))\n\n layoutLists = self.blankLayoutLists(width, height)\n\n self.addInfeasibleGhosts(layoutLists, constraints, xmin, ymin, xmax, ymax)\n\n layoutLists = self.changeBorderGhostsToWall(layoutLists)\n \n for point in infeasiblePoints:\n self.addCartesianPointToLayout(layoutLists, point, '.', xmin, ymin, xmax, ymax)\n\n for point in feasiblePoints:\n self.addCartesianPointToLayout(layoutLists, point, 'o', xmin, ymin, xmax, ymax)\n\n if optimalPoint is not None:\n self.addCartesianPointToLayout(layoutLists, optimalPoint, 'P', xmin, ymin, xmax, ymax)\n\n if graphicsUtils._canvas is not None:\n graphicsUtils.clear_screen()\n \n # Initialize GameStateData with blank board with axes \n self.width = width\n self.height = height\n\n self.zoom = min(30.0/self.width, 20.0/self.height)\n self.gridSize = graphicsDisplay.DEFAULT_GRID_SIZE * self.zoom\n\n maxNumGhosts = 10000\n layout = Layout(layoutLists)\n self.blankGameState = GameStateData()\n self.blankGameState.initialize(layout, maxNumGhosts)\n self.initialize(self.blankGameState)\n title = 'Pacman Plot LP'\n graphicsUtils.changeText(self.infoPane.scoreText, title)\n graphicsUtils.refresh()\n graphicsUtils.sleep(1)\n\n if costVector is not None:\n self.shadeCost(layoutLists, constraints, costVector, feasiblePoints, xmin, ymin, xmax, ymax)", "def generateTopology():\n switches = {}\n interfaces = {}\n links = {}\n return (switches,links)", "def __init__(self, n=1, cpu=.1, bw=10, delay=None,\n max_queue_size=None, **params):\n\n # Initialize topo\n Topo.__init__(self, **params)\n\n # Host and link configuration\n hconfig = {'cpu': cpu}\n lconfig = {'bw': bw, 'delay': delay,\n 'max_queue_size': max_queue_size }\n\n # Create the actual topology\n receiver = self.addHost('receiver')\n\n # Switch ports 1:uplink 2:hostlink 3:downlink\n uplink, hostlink, downlink = 1, 2, 3\n\n # The following template code creates a parking lot topology\n # TODO: Replace the template code to create a parking lot topology for any arbitrary N (>= 1)\n if n < 1: # network must have at least 1 host\n return -1\n\n s = [] # Python list of switches\n h = [] # Python list of hosts\n\n # dynamically add all hosts and switches to network backbone first\n for i in range(n):\n switch_name = 's%s' % (i+1)\n host_name = 'h%s' % (i+1)\n\n s.append( self.addSwitch(switch_name) ) # s[0] is switch1\n h.append( self.addHost(host_name) ) # h[0] is host1\n\n # Wire up clients\n self.addLink(h[i], s[i], port1=0, port2=hostlink, **lconfig)\n \n # link to previous switch\n if i > 0:\n self.addLink(s[i-1], s[i], port1=downlink, port2=uplink, **lconfig)\n\n \n # Wire up receiver to first switch\n self.addLink(receiver, s[0], port1=0, port2=uplink, **lconfig)\n\n '''\n # for N = 1\n # Begin: Template code\n s1 = self.addSwitch('s1')\n h1 = self.addHost('h1', **hconfig)\n\n # Wire up receiver\n self.addLink(receiver, s1, port1=0, port2=uplink, **lconfig)\n\n # Wire up clients\n self.addLink(h1, s1, port1=0, port2=hostlink, **lconfig)\n\n # Uncomment the next 8 lines to create a N = 3 parking lot topology\n s2 = self.addSwitch('s2')\n h2 = self.addHost('h2', **hconfig)\n self.addLink(s1, s2,\n port1=downlink, port2=uplink, **lconfig)\n self.addLink(h2, s2,\n port1=0, port2=hostlink, **lconfig)\n\n s3 = self.addSwitch('s3')\n h3 = self.addHost('h3', **hconfig)\n self.addLink(s2, s3,\n port1=downlink, port2=uplink, **lconfig)\n self.addLink(h3, s3,\n port1=0, port2=hostlink, **lconfig)\n \n # End: Template code\n '''", "def __init__(self, dx = 1., dy = 1., nx = 1, ny = 1,\n _RepresentationClass=_Grid2DRepresentation, _TopologyClass=_Mesh2DTopology):\n\n self.args = {\n 'dx': dx,\n 'dy': dy,\n 'nx': nx,\n 'ny': ny\n }\n\n self.nx = nx\n self.ny = ny\n\n self.numberOfHorizontalFaces = self.nx * (self.ny + 1)\n self.numberOfVerticalFaces = self.ny * (self.nx + 1)\n self.numberOfEachDiagonalFaces = self.nx * self.ny\n\n self.dx = PhysicalField(value = dx)\n scale = PhysicalField(value = 1, unit = self.dx.unit)\n self.dx /= scale\n\n self.dy = PhysicalField(value = dy)\n if self.dy.unit.isDimensionless():\n self.dy = dy\n else:\n self.dy /= scale\n\n self.numberOfCornerVertices = (self.nx + 1) * (self. ny + 1)\n self.numberOfCenterVertices = self.nx * self.ny\n self.numberOfTotalVertices = self.numberOfCornerVertices + self.numberOfCenterVertices\n\n self.offset = (0, 0)\n\n vertices = self._createVertices()\n faces = self._createFaces()\n\n cells = self._createCells()\n cells = numerix.sort(cells, axis=0)\n\n Mesh2D.__init__(self, vertices, faces, cells,\n _RepresentationClass=_RepresentationClass, _TopologyClass=_TopologyClass)\n\n self.scale = scale", "def fake_focalplane(\n samplerate=20,\n epsilon=0,\n net=1,\n fmin=0,\n alpha=1,\n fknee=0.05,\n fwhm=30,\n npix=7,\n fov=3.0\n):\n zaxis = np.array([0, 0, 1.0])\n \n pol_A = hex_pol_angles_qu(npix)\n pol_B = hex_pol_angles_qu(npix, offset=90.0)\n \n dets_A = hex_layout(npix, fov, \"\", \"\", pol_A)\n dets_B = hex_layout(npix, fov, \"\", \"\", pol_B)\n \n dets = dict()\n for p in range(npix):\n pstr = \"{:01d}\".format(p)\n for d, layout in zip([\"A\", \"B\"], [dets_A, dets_B]):\n props = dict()\n props[\"quat\"] = layout[pstr][\"quat\"]\n props[\"epsilon\"] = epsilon\n props[\"rate\"] = samplerate\n props[\"alpha\"] = alpha\n props[\"NET\"] = net\n props[\"fmin\"] = fmin\n props[\"fknee\"] = fknee\n props[\"fwhm_arcmin\"] = fwhm\n dname = \"{}{}\".format(pstr, d)\n dets[dname] = props\n return dets", "def Planes(pi1, pi2):\n\n if not isinstance(p11, Plane):\n pi1 = Plane(arg.getvector(pi1, 4))\n if not isinstance(p12, Plane):\n pi2 = Plane(arg.getvector(pi2, 4))\n \n w = np.cross(pi1.n, pi2.n)\n v = pi2.d * pi1.n - pi1.d * pi2.n\n return Plucker(np.r_[v, w])", "def topology(self, bonds_info, angles_info):\n return self.header() + self.defaults() +\\\n self.moleculetype() + self.atoms() +\\\n self.bonds(bonds_info) + self.angles(angles_info)", "def PlotAntsPlane():\n\n # load and shrink airplane\n airplane = vtkInterface.PolyData(planefile)\n airplane.points /= 10\n # pts = airplane.GetNumpyPoints() # gets pointer to array\n # pts /= 10 # shrink\n\n # rotate and translate ant so it is on the plane\n ant = vtkInterface.PolyData(antfile)\n ant.RotateX(90)\n ant.Translate([90, 60, 15])\n\n # Make a copy and add another ant\n ant_copy = ant.Copy()\n ant_copy.Translate([30, 0, -10])\n\n # Create plotting object\n plobj = vtkInterface.PlotClass()\n plobj.AddMesh(ant, 'r')\n plobj.AddMesh(ant_copy, 'b')\n\n # Add airplane mesh and make the color equal to the Y position\n plane_scalars = airplane.points[:, 1]\n plobj.AddMesh(airplane, scalars=plane_scalars, stitle='Plane Y\\nLocation')\n plobj.AddText('Ants and Plane Example')\n plobj.Plot()", "def two_plane_obj_points(grid_size, dx):\r\n objp_xy = np.zeros((grid_size[0]*grid_size[1], 3), np.float32)\r\n objp_yz = np.zeros((grid_size[1]*grid_size[2], 3), np.float32)\r\n objp_xy[:,:2] = np.mgrid[0:grid_size[0], 0:grid_size[1]].T.reshape(-1, 2)\r\n objp_yz[:,1:3] = np.mgrid[0:grid_size[1], 0:grid_size[2]].T.reshape(-1, 2)\r\n\r\n return objp_xy*dx, objp_yz*dx", "def build_topo(self):\n super(EBGPTopo, self).build()", "def patterning(mesh, operator):\n\n\n operators = {\n 'conway_dual': conway_dual,\n 'conway_join': conway_join,\n 'conway_ambo': conway_ambo,\n 'conway_kis': conway_kis,\n 'conway_needle': conway_needle,\n 'conway_zip': conway_zip,\n 'conway_truncate': conway_truncate,\n 'conway_ortho': conway_ortho,\n 'conway_expand': conway_expand,\n 'conway_gyro': conway_gyro,\n 'conway_snub': conway_snub,\n 'conway_meta': conway_meta,\n 'conway_bevel': conway_bevel\n }\n\n try:\n operators[operator](mesh)\n return mesh\n except:\n return mesh", "def plane_desc(self) -> str:\n return self.planes[0].join(' ') + self.planes[1].join(' ') + self.planes[2].join(' ')", "def create_partition(mesh,polygons,enforce_exact=False):", "def MeshMachine(main):\n\n # oDesign definition\n oDesign = main['ANSYS']['oDesign']\n\n # Data for the rotor mesh\n RotorName = main['ANSYS']['Rotor&Magnets']['Name'][0]\n RotorNumMaxElem = main['ANSYS']['Mesh']['Rotor']['NumMaxElem']\n RotorMaxLength = main['ANSYS']['Mesh']['Rotor']['MaxLength']\n\n # Data for the magnets mesh\n PMNames = main['ANSYS']['Rotor&Magnets']['PMNames']\n PMNumMaxElem = main['ANSYS']['Mesh']['Magnets']['NumMaxElem']\n PMMaxLength = main['ANSYS']['Mesh']['Magnets']['MaxLength']\n\n # Data for the Stator mesh\n StatorName = main['ANSYS']['Stator']['Name']\n StatorNormalDev = main['ANSYS']['Mesh']['Stator']['NormalDev']\n StatorAspectRatio = main['ANSYS']['Mesh']['Stator']['AspectRatio']\n\n # Data for the Stator mesh\n CoilNames = main['ANSYS']['Winding']['CoilNames']\n WindingNumMaxElem = main['ANSYS']['Mesh']['Winding']['NumMaxElem']\n WindingMaxLength = main['ANSYS']['Mesh']['Winding']['MaxLength']\n\n WindingName = []\n for phase in CoilNames:\n for direction in phase:\n WindingName += direction\n\n # Creating meshes\n oModule = oDesign.GetModule(\"MeshSetup\")\n\n # Rotor meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Rotor\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", [RotorName],\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(RotorNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(RotorMaxLength)+\"mm\"\n ]\n )\n # Magnet meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Magnets\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", PMNames,\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(PMNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(PMMaxLength)+\"mm\"\n ]\n )\n # Stator meshes\n oModule.AssignTrueSurfOp(\n [\n \"NAME:Stator\",\n \"Objects:=\", [StatorName],\n \"CurvedSurfaceApproxChoice:=\", \"ManualSettings\",\n \"SurfDevChoice:=\", 0,\n \"NormalDevChoice:=\", 2,\n \"NormalDev:=\", str(StatorNormalDev) + \"deg\",\n \"AspectRatioChoice:=\", 2,\n \"AspectRatio:=\", str(StatorAspectRatio)\n ]\n )\n\n # Coil meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Coils\",\n \"RefineInside:=\"\t, True,\n \"Enabled:=\"\t\t, True,\n \"Objects:=\"\t\t, WindingName,\n \"RestrictElem:=\"\t, False,\n \"NumMaxElem:=\"\t\t, str(WindingNumMaxElem),\n \"RestrictLength:=\"\t, True,\n \"MaxLength:=\"\t\t, str(WindingMaxLength) +\"mm\"\n ]\n )\n\n return main", "def convert_topology(src_filename, set_backbone=True, in_place=False, split_dir=None):\n\n # Grab unit cell description (should be on first few lines:\n cryst = None\n with open(src_filename) as src:\n for line in src.readlines():\n if line.startswith('CRYST1'):\n cryst = line\n break\n\n # Read in source PDB (DEShaw original format)\n src_pdb = PdbStructure(open(src_filename))\n atoms = list(src_pdb.iter_atoms())\n topo = md.load(src_filename).top\n\n # Break into 4 segments\n segment_list = ['C1', 'C2', 'C3', 'C4']\n segment = {l:[] for l in segment_list}\n for i in atoms: \n segment[i.segment_id].append(i)\n\n # Set temperature factor (for gradual heating) \n if set_backbone:\n backbone = topo.select(\"backbone\")\n for i in range(0, len(segment['C1'])):\n if i in backbone:\n segment['C1'][i].location.temperature_factor = 1.0\n\n # Resort water segements and alias \"pseu\" to OM (tip4p forcefield)\n for wat in ['C2', 'C3']:\n segment[wat] = sorted(segment[wat], key = lambda i: i.residue_number)\n start_serial_num = min(segment[wat], key= lambda i: i.serial_number)\n for i in range(0, len(segment[wat])):\n newsn = i + start_serial_num.serial_number\n segment[wat][i].serial_number = newsn\n if segment[wat][i].get_name == 'pseu':\n segment[wat][i].set_name_with_spaces(' OM ')\n\n # FOR RE-RUNNING THE PSFGEN\n if split_dir is not None:\n for s in segment_list:\n with open(split_dir + '/%s.pdb' % s, 'w') as dest:\n for atom in segment[s]:\n _=dest.write(str(atom) + '\\n')\n\n # Writeout new file\n if in_place:\n dest = open(src_filename, 'w')\n if cryst is not None:\n dest.write(cryst)\n for s in segment_list:\n for atom in segment[s]:\n _=dest.write(str(atom) + '\\n')\n _=dest.write('END')\n dest.close()", "def build_topology_vonneumann(config):\n\n width = config['VonNeumannTopology']['width']\n height = config['VonNeumannTopology']['height']\n periodic = config['VonNeumannTopology']['periodic']\n assert width > 0 and height > 0\n\n graph = nx.grid_2d_graph(m=width, n=height, periodic=periodic)\n graph = nx.convert_node_labels_to_integers(graph)\n graph.name = \"VonNeumann Lattice: {r} rows, {c} columns\".format(r=height,\n c=width)\n\n if periodic:\n graph.name += ' with periodic boundaries'\n\n for node in graph.nodes_iter():\n graph.node[node]['coords'] = divmod(node, width)\n\n return graph", "def createTopology(self):\n\n # find DAG root\n dagRoot = None\n for mote in self.motes:\n if mote.id == 0:\n mote.role_setDagRoot()\n dagRoot = mote\n assert dagRoot\n\n if self.settings.mobilityModel == 'RPGM':\n # put DAG root at center of area\n dagRoot.setLocation(x=SimEngine.SimEngine().targets[0][0],\n y=SimEngine.SimEngine().targets[0][1])\n else:\n # put DAG root at center of area\n dagRoot.setLocation(x=self.squareSide/2,\n y=self.squareSide/2)\n\n # reposition each mote until it is connected\n connectedMotes = [dagRoot]\n motes_shuffled = copy.copy(self.motes)\n random.shuffle(motes_shuffled) # shuffle them around\n\n # for mote in self.motes:\n for mote in motes_shuffled:\n stableNeighbors = []\n if mote in connectedMotes:\n continue\n\n connected = False\n while not connected:\n # pick a random location\n # mote.setLocation(x=self.squareSide*random.random(),\n # y=self.squareSide*random.random())\n #\n # mote.setLocation(\n # x=self.settings.squareSide * random.random(),\n # y=self.settings.squareSide * random.random()\n # )\n\n newX = None\n newY = None\n # if no topology is not given, build the topology yourself\n if SimEngine.SimEngine().ilp_topology is None:\n newX = self.settings.squareSide * random.random()\n newY = self.settings.squareSide * random.random()\n else:\n # if no topology is given, use that topology\n newX = SimEngine.SimEngine().ilp_topology[str(mote.id)]['x']\n newY = SimEngine.SimEngine().ilp_topology[str(mote.id)]['y']\n\n mote.setLocation(\n x=newX,\n y=newY\n )\n\n numStableNeighbors = 0\n stableNeighbors = []\n\n # tryAgain = False\n # for cm in connectedMotes:\n # rssi = self._computeRSSI(mote, cm)\n # if rssi > -110:\n # tryAgain = True\n\n # if not tryAgain:\n # count number of neighbors with sufficient RSSI\n for cm in connectedMotes:\n\n rssi = self._computeRSSI(mote, cm)\n mote.setRSSI(cm, rssi)\n cm.setRSSI(mote, rssi)\n\n # save the intial RSSI values for future use in the mobility models\n mote.initialRSSI[cm] = rssi\n cm.initialRSSI[mote] = rssi\n\n if self.settings.individualModulations == 1:\n if self.rssiToPdr(rssi, modulation=Modulation.Modulation().minimalCellModulation[SimSettings.SimSettings().modulationConfig]) > self.settings.stableNeighborPDR:\n # if rssi > Modulation.Modulation().modulationStableRSSI[Modulation.Modulation().minimalCellModulation[SimSettings.SimSettings().modulationConfig]]:\n # print rssi\n numStableNeighbors += 1\n stableNeighbors.append(cm.id)\n else:\n if rssi > self.STABLE_RSSI:\n # print rssi\n numStableNeighbors += 1\n\n # make sure it is connected to at least STABLE_NEIGHBORS motes\n # or connected to all the currently deployed motes when the number of deployed motes\n # are smaller than STABLE_NEIGHBORS\n if numStableNeighbors >= self.stable_neighbors or numStableNeighbors == len(connectedMotes):\n print 'For mote {0}, stable neighbors {1}'.format(mote.id, stableNeighbors)\n connected = True\n\n connectedMotes += [mote]\n\n # for each mote, compute PDR to each neighbors\n for mote in self.motes:\n for m in self.motes:\n if mote == m:\n continue\n\n # set the distance to all other motes\n distance = math.sqrt((m.x - mote.x) ** 2 + (m.y - mote.y) ** 2)\n m.set_distance(mote, distance)\n mote.set_distance(m, distance)\n # print 'mote %d to mote %d: %.4f' % (m.id, mote.id, distance)\n if self.settings.individualModulations == 1:\n rssi_value = mote.getRSSI(m)\n # for modulationTmp in Modulation.Modulation().modulations:\n # if self.settings.ilpfile is not None:\n # ## I am not going to set this as this should be set by the ILP\n # pass\n # else:\n # # if the rssi value is higher than the minimal signal value required for this neighbor, take that modulation\n # # and compute the PDR using that modulation\n # pass\n # # if rssi_value > Modulation.Modulation().modulationStableRSSI[modulationTmp]:\n # # pdr = self._computePDR(mote, m, modulation=modulationTmp)\n # # mote.setPDR(m, pdr)\n # # m.setPDR(mote, pdr)\n # # mote.setModulation(m, modulationTmp)\n # # m.setModulation(mote, modulationTmp)\n else:\n if mote.getRSSI(m) > mote.minRssi:\n pdr = self._computePDR(mote, m)\n mote.setPDR(m, pdr)\n m.setPDR(mote, pdr)", "def surfaceRender(nodal_mesh, focus, ax=None):\n\t# If no axes were passed, generate new set of axes\n\tif not ax:\n\t\tfig = mplt.figure()\n\t\tax = fig.add_subplot(111, projection='3d')\n\n\t# Sort the mesh by first 3 columns\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 0].argsort()]\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 1].argsort(kind='mergesort')]\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 2].argsort(kind='mergesort')]\n\t\n\t# Set up number of divisions and calculate e for each division (as a ratio)\n\tnum_div = 20\n\te = [i/num_div for i in range(num_div + 1)]\n\t# Convert angular values from degrees to radians\n\trads = math.pi/180\n\tnodal_mesh[:, 1:3] *= rads\n\t# Store the shapes and sizes of the mesh values\n\tm = nodal_mesh.shape[0]\n\tsize_nodal_nu = np.where(nodal_mesh[:, 2] == 0)[0].size\n\tsize_nodal_phi = m/size_nodal_nu\n\t# Get the mu and theta values from the mesh\n\tnodal_nu = nodal_mesh[:size_nodal_nu, 1]\n\tnodal_phi = nodal_mesh[::size_nodal_nu, 2]\n\t# Convert apex node from prolate to cartesian, then plot with scatter\n\tif min(nodal_nu) == 0:\n\t\tx, y, z = mathhelper.prolate2cart(nodal_mesh[0, 0], nodal_mesh[0, 1], nodal_mesh[0, 2], focus)\n\t\tax.scatter(z, y, -x)\n\t\tstart_nu = 1\n\telse:\n\t\tstart_nu = 0\n\t# Plot circumferential element boundaries\n\tfor i in range(start_nu, size_nodal_nu):\n\t\tfor j in range(int(size_nodal_phi)):\n\t\t\t# Define nodal values for interpolation\n\t\t\tif j == size_nodal_phi-1:\n\t\t\t\tind0 = i\n\t\t\t\tp0 = 2*math.pi\n\t\t\telse:\n\t\t\t\tind0 = (j+1)*size_nodal_nu + i\n\t\t\t\tp0 = nodal_phi[j+1]\n\t\t\tind1 = (j)*size_nodal_nu + i\n\t\t\tp1 = nodal_phi[j]\n\t\t\t# Get mu and dM/dm1\n\t\t\tm0 = nodal_mesh[ind0, 0]\n\t\t\tdm0 = nodal_mesh[ind0, 3]\n\t\t\tm1 = nodal_mesh[ind1, 0]\n\t\t\tdm1 = nodal_mesh[ind1, 3]\n\t\t\t# Convert to cartesian\n\t\t\tn0x, n0y, n0z = mathhelper.prolate2cart(nodal_mesh[ind0, 0], nodal_mesh[ind0, 1], nodal_mesh[ind0, 2], focus)\n\t\t\t# Plot the node\n\t\t\tax.scatter(n0z, n0y, -n0x)\n\t\t\t# Plot the arc segments\n\t\t\tfor k in range(2, len(e)):\n\t\t\t\t# Determine starting point to use\n\t\t\t\tif k == 2:\n\t\t\t\t\tpt_x, pt_y, pt_z = n0x, n0y, n0z\n\t\t\t\telse:\n\t\t\t\t\tpt_x, pt_y, pt_z = x_here, y_here, z_here\n\t\t\t\t# Get lambda\n\t\t\t\thm0 = 1 - 3*(e[k]**2) + 2*(e[k]**3)\n\t\t\t\thdm0 = e[k]*(e[k] - 1)**2\n\t\t\t\thm1 = (e[k]**2)*(3 - 2*e[k])\n\t\t\t\thdm1 = (e[k]**2)*(e[k] - 1)\n\t\t\t\tm = hm0 * m0 + hdm0 * dm0 + hm1 * m1 + hdm1 * dm1\n\t\t\t\t# Get theta\n\t\t\t\tp_here = p0 - e[k]*(p0 - p1)\n\t\t\t\t# Convert to cartesian\n\t\t\t\tx_here, y_here, z_here = mathhelper.prolate2cart(m, nodal_nu[i], p_here, focus)\n\t\t\t\t# Create vectors\n\t\t\t\tx = np.append(pt_x, x_here)\n\t\t\t\ty = np.append(pt_y, y_here)\n\t\t\t\tz = np.append(pt_z, z_here)\n\t\t\t\t# Plot segments\n\t\t\t\tax.plot(z, y, -x, 'k-.')\n\t# Plot longitudinal element boundaries\n\tfor i in range(int(size_nodal_phi)):\n\t\tfor j in range(size_nodal_nu-1):\n\t\t\t# Define nodal values needeed for interpolation\n\t\t\tind0 = i*size_nodal_nu + j\n\t\t\tind1 = ind0 + 1\n\t\t\tn0 = nodal_nu[j]\n\t\t\tn1 = nodal_nu[j+1]\n\t\t\t# Get lambda and dL/de2\n\t\t\tm0 = nodal_mesh[ind0, 0]\n\t\t\tdm0 = nodal_mesh[ind0, 4]\n\t\t\tm1 = nodal_mesh[ind1, 0]\n\t\t\tdm1 = nodal_mesh[ind1, 4]\n\t\t\t# Convert nodal points to cartesian\n\t\t\tn0x, n0y, n0z = mathhelper.prolate2cart(nodal_mesh[ind0, 0], nodal_mesh[ind0, 1], nodal_mesh[ind0, 2], focus)\n\t\t\t# Plot arc\n\t\t\tfor k in range(2, len(e)):\n\t\t\t\t# Determine point to use\n\t\t\t\tif k == 2:\n\t\t\t\t\tpt_x, pt_y, pt_z = n0x, n0y, n0z\n\t\t\t\telse:\n\t\t\t\t\tpt_x, pt_y, pt_z = x_here, y_here, z_here\n\t\t\t\t# Get lambda\n\t\t\t\thm0 = 1 - 3*(e[k]**2) + 2*(e[k]**3)\n\t\t\t\thdm0 = e[k]*(e[k] - 1)**2\n\t\t\t\thm1 = (e[k]**2)*(3 - 2*e[k])\n\t\t\t\thdm1 = (e[k]**2)*(e[k] - 1)\n\t\t\t\tm = hm0 * m0 + hdm0 * dm0 + hm1 * m1 + hdm1 * dm1\n\t\t\t\t# Get nu\n\t\t\t\tn_here = n0 + e[k]*(n1-n0)\n\t\t\t\t# Convert to cartesian\n\t\t\t\tx_here, y_here, z_here = mathhelper.prolate2cart(m, n_here, nodal_phi[i], focus)\n\t\t\t\t# Append the vectors for plotting\n\t\t\t\tx = np.append(pt_x, x_here)\n\t\t\t\ty = np.append(pt_y, y_here)\n\t\t\t\tz = np.append(pt_z, z_here)\n\t\t\t\t# Plot the segment\n\t\t\t\tax.plot(z, y, -x, 'k-.')\n\t\t\t\t\n\treturn(ax)", "def planes_3d(self, quantity, xplane, yplane):\n xplane = int(xplane)\n yplane = int(yplane)\n # Get the scalar values\n # Get the data on the plane with a fixed x value. These means we'll\n # have changing (y, z) points\n xdata = self.get_plane(quantity, 'yz', xplane)\n # z first cuz we want y to be changing before z to correspond with the\n # way numpy flattens arrays. Note this means y points will be in the\n # 2nd column\n xplanepoints = np.array(list(itertools.product(self.Z, self.Y)))\n xdata = xdata.flatten()\n xplanexval = np.array(list(itertools.repeat(x[xplane], len(xdata))))\n xplanedata = np.zeros((xplanepoints.shape[0], 4))\n xplanedata[:, 0] = xplanexval\n xplanedata[:, 1] = xplanepoints[:, 1]\n xplanedata[:, 2] = xplanepoints[:, 0]\n xplanedata[:, 3] = xdata\n # Same procedure for fixed y plane\n ydata = self.get_plane(quantity, 'xz', yplane)\n yplanepoints = np.array(list(itertools.product(z, x)))\n ydata = ydata.flatten()\n yplaneyval = np.array(list(itertools.repeat(y[yplane], len(ydata))))\n yplanedata = np.zeros((yplanepoints.shape[0], 4))\n yplanedata[:, 0] = yplanepoints[:, 1]\n yplanedata[:, 1] = yplaneyval\n yplanedata[:, 2] = yplanepoints[:, 0]\n yplanedata[:, 3] = ydata\n labels = ('X [um]', 'Y [um]', 'Z [um]', quantity)\n # Now stack them vertically and plot!\n all_data = np.vstack((xplanedata, yplanedata))\n self.scatter3d(all_data[:, 0], all_data[:, 1], all_data[:, 2],\n all_data[:, 3], labels, 'planes_3d')", "def RoadRiverTab():\n\n # Main layout\n mainTab = cmds.columnLayout(adjustableColumn=True, columnAttach=('both', 20))\n \n ### Geo parameters\n cmds.separator(height=10, style=\"none\")\n cmds.text(label=\"Generate road and rivers:\", align=\"left\")\n RoadRiverTab.roadWidth = cmds.floatSliderGrp(label=\"Road Width\", field=True, value=1, min=.01, max=100)\n RoadRiverTab.roadQuality = cmds.intSliderGrp(label=\"Curve Quality\", field=True, value=20, min=2, max=100)\n\n ### Choose which king of geo\n cmds.separator(height=5, style=\"none\")\n cmds.rowLayout(numberOfColumns=3, adjustableColumn=2)\n cmds.button(label='Create Road', width=200, command=buildRoad)\n cmds.separator(style=\"none\")\n cmds.button(label=\"Create River\", width=200, command=buildRiver)\n\n cmds.setParent('..')\n cmds.setParent('..')\n\n return mainTab", "def create_spacecraft_geometry():\r\n\r\n bounds_lower = [3, 7, 33]\r\n funcs_lower = [0, lambda y: y ** 1.5, 0]\r\n\r\n bounds_upper = None\r\n funcs_upper = 100\r\n\r\n x_max = 10\r\n x_min = 0\r\n resolution = 200\r\n\r\n spacecraft = Geometry(x_max, x_min, resolution,\r\n bounds_upper, funcs_upper,\r\n bounds_lower, funcs_lower)\r\n\r\n return spacecraft", "def shapebuild_PlaneXOY(*args):\n return _ShapeBuild.shapebuild_PlaneXOY(*args)", "def __init__(self, spread, depth, bandwidth, delay, loss, fpga, fpga_bandwidth=None, fpga_delay=None,\n fpga_loss=None, poisson=None):\n logger = logging.getLogger(__name__)\n\n # Initialize topology #\n Topo.__init__(self)\n\n # Setup parameters\n fpga_bandwidth = bandwidth if fpga_bandwidth is None else fpga_bandwidth\n fpga_delay = delay if fpga_delay is None else halve_delay(fpga_delay)\n fpga_loss = loss * 2 if fpga_loss is None else fpga_loss\n\n if poisson:\n link_opts = dict(bw=bandwidth, delay=get_poisson_delay(delay), loss=loss, use_htb=True)\n fpga_link_opts = dict(bw=fpga_bandwidth, delay=get_poisson_delay(fpga_delay),\n loss=fpga_loss, use_htb=True)\n else:\n link_opts = dict(bw=bandwidth, delay=delay, loss=loss, use_htb=True)\n fpga_link_opts = dict(bw=fpga_bandwidth, delay=fpga_delay, loss=fpga_loss, use_htb=True)\n cloud_link_opts = dict(bw=1000, delay='0ms', loss=0, use_htb=True)\n\n # Add hosts and switches #\n\n # switch naming convention:\n # s[level][switch_number]\n\n switches = [[None for _ in range(spread ** (depth - 1))] for _ in range(depth - 1)]\n hosts = [None for _ in range(spread ** (depth - 1))]\n\n for i in range(depth):\n for j in range(spread ** i):\n if i == (depth - 1):\n hosts[j] = self.addHost('h' + str(j))\n else:\n sw_name = 's' + str(i) + str(j)\n switches[i][j] = self.addSwitch(sw_name)\n if fpga is not None and fpga == i:\n # Create host to serve as FPGA in switch\n # Will have one link to the relevant FPGA\n # The link will have the bandwidth and loss specified by the user, and half\n # the delay\n # These parameters are as if they were caused by the FPGA, rather than a\n # link\n # As a result, latency is halved since it will essentially be doubled by the\n # packet flowing in\n # and out of the host\n self.addHost('f{}'.format(j))\n self.addLink(sw_name, 'f{}'.format(j), **fpga_link_opts)\n\n # Add host to serve as cloud\n # Will have one high bandwidth, 0 latency link to root switch\n self.addHost('cloud')\n self.addLink(switches[0][0], 'cloud', **cloud_link_opts)\n\n # Add links #\n\n for i, row in enumerate(switches):\n for j, switch in enumerate(row):\n if switch is None:\n break\n if i == (depth - 2):\n for k in range(spread):\n # add a link between the current switch, and all hosts\n # directly beneath it.\n # (spread * j) + k will get all the appropriate hosts\n logger.debug(\"Adding standard link from switch[{}][{}] to \"\n \"host[{}]\".format(i, j, (spread * j) + k))\n self.addLink(switch, hosts[(spread * j) + k], **link_opts)\n\n else:\n for k in range(spread):\n # add a link between the current switch, and all\n # switches directly beneath it.\n # i + 1 refers to 1 level deeper in the tree, and\n # (spread * j) + k will get all the appropriate child\n # switches on that level.\n logger.debug(\"Adding standard link from switch[{}][{}] to \"\n \"switch[{}][{}]\".format(i, j, i + 1, (spread * j) + k))\n self.addLink(switch, switches[i + 1][(spread * j) + k], **link_opts)", "def set_topology(self, topo):\n self.topo = topo", "def __init__(self, mesh, out_dir='./results/', use_periodic=False):\n s = \"::: INITIALIZING 2D MODEL :::\"\n print_text(s, cls=self)\n \n Model.__init__(self, mesh, out_dir, use_periodic)", "def __init__(self, planes, cosmology):\r\n self.planes = planes\r\n self.plane_redshifts = [plane.redshift for plane in planes]\r\n self.cosmology = cosmology", "def parse(cls, vmf_file: VMF, tree: Keyvalues) -> 'Side':\n # planes = \"(x1 y1 z1) (x2 y2 z2) (x3 y3 z3)\"\n verts = tree[\"plane\", \"(0 0 0) (0 0 0) (0 0 0)\"][1:-1].split(\") (\")\n if len(verts) != 3:\n raise ValueError('Wrong number of solid planes in \"' +\n tree['plane', ''] +\n '\"')\n planes = [\n Vec.from_str(verts[0]),\n Vec.from_str(verts[1]),\n Vec.from_str(verts[2]),\n ]\n\n side: Side = cls(\n vmf_file,\n planes,\n tree.int('id', -1),\n tree.int('lightmapscale', 16),\n tree.int('smoothing_groups'),\n tree['material', ''],\n tree.float('rotation'),\n UVAxis.parse(tree['uaxis', '[0 1 0 0] 0.25']),\n UVAxis.parse(tree['vaxis', '[0 0 -1 0] 0.25']),\n )\n\n try:\n disp_tree = tree.find_key('dispinfo')\n except LookupError: # Not a displacement.\n return side\n\n # Deal with displacements.\n disp_power = disp_tree.int('power', 4)\n if disp_power in (0, 1, 2, 3, 4):\n side.disp_power = disp_power # type: ignore\n else:\n raise ValueError(f'Invalid displacement power {disp_power}!')\n side.disp_pos = disp_tree.vec('startposition')\n side.disp_elevation = disp_tree.float('elevation')\n disp_flag_ind = disp_tree.int('flags')\n if 0 <= disp_flag_ind <= 16:\n side.disp_flags = _DISP_FLAG_TO_COLL[disp_flag_ind]\n else:\n raise ValueError(f'Invalid displacement flags {disp_flag_ind} in side {side.id}!')\n if disp_tree.bool('subdiv'):\n side.disp_flags |= DispFlag.SUBDIV\n\n # This always has a key of '10', with 10 '-1's...\n vert_key = disp_tree.find_key('allowed_verts')\n allowed_vert = Array('i', map(int, vert_key['10'].split()))\n if len(allowed_vert) != 10:\n raise ValueError(\n f'Displacement allowed_verts in side {side.id} '\n f'must be 10 long!'\n )\n side.disp_allowed_vert = allowed_vert\n\n size = side.disp_size\n side._disp_verts = [\n DispVertex(x, y)\n for y in range(size)\n for x in range(size)\n ]\n # Parse all the rows..\n side._parse_disp_vecrow(disp_tree, 'normals', 'normal')\n side._parse_disp_vecrow(disp_tree, 'offsets', 'offset')\n side._parse_disp_vecrow(disp_tree, 'offset_normals', 'offset_norm')\n\n for y, row in side._iter_disp_row(disp_tree, 'alphas', size):\n try:\n for x, alpha in enumerate(row):\n side._disp_verts[y * size + x].alpha = float(alpha)\n except ValueError as exc:\n raise ValueError(\n f'Displacement array for alpha in side {side.id}, '\n f'row {y} had invalid number: {exc.args[0]}'\n ) from None\n\n for y, row in side._iter_disp_row(disp_tree, 'distances', size):\n try:\n for x, alpha in enumerate(row):\n side._disp_verts[y * size + x].distance = float(alpha)\n except ValueError as exc:\n raise ValueError(\n f'Displacement array for distances in side {side.id}, '\n f'row {y} had invalid number: {exc.args[0]}'\n ) from None\n\n # Not the same, 1 less row and column since it's per-quad.\n tri_tags_count = 2 ** disp_power\n for y, row in side._iter_disp_row(disp_tree, 'triangle_tags', 2 * tri_tags_count):\n try:\n for x in range(tri_tags_count):\n vert = side._disp_verts[y * size + x]\n vert.triangle_a = TriangleTag(int(row[2 * x]))\n vert.triangle_b = TriangleTag(int(row[2 * x + 1]))\n except ValueError as exc:\n raise ValueError(\n f'Displacement array for triangle tags in side {side.id}, '\n f'row {y} had invalid number: {exc.args[0]}'\n ) from None\n\n if 'multiblend' not in disp_tree:\n return side\n # Else: Parse multiblend too.\n # First initialise this list.\n for vert in side._disp_verts:\n vert.multi_colors = [Vec(1, 1, 1), Vec(1, 1, 1), Vec(1, 1, 1), Vec(1, 1, 1)]\n for i in range(4):\n side._parse_disp_vecrow(disp_tree, 'multiblend_color_' + str(i), i)\n\n for y, split in side._iter_disp_row(disp_tree, 'multiblend', 4 * size):\n try:\n for x in range(size):\n side._disp_verts[y * size + x].multi_blend = Vec4(\n float(split[4*x]),\n float(split[4*x + 1]),\n float(split[4*x + 2]),\n float(split[4*x + 3]),\n )\n except ValueError as exc:\n raise ValueError(\n f'Displacement array for multiblend in side {side.id}, '\n f'row {y} had invalid number: {exc.args[0]}'\n ) from None\n\n for y, split in side._iter_disp_row(disp_tree, 'alphablend', 4 * size):\n try:\n for x in range(size):\n side._disp_verts[y * size + x].multi_alpha = Vec4(\n float(split[4*x]),\n float(split[4*x + 1]),\n float(split[4*x + 2]),\n float(split[4*x + 3]),\n )\n except ValueError as exc:\n raise ValueError(\n f'Displacement array for multiblend in side {side.id}, '\n f'row {y} had invalid number: {exc.args[0]}'\n ) from None\n return side", "def __init__(self, n=1, delay='1ms', z=1, bw=1, cpu=.1, max_queue_size=None, **params):\n\n # Initialize topo\n Topo.__init__(self, **params)\n\n #TODO: Create your Dynamic Mininet Topology here!\n #NOTE: You MUST label switches as s1, s2, ... sz\n #NOTE: You MUST label hosts as h1-1, h1-2, ... hz-n \n #HINT: Use a loop to construct the topology in pieces.", "def is_intersection_plane_plane(plane1, plane2, epsilon=1e-6):\n # check for parallelity of planes\n if abs(dot_vectors(plane1[1], plane2[1])) > 1 - epsilon:\n return False\n return True", "def __getPlaneName(self):\n item = self._item()\n planeNormal = item.getNormal() if item is not None else None\n\n for name, normal in self._PLANES.items():\n if numpy.array_equal(planeNormal, normal):\n return name\n return '-'", "def make_fake_topology(density=.02, lower_left=ORIGIN, upper_right=Point2D(30, 30), max_z=None):\n styles = ['cone', 'pyramid']\n if not max_z:\n biggest_axis = max(upper_right.x - lower_left.x, upper_right.y - lower_left.y)\n max_z = round(biggest_axis / 1.2) # Just need a rule here. how about max height is half width?\n\n factory = TopologyFactory(lower_left, upper_right)\n number_of_seeds = round(factory.cell_count * density) # how many seeds depends on density and area\n\n # Produce roughly (due to rounding) the number of seeds. We will perform multiple passes,\n # generating random x,y,z values and adding them as peaks on the map. With each pass, the\n # range of z values tends to get smaller (though there is randomness).\n # Also, each pass has a random steepness value, which is essentially the step height if we\n # are walking up an Aztec pyramid\n seeds_per_pass = 5 # a decent-looking value\n passes = round(number_of_seeds // seeds_per_pass)\n\n for i in range(1, passes + 1):\n # get a z-range for this pass\n max_z_pass = round(max_z / i)\n min_z_pass = max_z_pass // 2\n\n seeds = factory._random_points_3d(seeds_per_pass, min_z_pass, max_z_pass)\n steepness = random.uniform(1, 4) # the resulting step height between adjacent cells\n factory._add_peaks_from_seed_points3d(seeds, random.choice(styles), steepness=steepness)\n return factory._tm", "def __init__(self, radius, dimensions, index=N_HPFS):\n _OpticalElement.__init__(self, 'itop mirror', index)\n self.add_boundary(SphericalSurface(\n [0., 0., radius - dimensions[2]], float(radius), name='-z',\n reflective=True))\n self.add_boundary(PlaneSurface(\n [0., 0., 1.], [0., 0., 0.], name='+z'))\n self.add_boundary(PlaneSurface(\n [0., 1., 0.], [0., dimensions[1]/2., 0.], name='+y'))\n self.add_boundary(PlaneSurface(\n [0., -1., 0.], [0., -dimensions[1]/2., 0.], name='-y'))\n self.add_boundary(PlaneSurface(\n [1., 0., 0.], [dimensions[0]/2., 0., 0.], name='+x'))\n self.add_boundary(PlaneSurface(\n [-1., 0., 0.], [-dimensions[0]/2., 0., 0.], name='-x'))", "def __init__(self, file_data, x0, x1, y0, y1, scan_dir=0):\n\n x0 = self.nm2pnt(x0, file_data)\n x1 = self.nm2pnt(x1, file_data)\n y0 = self.nm2pnt(y0, file_data, axis='y')\n y1 = self.nm2pnt(y1, file_data, axis='y')\n\n self.topo_info = file_data[scan_dir].info\n self.x_res = self.topo_info['xres']\n self.y_res = self.topo_info['yres']\n\n self.x_range = np.arange(0, self.x_res, 1)\n self.y_range = np.arange(0, self.y_res, 1)\n\n self.topo_data = file_data[scan_dir].data\n\n self.param_init = [1, 1, 1]\n\n self.topo_plane_lsq = leastsq(self.topo_plane_residuals, self.param_init,\n args=(self.topo_data, x0, x1, y0, y1))[0]\n self.topo_plane_fit = self.topo_plane_paramEval(self.topo_plane_lsq)\n self.topo_data_flattened = self.topo_data - self.topo_plane_fit\n self.topo_data_flattened = self.topo_data_flattened - np.amin(self.topo_data_flattened)\n\n self.get_data()", "def get_pipeline_topology(account_name: Optional[str] = None,\n pipeline_topology_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPipelineTopologyResult:\n __args__ = dict()\n __args__['accountName'] = account_name\n __args__['pipelineTopologyName'] = pipeline_topology_name\n __args__['resourceGroupName'] = resource_group_name\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('azure-native:videoanalyzer/v20211101preview:getPipelineTopology', __args__, opts=opts, typ=GetPipelineTopologyResult).value\n\n return AwaitableGetPipelineTopologyResult(\n description=pulumi.get(__ret__, 'description'),\n id=pulumi.get(__ret__, 'id'),\n kind=pulumi.get(__ret__, 'kind'),\n name=pulumi.get(__ret__, 'name'),\n parameters=pulumi.get(__ret__, 'parameters'),\n processors=pulumi.get(__ret__, 'processors'),\n sinks=pulumi.get(__ret__, 'sinks'),\n sku=pulumi.get(__ret__, 'sku'),\n sources=pulumi.get(__ret__, 'sources'),\n system_data=pulumi.get(__ret__, 'system_data'),\n type=pulumi.get(__ret__, 'type'))", "def polyPlanarProjection(*args, projectionHorizontalSweep: Union[float, bool]=0.0, seamCorrect:\n bool=True, caching: bool=True, constructionHistory: bool=True,\n createNewMap: bool=True, imageCenter: Union[List[float, float],\n bool]=None, imageCenterX: Union[float, bool]=0.0, imageCenterY:\n Union[float, bool]=0.0, imageScale: Union[List[float, float],\n bool]=None, imageScaleU: Union[float, bool]=0.0, imageScaleV:\n Union[float, bool]=0.0, insertBeforeDeformers: bool=True,\n keepImageRatio: bool=True, mapDirection: AnyStr=\"\", name: AnyStr=\"\",\n nodeState: Union[int, bool]=0, perInstance: bool=True,\n projectionCenter: Union[List[float, float, float], bool]=None,\n projectionCenterX: Union[float, bool]=0.0, projectionCenterY:\n Union[float, bool]=0.0, projectionCenterZ: Union[float, bool]=0.0,\n projectionHeight: Union[float, bool]=0.0, projectionScale:\n Union[List[float, float], bool]=None, rotate: Union[List[float, float,\n float], bool]=None, rotateX: Union[float, bool]=0.0, rotateY:\n Union[float, bool]=0.0, rotateZ: Union[float, bool]=0.0,\n rotationAngle: Union[float, bool]=0.0, smartFit: bool=True,\n worldSpace: bool=True, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def xyplane(draw, r, x, shift = np.array([1000, 1000, 0, 0]), scale = 300):\n extent = 2.8\n pln = np.array(\n [\n [x,-extent,0],\n [x,extent,0],\n [x,extent,extent*2],\n [x,-extent,extent*2]\n ]\n )\n pln = np.dot(pln,np.transpose(r))\n pln = pln * scale + shift[:3]\n draw.polygon([(pln[0][0],pln[0][1]),(pln[1][0],pln[1][1]),(pln[2][0],pln[2][1]),(pln[3][0],pln[3][1])], (0,102,255,70))", "def plot_phase_plane(self , x_axis = 0 , y_axis = 1 ):\n\n pp = phaseanalysis.PhasePlot( self , x_axis , y_axis )\n \n pp.plot()" ]
[ "0.5991863", "0.5981517", "0.59631455", "0.5854901", "0.58076966", "0.5753462", "0.56212896", "0.5567714", "0.55646545", "0.55594105", "0.55340147", "0.551879", "0.54897666", "0.5471865", "0.5409033", "0.53919226", "0.5324614", "0.53135514", "0.5296597", "0.5288494", "0.52642614", "0.5252399", "0.52334726", "0.521043", "0.5176542", "0.5149132", "0.5137906", "0.5127393", "0.51065546", "0.5097846", "0.5090994", "0.5081946", "0.50530857", "0.5046951", "0.50436246", "0.5036304", "0.50169307", "0.50100595", "0.5008374", "0.49979436", "0.4992863", "0.49871722", "0.49718532", "0.49596983", "0.4947392", "0.4947156", "0.49399287", "0.49352446", "0.49317822", "0.4925205", "0.49207312", "0.49185026", "0.48913085", "0.4882188", "0.48687202", "0.48676476", "0.48610485", "0.48598954", "0.4855208", "0.48507613", "0.48436835", "0.4841052", "0.48317778", "0.48186287", "0.4815154", "0.48143542", "0.4808101", "0.48024732", "0.47922146", "0.4783738", "0.47804233", "0.4778059", "0.47512004", "0.4751039", "0.47466877", "0.4734245", "0.47161657", "0.47142956", "0.47028878", "0.46984896", "0.4694205", "0.46922997", "0.46880665", "0.46682236", "0.466177", "0.46607506", "0.46601704", "0.46575525", "0.4657336", "0.46494013", "0.46479908", "0.46439198", "0.4643234", "0.46400487", "0.46156704", "0.460865", "0.4603622", "0.4599509", "0.45985046", "0.45955977" ]
0.59658796
2
Comparison of two topologies. Two topos are equal if they have the same mesh, shape and domain.
def __eq__(self, other): if self.__class__ != other.__class__: return False return self.mesh == other.mesh and \ npw.equal(self.shape, other.shape).all() and \ self.domain == other.domain
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def have_same_topology(first_mesh, second_mesh):\n return attr_has_same_shape(first_mesh, second_mesh, \"v\") and attr_is_equal(\n first_mesh, second_mesh, \"f\"\n )", "def equivalent(kls, first, second):\n if first.empty() and second.empty():\n return True\n elif first.vertices.shape[0] != second.vertices.shape[0]:\n return False\n elif first.edges.shape[0] != second.edges.shape[0]:\n return False\n\n EPSILON = 1e-7\n\n vertex1, ct1 = np.unique(first.vertices, axis=0, return_counts=True)\n vertex2, ct2 = np.unique(second.vertices, axis=0, return_counts=True)\n \n vertex_match = np.all(np.abs(vertex1 - vertex2) < EPSILON)\n ct_match = np.all(ct1 == ct2)\n if not (vertex_match and ct_match):\n return False\n\n g1 = nx.Graph()\n g1.add_edges_from(first.edges)\n g2 = nx.Graph()\n g2.add_edges_from(second.edges)\n edges_match = nx.is_isomorphic(g1, g2)\n del g1 \n del g2\n\n if not edges_match:\n return False\n\n second_verts = {}\n for i, vert in enumerate(second.vertices):\n second_verts[tuple(vert)] = i\n \n attrs = [ attr['id'] for attr in first.extra_attributes ]\n for attr in attrs:\n buf1 = getattr(first, attr)\n buf2 = getattr(second, attr)\n if len(buf1) != len(buf2):\n return False\n\n for i in range(len(buf1)):\n i2 = second_verts[tuple(first.vertices[i])]\n if buf1[i] != buf2[i2]:\n return False\n\n return True", "def compare_topology(tree1, tree2):\n n2p1, n2p2 = ({node.name: node.parent.name\n for node in tree.traverse() if not node.is_root()}\n for tree in (tree1, tree2))\n return n2p1 == n2p2", "def _shape_compare(shape1, shape2):\n if len(shape1) != len(shape2):\n return False\n for s1, s2 in zip(shape1, shape2):\n if s1 != s2:\n return False\n return True", "def __eq__(self, other) -> bool:\n if not isinstance(other, self.__class__):\n return False\n\n if self.number_of_nodes() != other.number_of_nodes():\n return False\n if self.number_of_edges() != other.number_of_edges():\n return False\n\n if list(self.nodes) != list(other.nodes):\n return False\n\n # Compare node data.\n for i in self.nodes:\n # We may want to exclude the 'name' attribute from comparisons, assuming\n # it has no logical meaning.\n if self.nodes[i] != other.nodes[i]:\n return False\n\n if list(self.edges) != list(other.edges):\n return False\n\n for i, j in self.edges:\n # Compare edge data.\n if self.edges[i, j] != other.edges[i, j]:\n return False\n\n return True", "def compareGrids(grid1, grid2):\n if axis_utils.areAxesIdentical(grid1.getLatitude(),\n grid2.getLatitude(), check_id=False)==False:\n return False\n if axis_utils.areAxesIdentical(grid1.getLongitude(),\n grid2.getLongitude(), check_id=False)==False:\n return False\n return True", "def __eq__(self, other):\n return self.position.data == other.position.data and \\\n self.velocity.data == other.velocity.data", "def __eq__(self, other) -> bool:\n if other is None or not isinstance(other, Graph):\n name = other.name if other else None\n print(f'{name} is not a Graph object.')\n return False\n\n def match(op1: Operator, op2: Operator) -> bool:\n if not op1.equals(op2):\n print(f'{op1.name} is different.')\n return False\n\n # check input nodes and further\n for i1, i2 in zip(op1.input_ops.values(), op2.input_ops.values()):\n if not match(i1, i2):\n return False\n return True\n\n for o1, o2 in zip(self.get_outputs(), other.get_outputs()):\n if not match(o1, o2):\n return False\n return True", "def __eq__(self, other):\n return np.all(self.grid == other.grid) and np.all(self.pos == other.pos)", "def proj_is_same(p1, p2):\n if has_gdal:\n # this is more robust, but gdal is a pain\n s1 = osr.SpatialReference()\n s1.ImportFromProj4(p1.srs)\n s2 = osr.SpatialReference()\n s2.ImportFromProj4(p2.srs)\n return s1.IsSame(s2) == 1 # IsSame returns 1 or 0\n else:\n # at least we can try to sort it\n p1 = '+'.join(sorted(p1.srs.split('+')))\n p2 = '+'.join(sorted(p2.srs.split('+')))\n return p1 == p2", "def compare(self, other, enforce_mask=False, enforce_grid=False,\n enforce_area=False, enforce_aream=False, enforce_all=False):\n eps_mask = 1.0e-6\n eps_grid = 1.0e-2\n eps_area = 1.0e-1\n\n # Do a global gather to create a non-distributed attribute vector\n debugPrint( \"self.lgrid:\\n\",self.lgrid )\n debugPrint( \"other.lgrid:\\n\",other.lgrid )\n gGrid1 = attributevector.AttributeVector(self.ifields, self.rfields, self.lsize())\n gGrid1.initv(self.lgrid, self.lgrid.lsize())\n gGrid1.gather(self.lgrid, self.gsMap, comm.world_pe0, comm.component_pid, comm.local_comm) \n gGrid2 = attributevector.AttributeVector(other.ifields, other.rfields, other.lsize())\n gGrid2.initv( other.lgrid, other.lgrid.lsize() )\n gGrid2.gather(other.lgrid, self.gsMap,comm.world_pe0, comm.component_pid, comm.local_comm)\n\n # From here on, everything is done by the root pe\n if( comm.component_pid != comm.world_pe0 ):\n return\n\n # Compare size of domain\n npts1 = gGrid1.lsize()\n npts2 = gGrid2.lsize()\n npts = npts1\n\n if ( npts1 == npts2 ):\n debugPrint( \"the domain size is \",npts )\n else:\n debugPrint( \"domain size #1 = \", npts1 )\n debugPrint( \"domain size #2 = \", npts2 )\n debugPrint( \"ERROR: domain size mis-match\" )\n # call shr_sys_abort(subName // \"ERROR: domain size mis-match\")\n # Exceptions?\n\n # If there was no problem, continue:\n # Compare Domain masks:\n debugPrint(\"gData1:\\n\",gGrid1)\n debugPrint(\"gData2:\\n\",gGrid2)\n data1,data1_size = gGrid1.exportRAttr(\"mask\")#rcode)?\n data2,data2_size = gGrid2.exportRAttr(\"mask\")#rcode)?\n \n ndiff = 0\n debugPrint( \"npts:\",npts )\n debugPrint( \"length of data1:\",data1_size )\n for n in xrange(0,npts-1):\n if ( (( (abs(data1[n])) > eps_mask ) and (abs(data1[n]) < eps_mask )) or \n ( (( abs(data1[n])) < eps_mask ) and (( abs(data1[n])) > eps_mask) ) ):\n ndiff = ndiff + 1\n\n # Enforce consistency: \n # Nested function declaration\n def enforce_consistency(msg,exception=None):\n if (enforce_mask or enforce_all):\n if (ndiff > 0):\n debugPrint( msg )\n # Raise Exception\n \n enforce_consistency(\"ERROR: incompatible domain masks\")\n \n # Compute Maximum Latitude and Longitude Differences\n mask = data1\n ndiff = 0\n data1,data1_size = gGrid1.exportRAttr(\"lat\")#,rcode))\n data2,data2_size = gGrid2.exportRAttr(\"lat\")#,rcode))\n diff = 0\n max_diff = 0.0\n for n in xrange(npts):\n if( abs( mask[n] ) > eps_mask ):\n diff = abs( data1[n] - data2[n] )\n max_diff = max(max_diff, diff)\n if( diff > eps_grid ):\n ndiff = ndiff + 1\n debugPrint( \"Maximum latitude difference = \",max_diff )\n\n data1,data1_size = gGrid1.exportRAttr(\"lon\")#,rcode))\n data2,data2_size = gGrid2.exportRAttr(\"lon\")#,rcode))\n max_diff = 0.0\n\n for n in xrange(npts):\n if( abs( mask[n] ) > eps_mask ):\n x1 = data1[n]\n x2 = data2[n]\n if( x1 > x2 ): #make sure x1 < x2\n # swap(x1,x2)\n x1 = data2[n]\n x2 = data1[n]\n while( (x1+360.0) < (x2+180.0) ):#longitude is periodic\n x1 = x1 + 360.0\n diff = abs( x2 - x1 )\n max_diff = max(max_diff,diff)\n \n if (diff > eps_grid):\n ndiff = ndiff + 1\n debugPrint( \"Maximum longitude difference = \",max_diff )\n\n enforce_consistency(\"ERROR: incompatible domain grid coordinates!\")\n\n # Compare Area:\n data1,data1_size = gGrid1.exportRAttr( \"area\" )#, rcode )\n data2,data2_size = gGrid2.exportRAttr( \"area\" )#, rcode )\n\n ndiff = 0\n max_diff = 0.0\n\n for n in xrange(npts):\n if( abs( mask[n] ) > eps_mask ):\n if( data2[n] != 0.0 ):\n diff = abs( (data2[n] - data1[n]) / data2[n] )\n max_diff = max(max_diff,diff)\n if( diff > eps_area ):\n ndiff = ndiff + 1\n debugPrint( \"Maxium relative error of area (model) = \", max_diff )\n\n enforce_consistency(\"ERROR: icompatible domain area(model)\")\n\n # Compare aream\n data1,data1_size = gGrid1.exportRAttr(\"aream\")#,rcode))\n data2,data2_size = gGrid2.exportRAttr(\"aream\")#,rcode))\n\n ndiff = 0\n max_diff = 0.0\n for n in xrange(npts):\n if ( abs( mask[n] ) > eps_mask ):\n if( data2[n] != 0.0 ):\n diff = abs((data2[n] - data1[n])/data2[n])\n max_diff = max(max_diff,diff)\n if( diff > eps_area ):\n ndiff = ndiff + 1\n debugPrint( \"maximum relative error of area(map) = \",max_diff )\n\n enforce_consistency(\"ERROR: incompatible domain area (map)\")\n\n # Clean up, we're finished!\n return", "def assertNodesEqual(self, a, b):\n self.assertEqual((a.version, a.address, a.service, a.properties),\n (b.version, b.address, b.service, b.properties))", "def __eq__(self, other):\n return (type(self) == type(other) and\n (self.from_grid == other.from_grid) and\n (self.to_grid == other.to_grid))", "def __eq__(self, other) -> bool:\n if not isinstance(other, NilpotentOrbit):\n return False\n if self.my_type != other.my_type:\n return False\n if self.lie_rank != other.lie_rank:\n return False\n if self.decorator != other.decorator:\n return False\n return self.my_diagram == other.my_diagram", "def __eq__(self, other):\n return (type(self) == type(other) and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid)", "def __eq__(self, other):\n return (type(self) == type(other) and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid)", "def compare_graphs(self):\n\t\tpass", "def __eq__(self, other):\n if not isinstance(other, RackTopoWhereInput):\n return False\n\n return self.to_dict() == other.to_dict()", "def test_topology_handles_out_of_order_nodes(self, db_session, topology, second_topology, dbview):\n # Fill two networks simultaneously with newly created nodes\n self.nodes(topology, db_session, n=2)\n self.nodes(second_topology, db_session, n=2)\n self.nodes(topology, db_session, n=1)\n self.nodes(second_topology, db_session, n=1)\n\n # These topologies have non-contiguous node ids\n assert {node.id for node in topology.nodes()} == {1, 2, 5}\n assert {node.id for node in second_topology.nodes()} == {3, 4, 6}\n\n # Their edge graph is still internally consistent\n assertItemsEqual([\n set(neighbor.id for neighbor in node.neighbors())\n for node in topology.nodes()\n ],\n [{2, 5}, {1}, {1}]\n )\n\n assertItemsEqual([\n set(neighbor.id for neighbor in node.neighbors())\n for node in second_topology.nodes()\n ],\n [{4, 6}, {3}, {3}]\n )", "def __eq__(self, other):\n return self.master.abs2phy(pos=other)", "def test_point_relations(p1, p2):\n assert p1.left_of(p2) or p1.x >= p2.x\n assert p1.is_right_of(p2) or p1.x <= p2.x\n\n assert p1.left_of(p2) == p2.is_right_of(p1) or p1.x == p2.x\n assert not p1.left_of(p2) or not p1.is_right_of(p2)\n assert not p2.left_of(p1) or not p2.is_right_of(p1)", "def are_equal(self, sp1, sp2):\n return True", "def __eq__(self, other):\n return (type(self) == type(other) and\n self.n == other.n and self.m == other.m and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid)", "def test_transposition(self):\n\n World.reset()\n\n\n def inside(x, y):\n centers_distance = tf.sqrt(tf.reduce_sum(tf.squared_difference(x[:, 0:2], y[:, 0:2]), axis=1) + 1e-6)\n return tf.cast((centers_distance + x[:, 2]) < y[:, 2], tf.float32)\n\n circles = tfl.Domain(label=\"Circles\", data=[[0., 0, 1], [0,0, 2], [0,0, 3]])\n inside = tfl.Predicate(label=\"inside\", domains=[\"Circles\", \"Circles\"], function=inside)\n tfl.setTNorm(id=tfl.SS, p=1)\n sess = tf.Session()\n\n\n # Constraint 1\n x = tfl.variable(circles, name=\"x\")\n y = tfl.variable(circles, name=\"y\")\n a = tfl.atom(inside, (x,y))\n b = tfl.atom(inside, (y,x))\n rule = tfl.and_n(a, b)\n\n assert np.greater(sess.run(rule), np.zeros(shape=[3,3,3])).all()\n assert len(World._predicates_cache)==1", "def compareNodes(x, y):\n return x.pathValue - y.pathValue", "def compare_plane_data(pd1, pd2):\n raise NotImplementedError", "def __eq__(self, other):\n\n # Attributes defining the instance\n ckeys = ['x0', 'y0', 'nx', 'ny', 'dx', 'dy', 'origin']\n\n a = dict((k, getattr(self.corner_grid, k)) for k in ckeys)\n b = dict((k, getattr(other.corner_grid, k)) for k in ckeys)\n p1 = self.corner_grid.proj\n p2 = other.corner_grid.proj\n return (a == b) and proj_is_same(p1, p2)", "def __eq__(self, other):\n return (self.vertices == other.vertices and self.weight == other.weight)", "def __eq__(self, other):\n return (type(self) == type(other) and\n self.from_grid == other.from_grid and\n self.to_grid == other.to_grid and\n self.m == other.m and\n self.n == other.n)", "def __eq__(self, other):\r\n\r\n if type(other) != type(self):\r\n return False\r\n if other.loss_list != self.loss_list:\r\n return False\r\n if other.meshsol_list != self.meshsol_list:\r\n return False\r\n if other.loss_index != self.loss_index:\r\n return False\r\n if other.logger_name != self.logger_name:\r\n return False\r\n if other.axes_dict != self.axes_dict:\r\n return False\r\n if other.Pstator != self.Pstator:\r\n return False\r\n if other.Protor != self.Protor:\r\n return False\r\n if other.Pmagnet != self.Pmagnet:\r\n return False\r\n if other.Pprox != self.Pprox:\r\n return False\r\n if other.Pjoule != self.Pjoule:\r\n return False\r\n if other.coeff_dict != self.coeff_dict:\r\n return False\r\n return True", "def __eq__(self, other):\n return self.master.phy2abs(pos=other)", "def __eq__(self, other):\r\n return abs(self.x - other.x) + abs(self.y - other.y) < Vertex.epsilon", "def are_equal(self, sp1, sp2):\n return", "def __ne__(self, other):\n if not isinstance(other, RackTopoWhereInput):\n return True\n\n return self.to_dict() != other.to_dict()", "def is_same_as(self, other) -> bool:\n return self.x == other.x and self.y == other.y", "def similar(g1, g2):\r\n return all(t1 == t2 for (t1, t2) in _squashed_graphs_triples(g1, g2))", "def test_1(self):\r\n r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)\r\n o = MyOp.make_node(r1, r1)\r\n o2 = MyOp.make_node(o.outputs[0], r5)\r\n all = general_toposort(o2.outputs, prenode)\r\n assert all == [r5, r1, o, o.outputs[0], o2, o2.outputs[0]]", "def __eq__(self, other):\n\t\treturn self._coords == other._coords", "def __eq__(self, other):\n if isinstance(other, DirectedGraphEdge):\n return self.head_vertex == other.head_vertex and self.tail_vertex == other.tail_vertex\n return NotImplemented", "def test_eq(self, tdim, tdim2):\n assert tdim != tdim2\n assert tdim == copy.deepcopy(tdim)", "def __eq__(self, second):\r\n\t\treturn self.x == other.x and self.y == other.y", "def test_hash_equality(self):\n origin = np.random.randn(3)\n normal = np.random.randn(3)\n up_vector = np.random.randn(3)\n up_vector2 = np.random.randn(3)\n p1 = shapes_3d.CoordinatePlane(origin, normal, up_vector)\n p2 = shapes_3d.CoordinatePlane(origin, normal, up_vector)\n p3 = shapes_3d.CoordinatePlane(origin, normal, up_vector2)\n \n self.assertEqual(p1, p2)\n self.assertNotEqual(p1, p3)", "def __eq__(self, other):\n\n if not isinstance(other, Zone):\n return False\n if self.rdclass != other.rdclass or \\\n self.origin != other.origin or \\\n self.nodes != other.nodes:\n return False\n return True", "def _compare(self, boxlist1, boxlist2):\n\n ycenter1, xcenter1, _, _ = BoxList.get_center_coordinates_and_sizes(boxlist1)\n ycenter2, xcenter2, _, _ = BoxList.get_center_coordinates_and_sizes(boxlist2)\n\n centers1 = tf.transpose(tf.stack((ycenter1, xcenter1)))\n centers2 = tf.transpose(tf.stack((ycenter2, ycenter2)))\n\n centers_diff = tf.expand_dims(centers1, 1) - tf.expand_dims(centers2, 0)\n neg_l2_distance = -tf.norm(centers_diff, axis=2)\n return neg_l2_distance\n #return box_list_ops.iou(boxlist1, boxlist2)", "def is_connected(object_one, object_two):\n\n for vert_one in object_one.Vertexes:\n for vert_two in object_two.Vertexes:\n if (vert_one.X == vert_two.X) and (vert_one.y == vert_two.y):\n return True\n\n return False", "def __eq__(self, other):\n s = len(self)\n r = len(other)\n\n if s != r:\n raise(VetorError, \"Vetor dimensions are not equal\")\n\n # Two vectors are numericaly the same if the difference\n # between both of them are smaller than given precisao\n for i in range(s):\n if not comozero(self[i] - other[i]):\n return False\n\n return True", "def __eq__(self, other):\n # check equality of the nodesets\n return self.nodeset.__eq__(other.get_nodeset())", "def is_equal(self, other) -> bool:\n if isinstance(other, numbers.Number):\n return not self.num_variables and bool(self.offset == other)\n # todo: performance\n\n try:\n if callable(other.vartype):\n vartype_eq = all(self.vartype(v) == other.vartype(v) for v in self.variables)\n else:\n vartype_eq = all(self.vartype(v) == other.vartype for v in self.variables)\n\n return (vartype_eq\n and self.shape == other.shape # redundant, fast to check\n and self.offset == other.offset\n and self.linear == other.linear\n and self.adj == other.adj)\n except AttributeError:\n return False", "def __eq__(A, B):\n if not isinstance(A, type(B)):\n return NotImplemented\n return A.domain == B.domain and A.rep == B.rep", "def IsEqualOrder(self,other):\n return self.InferPolynomialDegree() == other.InferPolynomialDegree()", "def isomorphic(graph1, graph2):\r\n\r\n gd1 = _TripleCanonicalizer(graph1).to_hash()\r\n gd2 = _TripleCanonicalizer(graph2).to_hash()\r\n return gd1 == gd2", "def __eq__(self, other):\n if type(other) != type(self):\n return False\n # Check the properties inherited from Data\n if not super(DataND, self).__eq__(other):\n return False\n if other.axes != self.axes:\n return False\n if other.normalizations != self.normalizations:\n return False\n if other.FTparameters != self.FTparameters:\n return False\n if not array_equal(other.values, self.values):\n return False\n return True", "def __eq__(self, other):\n return self._coords == other._coords", "def __eq__(self, other):\n return self._coords == other._coords", "def __eq__(self, other):\n for ls, lo in zip(self.leaderboard_names, other.leaderboard_names):\n if ls != lo:\n return False\n for ls, lo in zip(self.leaderboard_groups, other.leaderboard_groups):\n if ls != lo:\n return False\n if self.top_left != other.top_left:\n return False\n if self.bottom_right != other.bottom_right:\n return False\n return True", "def compare_networks(model_1, model_2, pos=None, showfig=True, figsize=(15, 8), verbose=3):\n [scores, adjmat_diff] = network.compare_networks(model_1['adjmat'], model_2['adjmat'], pos=pos, showfig=showfig, width=figsize[0], height=figsize[1], verbose=verbose)\n return(scores, adjmat_diff)", "def is_equal (self, p):\n assert isinstance (p, Problem), \"Must be comparing two of same type\"\n if self.objects != p.objects:\n print(\"objects\")\n return False\n\n if self.init != p.init:\n #print \"init\"\n #print \"*self*\"\n #print self.init\n #print \"*p*\"\n #print p.init\n return False\n\n if self.goal != p.goal:\n print(\"goal\")\n return False\n\n if not all ([sa == pa for sa, pa in zip (self.actions, p.actions)]):\n print(\"actions\")\n return False\n\n if not all ([sp == pp for sp, pp in zip (self.predicates, p.predicates)]):\n print(\"predicates\")\n return False\n\n if self.types != p.types or self.parent_types != p.parent_types:\n print(\"types\")\n return False\n\n return True", "def is_equivalent(self, other):\n A = self.minimization().relabeled()\n [initial] = A.initial_states()\n address = {initial: ()}\n for v in A.digraph().breadth_first_search(initial.label()):\n state = A.state(v)\n state_address = address[state]\n for t in A.iter_transitions(state):\n if t.to_state not in address:\n address[t.to_state] = state_address + tuple(t.word_in)\n\n B = other.minimization().relabeled()\n labels = {B.process(path)[1].label(): state.label()\n for (state, path) in address.iteritems()}\n try:\n return A == B.relabeled(labels=labels)\n except KeyError:\n return False", "def __eq__(self, other):\n if self.get_dimensions() == other.get_dimensions():\n is_equal = (np.allclose(self.lon_arr, other.lon_arr) and\n np.allclose(self.lat_arr, other.lat_arr))\n else:\n is_equal = False\n return is_equal", "def _exact_compare(tree1, tree2):\n attrs = ['name', 'length', 'support']\n for n1, n2 in zip(tree1.postorder(), tree2.postorder()):\n for attr in attrs:\n if getattr(n1, attr, None) != getattr(n2, attr, None):\n return False\n return True", "def almost_equal(self, other, rtol=1e-05, atol=1e-08):\n\n # float attributes defining the instance\n fkeys = ['x0', 'y0', 'dx', 'dy']\n # unambiguous attributes\n ckeys = ['nx', 'ny', 'origin']\n\n ok = True\n for k in fkeys:\n ok = ok and np.isclose(getattr(self.corner_grid, k),\n getattr(other.corner_grid, k),\n rtol=rtol, atol=atol)\n for k in ckeys:\n _ok = getattr(self.corner_grid, k) == getattr(other.corner_grid, k)\n ok = ok and _ok\n p1 = self.corner_grid.proj\n p2 = other.corner_grid.proj\n return ok and proj_is_same(p1, p2)", "def equals(\n self, other, rtol=None, atol=None, verbose=None, ignore_type=False\n ):\n pp = super()._equals_preprocess(\n other, verbose=verbose, ignore_type=ignore_type\n )\n if pp is True or pp is False:\n return pp\n\n other = pp\n\n coords0 = self.coordinates()\n coords1 = other.coordinates()\n if len(coords0) != len(coords1):\n logger.info(\n f\"{self.__class__.__name__}: Different sized collections of \"\n f\"coordinates ({coords0}, {coords1})\"\n )\n\n return False\n\n if not self.coordinate_conversion.equals(\n other.coordinate_conversion,\n rtol=rtol,\n atol=atol,\n verbose=verbose,\n ignore_type=ignore_type,\n ):\n logger.info(\n f\"{self.__class__.__name__}: Different coordinate conversions\"\n )\n\n return False\n\n if not self.datum.equals(\n other.datum,\n rtol=rtol,\n atol=atol,\n verbose=verbose,\n ignore_type=ignore_type,\n ):\n logger.info(f\"{self.__class__.__name__}: Different datums\")\n\n return False\n\n # Still here? Then the two coordinate references are as equal\n # as can be ascertained in the absence of domains.\n return True", "def __eq__(self, other):\n if not isinstance(other, NodeProperties):\n return False\n\n return self.__dict__ == other.__dict__", "def verticesEqual(self, v1, v2, eps=1e-8):\n if abs(v1[0] - v2[0]) > eps:\n return False\n if abs(v1[1] - v2[1]) > eps:\n return False\n if abs(v1[2] - v2[2]) > eps:\n return False\n return True", "def __eq__(self, other):\n return self.abs2phy.__eq__(other)", "def __eq__(self, other):\n return self.abs2phy.__eq__(other)", "def _compare_attributes(self, first: Node, second: Node) -> bool:\n # If opsets of nodes are different, then nodes have different attributes.\n fst_opset = first.get_opset()\n snd_opset = second.get_opset()\n if fst_opset != snd_opset:\n return False\n\n if fst_opset not in ['opset1', 'opset4']:\n fst_name = first.soft_get('name', first.id)\n snd_name = second.soft_get('name', second.id)\n raise Error('Unsupported opset {} for nodes with names {} and {}'.format(fst_opset, fst_name, snd_name))\n\n if fst_opset == 'opset1':\n return self._compare_attributes_of_interpolate1(first, second)\n else:\n return self._compare_attributes_of_interpolate4(first, second)", "def __eq__(self, other):\n\n return (self.nodes[0].id == other.nodes[0].id) & \\\n (self.nodes[1].id == other.nodes[1].id) & \\\n (self.name == other.name)", "def are_the_same(node_before, node_after) -> bool:\n\n if node_before.algorithm != node_after.algorithm:\n return False\n elif not _is_output_name_same(node_before, node_after):\n return False\n else:\n for attr in interested_attrs:\n if _exists_attr(attr, node_before, node_after) == 1 or \\\n _exists_attr(attr, node_before, node_after) == 2:\n return False\n elif _exists_attr(attr, node_before, node_after) == 12 and \\\n node_before.attributes[attr] != node_after.attributes[attr]:\n return False\n return True", "def __eq__(self, other):\n return type(self) == type(other) and self.node is other.node", "def __eq__(self, other):\n return self.conn == other.conn and self.p1 == other.p1 and self.p2 == other.p2", "def check_topology(self, *args, **kwargs):\n return _TestA_swig.cleanslate_sptr_check_topology(self, *args, **kwargs)", "def __eq__(self, other):\n if isinstance(other, type(self)):\n same_edges = self._edges == other._edges\n same_weights = self._weights == other._weights\n return same_edges and same_weights\n else:\n return False", "def __eq__(self, other):\n if isinstance(other, DenseUnit):\n return (Counter(self.dimension) == Counter(other.dimension) and Counter(self.points) == Counter(\n other.points))\n return False", "def _match_identical_nodes(self):\n\n for job_name_b in self._topo_b_nodes:\n for job_name_a in self._unresolved_a_nodes:\n if self._is_node_identical(job_name_a, job_name_b):\n self._identical_nodes[job_name_b] = job_name_a\n self._unresolved_a_nodes.remove(job_name_a)\n self._unresolved_b_nodes.remove(job_name_b)\n break", "def __eq__(self, other):\n if isinstance(other, UnDirectedWeightedGraphEdge):\n if self.head_vertex != other.head_vertex:\n return False\n elif self.tail_vertex != other.tail_vertex:\n return False\n elif self.weight != other.weight:\n return False\n return True\n return NotImplemented", "def test_to_from_topology(self, molecule):\n topology = molecule.to_topology()\n molecule_copy = Molecule.from_topology(topology)\n assert molecule == molecule_copy", "def test_to_from_topology(self, molecule):\n topology = molecule.to_topology()\n molecule_copy = Molecule.from_topology(topology)\n assert molecule == molecule_copy", "def __eq__(self, other):\n if isinstance(other, DirectedWeightedGraphEdge):\n if self.head_vertex != other.head_vertex:\n return False\n elif self.tail_vertex != other.tail_vertex:\n return False\n elif self.weight != other.weight:\n return False\n return True\n return NotImplemented", "def __eq__(self, other):\n x_eq = self.x == other.x\n y_eq = self.y == other.y\n return x_eq and y_eq", "def __eq__(self, other):\n x_eq = self.x == other.x\n y_eq = self.y == other.y\n return x_eq and y_eq", "def __ne__(self, other):\n return np.all(self.grid != other.grid) or np.all(self.pos != other.pos)", "def __eq__(self, other):\n if not isinstance(other, DFM):\n return NotImplemented\n # Compare domains first because we do *not* want matrices with\n # different domains to be equal but e.g. a flint fmpz_mat and fmpq_mat\n # with the same entries will compare equal.\n return self.domain == other.domain and self.rep == other.rep", "def are_similar(first_coords: List[Tuple[int, int]], second_coords: List[Tuple[int, int]]) -> bool:\n # Step 1: Get angles of each triangle\n # Step 2: Compare grades of two triangles\n # Step 3: If two angles are equal then first triangle is similar to second triangle\n pass", "def assert_source_space_equal(src1, src2, msg=\"SourceSpace Dimension objects \"\n \"unequal\"):\n msg = \"%s:\" % msg\n assert_array_equal(src1.vertno[0], src2.vertno[0], \"%s unequal lh vertno \"\n \"(%r vs %r)\" % (msg, src1.vertno[0], src2.vertno[0]))\n assert_array_equal(src1.vertno[1], src2.vertno[1], \"%s unequal rh vertno \"\n \"(%r vs %r)\" % (msg, src1.vertno[1], src2.vertno[1]))\n assert_equal(src1.subject, src2.subject, \"%s unequal subject (%r vs %r\"\n \")\" % (msg, src1.subject, src2.subject))\n assert_equal(src1.src, src2.src, \"%s unequal names (%r vs %r\"\n \")\" % (msg, src1.src, src2.src))\n assert_equal(src1.subjects_dir, src2.subjects_dir, \"%s unequal names (%r \"\n \"vs %r)\" % (msg, src1.subjects_dir, src2.subjects_dir))", "def test_compare_old_to_new_method_to_create_trees(self):\n nodes = util.generate_sequence_of_points(2, 2)\n tree1 = kdtree.createNewTree(nodes)\n kdtree.visualize(tree1)\n \n sel_axis = (lambda axis: axis)\n tree2 = kdtree.createNewTree([[0.5, 0.5]],axis = 0, sel_axis= sel_axis)\n tree2.split2([0.25, 0.5], axis = 1)\n tree2.split2([0.75, 0.5], axis = 1)\n \n #left\n tree2.split2([0.25, 0.25], axis = 0, sel_axis = sel_axis)\n tree2.split2([0.25, 0.75], axis = 0, sel_axis = sel_axis)\n \n #right\n tree2.split2([0.75, 0.25], axis = 0, sel_axis = sel_axis)\n tree2.split2([0.75, 0.75], axis = 0, sel_axis = sel_axis)\n \n kdtree.visualize(tree2)\n \n for n in zip(kdtree.level_order(tree1), kdtree.level_order(tree2)):\n self.assertEqual(n[0].data, n[1].data, \"elements not equal\")\n \n if n[0].data is not None and n[1].data is not None:\n self.assertEqual(n[0].axis, n[1].axis, \"elements not equal\")", "def assertNodesEqual(self, first, second):\n def get_attrs(l):\n result = []\n for n in l:\n result.append((n.service, n.address, n.version, n.properties))\n return result\n self.assertEqual(get_attrs(first), get_attrs(second))", "def is_equal_to(self, another_labyrinth):\n \n if self.equals_list_nodes(self.list_empty_nodes, another_labyrinth.list_empty_nodes) and \\\n self.equals_list_nodes(self.list_wall_nodes, another_labyrinth.list_wall_nodes) and \\\n self.start_point.position_is_equal_to(another_labyrinth.start_point) and \\\n self.exit_point.position_is_equal_to(another_labyrinth.exit_point):\n return True\n \n else:\n return False", "def test_other_side_mesh(self):\n layered_volume = np.array(\n [\n [\n [0, 0, 0, 0, 0],\n [0, 0, 1, 1, 1],\n [0, 1, 1, 0, 0],\n [0, 1, 1, 1, 0],\n [0, 0, 0, 0, 0],\n ]\n ]\n )\n\n def quad(v1, v2, v3, v4):\n \"\"\"counterclockwise winding faces to make quad\"\"\"\n return [[v3, v2, v1], [v4, v3, v2]]\n\n top_mesh = trimesh.Trimesh(\n vertices=np.array(\n [\n [0, 1, 5],\n [1, 1, 5],\n [0, 1, 2],\n [1, 1, 2],\n [0, 3.5, 1.5],\n [1, 3.5, 1.5],\n ]\n ),\n faces=np.concatenate([quad(0, 1, 3, 2), quad(2, 3, 5, 4)], axis=0),\n )\n\n bot_mesh = trimesh.Trimesh(\n vertices=np.array([[0, 2, 5], [1, 2, 5], [0, 4, 2], [1, 4, 2]]),\n faces=quad(0, 1, 3, 2),\n )\n\n up = [0, -1, 0]\n dup = [0, -np.sqrt(0.5), -np.sqrt(0.5)]\n nanvec = [np.nan, np.nan, np.nan]\n vectors = np.array(\n [\n [\n [nanvec, nanvec, nanvec, nanvec, nanvec],\n [nanvec, nanvec, dup, up, up],\n [nanvec, dup, dup, nanvec, nanvec],\n [nanvec, dup, up, up, nanvec],\n [nanvec, nanvec, nanvec, nanvec, nanvec],\n ]\n ]\n )\n\n distances, something_wrong = tested.distances_from_voxels_to_meshes_wrt_dir(\n layered_volume, [top_mesh, bot_mesh], vectors\n )\n\n npt.assert_array_almost_equal(distances, get_expected_distances_to_meshes())\n assert not np.any(something_wrong)", "def __eq__(self, other):\n if isinstance(other, GraphEdge):\n return self.head_vertex == other.head_vertex and self.tail_vertex == other.tail_vertex\n return NotImplemented", "def check_form_match(\n cls,\n tensor1=None,\n tensor2=None,\n qhape1=None,\n shape1=None,\n dirs1=None,\n qhape2=None,\n shape2=None,\n dirs2=None,\n qodulus=None,\n ):\n if tensor1 is not None:\n qhape1 = tensor1.qhape\n shape1 = tensor1.shape\n dirs1 = tensor1.dirs\n if tensor2 is not None:\n qhape2 = tensor2.qhape\n shape2 = tensor2.shape\n dirs2 = tensor2.dirs\n if not (\n len(qhape1)\n == len(qhape2)\n == len(shape1)\n == len(shape2)\n == len(dirs1)\n == len(dirs2)\n ):\n return False\n # Loop over the indices of both tensors in tandem.\n for d1, qim1, dim1, d2, qim2, dim2 in zip(\n dirs1, qhape1, shape1, dirs2, qhape2, shape2\n ):\n # This is almost like compatible_indices, but for the missing minus\n # sign when building o_qim.\n qim2 = [d1 * d2 * q for q in qim2]\n if qodulus is not None:\n qim2 = [q % qodulus for q in qim2]\n qimdim1 = set(zip(qim1, dim1))\n qimdim2 = set(zip(qim2, dim2))\n if not qimdim1 == qimdim2:\n return False\n return True", "def test_basic_two_point_graph(self):\n self.assertEquals(\n Dijkstras().dijkstras(self.g1, 'a', 'b'), (1, ['a', 'b']))", "def __eq__(self, other):\r\n try:\r\n return self.gd == other.gd and self.boundariesrules == other.boundariesrules\r\n except AttributeError:\r\n return False", "def test_canonize_neighborhood_same_graph(nauty, ref_graph, ref_graph2):\n key = nauty.canonize_neighborhood(ref_graph, 2, 1)\n key2 = nauty.canonize_neighborhood(ref_graph2, 3, 1)\n assert key == key2", "def _vertices_are_equal(\n vertices1: List[np.ndarray], vertices2: List[np.ndarray]\n) -> bool:\n if len(vertices1) != len(vertices2):\n return False\n diff = vertices1 - vertices2\n if np.abs(np.max(diff)) < ways_are_equal_tolerance:\n return True\n return False", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def __eq__(self, other):\n # check equality of names since names are unique identifiers of nodes\n return self.name.__eq__(other.get_name())", "def brepalgo_IsTopologicallyValid(*args):\n return _BRepAlgo.brepalgo_IsTopologicallyValid(*args)", "def __eq__(self, other):\n from numpy.linalg import norm\n from numpy import array\n\n # Upcast to an Atom\n othercomp = Atom(other)\n\n # Compare Symbols\n sym1 = self.sym\n sym2 = othercomp.sym\n if sym1 != sym2:\n return False\n\n # Compare position\n pos1 = array(self.get_position())\n pos2 = array(othercomp.get_position())\n\n return norm(pos1 - pos2) < 1e-3", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y" ]
[ "0.7391109", "0.69211054", "0.66308445", "0.6267532", "0.6095765", "0.5980267", "0.5966569", "0.5945673", "0.59418166", "0.5940273", "0.59177405", "0.5912851", "0.59052336", "0.59029365", "0.5899751", "0.5899751", "0.589094", "0.587771", "0.58488256", "0.5842145", "0.5818348", "0.5813265", "0.5811964", "0.5805476", "0.5787111", "0.5774935", "0.577336", "0.5771975", "0.57696676", "0.57564896", "0.5743296", "0.5742526", "0.5733099", "0.57326764", "0.5713856", "0.57041633", "0.56934845", "0.56815714", "0.567927", "0.5678706", "0.56779504", "0.5671088", "0.56705916", "0.56583005", "0.5654814", "0.56537473", "0.5653324", "0.56402177", "0.5639907", "0.56384695", "0.5635804", "0.5629337", "0.562037", "0.562037", "0.56089336", "0.5605169", "0.5602651", "0.5594647", "0.5590881", "0.55801964", "0.5576192", "0.55750465", "0.5573356", "0.55681205", "0.5561084", "0.5561084", "0.5558202", "0.55570614", "0.5553649", "0.5552477", "0.5550244", "0.5537166", "0.55367064", "0.5533059", "0.5530732", "0.5528893", "0.5526104", "0.5526104", "0.5516893", "0.5513675", "0.5513675", "0.5507421", "0.55036205", "0.54999924", "0.54953986", "0.549401", "0.5493579", "0.54898095", "0.5483943", "0.5482341", "0.5477842", "0.54772615", "0.54676944", "0.5459728", "0.54583055", "0.54557234", "0.54557234", "0.54542905", "0.5453643", "0.5449166" ]
0.651133
3
Not equal operator. Seems to be required in addition to __eq__ to avoid 'cornercase' behaviors.
def __ne__(self, other): result = self.__eq__(other) if result is NotImplemented: return result return not result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ne__(self, other):\n return not (self == other) # opposite of __eq__", "def __ne__(self, other):\n return not (self == other) # opposite of __eq__", "def __ne__(self, rhs):\n return not self.__eq__(rhs)", "def __ne__(self, other):\r\n return not self.__eq__(other)", "def __ne__(self, other):\r\n return not self.__eq__(other)", "def __ne__(self, other):\r\n return not self.__eq__(other)", "def __ne__(self, other):\r\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def ne (self, other):\n return not (self == other) # opposite of eq", "def __ne__(self, other):\n return not(self == other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self.__eq__(other)", "def __ne__(self, other):\n return not self == other", "def __ne__(self,other):\n return not (self == other)", "def __ne__(self, other):\n return not_equal(self, other)", "def __ne__(self, other):\n\t\treturn not self.__eq__(other)", "def __neq__(self, other): \n return not self == other", "def __ne__(self, other):\r\n\t\treturn not(self.__eq__(other))", "def __ne__(self, other):\r\n return not (self == other)", "def __ne__(self,other):\n return not self == other", "def __ne__(self, other):\n return not (self == other)", "def __ne__(self, other):\n return not (self == other)", "def __ne__(self, other):\n return not (self == other)", "def __ne__(self, other):\r\n return not self == other", "def __ne__(self, other):\r\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n\n return not self.__eq__(other)", "def __ne__(self, other):\n\n return not self.__eq__(other)", "def not_equal(lhs, rhs):\n return _make.not_equal(lhs, rhs)", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other", "def __ne__(self, other):\n return not self == other" ]
[ "0.8394335", "0.8386683", "0.8160081", "0.81121725", "0.81121725", "0.81121725", "0.81121725", "0.8075682", "0.80685496", "0.80672383", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8032567", "0.8025684", "0.80254567", "0.80094445", "0.80091685", "0.8005091", "0.79911494", "0.7986639", "0.7986177", "0.7986177", "0.7986177", "0.79824907", "0.79824907", "0.7960055", "0.7960055", "0.79299897", "0.7900583", "0.7900583", "0.78992826", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294", "0.78978294" ]
0.0
-1
True if ghost layer length is not zero.
def has_ghosts(self): return not np.all(self.mesh.discretization.ghosts == 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def empty(self):\n return len(self.layers) == 0", "def is_empty(self):\n return ch.prod(ch.tensor(self.x.shape)).item() == 0", "def is_ghost(self):\n\t\treturn False", "def is_trivial(self):\n return self.dims == 0", "def is_empty(self) -> bool:\n return self.num_grna() == 0", "def _is_empty(self):\n if self.allocated_spaces == 0:\n return True\n else:\n return False", "def is_ghost(self):\n return self._is_ghost", "def is_empty(self):\n return self._connected and self._length == 1 and self._degree > 1", "def is_full_dimensional(self):\n\n return self.affine_dimension() == self.space_dimension()", "def _is_empty(shape):\n return F.shape_mul(shape) == 0", "def is_empty (self):\n return len(self.network) == 0", "def empty(self) -> bool:\n return self.sk1_len==0", "def empty(self):\n return self.numba_rtree._bounds_tree.shape[0] == 0", "def is_buffer_empty(self): \n if self.buffer.shape == (0, 5):\n return True\n else:\n return False", "def _is_empty(self):\n return self.size == 0", "def empty(self):\r\n return self.getSize() == 0", "def is_full(self):\n return self.remaining_space_in_hold() == 0", "def isEmpty(self):\n return len(self.mask) == 0", "def is_empty(self):\n if numpy.any(numpy.logical_not(self.shape)):\n return True\n if len(self.__m__) == 0:\n return True\n return False", "def is_empty(self):\n\t\treturn (self._size == 0)", "def is_empty(self):\n return self.fodder == 0", "def is_null(self):\n return self.length2 < pygonal.EPSILON2", "def is_empty(self):\n\t\treturn self._size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_empty(self):\n return self.size == 0", "def is_zero(self):\n return self._x == 0 and self._y == 0", "def is_empty(self):\n return len(self.__nodes) == 0", "def is_empty(self):\n return self._size == 0", "def isEmpty(self):\n return self._N == 0", "def is_Empty(self):\n return self.size == 0", "def is_empty(self):\r\n return self._size == 0", "def is_empty(self):\r\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return self._size == 0", "def is_empty(self):\n return len(self.container) == 0", "def is_empty(self):\n return self.__size == 0", "def is_full(self):\n return len(self.__occupied_slots__) >= self.__size__", "def is_empty(self):\r\n\r\n return self._size == 0", "def CheckGhost(self,G):\n\t\tif(P.x==G.x and P.y==G.y):\n\t\t\treturn 1\n\t\treturn 0", "def is_full(self):\n return len(self.walls) == 4", "def is_empty(self) -> bool:\n return self.size_bytes == 0", "def is_dicotic(G):\n return G.n == 0", "def empty(self):\n return self._size is 0", "def is_empty(self):\r\n if self.size == 0:\r\n return True\r\n return False", "def is_compact(self):\n return self.n_rays()==0 and self.n_lines()==0", "def _is_full(self):\n if self.allocated_spaces == self.capacity:\n return True\n elif self.allocated_spaces < self.capacity:\n return False", "def is_empty(self):\n return self.n==0", "def empty(self):\n return self.size == 0", "def empty(self):\n return self.size == 0", "def isZero(self):\n return self.count == 0", "def is_empty(self):\n if self._size == 0:\n return True\n return False", "def is_empty(self):\n return self.size == []", "def check_empty(self, coord):\n x, y, z = coord\n if self.perlin_3d(x, y, z) <= 0:\n return True\n else:\n return False", "def isEmpty(self):\n\t\tif len(self.chickWeight) == 0:\n\t\t\treturn True\n\t\telse:\n\t\t\treturn False", "def filled(self):\n return len(self) == self.length", "def isDimensionless(self):\n return _libsbml.Unit_isDimensionless(self)", "def is_empty(self) -> bool:\r\n return self.size == 0", "def is_empty(self):\n return self._sum() == 0", "def is_empty(self):\n return len(self.steps) == 0", "def has_zero_length_leaves(self):\n for l in self.leaves():\n if l.has_zero_length:\n return True\n return False", "def empty(self) -> bool:\n return self.data.get_size() == 0", "def isFull(self) -> bool:\n return self.size == self.maxlen", "def empty(self) -> bool:\n return len(self.input_stack) == 0 and len(self.output_stack) == 0", "def is_full(self):\n return len(self._data) == 1", "def is_full(self):\n return self.idx == self.len", "def is_empty(self):\n return self.__len__() == 0", "def isFull(self):\n if len(self._data) == self._length:\n return True\n else:\n return False", "def is_full(self):\n\n return self.count == len(self.array)", "def is_empty(self):\n return not self.size()", "def is_full(self):\n return self.top == self.size - 1", "def isEmpty(self):\n\n if hasattr(self, \"e0\"):\n return False\n else:\n if not self.e0.isEmpty():\n return False\n\n if hasattr(self, \"e1\"):\n return False\n else:\n if not self.e2.isEmpty():\n return False\n\n if hasattr(self, \"e2\"):\n return False\n else:\n if not self.e2.isEmpty():\n return False\n\n if hasattr(self, \"maximumHorizontalProjection\"):\n return False\n\n if hasattr(self, \"maximumVerticalProjection\"):\n return False\n\n if hasattr(self, \"equivalentHorizontalRadius\"):\n return False\n\n return True", "def isFull(self):\n return self.rear == self.size", "def is_empty(self):\n if self.length == 0:\n return True\n else:\n return False", "def not_empty(entry):\n gt_boxes = entry['boxes']\n return gt_boxes.shape[0] > 0", "def has_geom(self):\n return bool(self.give_geom())", "def is_empty( self ):\n \n return len(self.__deck) == 0", "def isEmpty(self):\n return self.__size == 0", "def is_empty(self) -> bool:\n if (self._pax_with_carry_on.is_empty()\n and self._pax_without_carry_on.is_empty()):\n\n # no pax in the plane\n return True\n\n return False", "def is_empty(self):\n return len(self.top) == 0", "def isEmpty(self):\n return self.size == 0", "def isEmpty(self):\n return self.size == 0", "def is_full(self) -> bool:\n return self._array[0].all()", "def _has_coordinates_and_gradient(self) -> bool:\n return self._coords is not None and self._coords.g is not None" ]
[ "0.7561698", "0.72112876", "0.7065583", "0.7046145", "0.7030016", "0.69214237", "0.6830907", "0.6792221", "0.67822015", "0.67809653", "0.67568576", "0.6733361", "0.6685363", "0.6656976", "0.6638194", "0.6637561", "0.6615059", "0.660577", "0.65900713", "0.6586594", "0.658374", "0.65624404", "0.6556189", "0.6555887", "0.6555887", "0.6555887", "0.6555887", "0.6555887", "0.6554751", "0.6553147", "0.65424526", "0.6529356", "0.6527895", "0.6522761", "0.6522761", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.65139335", "0.6506465", "0.6496786", "0.6494738", "0.6485843", "0.64812475", "0.6473082", "0.6467284", "0.6452752", "0.6443414", "0.6434815", "0.6417093", "0.6413712", "0.6413563", "0.64111716", "0.64111716", "0.6410123", "0.64054316", "0.6391819", "0.6389463", "0.638181", "0.6381135", "0.63801223", "0.6374314", "0.63723606", "0.6370839", "0.636772", "0.63646597", "0.6364129", "0.6361815", "0.63601375", "0.63595015", "0.6357582", "0.635688", "0.63555336", "0.63554543", "0.6349201", "0.6348019", "0.6343928", "0.6341595", "0.63391954", "0.63207173", "0.6313549", "0.6310862", "0.63082886", "0.63030326", "0.6302988", "0.6302988", "0.63005644", "0.6299385" ]
0.72264445
1
return the id of the present topology. This id is unique among all defined topologies.
def get_id(self): return self.__id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unique_id(self) -> str:\n return str(self.coordinator.gios.station_id)", "def topology_name(self):\n return self._topology_name", "def topology(self):\n return self._topology", "def portlet_id(self):\n return id(self)", "def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id", "def id(self):\n return self.proto.id", "def id(self):\n if self.cloudserver:\n return self.cloudserver.id\n else:\n return None", "def idstring(self):\n return self.server.idstring(self)", "def unique_id(self) -> str | None:\n return f\"{self._station_id}_{self._fuel_type}\"", "def id(self):\n return id(self._component)", "def unique_id(self) -> str:\n return f\"{self._host}_{self._name}_{self._unique_id}\"", "def station_id(self) -> str:\n return self._station_id", "def unique_id(self):\n return '{}-{}-{}'.format(self.airly.latitude, self.airly.longitude,\n self.type)", "def unique_id(self):\n return self.heater.id + \"_switch\"", "def depmap_id(self) -> str:\n return self._depmap_id", "def unique_id(self) -> str:\n return f\"{self._device.mac}_{self._router.config_entry.entry_id}\"", "def unique_id(self):\n return '{}-{}-{}'.format(self._latitude, self._longitude, self._type)", "def topology(self):\n return self._h5[TOPOLOGY][()]", "def id(self) -> int:\n\t\t# pylint: disable=invalid-name\n\t\treturn self._oid", "def transit_router_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"transit_router_id\")", "def transit_router_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"transit_router_id\")", "def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None", "def id(self) -> str:\n return self._pipeline_definition.get(\"id\")", "def dag_id(self):\n if self.parallelize_task:\n return f'{self.job_id_extension.get_parallel(self.job_id)}'\n else:\n return f'{self.job_id_extension.get_preparation(self.job_id)}'", "def unique_id(self) -> str | None:\n return self._config[CONF_ID]", "def get_id(self):\n\n\t\treturn self.__id", "def host_id(self) -> str:\n return pulumi.get(self, \"host_id\")", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getid(self):\n if self.Id is None:\n return self.internalid\n else:\n return self.Id", "def getId(self):\n return _libsbml.Port_getId(self)", "def unique_id(self):\n return self._id", "def unique_id(self):\n return self._id", "def id(self) -> Optional[int]:\n return self.__id", "def id(self) -> str:\n return self._id # type: ignore[return-value]", "def mesh_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"mesh_id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")" ]
[ "0.69869524", "0.66642356", "0.6493194", "0.6485879", "0.64025915", "0.63910806", "0.6356197", "0.62523925", "0.62455714", "0.62014794", "0.6190886", "0.61564124", "0.6125926", "0.60740584", "0.6072235", "0.60693944", "0.60588557", "0.6046706", "0.60322815", "0.60287", "0.60287", "0.60107666", "0.60096306", "0.600709", "0.59968144", "0.5994115", "0.59852785", "0.59701437", "0.59701437", "0.59701437", "0.59701437", "0.59701437", "0.59701437", "0.5958455", "0.59568506", "0.59568506", "0.5955803", "0.5944371", "0.5943218", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329", "0.5942329" ]
0.0
-1