code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
radius = int(input("enter the value for the radius of the cycle: "))
circumference = 2 * 3.14159 * radius
diameter = 2 * radius
area = 3.14159 * radius ** 2
print('circumference is ', circumference)
print('diameter is: ', diameter)
print('area is ', area)
|
normal
|
{
"blob_id": "ab5412a3d22bd53a592c93bad4870b06fd9f0720",
"index": 4080,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-3": "radius = int(input('enter the value for the radius of the cycle: '))\ncircumference = 2 * 3.14159 * radius\ndiameter = 2 * radius\narea = 3.14159 * radius ** 2\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-4": "radius = int(input(\"enter the value for the radius of the cycle: \"))\ncircumference = 2 * 3.14159 * radius\ndiameter = 2 * radius\narea = 3.14159 * radius ** 2\n\nprint('circumference is ', circumference)\nprint('diameter is: ', diameter)\nprint('area is ', area)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import argparse
import os
import shutil
import time, math
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import numpy as np
import torch.utils.model_zoo as model_zoo
from torch.autograd.variable import Variable
from .Resnets import *
import torch.nn.functional as F
from torch.autograd import Variable
from efficientnet_pytorch import EfficientNet as efn
class ChannelAttention(nn.Module):
def __init__(self, in_planes, ratio=16):
super(ChannelAttention, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.max_pool = nn.AdaptiveMaxPool2d(1)
self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // 16, 1, bias=False),
nn.ReLU(inplace=True),
nn.Conv2d(in_planes // 16, in_planes, 1, bias=False))
#self.fc = nn.Sequential(nn.Linear(in_planes, in_planes // 16, bias=False),
# nn.ReLU(inplace=True),
# nn.Linear(in_planes // 16, in_planes, bias=False))
self.sigmoid = nn.Sigmoid()
def forward(self, x):
#b, c, _, _ = x.size()
avg_out = self.fc(self.avg_pool(x))
max_out = self.fc(self.max_pool(x))
out = avg_out + max_out
return self.sigmoid(out)
class SpatialAttention(nn.Module):
def __init__(self, kernel_size=7):
super(SpatialAttention, self).__init__()
self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size//2, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avg_out = torch.mean(x, dim=1, keepdim=True)
max_out, _ = torch.max(x, dim=1, keepdim=True)
x = torch.cat([avg_out, max_out], dim=1)
x = self.conv1(x)
return self.sigmoid(x)
class PYRModule(nn.Module):
def __init__(self,inplanes,downsample=None):
super(PYRModule, self).__init__()
self.ca = ChannelAttention(inplanes)
self.features = nn.Sequential(
nn.AdaptiveAvgPool2d((1, 1)),
)
def forward(self, x):
#residual =x
x = self.ca(x) * x
#x += residual
x = self.features(x)
return x
class HPNet(nn.Module):
def __init__(self):
super(HPNet, self).__init__()
self.faceModel = efn.from_pretrained('efficientnet-b4')
self.planes_num=1792#2304#2048#1536#1408#1280#1792
self.cls_num=66
self.feature_1 = PYRModule(self.planes_num)
self.feature_2 = PYRModule(self.planes_num)
self.feature_3 = PYRModule(self.planes_num)
self.idx_tensor = torch.FloatTensor(torch.range(0,self.cls_num-1)*1).cuda()
self.fc_b_1 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.fc_b_2 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.fc_b_3 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.max_pool_1=nn.MaxPool1d(3)
self.max_pool_2=nn.MaxPool1d(3)
self.max_pool_3=nn.MaxPool1d(3)
self.softmax=nn.Softmax(dim=2).cuda()
self.sigmoid=nn.Sigmoid().cuda()
def forward(self, faces):
xFace = self.faceModel.extract_features(faces)
x_p = self.feature_1(xFace)
x_y = self.feature_2(xFace)
x_r = self.feature_3(xFace)
x_p = torch.flatten(x_p, 1)
x_y = torch.flatten(x_y, 1)
x_r = torch.flatten(x_r, 1)
x_p_feat=torch.unsqueeze(x_p,1)
x_y_feat=torch.unsqueeze(x_y,1)
x_r_feat=torch.unsqueeze(x_r,1)
x_feat=torch.cat([x_p_feat,x_y_feat,x_r_feat],1)
x_p_b=self.fc_b_1(x_p)
x_y_b=self.fc_b_2(x_y)
x_r_b=self.fc_b_3(x_r)
x_p_b=torch.unsqueeze(x_p_b,1)
x_y_b=torch.unsqueeze(x_y_b,1)
x_r_b=torch.unsqueeze(x_r_b,1)
x_p_b_mp=self.max_pool_1(x_p_b)
x_y_b_mp=self.max_pool_2(x_y_b)
x_r_b_mp=self.max_pool_3(x_r_b)
x_p_pre=self.softmax(x_p_b)
x_y_pre=self.softmax(x_y_b)
x_r_pre=self.softmax(x_r_b)
x_p=torch.sum(x_p_pre * self.idx_tensor, 2)
x_y=torch.sum(x_y_pre * self.idx_tensor, 2)
x_r=torch.sum(x_r_pre * self.idx_tensor, 2)
return torch.cat([x_p,x_y,x_r],1),torch.cat([x_p_b,x_y_b,x_r_b],1),torch.cat([x_p_b_mp,x_y_b_mp,x_r_b_mp],1),x_feat
|
normal
|
{
"blob_id": "c9de51ee5a9955f36ecd9f5d92813821fb68fb3d",
"index": 4308,
"step-1": "<mask token>\n\n\nclass SpatialAttention(nn.Module):\n\n def __init__(self, kernel_size=7):\n super(SpatialAttention, self).__init__()\n self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size // 2,\n bias=False)\n self.sigmoid = nn.Sigmoid()\n <mask token>\n\n\nclass PYRModule(nn.Module):\n\n def __init__(self, inplanes, downsample=None):\n super(PYRModule, self).__init__()\n self.ca = ChannelAttention(inplanes)\n self.features = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)))\n\n def forward(self, x):\n x = self.ca(x) * x\n x = self.features(x)\n return x\n\n\nclass HPNet(nn.Module):\n\n def __init__(self):\n super(HPNet, self).__init__()\n self.faceModel = efn.from_pretrained('efficientnet-b4')\n self.planes_num = 1792\n self.cls_num = 66\n self.feature_1 = PYRModule(self.planes_num)\n self.feature_2 = PYRModule(self.planes_num)\n self.feature_3 = PYRModule(self.planes_num)\n self.idx_tensor = torch.FloatTensor(torch.range(0, self.cls_num - 1\n ) * 1).cuda()\n self.fc_b_1 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_2 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_3 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.max_pool_1 = nn.MaxPool1d(3)\n self.max_pool_2 = nn.MaxPool1d(3)\n self.max_pool_3 = nn.MaxPool1d(3)\n self.softmax = nn.Softmax(dim=2).cuda()\n self.sigmoid = nn.Sigmoid().cuda()\n\n def forward(self, faces):\n xFace = self.faceModel.extract_features(faces)\n x_p = self.feature_1(xFace)\n x_y = self.feature_2(xFace)\n x_r = self.feature_3(xFace)\n x_p = torch.flatten(x_p, 1)\n x_y = torch.flatten(x_y, 1)\n x_r = torch.flatten(x_r, 1)\n x_p_feat = torch.unsqueeze(x_p, 1)\n x_y_feat = torch.unsqueeze(x_y, 1)\n x_r_feat = torch.unsqueeze(x_r, 1)\n x_feat = torch.cat([x_p_feat, x_y_feat, x_r_feat], 1)\n x_p_b = self.fc_b_1(x_p)\n x_y_b = self.fc_b_2(x_y)\n x_r_b = self.fc_b_3(x_r)\n x_p_b = torch.unsqueeze(x_p_b, 1)\n x_y_b = torch.unsqueeze(x_y_b, 1)\n x_r_b = torch.unsqueeze(x_r_b, 1)\n x_p_b_mp = self.max_pool_1(x_p_b)\n x_y_b_mp = self.max_pool_2(x_y_b)\n x_r_b_mp = self.max_pool_3(x_r_b)\n x_p_pre = self.softmax(x_p_b)\n x_y_pre = self.softmax(x_y_b)\n x_r_pre = self.softmax(x_r_b)\n x_p = torch.sum(x_p_pre * self.idx_tensor, 2)\n x_y = torch.sum(x_y_pre * self.idx_tensor, 2)\n x_r = torch.sum(x_r_pre * self.idx_tensor, 2)\n return torch.cat([x_p, x_y, x_r], 1), torch.cat([x_p_b, x_y_b,\n x_r_b], 1), torch.cat([x_p_b_mp, x_y_b_mp, x_r_b_mp], 1), x_feat\n",
"step-2": "<mask token>\n\n\nclass ChannelAttention(nn.Module):\n <mask token>\n <mask token>\n\n\nclass SpatialAttention(nn.Module):\n\n def __init__(self, kernel_size=7):\n super(SpatialAttention, self).__init__()\n self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size // 2,\n bias=False)\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n avg_out = torch.mean(x, dim=1, keepdim=True)\n max_out, _ = torch.max(x, dim=1, keepdim=True)\n x = torch.cat([avg_out, max_out], dim=1)\n x = self.conv1(x)\n return self.sigmoid(x)\n\n\nclass PYRModule(nn.Module):\n\n def __init__(self, inplanes, downsample=None):\n super(PYRModule, self).__init__()\n self.ca = ChannelAttention(inplanes)\n self.features = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)))\n\n def forward(self, x):\n x = self.ca(x) * x\n x = self.features(x)\n return x\n\n\nclass HPNet(nn.Module):\n\n def __init__(self):\n super(HPNet, self).__init__()\n self.faceModel = efn.from_pretrained('efficientnet-b4')\n self.planes_num = 1792\n self.cls_num = 66\n self.feature_1 = PYRModule(self.planes_num)\n self.feature_2 = PYRModule(self.planes_num)\n self.feature_3 = PYRModule(self.planes_num)\n self.idx_tensor = torch.FloatTensor(torch.range(0, self.cls_num - 1\n ) * 1).cuda()\n self.fc_b_1 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_2 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_3 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.max_pool_1 = nn.MaxPool1d(3)\n self.max_pool_2 = nn.MaxPool1d(3)\n self.max_pool_3 = nn.MaxPool1d(3)\n self.softmax = nn.Softmax(dim=2).cuda()\n self.sigmoid = nn.Sigmoid().cuda()\n\n def forward(self, faces):\n xFace = self.faceModel.extract_features(faces)\n x_p = self.feature_1(xFace)\n x_y = self.feature_2(xFace)\n x_r = self.feature_3(xFace)\n x_p = torch.flatten(x_p, 1)\n x_y = torch.flatten(x_y, 1)\n x_r = torch.flatten(x_r, 1)\n x_p_feat = torch.unsqueeze(x_p, 1)\n x_y_feat = torch.unsqueeze(x_y, 1)\n x_r_feat = torch.unsqueeze(x_r, 1)\n x_feat = torch.cat([x_p_feat, x_y_feat, x_r_feat], 1)\n x_p_b = self.fc_b_1(x_p)\n x_y_b = self.fc_b_2(x_y)\n x_r_b = self.fc_b_3(x_r)\n x_p_b = torch.unsqueeze(x_p_b, 1)\n x_y_b = torch.unsqueeze(x_y_b, 1)\n x_r_b = torch.unsqueeze(x_r_b, 1)\n x_p_b_mp = self.max_pool_1(x_p_b)\n x_y_b_mp = self.max_pool_2(x_y_b)\n x_r_b_mp = self.max_pool_3(x_r_b)\n x_p_pre = self.softmax(x_p_b)\n x_y_pre = self.softmax(x_y_b)\n x_r_pre = self.softmax(x_r_b)\n x_p = torch.sum(x_p_pre * self.idx_tensor, 2)\n x_y = torch.sum(x_y_pre * self.idx_tensor, 2)\n x_r = torch.sum(x_r_pre * self.idx_tensor, 2)\n return torch.cat([x_p, x_y, x_r], 1), torch.cat([x_p_b, x_y_b,\n x_r_b], 1), torch.cat([x_p_b_mp, x_y_b_mp, x_r_b_mp], 1), x_feat\n",
"step-3": "<mask token>\n\n\nclass ChannelAttention(nn.Module):\n <mask token>\n\n def forward(self, x):\n avg_out = self.fc(self.avg_pool(x))\n max_out = self.fc(self.max_pool(x))\n out = avg_out + max_out\n return self.sigmoid(out)\n\n\nclass SpatialAttention(nn.Module):\n\n def __init__(self, kernel_size=7):\n super(SpatialAttention, self).__init__()\n self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size // 2,\n bias=False)\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n avg_out = torch.mean(x, dim=1, keepdim=True)\n max_out, _ = torch.max(x, dim=1, keepdim=True)\n x = torch.cat([avg_out, max_out], dim=1)\n x = self.conv1(x)\n return self.sigmoid(x)\n\n\nclass PYRModule(nn.Module):\n\n def __init__(self, inplanes, downsample=None):\n super(PYRModule, self).__init__()\n self.ca = ChannelAttention(inplanes)\n self.features = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)))\n\n def forward(self, x):\n x = self.ca(x) * x\n x = self.features(x)\n return x\n\n\nclass HPNet(nn.Module):\n\n def __init__(self):\n super(HPNet, self).__init__()\n self.faceModel = efn.from_pretrained('efficientnet-b4')\n self.planes_num = 1792\n self.cls_num = 66\n self.feature_1 = PYRModule(self.planes_num)\n self.feature_2 = PYRModule(self.planes_num)\n self.feature_3 = PYRModule(self.planes_num)\n self.idx_tensor = torch.FloatTensor(torch.range(0, self.cls_num - 1\n ) * 1).cuda()\n self.fc_b_1 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_2 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_3 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.max_pool_1 = nn.MaxPool1d(3)\n self.max_pool_2 = nn.MaxPool1d(3)\n self.max_pool_3 = nn.MaxPool1d(3)\n self.softmax = nn.Softmax(dim=2).cuda()\n self.sigmoid = nn.Sigmoid().cuda()\n\n def forward(self, faces):\n xFace = self.faceModel.extract_features(faces)\n x_p = self.feature_1(xFace)\n x_y = self.feature_2(xFace)\n x_r = self.feature_3(xFace)\n x_p = torch.flatten(x_p, 1)\n x_y = torch.flatten(x_y, 1)\n x_r = torch.flatten(x_r, 1)\n x_p_feat = torch.unsqueeze(x_p, 1)\n x_y_feat = torch.unsqueeze(x_y, 1)\n x_r_feat = torch.unsqueeze(x_r, 1)\n x_feat = torch.cat([x_p_feat, x_y_feat, x_r_feat], 1)\n x_p_b = self.fc_b_1(x_p)\n x_y_b = self.fc_b_2(x_y)\n x_r_b = self.fc_b_3(x_r)\n x_p_b = torch.unsqueeze(x_p_b, 1)\n x_y_b = torch.unsqueeze(x_y_b, 1)\n x_r_b = torch.unsqueeze(x_r_b, 1)\n x_p_b_mp = self.max_pool_1(x_p_b)\n x_y_b_mp = self.max_pool_2(x_y_b)\n x_r_b_mp = self.max_pool_3(x_r_b)\n x_p_pre = self.softmax(x_p_b)\n x_y_pre = self.softmax(x_y_b)\n x_r_pre = self.softmax(x_r_b)\n x_p = torch.sum(x_p_pre * self.idx_tensor, 2)\n x_y = torch.sum(x_y_pre * self.idx_tensor, 2)\n x_r = torch.sum(x_r_pre * self.idx_tensor, 2)\n return torch.cat([x_p, x_y, x_r], 1), torch.cat([x_p_b, x_y_b,\n x_r_b], 1), torch.cat([x_p_b_mp, x_y_b_mp, x_r_b_mp], 1), x_feat\n",
"step-4": "import argparse\nimport os\nimport shutil\nimport time, math\nfrom collections import OrderedDict\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim\nimport torch.utils.data\nimport torchvision.transforms as transforms\nimport torchvision.datasets as datasets\nimport torchvision.models as models\nimport numpy as np\nimport torch.utils.model_zoo as model_zoo\nfrom torch.autograd.variable import Variable\nfrom .Resnets import *\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\nfrom efficientnet_pytorch import EfficientNet as efn\n\n\nclass ChannelAttention(nn.Module):\n\n def __init__(self, in_planes, ratio=16):\n super(ChannelAttention, self).__init__()\n self.avg_pool = nn.AdaptiveAvgPool2d(1)\n self.max_pool = nn.AdaptiveMaxPool2d(1)\n self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // 16, 1,\n bias=False), nn.ReLU(inplace=True), nn.Conv2d(in_planes // 16,\n in_planes, 1, bias=False))\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n avg_out = self.fc(self.avg_pool(x))\n max_out = self.fc(self.max_pool(x))\n out = avg_out + max_out\n return self.sigmoid(out)\n\n\nclass SpatialAttention(nn.Module):\n\n def __init__(self, kernel_size=7):\n super(SpatialAttention, self).__init__()\n self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size // 2,\n bias=False)\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n avg_out = torch.mean(x, dim=1, keepdim=True)\n max_out, _ = torch.max(x, dim=1, keepdim=True)\n x = torch.cat([avg_out, max_out], dim=1)\n x = self.conv1(x)\n return self.sigmoid(x)\n\n\nclass PYRModule(nn.Module):\n\n def __init__(self, inplanes, downsample=None):\n super(PYRModule, self).__init__()\n self.ca = ChannelAttention(inplanes)\n self.features = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)))\n\n def forward(self, x):\n x = self.ca(x) * x\n x = self.features(x)\n return x\n\n\nclass HPNet(nn.Module):\n\n def __init__(self):\n super(HPNet, self).__init__()\n self.faceModel = efn.from_pretrained('efficientnet-b4')\n self.planes_num = 1792\n self.cls_num = 66\n self.feature_1 = PYRModule(self.planes_num)\n self.feature_2 = PYRModule(self.planes_num)\n self.feature_3 = PYRModule(self.planes_num)\n self.idx_tensor = torch.FloatTensor(torch.range(0, self.cls_num - 1\n ) * 1).cuda()\n self.fc_b_1 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_2 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.fc_b_3 = nn.Sequential(nn.Linear(self.planes_num, self.cls_num))\n self.max_pool_1 = nn.MaxPool1d(3)\n self.max_pool_2 = nn.MaxPool1d(3)\n self.max_pool_3 = nn.MaxPool1d(3)\n self.softmax = nn.Softmax(dim=2).cuda()\n self.sigmoid = nn.Sigmoid().cuda()\n\n def forward(self, faces):\n xFace = self.faceModel.extract_features(faces)\n x_p = self.feature_1(xFace)\n x_y = self.feature_2(xFace)\n x_r = self.feature_3(xFace)\n x_p = torch.flatten(x_p, 1)\n x_y = torch.flatten(x_y, 1)\n x_r = torch.flatten(x_r, 1)\n x_p_feat = torch.unsqueeze(x_p, 1)\n x_y_feat = torch.unsqueeze(x_y, 1)\n x_r_feat = torch.unsqueeze(x_r, 1)\n x_feat = torch.cat([x_p_feat, x_y_feat, x_r_feat], 1)\n x_p_b = self.fc_b_1(x_p)\n x_y_b = self.fc_b_2(x_y)\n x_r_b = self.fc_b_3(x_r)\n x_p_b = torch.unsqueeze(x_p_b, 1)\n x_y_b = torch.unsqueeze(x_y_b, 1)\n x_r_b = torch.unsqueeze(x_r_b, 1)\n x_p_b_mp = self.max_pool_1(x_p_b)\n x_y_b_mp = self.max_pool_2(x_y_b)\n x_r_b_mp = self.max_pool_3(x_r_b)\n x_p_pre = self.softmax(x_p_b)\n x_y_pre = self.softmax(x_y_b)\n x_r_pre = self.softmax(x_r_b)\n x_p = torch.sum(x_p_pre * self.idx_tensor, 2)\n x_y = torch.sum(x_y_pre * self.idx_tensor, 2)\n x_r = torch.sum(x_r_pre * self.idx_tensor, 2)\n return torch.cat([x_p, x_y, x_r], 1), torch.cat([x_p_b, x_y_b,\n x_r_b], 1), torch.cat([x_p_b_mp, x_y_b_mp, x_r_b_mp], 1), x_feat\n",
"step-5": "import argparse\nimport os\nimport shutil\nimport time, math\nfrom collections import OrderedDict\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim\nimport torch.utils.data\nimport torchvision.transforms as transforms\nimport torchvision.datasets as datasets\nimport torchvision.models as models\nimport numpy as np\nimport torch.utils.model_zoo as model_zoo\nfrom torch.autograd.variable import Variable\nfrom .Resnets import *\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\nfrom efficientnet_pytorch import EfficientNet as efn\n\n\nclass ChannelAttention(nn.Module):\n def __init__(self, in_planes, ratio=16):\n super(ChannelAttention, self).__init__()\n self.avg_pool = nn.AdaptiveAvgPool2d(1)\n self.max_pool = nn.AdaptiveMaxPool2d(1)\n \n self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // 16, 1, bias=False),\n nn.ReLU(inplace=True),\n nn.Conv2d(in_planes // 16, in_planes, 1, bias=False))\n \n #self.fc = nn.Sequential(nn.Linear(in_planes, in_planes // 16, bias=False),\n # nn.ReLU(inplace=True),\n # nn.Linear(in_planes // 16, in_planes, bias=False))\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n #b, c, _, _ = x.size()\n avg_out = self.fc(self.avg_pool(x))\n max_out = self.fc(self.max_pool(x))\n out = avg_out + max_out\n return self.sigmoid(out)\n\nclass SpatialAttention(nn.Module):\n def __init__(self, kernel_size=7):\n super(SpatialAttention, self).__init__()\n\n self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size//2, bias=False)\n self.sigmoid = nn.Sigmoid()\n\n def forward(self, x):\n avg_out = torch.mean(x, dim=1, keepdim=True)\n max_out, _ = torch.max(x, dim=1, keepdim=True)\n x = torch.cat([avg_out, max_out], dim=1)\n x = self.conv1(x)\n return self.sigmoid(x)\n\n\nclass PYRModule(nn.Module):\n\n def __init__(self,inplanes,downsample=None):\n super(PYRModule, self).__init__()\n \n self.ca = ChannelAttention(inplanes)\n \n self.features = nn.Sequential(\n nn.AdaptiveAvgPool2d((1, 1)),\n )\n\n def forward(self, x):\n #residual =x\n x = self.ca(x) * x\n #x += residual\n x = self.features(x)\n return x\n\n\n\nclass HPNet(nn.Module):\n\n def __init__(self):\n super(HPNet, self).__init__()\n self.faceModel = efn.from_pretrained('efficientnet-b4')\n \n self.planes_num=1792#2304#2048#1536#1408#1280#1792\n self.cls_num=66\n \n self.feature_1 = PYRModule(self.planes_num)\n self.feature_2 = PYRModule(self.planes_num)\n self.feature_3 = PYRModule(self.planes_num)\n \n \n self.idx_tensor = torch.FloatTensor(torch.range(0,self.cls_num-1)*1).cuda()\n \n self.fc_b_1 = nn.Sequential(\n nn.Linear(self.planes_num, self.cls_num),\n )\n self.fc_b_2 = nn.Sequential(\n nn.Linear(self.planes_num, self.cls_num),\n )\n self.fc_b_3 = nn.Sequential(\n nn.Linear(self.planes_num, self.cls_num),\n )\n self.max_pool_1=nn.MaxPool1d(3)\n self.max_pool_2=nn.MaxPool1d(3)\n self.max_pool_3=nn.MaxPool1d(3)\n \n self.softmax=nn.Softmax(dim=2).cuda()\n self.sigmoid=nn.Sigmoid().cuda()\n \n \n def forward(self, faces):\n\n xFace = self.faceModel.extract_features(faces)\n \n \n x_p = self.feature_1(xFace)\n x_y = self.feature_2(xFace)\n x_r = self.feature_3(xFace)\n \n x_p = torch.flatten(x_p, 1)\n x_y = torch.flatten(x_y, 1)\n x_r = torch.flatten(x_r, 1)\n \n x_p_feat=torch.unsqueeze(x_p,1)\n x_y_feat=torch.unsqueeze(x_y,1)\n x_r_feat=torch.unsqueeze(x_r,1)\n \n x_feat=torch.cat([x_p_feat,x_y_feat,x_r_feat],1)\n \n x_p_b=self.fc_b_1(x_p)\n x_y_b=self.fc_b_2(x_y)\n x_r_b=self.fc_b_3(x_r)\n \n x_p_b=torch.unsqueeze(x_p_b,1)\n x_y_b=torch.unsqueeze(x_y_b,1)\n x_r_b=torch.unsqueeze(x_r_b,1)\n \n x_p_b_mp=self.max_pool_1(x_p_b)\n x_y_b_mp=self.max_pool_2(x_y_b)\n x_r_b_mp=self.max_pool_3(x_r_b)\n \n x_p_pre=self.softmax(x_p_b)\n x_y_pre=self.softmax(x_y_b)\n x_r_pre=self.softmax(x_r_b)\n \n x_p=torch.sum(x_p_pre * self.idx_tensor, 2) \n x_y=torch.sum(x_y_pre * self.idx_tensor, 2) \n x_r=torch.sum(x_r_pre * self.idx_tensor, 2)\n \n\n return torch.cat([x_p,x_y,x_r],1),torch.cat([x_p_b,x_y_b,x_r_b],1),torch.cat([x_p_b_mp,x_y_b_mp,x_r_b_mp],1),x_feat",
"step-ids": [
8,
10,
11,
13,
14
]
}
|
[
8,
10,
11,
13,
14
] |
from flask import Blueprint, render_template
from bashtube import cache
singlevideos = Blueprint('singlevideos', __name__, template_folder='templates')
@singlevideos.route('/')
def index():
return render_template('singlevideos/single.html')
|
normal
|
{
"blob_id": "ee10bca1126b20378c4e9cea4d2dc7ed6a2044ab",
"index": 9187,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-3": "<mask token>\nsinglevideos = Blueprint('singlevideos', __name__, template_folder='templates')\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-4": "from flask import Blueprint, render_template\nfrom bashtube import cache\nsinglevideos = Blueprint('singlevideos', __name__, template_folder='templates')\n\n\n@singlevideos.route('/')\ndef index():\n return render_template('singlevideos/single.html')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import tensorflow as tf
class LocNet:
def __init__(self, scope, buttom_layer):
self.scope = scope
with tf.variable_scope(scope) as scope:
self.build_graph(buttom_layer)
self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None,4),name='gt_loc')
def build_graph(self, buttom_layer):
self.variables = []
self.kernel_weights = []
pool = tf.nn.max_pool(buttom_layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool')
drop = tf.nn.dropout(pool, 0.3)
with tf.name_scope('fc1') as scope:
shape = int(np.prod(drop.get_shape()[1:]))
fc1w = tf.Variable(tf.truncated_normal([shape, 3000],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc1b = tf.Variable(tf.constant(1.0, shape=[3000], dtype=tf.float32),
trainable=True, name='biases')
pool_flat = tf.reshape(drop, [-1, shape])
fc1l = tf.nn.bias_add(tf.matmul(pool_flat, fc1w), fc1b)
fc1 = tf.nn.relu(fc1l)
self.kernel_weights += [fc1w]
self.variables += [fc1w, fc1b]
with tf.name_scope('fc2') as scope:
fc2w = tf.Variable(tf.truncated_normal([3000, 4],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc2b = tf.Variable(tf.constant(1.0, shape=[4], dtype=tf.float32),
trainable=True, name='biases')
self.logit = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)
self.kernel_weights += [fc2w]
self.variables += [fc2w, fc2b]
def loss(self):
with tf.name_scope(self.scope) as scope:
beta = tf.constant(0.05, name='beta')
loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc, self.logit))
loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.kernel_weights]
loss_wd = beta * tf.add_n(loss_wd)
total_loss = loss_rms + loss_wd
return total_loss
|
normal
|
{
"blob_id": "dd4dc1c4a0dc47711d1d0512ef3f6b7908735766",
"index": 3149,
"step-1": "<mask token>\n\n\nclass LocNet:\n\n def __init__(self, scope, buttom_layer):\n self.scope = scope\n with tf.variable_scope(scope) as scope:\n self.build_graph(buttom_layer)\n self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None, 4),\n name='gt_loc')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LocNet:\n\n def __init__(self, scope, buttom_layer):\n self.scope = scope\n with tf.variable_scope(scope) as scope:\n self.build_graph(buttom_layer)\n self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None, 4),\n name='gt_loc')\n <mask token>\n\n def loss(self):\n with tf.name_scope(self.scope) as scope:\n beta = tf.constant(0.05, name='beta')\n loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc,\n self.logit))\n loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.\n kernel_weights]\n loss_wd = beta * tf.add_n(loss_wd)\n total_loss = loss_rms + loss_wd\n return total_loss\n",
"step-3": "<mask token>\n\n\nclass LocNet:\n\n def __init__(self, scope, buttom_layer):\n self.scope = scope\n with tf.variable_scope(scope) as scope:\n self.build_graph(buttom_layer)\n self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None, 4),\n name='gt_loc')\n\n def build_graph(self, buttom_layer):\n self.variables = []\n self.kernel_weights = []\n pool = tf.nn.max_pool(buttom_layer, ksize=[1, 2, 2, 1], strides=[1,\n 2, 2, 1], padding='SAME', name='pool')\n drop = tf.nn.dropout(pool, 0.3)\n with tf.name_scope('fc1') as scope:\n shape = int(np.prod(drop.get_shape()[1:]))\n fc1w = tf.Variable(tf.truncated_normal([shape, 3000], dtype=tf.\n float32, stddev=0.1), name='weights')\n fc1b = tf.Variable(tf.constant(1.0, shape=[3000], dtype=tf.\n float32), trainable=True, name='biases')\n pool_flat = tf.reshape(drop, [-1, shape])\n fc1l = tf.nn.bias_add(tf.matmul(pool_flat, fc1w), fc1b)\n fc1 = tf.nn.relu(fc1l)\n self.kernel_weights += [fc1w]\n self.variables += [fc1w, fc1b]\n with tf.name_scope('fc2') as scope:\n fc2w = tf.Variable(tf.truncated_normal([3000, 4], dtype=tf.\n float32, stddev=0.1), name='weights')\n fc2b = tf.Variable(tf.constant(1.0, shape=[4], dtype=tf.float32\n ), trainable=True, name='biases')\n self.logit = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)\n self.kernel_weights += [fc2w]\n self.variables += [fc2w, fc2b]\n\n def loss(self):\n with tf.name_scope(self.scope) as scope:\n beta = tf.constant(0.05, name='beta')\n loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc,\n self.logit))\n loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.\n kernel_weights]\n loss_wd = beta * tf.add_n(loss_wd)\n total_loss = loss_rms + loss_wd\n return total_loss\n",
"step-4": "import numpy as np\nimport tensorflow as tf\n\n\nclass LocNet:\n\n def __init__(self, scope, buttom_layer):\n self.scope = scope\n with tf.variable_scope(scope) as scope:\n self.build_graph(buttom_layer)\n self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None, 4),\n name='gt_loc')\n\n def build_graph(self, buttom_layer):\n self.variables = []\n self.kernel_weights = []\n pool = tf.nn.max_pool(buttom_layer, ksize=[1, 2, 2, 1], strides=[1,\n 2, 2, 1], padding='SAME', name='pool')\n drop = tf.nn.dropout(pool, 0.3)\n with tf.name_scope('fc1') as scope:\n shape = int(np.prod(drop.get_shape()[1:]))\n fc1w = tf.Variable(tf.truncated_normal([shape, 3000], dtype=tf.\n float32, stddev=0.1), name='weights')\n fc1b = tf.Variable(tf.constant(1.0, shape=[3000], dtype=tf.\n float32), trainable=True, name='biases')\n pool_flat = tf.reshape(drop, [-1, shape])\n fc1l = tf.nn.bias_add(tf.matmul(pool_flat, fc1w), fc1b)\n fc1 = tf.nn.relu(fc1l)\n self.kernel_weights += [fc1w]\n self.variables += [fc1w, fc1b]\n with tf.name_scope('fc2') as scope:\n fc2w = tf.Variable(tf.truncated_normal([3000, 4], dtype=tf.\n float32, stddev=0.1), name='weights')\n fc2b = tf.Variable(tf.constant(1.0, shape=[4], dtype=tf.float32\n ), trainable=True, name='biases')\n self.logit = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)\n self.kernel_weights += [fc2w]\n self.variables += [fc2w, fc2b]\n\n def loss(self):\n with tf.name_scope(self.scope) as scope:\n beta = tf.constant(0.05, name='beta')\n loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc,\n self.logit))\n loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.\n kernel_weights]\n loss_wd = beta * tf.add_n(loss_wd)\n total_loss = loss_rms + loss_wd\n return total_loss\n",
"step-5": "\n\nimport numpy as np \nimport tensorflow as tf\n\n\nclass LocNet: \n def __init__(self, scope, buttom_layer):\n self.scope = scope \n with tf.variable_scope(scope) as scope:\n self.build_graph(buttom_layer)\n self.gt_loc = tf.placeholder(dtype=tf.float32, shape=(None,4),name='gt_loc')\n \n def build_graph(self, buttom_layer):\n self.variables = []\n self.kernel_weights = []\n pool = tf.nn.max_pool(buttom_layer,\n ksize=[1, 2, 2, 1],\n strides=[1, 2, 2, 1],\n padding='SAME',\n name='pool')\n \n drop = tf.nn.dropout(pool, 0.3)\n with tf.name_scope('fc1') as scope:\n shape = int(np.prod(drop.get_shape()[1:]))\n fc1w = tf.Variable(tf.truncated_normal([shape, 3000],\n dtype=tf.float32,\n stddev=1e-1), name='weights')\n fc1b = tf.Variable(tf.constant(1.0, shape=[3000], dtype=tf.float32),\n trainable=True, name='biases')\n pool_flat = tf.reshape(drop, [-1, shape])\n fc1l = tf.nn.bias_add(tf.matmul(pool_flat, fc1w), fc1b)\n fc1 = tf.nn.relu(fc1l)\n self.kernel_weights += [fc1w]\n self.variables += [fc1w, fc1b]\n \n\n with tf.name_scope('fc2') as scope:\n fc2w = tf.Variable(tf.truncated_normal([3000, 4],\n dtype=tf.float32,\n stddev=1e-1), name='weights')\n fc2b = tf.Variable(tf.constant(1.0, shape=[4], dtype=tf.float32),\n trainable=True, name='biases')\n self.logit = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)\n self.kernel_weights += [fc2w]\n self.variables += [fc2w, fc2b]\n \n def loss(self):\n with tf.name_scope(self.scope) as scope:\n beta = tf.constant(0.05, name='beta')\n loss_rms = tf.reduce_max(tf.squared_difference(self.gt_loc, self.logit))\n loss_wd = [tf.reduce_mean(tf.square(w)) for w in self.kernel_weights]\n loss_wd = beta * tf.add_n(loss_wd)\n total_loss = loss_rms + loss_wd\n return total_loss\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from __future__ import unicode_literals
import frappe, json
def execute():
for ps in frappe.get_all('Property Setter', filters={'property': '_idx'},
fields = ['doc_type', 'value']):
custom_fields = frappe.get_all('Custom Field',
filters = {'dt': ps.doc_type}, fields=['name', 'fieldname'])
if custom_fields:
_idx = json.loads(ps.value)
for custom_field in custom_fields:
if custom_field.fieldname in _idx:
custom_field_idx = _idx.index(custom_field.fieldname)
if custom_field_idx == 0:
prev_fieldname = ""
else:
prev_fieldname = _idx[custom_field_idx - 1]
else:
prev_fieldname = _idx[-1]
custom_field_idx = len(_idx)
frappe.db.set_value('Custom Field', custom_field.name, 'insert_after', prev_fieldname)
frappe.db.set_value('Custom Field', custom_field.name, 'idx', custom_field_idx)
|
normal
|
{
"blob_id": "6f951815d0edafb08e7734d0e95e6564ab1be1f7",
"index": 2375,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef execute():\n for ps in frappe.get_all('Property Setter', filters={'property': '_idx'\n }, fields=['doc_type', 'value']):\n custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.\n doc_type}, fields=['name', 'fieldname'])\n if custom_fields:\n _idx = json.loads(ps.value)\n for custom_field in custom_fields:\n if custom_field.fieldname in _idx:\n custom_field_idx = _idx.index(custom_field.fieldname)\n if custom_field_idx == 0:\n prev_fieldname = ''\n else:\n prev_fieldname = _idx[custom_field_idx - 1]\n else:\n prev_fieldname = _idx[-1]\n custom_field_idx = len(_idx)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'insert_after', prev_fieldname)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'idx', custom_field_idx)\n",
"step-3": "from __future__ import unicode_literals\nimport frappe, json\n\n\ndef execute():\n for ps in frappe.get_all('Property Setter', filters={'property': '_idx'\n }, fields=['doc_type', 'value']):\n custom_fields = frappe.get_all('Custom Field', filters={'dt': ps.\n doc_type}, fields=['name', 'fieldname'])\n if custom_fields:\n _idx = json.loads(ps.value)\n for custom_field in custom_fields:\n if custom_field.fieldname in _idx:\n custom_field_idx = _idx.index(custom_field.fieldname)\n if custom_field_idx == 0:\n prev_fieldname = ''\n else:\n prev_fieldname = _idx[custom_field_idx - 1]\n else:\n prev_fieldname = _idx[-1]\n custom_field_idx = len(_idx)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'insert_after', prev_fieldname)\n frappe.db.set_value('Custom Field', custom_field.name,\n 'idx', custom_field_idx)\n",
"step-4": "from __future__ import unicode_literals\nimport frappe, json\n\ndef execute():\n\tfor ps in frappe.get_all('Property Setter', filters={'property': '_idx'},\n\t\tfields = ['doc_type', 'value']):\n\t\tcustom_fields = frappe.get_all('Custom Field',\n\t\t\tfilters = {'dt': ps.doc_type}, fields=['name', 'fieldname'])\n\n\t\tif custom_fields:\n\t\t\t_idx = json.loads(ps.value)\n\n\t\t\tfor custom_field in custom_fields:\n\t\t\t\tif custom_field.fieldname in _idx:\n\t\t\t\t\tcustom_field_idx = _idx.index(custom_field.fieldname)\n\t\t\t\t\tif custom_field_idx == 0:\n\t\t\t\t\t\tprev_fieldname = \"\"\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tprev_fieldname = _idx[custom_field_idx - 1]\n\n\t\t\t\telse:\n\t\t\t\t\tprev_fieldname = _idx[-1]\n\t\t\t\t\tcustom_field_idx = len(_idx)\n\n\t\t\t\tfrappe.db.set_value('Custom Field', custom_field.name, 'insert_after', prev_fieldname)\n\t\t\t\tfrappe.db.set_value('Custom Field', custom_field.name, 'idx', custom_field_idx)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class tenDParameters:
def __init__(self,
b: float,
DM: float,
pm_l: float,
pm_b: float,
vrad: float,
sb: float,
spml: float,
spmb: float,
sdm: float,
vc: float):
self.b = b
self.DM = DM
# this is actually pm_l * cos b, apparently
self.pm_l = pm_l
self.pm_b = pm_b
self.vrad = vrad
self.sb = sb
self.spml = spml
self.spmb = spmb
self.sdm = sdm
self.vc = vc
|
normal
|
{
"blob_id": "82e7e22293551e061dcb295c52714c22df0ed0ce",
"index": 5678,
"step-1": "<mask token>\n",
"step-2": "class tenDParameters:\n <mask token>\n",
"step-3": "class tenDParameters:\n\n def __init__(self, b: float, DM: float, pm_l: float, pm_b: float, vrad:\n float, sb: float, spml: float, spmb: float, sdm: float, vc: float):\n self.b = b\n self.DM = DM\n self.pm_l = pm_l\n self.pm_b = pm_b\n self.vrad = vrad\n self.sb = sb\n self.spml = spml\n self.spmb = spmb\n self.sdm = sdm\n self.vc = vc\n",
"step-4": "class tenDParameters:\n def __init__(self,\n b: float,\n DM: float,\n pm_l: float,\n pm_b: float,\n vrad: float,\n sb: float,\n spml: float,\n spmb: float,\n sdm: float,\n vc: float):\n self.b = b\n self.DM = DM\n # this is actually pm_l * cos b, apparently\n self.pm_l = pm_l\n self.pm_b = pm_b\n self.vrad = vrad\n self.sb = sb\n self.spml = spml\n self.spmb = spmb\n self.sdm = sdm\n self.vc = vc",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# 只放置可执行文件
#
# from ..src import package
# data_dict = package.pack()
# from ..src.plugins import * #解释一遍全放入内存
# from ..src import plugins #导入这个文件夹(包,模块,类库),默认加载init文件到内存
#
#
# plugins.pack()
from ..src.script import run
if __name__ == '__main__':
run()
|
normal
|
{
"blob_id": "4f870e0d86d9f9b8c620115a618ea32abc24c52d",
"index": 3008,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n run()\n",
"step-3": "from ..src.script import run\nif __name__ == '__main__':\n run()\n",
"step-4": "# 只放置可执行文件\n#\n# from ..src import package\n# data_dict = package.pack()\n\n# from ..src.plugins import * #解释一遍全放入内存\n# from ..src import plugins #导入这个文件夹(包,模块,类库),默认加载init文件到内存\n#\n#\n# plugins.pack()\n\n\nfrom ..src.script import run\n\nif __name__ == '__main__':\n run()\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from lessons.models import Lesson, Question, Response
from usermanage.models import SchoolClass
import json
@login_required
def lessons_overview(request):
if request.method == 'POST':
if request.user.is_staff:
school_class = SchoolClass.objects.get(id=request.POST['class_id'])
school_class.password = request.POST['class_pwd']
school_class.save()
if request.user.is_staff:
classes = request.user.teachers.select_related()
else:
classes = request.user.students.select_related()
return render(request, 'lessons_overview.html', {
'classes': classes,
})
@login_required
def lesson(request, id):
lesson = Lesson.objects.get(id=id)
if request.GET.get('grade_class'):
school_class = SchoolClass.objects.get(id=request.GET['grade_class'])
else:
school_class = None
return render(request, 'lesson.html', {
'lesson': lesson,
'school_class': school_class,
})
@staff_member_required
def new_lesson(request, id):
school_class = SchoolClass.objects.get(id=id)
if request.method == 'POST':
lesson = Lesson(
name=request.POST['lesson_name'],
school_class=school_class,
)
for title in request.POST.getlist('questions[]'):
question = Question(title=title)
question.save()
lesson.questions.add(question)
lesson.save()
return redirect('/')
return render(request, 'new_lesson.html', {
'school_class': school_class,
})
@staff_member_required
def grade_question(request, class_id, id):
question = Question.objects.get(id=id)
school_class = SchoolClass.objects.get(id=class_id)
students = school_class.students.all()
responses = Response.objects.filter(
answerer__in=students,
question=question
)
unanswered_students = []
for student in students:
try:
Response.objects.get(answerer=student, question=question)
except Response.DoesNotExist:
unanswered_students.append(student.get_full_name())
unanswered_students = ', '.join(unanswered_students) if unanswered_students else None
return render(request, 'question.html', {
'question': question,
'responses': responses,
'unanswered_students': unanswered_students,
})
def update_questions(questions, lesson_id):
questions = [q for q in questions if len(q) > 0]
lesson = Lesson.objects.get(id=lesson_id)
for question in lesson.questions.all():
question.title = questions.pop(0)
question.save()
if len(questions) > 0:
for title in questions:
new_question = Question(title=title)
new_question.save()
lesson.questions.add(new_question)
lesson.save()
@staff_member_required
def edit_lesson(request, id):
if request.method == 'POST':
if request.POST['action'] == 'update':
update_questions(request.POST.getlist('questions[]'), id)
return HttpResponse(status=200)
elif request.POST['action'] == 'delete':
Question.objects.get(id=request.POST['id']).delete()
return HttpResponse(status=200)
elif request.method == 'GET':
lesson = Lesson.objects.get(id=id)
return render(request, 'edit_lesson.html', {
'lesson': lesson,
})
@staff_member_required
def mark_response_seen(request):
response = Response.objects.get(id=request.POST['id'])
response.seen = True
response.save()
return HttpResponse(status=200)
@staff_member_required
def save_comment(request):
for id in request.POST.keys():
response = Response.objects.get(id=id)
response.seen = True # redundant
response.comment = request.POST[id]
response.save()
return HttpResponse(status=200)
@login_required
def save_responses(request):
responses = request.POST.items()
lesson = Lesson.objects.get(id=request.POST['lesson'])
responses.pop(responses.index(('lesson', request.POST['lesson'])))
new_response_ids = {}
for id in responses:
try:
response = Response.objects.get(id=id[0], answerer=request.user)
response.text = request.POST[id[0]]
response.save()
except ValueError:
if len(request.POST[id[0]]) > 0:
response = Response(
text=request.POST[id[0]],
answerer=request.user,
question=Question.objects.get(id=id[0][4:]),
lesson=lesson
)
response.save()
new_response_ids[id[0]] = str(response.id)
return HttpResponse(json.dumps(new_response_ids),
content_type='application/json')
|
normal
|
{
"blob_id": "ee417c5fff858d26ca60a78dffe4cff503a6f2b5",
"index": 6824,
"step-1": "<mask token>\n\n\n@login_required\ndef lessons_overview(request):\n if request.method == 'POST':\n if request.user.is_staff:\n school_class = SchoolClass.objects.get(id=request.POST['class_id'])\n school_class.password = request.POST['class_pwd']\n school_class.save()\n if request.user.is_staff:\n classes = request.user.teachers.select_related()\n else:\n classes = request.user.students.select_related()\n return render(request, 'lessons_overview.html', {'classes': classes})\n\n\n@login_required\ndef lesson(request, id):\n lesson = Lesson.objects.get(id=id)\n if request.GET.get('grade_class'):\n school_class = SchoolClass.objects.get(id=request.GET['grade_class'])\n else:\n school_class = None\n return render(request, 'lesson.html', {'lesson': lesson, 'school_class':\n school_class})\n\n\n<mask token>\n\n\n@staff_member_required\ndef grade_question(request, class_id, id):\n question = Question.objects.get(id=id)\n school_class = SchoolClass.objects.get(id=class_id)\n students = school_class.students.all()\n responses = Response.objects.filter(answerer__in=students, question=\n question)\n unanswered_students = []\n for student in students:\n try:\n Response.objects.get(answerer=student, question=question)\n except Response.DoesNotExist:\n unanswered_students.append(student.get_full_name())\n unanswered_students = ', '.join(unanswered_students\n ) if unanswered_students else None\n return render(request, 'question.html', {'question': question,\n 'responses': responses, 'unanswered_students': unanswered_students})\n\n\ndef update_questions(questions, lesson_id):\n questions = [q for q in questions if len(q) > 0]\n lesson = Lesson.objects.get(id=lesson_id)\n for question in lesson.questions.all():\n question.title = questions.pop(0)\n question.save()\n if len(questions) > 0:\n for title in questions:\n new_question = Question(title=title)\n new_question.save()\n lesson.questions.add(new_question)\n lesson.save()\n\n\n@staff_member_required\ndef edit_lesson(request, id):\n if request.method == 'POST':\n if request.POST['action'] == 'update':\n update_questions(request.POST.getlist('questions[]'), id)\n return HttpResponse(status=200)\n elif request.POST['action'] == 'delete':\n Question.objects.get(id=request.POST['id']).delete()\n return HttpResponse(status=200)\n elif request.method == 'GET':\n lesson = Lesson.objects.get(id=id)\n return render(request, 'edit_lesson.html', {'lesson': lesson})\n\n\n@staff_member_required\ndef mark_response_seen(request):\n response = Response.objects.get(id=request.POST['id'])\n response.seen = True\n response.save()\n return HttpResponse(status=200)\n\n\n<mask token>\n\n\n@login_required\ndef save_responses(request):\n responses = request.POST.items()\n lesson = Lesson.objects.get(id=request.POST['lesson'])\n responses.pop(responses.index(('lesson', request.POST['lesson'])))\n new_response_ids = {}\n for id in responses:\n try:\n response = Response.objects.get(id=id[0], answerer=request.user)\n response.text = request.POST[id[0]]\n response.save()\n except ValueError:\n if len(request.POST[id[0]]) > 0:\n response = Response(text=request.POST[id[0]], answerer=\n request.user, question=Question.objects.get(id=id[0][4:\n ]), lesson=lesson)\n response.save()\n new_response_ids[id[0]] = str(response.id)\n return HttpResponse(json.dumps(new_response_ids), content_type=\n 'application/json')\n",
"step-2": "<mask token>\n\n\n@login_required\ndef lessons_overview(request):\n if request.method == 'POST':\n if request.user.is_staff:\n school_class = SchoolClass.objects.get(id=request.POST['class_id'])\n school_class.password = request.POST['class_pwd']\n school_class.save()\n if request.user.is_staff:\n classes = request.user.teachers.select_related()\n else:\n classes = request.user.students.select_related()\n return render(request, 'lessons_overview.html', {'classes': classes})\n\n\n@login_required\ndef lesson(request, id):\n lesson = Lesson.objects.get(id=id)\n if request.GET.get('grade_class'):\n school_class = SchoolClass.objects.get(id=request.GET['grade_class'])\n else:\n school_class = None\n return render(request, 'lesson.html', {'lesson': lesson, 'school_class':\n school_class})\n\n\n@staff_member_required\ndef new_lesson(request, id):\n school_class = SchoolClass.objects.get(id=id)\n if request.method == 'POST':\n lesson = Lesson(name=request.POST['lesson_name'], school_class=\n school_class)\n for title in request.POST.getlist('questions[]'):\n question = Question(title=title)\n question.save()\n lesson.questions.add(question)\n lesson.save()\n return redirect('/')\n return render(request, 'new_lesson.html', {'school_class': school_class})\n\n\n@staff_member_required\ndef grade_question(request, class_id, id):\n question = Question.objects.get(id=id)\n school_class = SchoolClass.objects.get(id=class_id)\n students = school_class.students.all()\n responses = Response.objects.filter(answerer__in=students, question=\n question)\n unanswered_students = []\n for student in students:\n try:\n Response.objects.get(answerer=student, question=question)\n except Response.DoesNotExist:\n unanswered_students.append(student.get_full_name())\n unanswered_students = ', '.join(unanswered_students\n ) if unanswered_students else None\n return render(request, 'question.html', {'question': question,\n 'responses': responses, 'unanswered_students': unanswered_students})\n\n\ndef update_questions(questions, lesson_id):\n questions = [q for q in questions if len(q) > 0]\n lesson = Lesson.objects.get(id=lesson_id)\n for question in lesson.questions.all():\n question.title = questions.pop(0)\n question.save()\n if len(questions) > 0:\n for title in questions:\n new_question = Question(title=title)\n new_question.save()\n lesson.questions.add(new_question)\n lesson.save()\n\n\n@staff_member_required\ndef edit_lesson(request, id):\n if request.method == 'POST':\n if request.POST['action'] == 'update':\n update_questions(request.POST.getlist('questions[]'), id)\n return HttpResponse(status=200)\n elif request.POST['action'] == 'delete':\n Question.objects.get(id=request.POST['id']).delete()\n return HttpResponse(status=200)\n elif request.method == 'GET':\n lesson = Lesson.objects.get(id=id)\n return render(request, 'edit_lesson.html', {'lesson': lesson})\n\n\n@staff_member_required\ndef mark_response_seen(request):\n response = Response.objects.get(id=request.POST['id'])\n response.seen = True\n response.save()\n return HttpResponse(status=200)\n\n\n<mask token>\n\n\n@login_required\ndef save_responses(request):\n responses = request.POST.items()\n lesson = Lesson.objects.get(id=request.POST['lesson'])\n responses.pop(responses.index(('lesson', request.POST['lesson'])))\n new_response_ids = {}\n for id in responses:\n try:\n response = Response.objects.get(id=id[0], answerer=request.user)\n response.text = request.POST[id[0]]\n response.save()\n except ValueError:\n if len(request.POST[id[0]]) > 0:\n response = Response(text=request.POST[id[0]], answerer=\n request.user, question=Question.objects.get(id=id[0][4:\n ]), lesson=lesson)\n response.save()\n new_response_ids[id[0]] = str(response.id)\n return HttpResponse(json.dumps(new_response_ids), content_type=\n 'application/json')\n",
"step-3": "<mask token>\n\n\n@login_required\ndef lessons_overview(request):\n if request.method == 'POST':\n if request.user.is_staff:\n school_class = SchoolClass.objects.get(id=request.POST['class_id'])\n school_class.password = request.POST['class_pwd']\n school_class.save()\n if request.user.is_staff:\n classes = request.user.teachers.select_related()\n else:\n classes = request.user.students.select_related()\n return render(request, 'lessons_overview.html', {'classes': classes})\n\n\n@login_required\ndef lesson(request, id):\n lesson = Lesson.objects.get(id=id)\n if request.GET.get('grade_class'):\n school_class = SchoolClass.objects.get(id=request.GET['grade_class'])\n else:\n school_class = None\n return render(request, 'lesson.html', {'lesson': lesson, 'school_class':\n school_class})\n\n\n@staff_member_required\ndef new_lesson(request, id):\n school_class = SchoolClass.objects.get(id=id)\n if request.method == 'POST':\n lesson = Lesson(name=request.POST['lesson_name'], school_class=\n school_class)\n for title in request.POST.getlist('questions[]'):\n question = Question(title=title)\n question.save()\n lesson.questions.add(question)\n lesson.save()\n return redirect('/')\n return render(request, 'new_lesson.html', {'school_class': school_class})\n\n\n@staff_member_required\ndef grade_question(request, class_id, id):\n question = Question.objects.get(id=id)\n school_class = SchoolClass.objects.get(id=class_id)\n students = school_class.students.all()\n responses = Response.objects.filter(answerer__in=students, question=\n question)\n unanswered_students = []\n for student in students:\n try:\n Response.objects.get(answerer=student, question=question)\n except Response.DoesNotExist:\n unanswered_students.append(student.get_full_name())\n unanswered_students = ', '.join(unanswered_students\n ) if unanswered_students else None\n return render(request, 'question.html', {'question': question,\n 'responses': responses, 'unanswered_students': unanswered_students})\n\n\ndef update_questions(questions, lesson_id):\n questions = [q for q in questions if len(q) > 0]\n lesson = Lesson.objects.get(id=lesson_id)\n for question in lesson.questions.all():\n question.title = questions.pop(0)\n question.save()\n if len(questions) > 0:\n for title in questions:\n new_question = Question(title=title)\n new_question.save()\n lesson.questions.add(new_question)\n lesson.save()\n\n\n@staff_member_required\ndef edit_lesson(request, id):\n if request.method == 'POST':\n if request.POST['action'] == 'update':\n update_questions(request.POST.getlist('questions[]'), id)\n return HttpResponse(status=200)\n elif request.POST['action'] == 'delete':\n Question.objects.get(id=request.POST['id']).delete()\n return HttpResponse(status=200)\n elif request.method == 'GET':\n lesson = Lesson.objects.get(id=id)\n return render(request, 'edit_lesson.html', {'lesson': lesson})\n\n\n@staff_member_required\ndef mark_response_seen(request):\n response = Response.objects.get(id=request.POST['id'])\n response.seen = True\n response.save()\n return HttpResponse(status=200)\n\n\n@staff_member_required\ndef save_comment(request):\n for id in request.POST.keys():\n response = Response.objects.get(id=id)\n response.seen = True\n response.comment = request.POST[id]\n response.save()\n return HttpResponse(status=200)\n\n\n@login_required\ndef save_responses(request):\n responses = request.POST.items()\n lesson = Lesson.objects.get(id=request.POST['lesson'])\n responses.pop(responses.index(('lesson', request.POST['lesson'])))\n new_response_ids = {}\n for id in responses:\n try:\n response = Response.objects.get(id=id[0], answerer=request.user)\n response.text = request.POST[id[0]]\n response.save()\n except ValueError:\n if len(request.POST[id[0]]) > 0:\n response = Response(text=request.POST[id[0]], answerer=\n request.user, question=Question.objects.get(id=id[0][4:\n ]), lesson=lesson)\n response.save()\n new_response_ids[id[0]] = str(response.id)\n return HttpResponse(json.dumps(new_response_ids), content_type=\n 'application/json')\n",
"step-4": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom lessons.models import Lesson, Question, Response\nfrom usermanage.models import SchoolClass\nimport json\n\n\n@login_required\ndef lessons_overview(request):\n if request.method == 'POST':\n if request.user.is_staff:\n school_class = SchoolClass.objects.get(id=request.POST['class_id'])\n school_class.password = request.POST['class_pwd']\n school_class.save()\n if request.user.is_staff:\n classes = request.user.teachers.select_related()\n else:\n classes = request.user.students.select_related()\n return render(request, 'lessons_overview.html', {'classes': classes})\n\n\n@login_required\ndef lesson(request, id):\n lesson = Lesson.objects.get(id=id)\n if request.GET.get('grade_class'):\n school_class = SchoolClass.objects.get(id=request.GET['grade_class'])\n else:\n school_class = None\n return render(request, 'lesson.html', {'lesson': lesson, 'school_class':\n school_class})\n\n\n@staff_member_required\ndef new_lesson(request, id):\n school_class = SchoolClass.objects.get(id=id)\n if request.method == 'POST':\n lesson = Lesson(name=request.POST['lesson_name'], school_class=\n school_class)\n for title in request.POST.getlist('questions[]'):\n question = Question(title=title)\n question.save()\n lesson.questions.add(question)\n lesson.save()\n return redirect('/')\n return render(request, 'new_lesson.html', {'school_class': school_class})\n\n\n@staff_member_required\ndef grade_question(request, class_id, id):\n question = Question.objects.get(id=id)\n school_class = SchoolClass.objects.get(id=class_id)\n students = school_class.students.all()\n responses = Response.objects.filter(answerer__in=students, question=\n question)\n unanswered_students = []\n for student in students:\n try:\n Response.objects.get(answerer=student, question=question)\n except Response.DoesNotExist:\n unanswered_students.append(student.get_full_name())\n unanswered_students = ', '.join(unanswered_students\n ) if unanswered_students else None\n return render(request, 'question.html', {'question': question,\n 'responses': responses, 'unanswered_students': unanswered_students})\n\n\ndef update_questions(questions, lesson_id):\n questions = [q for q in questions if len(q) > 0]\n lesson = Lesson.objects.get(id=lesson_id)\n for question in lesson.questions.all():\n question.title = questions.pop(0)\n question.save()\n if len(questions) > 0:\n for title in questions:\n new_question = Question(title=title)\n new_question.save()\n lesson.questions.add(new_question)\n lesson.save()\n\n\n@staff_member_required\ndef edit_lesson(request, id):\n if request.method == 'POST':\n if request.POST['action'] == 'update':\n update_questions(request.POST.getlist('questions[]'), id)\n return HttpResponse(status=200)\n elif request.POST['action'] == 'delete':\n Question.objects.get(id=request.POST['id']).delete()\n return HttpResponse(status=200)\n elif request.method == 'GET':\n lesson = Lesson.objects.get(id=id)\n return render(request, 'edit_lesson.html', {'lesson': lesson})\n\n\n@staff_member_required\ndef mark_response_seen(request):\n response = Response.objects.get(id=request.POST['id'])\n response.seen = True\n response.save()\n return HttpResponse(status=200)\n\n\n@staff_member_required\ndef save_comment(request):\n for id in request.POST.keys():\n response = Response.objects.get(id=id)\n response.seen = True\n response.comment = request.POST[id]\n response.save()\n return HttpResponse(status=200)\n\n\n@login_required\ndef save_responses(request):\n responses = request.POST.items()\n lesson = Lesson.objects.get(id=request.POST['lesson'])\n responses.pop(responses.index(('lesson', request.POST['lesson'])))\n new_response_ids = {}\n for id in responses:\n try:\n response = Response.objects.get(id=id[0], answerer=request.user)\n response.text = request.POST[id[0]]\n response.save()\n except ValueError:\n if len(request.POST[id[0]]) > 0:\n response = Response(text=request.POST[id[0]], answerer=\n request.user, question=Question.objects.get(id=id[0][4:\n ]), lesson=lesson)\n response.save()\n new_response_ids[id[0]] = str(response.id)\n return HttpResponse(json.dumps(new_response_ids), content_type=\n 'application/json')\n",
"step-5": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.admin.views.decorators import staff_member_required\nfrom lessons.models import Lesson, Question, Response\nfrom usermanage.models import SchoolClass\nimport json\n\n\n@login_required\ndef lessons_overview(request):\n if request.method == 'POST':\n if request.user.is_staff:\n school_class = SchoolClass.objects.get(id=request.POST['class_id'])\n school_class.password = request.POST['class_pwd']\n school_class.save()\n\n if request.user.is_staff:\n classes = request.user.teachers.select_related()\n else:\n classes = request.user.students.select_related()\n return render(request, 'lessons_overview.html', {\n 'classes': classes,\n })\n\n\n@login_required\ndef lesson(request, id):\n lesson = Lesson.objects.get(id=id)\n if request.GET.get('grade_class'):\n school_class = SchoolClass.objects.get(id=request.GET['grade_class'])\n else:\n school_class = None\n return render(request, 'lesson.html', {\n 'lesson': lesson,\n 'school_class': school_class,\n })\n\n\n@staff_member_required\ndef new_lesson(request, id):\n school_class = SchoolClass.objects.get(id=id)\n\n if request.method == 'POST':\n lesson = Lesson(\n name=request.POST['lesson_name'],\n school_class=school_class,\n )\n for title in request.POST.getlist('questions[]'):\n question = Question(title=title)\n question.save()\n lesson.questions.add(question)\n lesson.save()\n return redirect('/')\n\n return render(request, 'new_lesson.html', {\n 'school_class': school_class,\n })\n\n\n@staff_member_required\ndef grade_question(request, class_id, id):\n question = Question.objects.get(id=id)\n\n school_class = SchoolClass.objects.get(id=class_id)\n students = school_class.students.all()\n responses = Response.objects.filter(\n answerer__in=students,\n question=question\n )\n\n unanswered_students = []\n for student in students:\n try:\n Response.objects.get(answerer=student, question=question)\n except Response.DoesNotExist:\n unanswered_students.append(student.get_full_name())\n unanswered_students = ', '.join(unanswered_students) if unanswered_students else None\n\n return render(request, 'question.html', {\n 'question': question,\n 'responses': responses,\n 'unanswered_students': unanswered_students,\n })\n\n\ndef update_questions(questions, lesson_id):\n questions = [q for q in questions if len(q) > 0]\n lesson = Lesson.objects.get(id=lesson_id)\n for question in lesson.questions.all():\n question.title = questions.pop(0)\n question.save()\n if len(questions) > 0:\n for title in questions:\n new_question = Question(title=title)\n new_question.save()\n lesson.questions.add(new_question)\n lesson.save()\n\n\n@staff_member_required\ndef edit_lesson(request, id):\n if request.method == 'POST':\n if request.POST['action'] == 'update':\n update_questions(request.POST.getlist('questions[]'), id)\n return HttpResponse(status=200)\n elif request.POST['action'] == 'delete':\n Question.objects.get(id=request.POST['id']).delete()\n return HttpResponse(status=200)\n\n elif request.method == 'GET':\n lesson = Lesson.objects.get(id=id)\n return render(request, 'edit_lesson.html', {\n 'lesson': lesson,\n })\n\n\n@staff_member_required\ndef mark_response_seen(request):\n response = Response.objects.get(id=request.POST['id'])\n response.seen = True\n response.save()\n return HttpResponse(status=200)\n\n\n@staff_member_required\ndef save_comment(request):\n for id in request.POST.keys():\n response = Response.objects.get(id=id)\n response.seen = True # redundant\n response.comment = request.POST[id]\n response.save()\n return HttpResponse(status=200)\n\n\n@login_required\ndef save_responses(request):\n responses = request.POST.items()\n lesson = Lesson.objects.get(id=request.POST['lesson'])\n responses.pop(responses.index(('lesson', request.POST['lesson'])))\n new_response_ids = {}\n\n for id in responses:\n try:\n response = Response.objects.get(id=id[0], answerer=request.user)\n response.text = request.POST[id[0]]\n response.save()\n except ValueError:\n if len(request.POST[id[0]]) > 0:\n response = Response(\n text=request.POST[id[0]],\n answerer=request.user,\n question=Question.objects.get(id=id[0][4:]),\n lesson=lesson\n )\n response.save()\n new_response_ids[id[0]] = str(response.id)\n\n return HttpResponse(json.dumps(new_response_ids),\n content_type='application/json')\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
from django.urls import path
from . import views
app_name = 'orders'
urlpatterns = [
path('checkout' , views.order_checkout_view , name='orders-checkout') ,
]
|
normal
|
{
"blob_id": "031f668fbf75b54ec874a59f53c60ceca53779cf",
"index": 8942,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'orders'\nurlpatterns = [path('checkout', views.order_checkout_view, name=\n 'orders-checkout')]\n",
"step-3": "from django.urls import path\nfrom . import views\napp_name = 'orders'\nurlpatterns = [path('checkout', views.order_checkout_view, name=\n 'orders-checkout')]\n",
"step-4": "from django.urls import path\n\nfrom . import views\n\napp_name = 'orders'\nurlpatterns = [\n path('checkout' , views.order_checkout_view , name='orders-checkout') ,\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Tests for the Transformer RNNCell.
"""
import pytest
import numpy as np
import tensorflow as tf
from .transformer import positional_encoding, transformer_layer
from .cell import (LimitedTransformerCell, UnlimitedTransformerCell,
inject_at_timestep, sequence_masks)
def test_inject_at_timestep():
with tf.Graph().as_default():
with tf.Session() as sess:
in_seq = tf.constant(np.array([
[
[1, 2, 3, 4],
[5, 6, 7, 8],
],
[
[9, 10, 11, 12],
[13, 14, 15, 16],
],
[
[17, 18, 19, 20],
[21, 22, 23, 24],
],
], dtype='float32'))
injection = tf.constant(np.array([
[-1, -2, -3, -4],
[-5, -6, -7, -8],
[-9, -10, -11, -12],
], dtype='float32'))
indices = np.array([0, 1, 0], dtype='int32')
injected = sess.run(inject_at_timestep(indices, in_seq, injection))
expected = np.array([
[
[-1, -2, -3, -4],
[5, 6, 7, 8],
],
[
[9, 10, 11, 12],
[-5, -6, -7, -8],
],
[
[-9, -10, -11, -12],
[21, 22, 23, 24],
],
], dtype='float32')
assert (injected == expected).all()
def test_sequence_masks():
with tf.Graph().as_default():
with tf.Session() as sess:
indices = tf.constant(np.array([3, 1, 2], dtype='int32'))
actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=tf.int32), tf.float32))
expected = np.array([
[0, 0, 0, 0],
[0, 0, -np.inf, -np.inf],
[0, 0, 0, -np.inf],
], dtype='float32')
assert (actual == expected).all()
@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell, UnlimitedTransformerCell])
@pytest.mark.parametrize('num_layers', [1, 2, 6])
def test_basic_equivalence(cell_cls, num_layers):
"""
Test that both transformer implementations produce the
same outputs when applied to a properly-sized
sequence.
"""
with tf.Graph().as_default():
with tf.Session() as sess:
pos_enc = positional_encoding(4, 6, dtype=tf.float64)
in_seq = tf.get_variable('in_seq',
shape=(3, 4, 6),
initializer=tf.truncated_normal_initializer(),
dtype=tf.float64)
cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2, hidden=24)
actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)
with tf.variable_scope('rnn', reuse=True):
with tf.variable_scope('transformer', reuse=True):
expected = in_seq + pos_enc
for _ in range(num_layers):
expected = transformer_layer(expected, num_heads=2, hidden=24)
sess.run(tf.global_variables_initializer())
actual, expected = sess.run((actual, expected))
assert not np.isnan(actual).any()
assert not np.isnan(expected).any()
assert actual.shape == expected.shape
assert np.allclose(actual, expected)
@pytest.mark.parametrize('cell_cls', [UnlimitedTransformerCell])
def test_past_horizon(cell_cls):
"""
Test the cell when the input sequence is longer than
the time horizon.
"""
with tf.Graph().as_default():
with tf.Session() as sess:
pos_enc = positional_encoding(4, 6, dtype=tf.float64)
in_seq = tf.get_variable('in_seq',
shape=(3, 5, 6),
initializer=tf.truncated_normal_initializer(),
dtype=tf.float64)
cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)
actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)
def apply_regular(sequence):
with tf.variable_scope('rnn', reuse=True):
with tf.variable_scope('transformer', reuse=True):
expected = sequence + pos_enc
for _ in range(3):
expected = transformer_layer(expected, num_heads=2, hidden=24)
return expected
expected = tf.concat([apply_regular(in_seq[:, :-1]),
apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)
sess.run(tf.global_variables_initializer())
actual, expected = sess.run((actual, expected))
assert not np.isnan(actual).any()
assert not np.isnan(expected).any()
assert actual.shape == expected.shape
assert np.allclose(actual, expected)
@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell, UnlimitedTransformerCell])
def test_mismatched_starts(cell_cls):
"""
Test the cell when the states are split up and
recombined from different timesteps.
"""
with tf.Graph().as_default():
with tf.Session() as sess:
pos_enc = positional_encoding(5, 6, dtype=tf.float64)
in_seq = tf.get_variable('in_seq',
shape=(3, 5, 6),
initializer=tf.truncated_normal_initializer(),
dtype=tf.float64)
cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)
_, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.float64)
_, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.float64)
_, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.float64)
new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0)
for s1, s2, s3 in zip(states_1, states_2, states_3))
full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)
expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5], full_seq[2, 1:3]], axis=0)
inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:3]], axis=0)
actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=new_states)
sess.run(tf.global_variables_initializer())
actual, expected = sess.run((actual, expected))
assert not np.isnan(actual).any()
assert not np.isnan(expected).any()
assert actual.shape == expected.shape
assert np.allclose(actual, expected)
|
normal
|
{
"blob_id": "958f6e539f9f68892d77b6becc387581c6adfa16",
"index": 3366,
"step-1": "<mask token>\n\n\ndef test_inject_at_timestep():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n in_seq = tf.constant(np.array([[[1, 2, 3, 4], [5, 6, 7, 8]], [[\n 9, 10, 11, 12], [13, 14, 15, 16]], [[17, 18, 19, 20], [21, \n 22, 23, 24]]], dtype='float32'))\n injection = tf.constant(np.array([[-1, -2, -3, -4], [-5, -6, -7,\n -8], [-9, -10, -11, -12]], dtype='float32'))\n indices = np.array([0, 1, 0], dtype='int32')\n injected = sess.run(inject_at_timestep(indices, in_seq, injection))\n expected = np.array([[[-1, -2, -3, -4], [5, 6, 7, 8]], [[9, 10,\n 11, 12], [-5, -6, -7, -8]], [[-9, -10, -11, -12], [21, 22, \n 23, 24]]], dtype='float32')\n assert (injected == expected).all()\n\n\ndef test_sequence_masks():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n indices = tf.constant(np.array([3, 1, 2], dtype='int32'))\n actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=\n tf.int32), tf.float32))\n expected = np.array([[0, 0, 0, 0], [0, 0, -np.inf, -np.inf], [0,\n 0, 0, -np.inf]], dtype='float32')\n assert (actual == expected).all()\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\n@pytest.mark.parametrize('num_layers', [1, 2, 6])\ndef test_basic_equivalence(cell_cls, num_layers):\n \"\"\"\n Test that both transformer implementations produce the\n same outputs when applied to a properly-sized\n sequence.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2,\n hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = in_seq + pos_enc\n for _ in range(num_layers):\n expected = transformer_layer(expected, num_heads=2,\n hidden=24)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_inject_at_timestep():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n in_seq = tf.constant(np.array([[[1, 2, 3, 4], [5, 6, 7, 8]], [[\n 9, 10, 11, 12], [13, 14, 15, 16]], [[17, 18, 19, 20], [21, \n 22, 23, 24]]], dtype='float32'))\n injection = tf.constant(np.array([[-1, -2, -3, -4], [-5, -6, -7,\n -8], [-9, -10, -11, -12]], dtype='float32'))\n indices = np.array([0, 1, 0], dtype='int32')\n injected = sess.run(inject_at_timestep(indices, in_seq, injection))\n expected = np.array([[[-1, -2, -3, -4], [5, 6, 7, 8]], [[9, 10,\n 11, 12], [-5, -6, -7, -8]], [[-9, -10, -11, -12], [21, 22, \n 23, 24]]], dtype='float32')\n assert (injected == expected).all()\n\n\ndef test_sequence_masks():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n indices = tf.constant(np.array([3, 1, 2], dtype='int32'))\n actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=\n tf.int32), tf.float32))\n expected = np.array([[0, 0, 0, 0], [0, 0, -np.inf, -np.inf], [0,\n 0, 0, -np.inf]], dtype='float32')\n assert (actual == expected).all()\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\n@pytest.mark.parametrize('num_layers', [1, 2, 6])\ndef test_basic_equivalence(cell_cls, num_layers):\n \"\"\"\n Test that both transformer implementations produce the\n same outputs when applied to a properly-sized\n sequence.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2,\n hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = in_seq + pos_enc\n for _ in range(num_layers):\n expected = transformer_layer(expected, num_heads=2,\n hidden=24)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [UnlimitedTransformerCell])\ndef test_past_horizon(cell_cls):\n \"\"\"\n Test the cell when the input sequence is longer than\n the time horizon.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n\n def apply_regular(sequence):\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = sequence + pos_enc\n for _ in range(3):\n expected = transformer_layer(expected,\n num_heads=2, hidden=24)\n return expected\n expected = tf.concat([apply_regular(in_seq[:, :-1]),\n apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_inject_at_timestep():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n in_seq = tf.constant(np.array([[[1, 2, 3, 4], [5, 6, 7, 8]], [[\n 9, 10, 11, 12], [13, 14, 15, 16]], [[17, 18, 19, 20], [21, \n 22, 23, 24]]], dtype='float32'))\n injection = tf.constant(np.array([[-1, -2, -3, -4], [-5, -6, -7,\n -8], [-9, -10, -11, -12]], dtype='float32'))\n indices = np.array([0, 1, 0], dtype='int32')\n injected = sess.run(inject_at_timestep(indices, in_seq, injection))\n expected = np.array([[[-1, -2, -3, -4], [5, 6, 7, 8]], [[9, 10,\n 11, 12], [-5, -6, -7, -8]], [[-9, -10, -11, -12], [21, 22, \n 23, 24]]], dtype='float32')\n assert (injected == expected).all()\n\n\ndef test_sequence_masks():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n indices = tf.constant(np.array([3, 1, 2], dtype='int32'))\n actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=\n tf.int32), tf.float32))\n expected = np.array([[0, 0, 0, 0], [0, 0, -np.inf, -np.inf], [0,\n 0, 0, -np.inf]], dtype='float32')\n assert (actual == expected).all()\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\n@pytest.mark.parametrize('num_layers', [1, 2, 6])\ndef test_basic_equivalence(cell_cls, num_layers):\n \"\"\"\n Test that both transformer implementations produce the\n same outputs when applied to a properly-sized\n sequence.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2,\n hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = in_seq + pos_enc\n for _ in range(num_layers):\n expected = transformer_layer(expected, num_heads=2,\n hidden=24)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [UnlimitedTransformerCell])\ndef test_past_horizon(cell_cls):\n \"\"\"\n Test the cell when the input sequence is longer than\n the time horizon.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n\n def apply_regular(sequence):\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = sequence + pos_enc\n for _ in range(3):\n expected = transformer_layer(expected,\n num_heads=2, hidden=24)\n return expected\n expected = tf.concat([apply_regular(in_seq[:, :-1]),\n apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\ndef test_mismatched_starts(cell_cls):\n \"\"\"\n Test the cell when the states are split up and\n recombined from different timesteps.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(5, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n _, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.\n float64)\n _, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.\n float64)\n _, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.\n float64)\n new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0) for \n s1, s2, s3 in zip(states_1, states_2, states_3))\n full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5],\n full_seq[2, 1:3]], axis=0)\n inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:\n 3]], axis=0)\n actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=\n new_states)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n",
"step-4": "<mask token>\nimport pytest\nimport numpy as np\nimport tensorflow as tf\nfrom .transformer import positional_encoding, transformer_layer\nfrom .cell import LimitedTransformerCell, UnlimitedTransformerCell, inject_at_timestep, sequence_masks\n\n\ndef test_inject_at_timestep():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n in_seq = tf.constant(np.array([[[1, 2, 3, 4], [5, 6, 7, 8]], [[\n 9, 10, 11, 12], [13, 14, 15, 16]], [[17, 18, 19, 20], [21, \n 22, 23, 24]]], dtype='float32'))\n injection = tf.constant(np.array([[-1, -2, -3, -4], [-5, -6, -7,\n -8], [-9, -10, -11, -12]], dtype='float32'))\n indices = np.array([0, 1, 0], dtype='int32')\n injected = sess.run(inject_at_timestep(indices, in_seq, injection))\n expected = np.array([[[-1, -2, -3, -4], [5, 6, 7, 8]], [[9, 10,\n 11, 12], [-5, -6, -7, -8]], [[-9, -10, -11, -12], [21, 22, \n 23, 24]]], dtype='float32')\n assert (injected == expected).all()\n\n\ndef test_sequence_masks():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n indices = tf.constant(np.array([3, 1, 2], dtype='int32'))\n actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=\n tf.int32), tf.float32))\n expected = np.array([[0, 0, 0, 0], [0, 0, -np.inf, -np.inf], [0,\n 0, 0, -np.inf]], dtype='float32')\n assert (actual == expected).all()\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\n@pytest.mark.parametrize('num_layers', [1, 2, 6])\ndef test_basic_equivalence(cell_cls, num_layers):\n \"\"\"\n Test that both transformer implementations produce the\n same outputs when applied to a properly-sized\n sequence.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2,\n hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = in_seq + pos_enc\n for _ in range(num_layers):\n expected = transformer_layer(expected, num_heads=2,\n hidden=24)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [UnlimitedTransformerCell])\ndef test_past_horizon(cell_cls):\n \"\"\"\n Test the cell when the input sequence is longer than\n the time horizon.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n\n def apply_regular(sequence):\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = sequence + pos_enc\n for _ in range(3):\n expected = transformer_layer(expected,\n num_heads=2, hidden=24)\n return expected\n expected = tf.concat([apply_regular(in_seq[:, :-1]),\n apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell,\n UnlimitedTransformerCell])\ndef test_mismatched_starts(cell_cls):\n \"\"\"\n Test the cell when the states are split up and\n recombined from different timesteps.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(5, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer\n =tf.truncated_normal_initializer(), dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n _, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.\n float64)\n _, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.\n float64)\n _, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.\n float64)\n new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0) for \n s1, s2, s3 in zip(states_1, states_2, states_3))\n full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5],\n full_seq[2, 1:3]], axis=0)\n inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:\n 3]], axis=0)\n actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=\n new_states)\n sess.run(tf.global_variables_initializer())\n actual, expected = sess.run((actual, expected))\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n",
"step-5": "\"\"\"\nTests for the Transformer RNNCell.\n\"\"\"\n\nimport pytest\n\nimport numpy as np\nimport tensorflow as tf\n\nfrom .transformer import positional_encoding, transformer_layer\nfrom .cell import (LimitedTransformerCell, UnlimitedTransformerCell,\n inject_at_timestep, sequence_masks)\n\n\ndef test_inject_at_timestep():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n in_seq = tf.constant(np.array([\n [\n [1, 2, 3, 4],\n [5, 6, 7, 8],\n ],\n [\n [9, 10, 11, 12],\n [13, 14, 15, 16],\n ],\n [\n [17, 18, 19, 20],\n [21, 22, 23, 24],\n ],\n ], dtype='float32'))\n injection = tf.constant(np.array([\n [-1, -2, -3, -4],\n [-5, -6, -7, -8],\n [-9, -10, -11, -12],\n ], dtype='float32'))\n\n indices = np.array([0, 1, 0], dtype='int32')\n injected = sess.run(inject_at_timestep(indices, in_seq, injection))\n\n expected = np.array([\n [\n [-1, -2, -3, -4],\n [5, 6, 7, 8],\n ],\n [\n [9, 10, 11, 12],\n [-5, -6, -7, -8],\n ],\n [\n [-9, -10, -11, -12],\n [21, 22, 23, 24],\n ],\n ], dtype='float32')\n assert (injected == expected).all()\n\n\ndef test_sequence_masks():\n with tf.Graph().as_default():\n with tf.Session() as sess:\n indices = tf.constant(np.array([3, 1, 2], dtype='int32'))\n actual = sess.run(sequence_masks(indices, tf.constant(4, dtype=tf.int32), tf.float32))\n expected = np.array([\n [0, 0, 0, 0],\n [0, 0, -np.inf, -np.inf],\n [0, 0, 0, -np.inf],\n ], dtype='float32')\n assert (actual == expected).all()\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell, UnlimitedTransformerCell])\n@pytest.mark.parametrize('num_layers', [1, 2, 6])\ndef test_basic_equivalence(cell_cls, num_layers):\n \"\"\"\n Test that both transformer implementations produce the\n same outputs when applied to a properly-sized\n sequence.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq',\n shape=(3, 4, 6),\n initializer=tf.truncated_normal_initializer(),\n dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = in_seq + pos_enc\n for _ in range(num_layers):\n expected = transformer_layer(expected, num_heads=2, hidden=24)\n sess.run(tf.global_variables_initializer())\n\n actual, expected = sess.run((actual, expected))\n\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [UnlimitedTransformerCell])\ndef test_past_horizon(cell_cls):\n \"\"\"\n Test the cell when the input sequence is longer than\n the time horizon.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(4, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq',\n shape=(3, 5, 6),\n initializer=tf.truncated_normal_initializer(),\n dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n\n def apply_regular(sequence):\n with tf.variable_scope('rnn', reuse=True):\n with tf.variable_scope('transformer', reuse=True):\n expected = sequence + pos_enc\n for _ in range(3):\n expected = transformer_layer(expected, num_heads=2, hidden=24)\n return expected\n expected = tf.concat([apply_regular(in_seq[:, :-1]),\n apply_regular(in_seq[:, 1:])[:, -1:]], axis=1)\n sess.run(tf.global_variables_initializer())\n\n actual, expected = sess.run((actual, expected))\n\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n\n\n@pytest.mark.parametrize('cell_cls', [LimitedTransformerCell, UnlimitedTransformerCell])\ndef test_mismatched_starts(cell_cls):\n \"\"\"\n Test the cell when the states are split up and\n recombined from different timesteps.\n \"\"\"\n with tf.Graph().as_default():\n with tf.Session() as sess:\n pos_enc = positional_encoding(5, 6, dtype=tf.float64)\n in_seq = tf.get_variable('in_seq',\n shape=(3, 5, 6),\n initializer=tf.truncated_normal_initializer(),\n dtype=tf.float64)\n cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24)\n _, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.float64)\n _, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.float64)\n _, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.float64)\n new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0)\n for s1, s2, s3 in zip(states_1, states_2, states_3))\n\n full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64)\n expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5], full_seq[2, 1:3]], axis=0)\n\n inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:3]], axis=0)\n actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=new_states)\n\n sess.run(tf.global_variables_initializer())\n\n actual, expected = sess.run((actual, expected))\n\n assert not np.isnan(actual).any()\n assert not np.isnan(expected).any()\n assert actual.shape == expected.shape\n assert np.allclose(actual, expected)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
WINRM Module to connect to windows host
"""
from winrm.protocol import Protocol
from lib import logger
class WINRM(object):
"""
WINRM Module to connect to windows host
"""
def __init__(self, host_ip, usr, pwd):
"""
- **parameters**, **types**, **return** and **return types**::
:param os_type : windows/linux
:param host_ip: ip address of the Windows host
:param usr: username of the Windows Host
:param pwd: Password of the Windows Host
:type os_type: string
:type host_ip: string
:type u_name: string
:type pwd: string
"""
self.os_type = 'windows'
self.host_ip = host_ip
self.usr = usr
self.pwd = pwd
self.shell_id = None
self.host_win_ip = None
self.conn = None
def connect(self):
"""
Method to connect to a Windows machine.
"""
try:
self.host_win_ip = "http://" + self.host_ip + ":5985/wsman"
self.conn = Protocol(
endpoint=self.host_win_ip,
transport="ntlm",
username=self.usr,
password=self.pwd,
server_cert_validation="ignore")
logger.warn("Connecting Windows ...")
self.shell_id = self.conn.open_shell()
logger.warn(self.shell_id)
logger.warn('Connected to Windows.')
except Exception as error:
msg_exception_error = "Exception raised: %s " % error
raise(msg_exception_error)
def run_cmd(self, cmd):
"""
Generic Method for passing command and run it on windows machine and return output.
- **parameters**, **types**, **return** and **return types**::
:param cmd: Command to be executed on windows machine.
:return stdout,stderr,status_code : output,errormessage and statuscode of output.
:rtype stdout,stderr,status_code: tuple
"""
if 'shell_id' in dir(self):
#checking for the shell_id created in winrm object
command_id = self.conn.run_command(self.shell_id, cmd)
std_out, std_err, status_code = self.conn.get_command_output(
self.shell_id, command_id)
#runs the command and returns output,error,statuscode
return std_out, std_err, status_code
|
normal
|
{
"blob_id": "96ac9088650490a7da00c7a20f634b76e673ca2d",
"index": 1174,
"step-1": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n <mask token>\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-2": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-3": "<mask token>\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-4": "<mask token>\nfrom winrm.protocol import Protocol\nfrom lib import logger\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n",
"step-5": "\"\"\"\r\nWINRM Module to connect to windows host\r\n\"\"\"\r\nfrom winrm.protocol import Protocol\r\nfrom lib import logger\r\n\r\n\r\nclass WINRM(object):\r\n \"\"\"\r\n WINRM Module to connect to windows host\r\n \"\"\"\r\n def __init__(self, host_ip, usr, pwd):\r\n \"\"\"\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param os_type : windows/linux\r\n :param host_ip: ip address of the Windows host\r\n :param usr: username of the Windows Host\r\n :param pwd: Password of the Windows Host\r\n :type os_type: string\r\n :type host_ip: string\r\n :type u_name: string\r\n :type pwd: string\r\n \"\"\"\r\n self.os_type = 'windows'\r\n self.host_ip = host_ip\r\n self.usr = usr\r\n self.pwd = pwd\r\n self.shell_id = None\r\n self.host_win_ip = None\r\n self.conn = None\r\n\r\n def connect(self):\r\n \"\"\"\r\n Method to connect to a Windows machine.\r\n \"\"\"\r\n try:\r\n self.host_win_ip = \"http://\" + self.host_ip + \":5985/wsman\"\r\n self.conn = Protocol(\r\n endpoint=self.host_win_ip,\r\n transport=\"ntlm\",\r\n username=self.usr,\r\n password=self.pwd,\r\n server_cert_validation=\"ignore\")\r\n logger.warn(\"Connecting Windows ...\")\r\n self.shell_id = self.conn.open_shell()\r\n logger.warn(self.shell_id)\r\n logger.warn('Connected to Windows.')\r\n except Exception as error:\r\n msg_exception_error = \"Exception raised: %s \" % error\r\n raise(msg_exception_error)\r\n\r\n def run_cmd(self, cmd):\r\n \"\"\"\r\n Generic Method for passing command and run it on windows machine and return output.\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param cmd: Command to be executed on windows machine.\r\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\r\n :rtype stdout,stderr,status_code: tuple\r\n \"\"\"\r\n if 'shell_id' in dir(self):\r\n #checking for the shell_id created in winrm object\r\n command_id = self.conn.run_command(self.shell_id, cmd)\r\n std_out, std_err, status_code = self.conn.get_command_output(\r\n self.shell_id, command_id)\r\n #runs the command and returns output,error,statuscode\r\n return std_out, std_err, status_code\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
class Solution(object):
def restoreIpAddresses(self, s):
"""
:type s: str
:rtype: List[str]
"""
def helper(sb, string, level):
if len(string) == 0:
if level == 4:
ans.append(sb[:-1])
return
if level == 4: return
for i in range(3):
if i < len(string):
part = string[:i + 1]
if valid(part):
helper(sb + part + '.', string[i + 1:], level + 1)
def valid(num):
if len(num) > 1 and num[0] == '0':
return False
if 0 <= int(num) <= 255:
return True
else:
return False
ans = []
sb = ''
helper(sb, s, 0)
return ans
solution = Solution()
print solution.restoreIpAddresses("010010")
|
normal
|
{
"blob_id": "ec4348c61cd1c9130543bb20f9ca199399e1caff",
"index": 226,
"step-1": "class Solution(object):\n def restoreIpAddresses(self, s):\n \"\"\"\n :type s: str\n :rtype: List[str]\n \"\"\"\n\n def helper(sb, string, level):\n if len(string) == 0:\n if level == 4:\n ans.append(sb[:-1])\n return\n if level == 4: return\n for i in range(3):\n if i < len(string):\n part = string[:i + 1]\n if valid(part):\n helper(sb + part + '.', string[i + 1:], level + 1)\n\n def valid(num):\n if len(num) > 1 and num[0] == '0':\n return False\n if 0 <= int(num) <= 255:\n return True\n else:\n return False\n\n ans = []\n sb = ''\n helper(sb, s, 0)\n return ans\n\nsolution = Solution()\nprint solution.restoreIpAddresses(\"010010\")",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#coding=utf-8
'''
Created on 04/09/2012
@author: Johnny
'''
from ckeditor.widgets import CKEditorWidget
from django.conf.urls import patterns, url
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.templatetags.static import static
import views
from portfolio.models import *
from custom_admin import custom_admin
from custom_admin.custom_model_admin import CustomModelAdmin
from django import forms
class CaracteristicaServicoAdmin(CustomModelAdmin):
list_display = ('descricao',)
search_fields = ['descricao']
exclude = ['slug']
class ServicoForm(forms.ModelForm):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = ('imagem_icone','titulo','intro',)
list_display_links = ('titulo','intro',)
search_fields = ['titulo','intro','descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self,request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True,False
if request.method == 'POST': # If the form has been submitted...
form = forms.ConfigServicoForm(request.POST,request.FILES) # A form bound to the POST data
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid(): # All validation rules pass
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data['texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update') or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form,[('Texto da página de serviços',{'fields':['imagem','texto']})],prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html',
{
'add':add,
'change':change,
'title': 'Configurações',
'is_popup': "_popup" in request.REQUEST,
'show_delete': False,
'has_delete_permission':False,
'has_add_permission':True,
'has_change_permission':True,
'errors': form.errors,
'app_label': opts.app_label,
'current_app':capitalize(opts.app_label),
'all_app_list':self.admin_site.all_app_list(request),
'module_name': force_unicode(opts.verbose_name_plural),
'opts':opts,
'has_file_field':True,
'adminform':adminForm,
'save_as':False,
'media':media,
}
,context_instance=RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('',
url(r'^config/$', custom_admin.custom_site.admin_view(self.configuracoes_servicos_view),name='%s_%s_config' % info),
)
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = [
'cufon-yui.js',
'TitilliumText.font.js',
'cufon-replace-ckeditor.js',
]
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = ('imagem_icone','descricao','site')
list_display_links = ('descricao',)
search_fields = ['site','descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = ('titulo','descricao_pequena','servico','cliente')
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
custom_admin.custom_site.register(Cliente,ClientesAdmin)
custom_admin.custom_site.register(CaracteristicaServico,CaracteristicaServicoAdmin)
custom_admin.custom_site.register(Servico,ServicosAdmin)
custom_admin.custom_site.register(Trabalho,TrabalhoAdmin)
|
normal
|
{
"blob_id": "caac9dfc7d52607c2af67ddc03a3a7bdae9911bb",
"index": 8204,
"step-1": "<mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n <mask token>\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n list_display = 'descricao',\n search_fields = ['descricao']\n exclude = ['slug']\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-5": "#coding=utf-8\n'''\nCreated on 04/09/2012\n\n@author: Johnny\n'''\nfrom ckeditor.widgets import CKEditorWidget\nfrom django.conf.urls import patterns, url\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext\nfrom django.templatetags.static import static\nimport views\nfrom portfolio.models import *\nfrom custom_admin import custom_admin\nfrom custom_admin.custom_model_admin import CustomModelAdmin\nfrom django import forms\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n list_display = ('descricao',)\n search_fields = ['descricao']\n exclude = ['slug']\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n class Meta:\n model = Servico\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = ('imagem_icone','titulo','intro',)\n list_display_links = ('titulo','intro',)\n search_fields = ['titulo','intro','descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n\n def configuracoes_servicos_view(self,request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n\n add, change = True,False\n\n if request.method == 'POST': # If the form has been submitted...\n\n form = forms.ConfigServicoForm(request.POST,request.FILES) # A form bound to the POST data\n\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n\n if form.is_valid(): # All validation rules pass\n\n form.fields['imagem'].required = True\n\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n\n if texto.texto == None or texto.texto != form.cleaned_data['texto']:\n texto.texto = form.cleaned_data['texto']\n\n if not request.POST.has_key('_update') or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n\n\n\n texto.slug = 'texto_servico'\n texto.save()\n\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n\n adminForm = helpers.AdminForm(form,[('Texto da página de serviços',{'fields':['imagem','texto']})],prepopuled_fields)\n\n media = self.media + adminForm.media\n\n return render_to_response('admin/config_form.html',\n {\n 'add':add,\n 'change':change,\n 'title': 'Configurações',\n 'is_popup': \"_popup\" in request.REQUEST,\n 'show_delete': False,\n 'has_delete_permission':False,\n 'has_add_permission':True,\n 'has_change_permission':True,\n 'errors': form.errors,\n 'app_label': opts.app_label,\n 'current_app':capitalize(opts.app_label),\n 'all_app_list':self.admin_site.all_app_list(request),\n 'module_name': force_unicode(opts.verbose_name_plural),\n 'opts':opts,\n 'has_file_field':True,\n 'adminform':adminForm,\n 'save_as':False,\n 'media':media,\n }\n ,context_instance=RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('',\n url(r'^config/$', custom_admin.custom_site.admin_view(self.configuracoes_servicos_view),name='%s_%s_config' % info),\n )\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n\n js = [\n 'cufon-yui.js',\n 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js',\n ]\n\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n\n media = super_media + current_media\n\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request) and self.has_add_permission(request)\n return permiss\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = ('imagem_icone','descricao','site')\n list_display_links = ('descricao',)\n search_fields = ['site','descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n class Meta:\n model = Trabalho\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = ('titulo','descricao_pequena','servico','cliente')\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\ncustom_admin.custom_site.register(Cliente,ClientesAdmin)\ncustom_admin.custom_site.register(CaracteristicaServico,CaracteristicaServicoAdmin)\ncustom_admin.custom_site.register(Servico,ServicosAdmin)\ncustom_admin.custom_site.register(Trabalho,TrabalhoAdmin)\n",
"step-ids": [
13,
14,
15,
16,
19
]
}
|
[
13,
14,
15,
16,
19
] |
#!/usr/bin/env python
import sys,re
print('\n'.join(re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',sys.stdin.read())))
|
normal
|
{
"blob_id": "4cefaa964251e77a05066af1f61f9fd2a4350d38",
"index": 7622,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('\\n'.join(re.findall(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n , sys.stdin.read())))\n",
"step-3": "import sys, re\nprint('\\n'.join(re.findall(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n , sys.stdin.read())))\n",
"step-4": "#!/usr/bin/env python\nimport sys,re\nprint('\\n'.join(re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',sys.stdin.read())))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
class Building(models.Model):
Number = models.CharField(max_length=60)
Description = models.CharField(max_length=120)
OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15) # the osm way id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class BuildingPoint(models.Model):
parent = models.ForeignKey('Building', null=False, blank=False, related_name='points')
OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15) # the osm id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class Facinet(models.Model):
##
Building = models.ForeignKey('Building', null=False, blank=False, related_name='FacinetNodes')
location = models.IntegerField(unique=True, db_column='Location') #
name = models.TextField(db_column='Name') #
connectionstring = models.TextField(db_column='ConnectionString') #
tapidevice = models.TextField(db_column='TapiDevice', blank=True) #
synctime = models.CharField(max_length=3, db_column='SyncTime') #
online = models.CharField(max_length=3, db_column='Online') #
onlineall = models.CharField(max_length=3, db_column='OnlineAll') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of facinet collector
Lon = models.CharField(max_length=20) # lat/lon of facinet collector
class Logger(models.Model):
Facinet = models.ForeignKey('Facinet', null=False, blank=False, related_name='Loggers')
loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex') #
name = models.TextField(db_column='Name') #
online = models.IntegerField(db_column='Online') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of the logger
Lon = models.CharField(max_length=20) # lat/lon of the logger
class LoggerMeasurement(models.Model):
Logger = models.ForeignKey('Logger', null=False, blank=False, related_name='Measurement')
timestamp = models.DateTimeField()
measurement = models.DecimalField(max_digits=12, decimal_places=4)
|
normal
|
{
"blob_id": "02ddf213cd3f455f8d8fbde8621fc4788124d5a9",
"index": 3714,
"step-1": "<mask token>\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-2": "<mask token>\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-3": "<mask token>\n\n\nclass BuildingPoint(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-4": "from django.db import models\n\n\nclass Building(models.Model):\n Number = models.CharField(max_length=60)\n Description = models.CharField(max_length=120)\n OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15)\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass BuildingPoint(models.Model):\n parent = models.ForeignKey('Building', null=False, blank=False,\n related_name='points')\n OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15)\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Facinet(models.Model):\n Building = models.ForeignKey('Building', null=False, blank=False,\n related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location')\n name = models.TextField(db_column='Name')\n connectionstring = models.TextField(db_column='ConnectionString')\n tapidevice = models.TextField(db_column='TapiDevice', blank=True)\n synctime = models.CharField(max_length=3, db_column='SyncTime')\n online = models.CharField(max_length=3, db_column='Online')\n onlineall = models.CharField(max_length=3, db_column='OnlineAll')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False,\n related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex')\n name = models.TextField(db_column='Name')\n online = models.IntegerField(db_column='Online')\n Lat = models.CharField(max_length=20)\n Lon = models.CharField(max_length=20)\n\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False,\n related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-5": "from django.db import models\n\n\nclass Building(models.Model):\n Number = models.CharField(max_length=60)\n Description = models.CharField(max_length=120)\n OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15) # the osm way id\n Lat = models.CharField(max_length=20) #lat/lon of then center\n Lon = models.CharField(max_length=20) # lat/lon of the center of the building\n\n\nclass BuildingPoint(models.Model):\n parent = models.ForeignKey('Building', null=False, blank=False, related_name='points')\n OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15) # the osm id\n Lat = models.CharField(max_length=20) #lat/lon of then center\n Lon = models.CharField(max_length=20) # lat/lon of the center of the building\n\n\nclass Facinet(models.Model):\n ##\n Building = models.ForeignKey('Building', null=False, blank=False, related_name='FacinetNodes')\n location = models.IntegerField(unique=True, db_column='Location') # \n name = models.TextField(db_column='Name') # \n connectionstring = models.TextField(db_column='ConnectionString') # \n tapidevice = models.TextField(db_column='TapiDevice', blank=True) # \n synctime = models.CharField(max_length=3, db_column='SyncTime') # \n online = models.CharField(max_length=3, db_column='Online') # \n onlineall = models.CharField(max_length=3, db_column='OnlineAll') # \n ## location for display\n Lat = models.CharField(max_length=20) #lat/lon of facinet collector\n Lon = models.CharField(max_length=20) # lat/lon of facinet collector\n\n\nclass Logger(models.Model):\n Facinet = models.ForeignKey('Facinet', null=False, blank=False, related_name='Loggers')\n loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex') # \n name = models.TextField(db_column='Name') # \n online = models.IntegerField(db_column='Online') # \n ## location for display\n Lat = models.CharField(max_length=20) #lat/lon of the logger\n Lon = models.CharField(max_length=20) # lat/lon of the logger\n\nclass LoggerMeasurement(models.Model):\n Logger = models.ForeignKey('Logger', null=False, blank=False, related_name='Measurement')\n timestamp = models.DateTimeField()\n measurement = models.DecimalField(max_digits=12, decimal_places=4)\n",
"step-ids": [
4,
6,
7,
11,
12
]
}
|
[
4,
6,
7,
11,
12
] |
# The purpose of this module is essentially to subclass the basic SWIG generated
# pynewton classes and add a bit of functionality to them (mostly callback related
# stuff). This could be done in the SWIG interface file, but it's easier to do it
# here since it makes adding python-specific extensions to newton easier.
import pynewton
try:
import OpenGL.GL as GL
import OpenGL.GLU as GLU
GLPresent = True
except:
GLPresent = False
def GetEulerAngle ( matrix ):
return pynewton.GetEulerAngle( matrix )
def SetEulerAngle ( angle ):
return pynewton.SetEulerAngle( angle )
#extensions to body
def NullApplyForceAndTorqueCallback( body ) :
pass
def NullTransformCallback( body, matrix ):
pass
def NullAutoactiveCallback( body, state ):
pass
def NullBodyDestructCallback( body ):
pass
class Body( pynewton.Body ):
def __init__( self, world, cg ):
self.ApplyForceAndTorqueCallback = None
self.TransformCallback = None
self.AutoactiveCallback = None
self.DestructorCallback = None
self.TreeCollisionCallback = None
pynewton.Body.__init__(self, world, cg )
world.RegisterBody( self )
self.py_cg = cg;
def SetApplyForceAndTorqueCallback( self, callback ):
self.ApplyForceAndTorqueCallback = callback
def SetAutoactiveCallback( self, callback ):
self.AutoactiveCallback = callback
def GetCollision( self ):
return self.py_cg
def OnApplyForceAndTorque(self):
if self.ApplyForceAndTorqueCallback != None:
self.ApplyForceAndTorqueCallback( self )
def OnAutoactive(self, state ):
if self.AutoactiveCallback != None:
self.AutoactiveCallback( self, state )
def OnTransform( self ):
matrix = self.GetMatrix()
if self.TransformCallback != None:
self.TransformCallback( self, matrix )
def OnDestruct( self ):
if self.DestructorCallback != None:
self.DestructorCallback( self, matrix )
def OnTreeCollisionWith( self, body ):
if self.TreeCollisionCallback != None:
self.TreeCollisionCallback(body)
def Draw( self ):
m = self.GetMatrix()
if not GLPresent: raise "OpenGL module not loaded, cannot draw"
GL.glPushMatrix()
GL.glMultMatrixf( m )
c = self.GetCollision()
c.draw()
GL.glPopMatrix()
class _materialCallback( object ):
def __init__(self, id1, id2, begin_function, process_function, end_function, userobject):
self.id1 = id1
self.id2 = id2
self.beginCallback = begin_function
self.processCallback = process_function
self.endCallback = end_function
self.userobject = userobject
#extensions to world
class World( pynewton.World ):
def __init__(self ):
self.bodyList = []
self.newtonBodyLookup = {}
self.materialCallbacks = {}
self.currentCallback = None
self.raycastUserData = None
self.raycastCallback = None
pynewton.World.__init__(self)
def RegisterBody( self, body ):
self.bodyList.append( body )
self.newtonBodyLookup[body.IDKey()] = body
def UnregisterBody( self, body ):
self.bodyList.remove( bodyList.index(body) )
del self.newtonBodyLookup[body.m_body]
def NewtonBodyToBody( self, ptr ):
return self.newtonBodyLookup[int(ptr)]
def ForEachBodyDo( self, function ):
for body in self.bodyList:
function( body )
def RayCast( self, p0, p1, callback, userdata):
"""Casts a ray in the world defined by p0 and p1 and calls callback
with the body, normal, collision id, user data and intersection distance"""
self.raycastUserData = userdata
self.raycastCallback = callback
self.CppRayCast.__call__(p0[0], p0[1], p0[2], p1[0], p1[1], p1[2])
def RayCastCallback( self, body, nx, ny, nz, collisionID, intersectParam ):
#delegate this off to the user specified function
return self.raycastCallback( body, (nx, ny, nz), collisionID, self.raycastUserData, intersectParam )
def MaterialSetCollisionCallback( self, id1, id2, userdata=None, begin_func=None, process_func=None, end_func=None ):
self.materialCallbacks[(id1,id2)] = _materialCallback( id1, id2, begin_func, process_func, end_func, userdata)
self.RegisterMaterialCallbackBetween( id1, id2)
def GetMaterialCallback(self, material, body1, body2):
id1 = body1.MaterialGroupID()
id2 = body2.MaterialGroupID()
cb = self.materialCallbacks[(id1,id2)]
return cb
def MaterialBeginCollision( self, material, b1, b2 ):
body1 = self.newtonBodyLookup[int(b1)]
body2 = self.newtonBodyLookup[int(b2)]
self.currentCallback = self.GetMaterialCallback( material, body1, body2 )
if self.currentCallback.beginCallback:
self.currentCallback.beginCallback(material,
body1,
body2,
self.currentCallback.userobject )
def MaterialProcessCollision( self, material, contactHandle ):
if self.currentCallback.processCallback:
self.currentCallback.processCallback(material,
contactHandle,
self.currentCallback.userobject )
def MaterialEndCollision( self, material ):
if self.currentCallback.endCallback:
self.currentCallback.endCallback( material,
self.currentCallback.userobject )
#collision extensions
class CollisionGeometry( pynewton.CollisionGeometry ):
def draw(self):
if not GlPresent: raise "OpenGL module could not be loaded"
class Sphere ( pynewton.Sphere ):
def __init__(self, world, w, h, d, offset_matrix=None):
pynewton.Sphere.__init__( self, world, w, h, d, offset_matrix )
self.width = w
self.height = h
self.depth = d
if GLPresent:
self.quad = GLU.gluNewQuadric()
def draw(self):
if not GLPresent: raise "OpenGL module could not be loaded"
GL.glPushMatrix()
GL.glScalef( self.width, self.height, self.depth )
GL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_LINE )
GLU.gluSphere( self.quad, 1.0, 12, 12 )
GL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_FILL )
GL.glPopMatrix()
class Box ( pynewton.Box ):
pass
class Cone ( pynewton.Cone ):
pass
class Cylinder (pynewton.Cylinder):
pass
class ChamferCylinder (pynewton.ChamferCylinder):
pass
class ConvexHull (pynewton.ConvexHull):
pass
class ConvexHullModifier (pynewton.ConvexHullModifier):
pass
class NullCollider (pynewton.NullCollider ):
pass
class TreeCollision (pynewton.TreeCollision):
pass
class TreeCollisionUserCallback ( pynewton.TreeCollisionUserCallback ):
def __init__( self, func ):
self.callbackFunc = func
def OnCallback (self, bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray):
if self.callbackFunc != None:
self.callbackFunc( bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray)
pass
#material extensions
class Material ( pynewton.Material ):
pass
#joint extensions
class BallJoint ( pynewton.BallJoint ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.BallJoint.__init__(self, *args, **kwargs )
def OnCallback():
if self.callback != None:
self.callback( )
pass
class Hinge ( pynewton.Hinge ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.Hinge.__init__( *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class Slider ( pynewton.Slider ):
def __init__( self, *args, **kwargs ):
self.callback = None
return pynewton.Slider.__init__( self, *args, **kwargs )
# def OnCallback( desc ):
# if self.callback != None:
# return self.callback( desc )
# return 0
class Corkscrew ( pynewton.Corkscrew ):
def __init__(self, *args, **kwargs ):
self.callback = None
pynewton.Corkscrew.__init__(self, *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class UniversalJoint ( pynewton.UniversalJoint ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.UniversalJoint.__init__( self, *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class UpVector ( pynewton.UpVector ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.UpVector.__init__(self, *args, **kwargs )
def OnCallback():
if self.callback != None:
self.callback( )
class Tire ( pynewton.Tire ):
pass
class Vehicle ( pynewton.Vehicle ):
def __init__(self, *args, **kwargs ):
self.tires = []
self.UpdateTireCallback = None
return pynewton.Vehicle.__init__(self, *args, **kwargs )
def AddTire ( self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID):
tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID)
tires.append( tire )
return tire
def RemoveTire( self, tire ):
del tires[tires.index(tire)]
tire = pynewton.Vehicle.RemoveTire( self, tire )
def OnCallback( self):
if self.UpdateTireCallback != None:
self.UpdateTireCallback(self)
#Heightmap
class HeightField ( pynewton.HeightField ):
pass
|
normal
|
{
"blob_id": "90d792fe18e589a0d74d36797b46c6ac1d7946be",
"index": 4303,
"step-1": "<mask token>\n\n\nclass ChamferCylinder(pynewton.ChamferCylinder):\n pass\n\n\nclass ConvexHull(pynewton.ConvexHull):\n pass\n\n\nclass ConvexHullModifier(pynewton.ConvexHullModifier):\n pass\n\n\nclass NullCollider(pynewton.NullCollider):\n pass\n\n\nclass TreeCollision(pynewton.TreeCollision):\n pass\n\n\nclass TreeCollisionUserCallback(pynewton.TreeCollisionUserCallback):\n\n def __init__(self, func):\n self.callbackFunc = func\n\n def OnCallback(self, bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray):\n if self.callbackFunc != None:\n self.callbackFunc(bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray)\n pass\n\n\nclass Material(pynewton.Material):\n pass\n\n\nclass BallJoint(pynewton.BallJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.BallJoint.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n pass\n\n\nclass Hinge(pynewton.Hinge):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Hinge.__init__(*args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass Slider(pynewton.Slider):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Slider.__init__(self, *args, **kwargs)\n\n\nclass Corkscrew(pynewton.Corkscrew):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n pynewton.Corkscrew.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UniversalJoint(pynewton.UniversalJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UniversalJoint.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UpVector(pynewton.UpVector):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UpVector.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n\n\nclass Tire(pynewton.Tire):\n pass\n\n\nclass Vehicle(pynewton.Vehicle):\n\n def __init__(self, *args, **kwargs):\n self.tires = []\n self.UpdateTireCallback = None\n return pynewton.Vehicle.__init__(self, *args, **kwargs)\n\n def AddTire(self, matrix, pin, mass, width, radius, suspensionShock,\n suspensionSpring, suspensionLength, userData, collisionID):\n tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width,\n radius, suspensionShock, suspensionSpring, suspensionLength,\n userData, collisionID)\n tires.append(tire)\n return tire\n\n def RemoveTire(self, tire):\n del tires[tires.index(tire)]\n tire = pynewton.Vehicle.RemoveTire(self, tire)\n\n def OnCallback(self):\n if self.UpdateTireCallback != None:\n self.UpdateTireCallback(self)\n\n\nclass HeightField(pynewton.HeightField):\n pass\n",
"step-2": "<mask token>\n\n\nclass World(pynewton.World):\n\n def __init__(self):\n self.bodyList = []\n self.newtonBodyLookup = {}\n self.materialCallbacks = {}\n self.currentCallback = None\n self.raycastUserData = None\n self.raycastCallback = None\n pynewton.World.__init__(self)\n\n def RegisterBody(self, body):\n self.bodyList.append(body)\n self.newtonBodyLookup[body.IDKey()] = body\n\n def UnregisterBody(self, body):\n self.bodyList.remove(bodyList.index(body))\n del self.newtonBodyLookup[body.m_body]\n <mask token>\n\n def ForEachBodyDo(self, function):\n for body in self.bodyList:\n function(body)\n <mask token>\n\n def RayCastCallback(self, body, nx, ny, nz, collisionID, intersectParam):\n return self.raycastCallback(body, (nx, ny, nz), collisionID, self.\n raycastUserData, intersectParam)\n\n def MaterialSetCollisionCallback(self, id1, id2, userdata=None,\n begin_func=None, process_func=None, end_func=None):\n self.materialCallbacks[id1, id2] = _materialCallback(id1, id2,\n begin_func, process_func, end_func, userdata)\n self.RegisterMaterialCallbackBetween(id1, id2)\n\n def GetMaterialCallback(self, material, body1, body2):\n id1 = body1.MaterialGroupID()\n id2 = body2.MaterialGroupID()\n cb = self.materialCallbacks[id1, id2]\n return cb\n\n def MaterialBeginCollision(self, material, b1, b2):\n body1 = self.newtonBodyLookup[int(b1)]\n body2 = self.newtonBodyLookup[int(b2)]\n self.currentCallback = self.GetMaterialCallback(material, body1, body2)\n if self.currentCallback.beginCallback:\n self.currentCallback.beginCallback(material, body1, body2, self\n .currentCallback.userobject)\n\n def MaterialProcessCollision(self, material, contactHandle):\n if self.currentCallback.processCallback:\n self.currentCallback.processCallback(material, contactHandle,\n self.currentCallback.userobject)\n\n def MaterialEndCollision(self, material):\n if self.currentCallback.endCallback:\n self.currentCallback.endCallback(material, self.currentCallback\n .userobject)\n\n\nclass CollisionGeometry(pynewton.CollisionGeometry):\n\n def draw(self):\n if not GlPresent:\n raise 'OpenGL module could not be loaded'\n\n\nclass Sphere(pynewton.Sphere):\n\n def __init__(self, world, w, h, d, offset_matrix=None):\n pynewton.Sphere.__init__(self, world, w, h, d, offset_matrix)\n self.width = w\n self.height = h\n self.depth = d\n if GLPresent:\n self.quad = GLU.gluNewQuadric()\n\n def draw(self):\n if not GLPresent:\n raise 'OpenGL module could not be loaded'\n GL.glPushMatrix()\n GL.glScalef(self.width, self.height, self.depth)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_LINE)\n GLU.gluSphere(self.quad, 1.0, 12, 12)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_FILL)\n GL.glPopMatrix()\n\n\nclass Box(pynewton.Box):\n pass\n\n\nclass Cone(pynewton.Cone):\n pass\n\n\nclass Cylinder(pynewton.Cylinder):\n pass\n\n\nclass ChamferCylinder(pynewton.ChamferCylinder):\n pass\n\n\nclass ConvexHull(pynewton.ConvexHull):\n pass\n\n\nclass ConvexHullModifier(pynewton.ConvexHullModifier):\n pass\n\n\nclass NullCollider(pynewton.NullCollider):\n pass\n\n\nclass TreeCollision(pynewton.TreeCollision):\n pass\n\n\nclass TreeCollisionUserCallback(pynewton.TreeCollisionUserCallback):\n\n def __init__(self, func):\n self.callbackFunc = func\n\n def OnCallback(self, bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray):\n if self.callbackFunc != None:\n self.callbackFunc(bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray)\n pass\n\n\nclass Material(pynewton.Material):\n pass\n\n\nclass BallJoint(pynewton.BallJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.BallJoint.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n pass\n\n\nclass Hinge(pynewton.Hinge):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Hinge.__init__(*args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass Slider(pynewton.Slider):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Slider.__init__(self, *args, **kwargs)\n\n\nclass Corkscrew(pynewton.Corkscrew):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n pynewton.Corkscrew.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UniversalJoint(pynewton.UniversalJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UniversalJoint.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UpVector(pynewton.UpVector):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UpVector.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n\n\nclass Tire(pynewton.Tire):\n pass\n\n\nclass Vehicle(pynewton.Vehicle):\n\n def __init__(self, *args, **kwargs):\n self.tires = []\n self.UpdateTireCallback = None\n return pynewton.Vehicle.__init__(self, *args, **kwargs)\n\n def AddTire(self, matrix, pin, mass, width, radius, suspensionShock,\n suspensionSpring, suspensionLength, userData, collisionID):\n tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width,\n radius, suspensionShock, suspensionSpring, suspensionLength,\n userData, collisionID)\n tires.append(tire)\n return tire\n\n def RemoveTire(self, tire):\n del tires[tires.index(tire)]\n tire = pynewton.Vehicle.RemoveTire(self, tire)\n\n def OnCallback(self):\n if self.UpdateTireCallback != None:\n self.UpdateTireCallback(self)\n\n\nclass HeightField(pynewton.HeightField):\n pass\n",
"step-3": "<mask token>\n\n\nclass Body(pynewton.Body):\n\n def __init__(self, world, cg):\n self.ApplyForceAndTorqueCallback = None\n self.TransformCallback = None\n self.AutoactiveCallback = None\n self.DestructorCallback = None\n self.TreeCollisionCallback = None\n pynewton.Body.__init__(self, world, cg)\n world.RegisterBody(self)\n self.py_cg = cg\n <mask token>\n\n def SetAutoactiveCallback(self, callback):\n self.AutoactiveCallback = callback\n\n def GetCollision(self):\n return self.py_cg\n\n def OnApplyForceAndTorque(self):\n if self.ApplyForceAndTorqueCallback != None:\n self.ApplyForceAndTorqueCallback(self)\n\n def OnAutoactive(self, state):\n if self.AutoactiveCallback != None:\n self.AutoactiveCallback(self, state)\n\n def OnTransform(self):\n matrix = self.GetMatrix()\n if self.TransformCallback != None:\n self.TransformCallback(self, matrix)\n\n def OnDestruct(self):\n if self.DestructorCallback != None:\n self.DestructorCallback(self, matrix)\n\n def OnTreeCollisionWith(self, body):\n if self.TreeCollisionCallback != None:\n self.TreeCollisionCallback(body)\n\n def Draw(self):\n m = self.GetMatrix()\n if not GLPresent:\n raise 'OpenGL module not loaded, cannot draw'\n GL.glPushMatrix()\n GL.glMultMatrixf(m)\n c = self.GetCollision()\n c.draw()\n GL.glPopMatrix()\n\n\nclass _materialCallback(object):\n\n def __init__(self, id1, id2, begin_function, process_function,\n end_function, userobject):\n self.id1 = id1\n self.id2 = id2\n self.beginCallback = begin_function\n self.processCallback = process_function\n self.endCallback = end_function\n self.userobject = userobject\n\n\nclass World(pynewton.World):\n\n def __init__(self):\n self.bodyList = []\n self.newtonBodyLookup = {}\n self.materialCallbacks = {}\n self.currentCallback = None\n self.raycastUserData = None\n self.raycastCallback = None\n pynewton.World.__init__(self)\n\n def RegisterBody(self, body):\n self.bodyList.append(body)\n self.newtonBodyLookup[body.IDKey()] = body\n\n def UnregisterBody(self, body):\n self.bodyList.remove(bodyList.index(body))\n del self.newtonBodyLookup[body.m_body]\n\n def NewtonBodyToBody(self, ptr):\n return self.newtonBodyLookup[int(ptr)]\n\n def ForEachBodyDo(self, function):\n for body in self.bodyList:\n function(body)\n\n def RayCast(self, p0, p1, callback, userdata):\n \"\"\"Casts a ray in the world defined by p0 and p1 and calls callback\n\t\twith the body, normal, collision id, user data and intersection distance\"\"\"\n self.raycastUserData = userdata\n self.raycastCallback = callback\n self.CppRayCast.__call__(p0[0], p0[1], p0[2], p1[0], p1[1], p1[2])\n\n def RayCastCallback(self, body, nx, ny, nz, collisionID, intersectParam):\n return self.raycastCallback(body, (nx, ny, nz), collisionID, self.\n raycastUserData, intersectParam)\n\n def MaterialSetCollisionCallback(self, id1, id2, userdata=None,\n begin_func=None, process_func=None, end_func=None):\n self.materialCallbacks[id1, id2] = _materialCallback(id1, id2,\n begin_func, process_func, end_func, userdata)\n self.RegisterMaterialCallbackBetween(id1, id2)\n\n def GetMaterialCallback(self, material, body1, body2):\n id1 = body1.MaterialGroupID()\n id2 = body2.MaterialGroupID()\n cb = self.materialCallbacks[id1, id2]\n return cb\n\n def MaterialBeginCollision(self, material, b1, b2):\n body1 = self.newtonBodyLookup[int(b1)]\n body2 = self.newtonBodyLookup[int(b2)]\n self.currentCallback = self.GetMaterialCallback(material, body1, body2)\n if self.currentCallback.beginCallback:\n self.currentCallback.beginCallback(material, body1, body2, self\n .currentCallback.userobject)\n\n def MaterialProcessCollision(self, material, contactHandle):\n if self.currentCallback.processCallback:\n self.currentCallback.processCallback(material, contactHandle,\n self.currentCallback.userobject)\n\n def MaterialEndCollision(self, material):\n if self.currentCallback.endCallback:\n self.currentCallback.endCallback(material, self.currentCallback\n .userobject)\n\n\nclass CollisionGeometry(pynewton.CollisionGeometry):\n\n def draw(self):\n if not GlPresent:\n raise 'OpenGL module could not be loaded'\n\n\nclass Sphere(pynewton.Sphere):\n\n def __init__(self, world, w, h, d, offset_matrix=None):\n pynewton.Sphere.__init__(self, world, w, h, d, offset_matrix)\n self.width = w\n self.height = h\n self.depth = d\n if GLPresent:\n self.quad = GLU.gluNewQuadric()\n\n def draw(self):\n if not GLPresent:\n raise 'OpenGL module could not be loaded'\n GL.glPushMatrix()\n GL.glScalef(self.width, self.height, self.depth)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_LINE)\n GLU.gluSphere(self.quad, 1.0, 12, 12)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_FILL)\n GL.glPopMatrix()\n\n\nclass Box(pynewton.Box):\n pass\n\n\nclass Cone(pynewton.Cone):\n pass\n\n\nclass Cylinder(pynewton.Cylinder):\n pass\n\n\nclass ChamferCylinder(pynewton.ChamferCylinder):\n pass\n\n\nclass ConvexHull(pynewton.ConvexHull):\n pass\n\n\nclass ConvexHullModifier(pynewton.ConvexHullModifier):\n pass\n\n\nclass NullCollider(pynewton.NullCollider):\n pass\n\n\nclass TreeCollision(pynewton.TreeCollision):\n pass\n\n\nclass TreeCollisionUserCallback(pynewton.TreeCollisionUserCallback):\n\n def __init__(self, func):\n self.callbackFunc = func\n\n def OnCallback(self, bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray):\n if self.callbackFunc != None:\n self.callbackFunc(bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray)\n pass\n\n\nclass Material(pynewton.Material):\n pass\n\n\nclass BallJoint(pynewton.BallJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.BallJoint.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n pass\n\n\nclass Hinge(pynewton.Hinge):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Hinge.__init__(*args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass Slider(pynewton.Slider):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Slider.__init__(self, *args, **kwargs)\n\n\nclass Corkscrew(pynewton.Corkscrew):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n pynewton.Corkscrew.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UniversalJoint(pynewton.UniversalJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UniversalJoint.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UpVector(pynewton.UpVector):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UpVector.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n\n\nclass Tire(pynewton.Tire):\n pass\n\n\nclass Vehicle(pynewton.Vehicle):\n\n def __init__(self, *args, **kwargs):\n self.tires = []\n self.UpdateTireCallback = None\n return pynewton.Vehicle.__init__(self, *args, **kwargs)\n\n def AddTire(self, matrix, pin, mass, width, radius, suspensionShock,\n suspensionSpring, suspensionLength, userData, collisionID):\n tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width,\n radius, suspensionShock, suspensionSpring, suspensionLength,\n userData, collisionID)\n tires.append(tire)\n return tire\n\n def RemoveTire(self, tire):\n del tires[tires.index(tire)]\n tire = pynewton.Vehicle.RemoveTire(self, tire)\n\n def OnCallback(self):\n if self.UpdateTireCallback != None:\n self.UpdateTireCallback(self)\n\n\nclass HeightField(pynewton.HeightField):\n pass\n",
"step-4": "<mask token>\n\n\ndef GetEulerAngle(matrix):\n return pynewton.GetEulerAngle(matrix)\n\n\n<mask token>\n\n\nclass Body(pynewton.Body):\n\n def __init__(self, world, cg):\n self.ApplyForceAndTorqueCallback = None\n self.TransformCallback = None\n self.AutoactiveCallback = None\n self.DestructorCallback = None\n self.TreeCollisionCallback = None\n pynewton.Body.__init__(self, world, cg)\n world.RegisterBody(self)\n self.py_cg = cg\n\n def SetApplyForceAndTorqueCallback(self, callback):\n self.ApplyForceAndTorqueCallback = callback\n\n def SetAutoactiveCallback(self, callback):\n self.AutoactiveCallback = callback\n\n def GetCollision(self):\n return self.py_cg\n\n def OnApplyForceAndTorque(self):\n if self.ApplyForceAndTorqueCallback != None:\n self.ApplyForceAndTorqueCallback(self)\n\n def OnAutoactive(self, state):\n if self.AutoactiveCallback != None:\n self.AutoactiveCallback(self, state)\n\n def OnTransform(self):\n matrix = self.GetMatrix()\n if self.TransformCallback != None:\n self.TransformCallback(self, matrix)\n\n def OnDestruct(self):\n if self.DestructorCallback != None:\n self.DestructorCallback(self, matrix)\n\n def OnTreeCollisionWith(self, body):\n if self.TreeCollisionCallback != None:\n self.TreeCollisionCallback(body)\n\n def Draw(self):\n m = self.GetMatrix()\n if not GLPresent:\n raise 'OpenGL module not loaded, cannot draw'\n GL.glPushMatrix()\n GL.glMultMatrixf(m)\n c = self.GetCollision()\n c.draw()\n GL.glPopMatrix()\n\n\nclass _materialCallback(object):\n\n def __init__(self, id1, id2, begin_function, process_function,\n end_function, userobject):\n self.id1 = id1\n self.id2 = id2\n self.beginCallback = begin_function\n self.processCallback = process_function\n self.endCallback = end_function\n self.userobject = userobject\n\n\nclass World(pynewton.World):\n\n def __init__(self):\n self.bodyList = []\n self.newtonBodyLookup = {}\n self.materialCallbacks = {}\n self.currentCallback = None\n self.raycastUserData = None\n self.raycastCallback = None\n pynewton.World.__init__(self)\n\n def RegisterBody(self, body):\n self.bodyList.append(body)\n self.newtonBodyLookup[body.IDKey()] = body\n\n def UnregisterBody(self, body):\n self.bodyList.remove(bodyList.index(body))\n del self.newtonBodyLookup[body.m_body]\n\n def NewtonBodyToBody(self, ptr):\n return self.newtonBodyLookup[int(ptr)]\n\n def ForEachBodyDo(self, function):\n for body in self.bodyList:\n function(body)\n\n def RayCast(self, p0, p1, callback, userdata):\n \"\"\"Casts a ray in the world defined by p0 and p1 and calls callback\n\t\twith the body, normal, collision id, user data and intersection distance\"\"\"\n self.raycastUserData = userdata\n self.raycastCallback = callback\n self.CppRayCast.__call__(p0[0], p0[1], p0[2], p1[0], p1[1], p1[2])\n\n def RayCastCallback(self, body, nx, ny, nz, collisionID, intersectParam):\n return self.raycastCallback(body, (nx, ny, nz), collisionID, self.\n raycastUserData, intersectParam)\n\n def MaterialSetCollisionCallback(self, id1, id2, userdata=None,\n begin_func=None, process_func=None, end_func=None):\n self.materialCallbacks[id1, id2] = _materialCallback(id1, id2,\n begin_func, process_func, end_func, userdata)\n self.RegisterMaterialCallbackBetween(id1, id2)\n\n def GetMaterialCallback(self, material, body1, body2):\n id1 = body1.MaterialGroupID()\n id2 = body2.MaterialGroupID()\n cb = self.materialCallbacks[id1, id2]\n return cb\n\n def MaterialBeginCollision(self, material, b1, b2):\n body1 = self.newtonBodyLookup[int(b1)]\n body2 = self.newtonBodyLookup[int(b2)]\n self.currentCallback = self.GetMaterialCallback(material, body1, body2)\n if self.currentCallback.beginCallback:\n self.currentCallback.beginCallback(material, body1, body2, self\n .currentCallback.userobject)\n\n def MaterialProcessCollision(self, material, contactHandle):\n if self.currentCallback.processCallback:\n self.currentCallback.processCallback(material, contactHandle,\n self.currentCallback.userobject)\n\n def MaterialEndCollision(self, material):\n if self.currentCallback.endCallback:\n self.currentCallback.endCallback(material, self.currentCallback\n .userobject)\n\n\nclass CollisionGeometry(pynewton.CollisionGeometry):\n\n def draw(self):\n if not GlPresent:\n raise 'OpenGL module could not be loaded'\n\n\nclass Sphere(pynewton.Sphere):\n\n def __init__(self, world, w, h, d, offset_matrix=None):\n pynewton.Sphere.__init__(self, world, w, h, d, offset_matrix)\n self.width = w\n self.height = h\n self.depth = d\n if GLPresent:\n self.quad = GLU.gluNewQuadric()\n\n def draw(self):\n if not GLPresent:\n raise 'OpenGL module could not be loaded'\n GL.glPushMatrix()\n GL.glScalef(self.width, self.height, self.depth)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_LINE)\n GLU.gluSphere(self.quad, 1.0, 12, 12)\n GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_FILL)\n GL.glPopMatrix()\n\n\nclass Box(pynewton.Box):\n pass\n\n\nclass Cone(pynewton.Cone):\n pass\n\n\nclass Cylinder(pynewton.Cylinder):\n pass\n\n\nclass ChamferCylinder(pynewton.ChamferCylinder):\n pass\n\n\nclass ConvexHull(pynewton.ConvexHull):\n pass\n\n\nclass ConvexHullModifier(pynewton.ConvexHullModifier):\n pass\n\n\nclass NullCollider(pynewton.NullCollider):\n pass\n\n\nclass TreeCollision(pynewton.TreeCollision):\n pass\n\n\nclass TreeCollisionUserCallback(pynewton.TreeCollisionUserCallback):\n\n def __init__(self, func):\n self.callbackFunc = func\n\n def OnCallback(self, bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray):\n if self.callbackFunc != None:\n self.callbackFunc(bodyWithTreeCollision, body, vertices,\n vertexstrideInBytes, indexCount, indexArray)\n pass\n\n\nclass Material(pynewton.Material):\n pass\n\n\nclass BallJoint(pynewton.BallJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.BallJoint.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n pass\n\n\nclass Hinge(pynewton.Hinge):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Hinge.__init__(*args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass Slider(pynewton.Slider):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.Slider.__init__(self, *args, **kwargs)\n\n\nclass Corkscrew(pynewton.Corkscrew):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n pynewton.Corkscrew.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UniversalJoint(pynewton.UniversalJoint):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UniversalJoint.__init__(self, *args, **kwargs)\n\n def OnCallback(desc):\n if self.callback != None:\n return self.callback(desc)\n return 0\n\n\nclass UpVector(pynewton.UpVector):\n\n def __init__(self, *args, **kwargs):\n self.callback = None\n return pynewton.UpVector.__init__(self, *args, **kwargs)\n\n def OnCallback():\n if self.callback != None:\n self.callback()\n\n\nclass Tire(pynewton.Tire):\n pass\n\n\nclass Vehicle(pynewton.Vehicle):\n\n def __init__(self, *args, **kwargs):\n self.tires = []\n self.UpdateTireCallback = None\n return pynewton.Vehicle.__init__(self, *args, **kwargs)\n\n def AddTire(self, matrix, pin, mass, width, radius, suspensionShock,\n suspensionSpring, suspensionLength, userData, collisionID):\n tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width,\n radius, suspensionShock, suspensionSpring, suspensionLength,\n userData, collisionID)\n tires.append(tire)\n return tire\n\n def RemoveTire(self, tire):\n del tires[tires.index(tire)]\n tire = pynewton.Vehicle.RemoveTire(self, tire)\n\n def OnCallback(self):\n if self.UpdateTireCallback != None:\n self.UpdateTireCallback(self)\n\n\nclass HeightField(pynewton.HeightField):\n pass\n",
"step-5": "# The purpose of this module is essentially to subclass the basic SWIG generated\n# pynewton classes and add a bit of functionality to them (mostly callback related\n# stuff). This could be done in the SWIG interface file, but it's easier to do it\n# here since it makes adding python-specific extensions to newton easier.\nimport pynewton\n\ntry:\n\timport OpenGL.GL as GL\n\timport OpenGL.GLU as GLU\n\tGLPresent = True\nexcept:\n\tGLPresent = False\n\n\ndef GetEulerAngle ( matrix ):\n\treturn pynewton.GetEulerAngle( matrix )\n\ndef SetEulerAngle ( angle ):\n\treturn pynewton.SetEulerAngle( angle )\n\n#extensions to body\ndef NullApplyForceAndTorqueCallback( body ) :\n\tpass\n\ndef NullTransformCallback( body, matrix ):\n\tpass\n\ndef NullAutoactiveCallback( body, state ):\n\tpass\n\ndef NullBodyDestructCallback( body ):\n\tpass\n\nclass Body( pynewton.Body ):\n\tdef __init__( self, world, cg ):\n\t\tself.ApplyForceAndTorqueCallback = None\n\t\tself.TransformCallback = None\n\t\tself.AutoactiveCallback = None\n\t\tself.DestructorCallback = None\n\t\tself.TreeCollisionCallback = None\n\n\t\tpynewton.Body.__init__(self, world, cg )\n\t\tworld.RegisterBody( self )\n\t\tself.py_cg = cg;\n\n\tdef SetApplyForceAndTorqueCallback( self, callback ):\n\t\tself.ApplyForceAndTorqueCallback = callback\n\n\tdef SetAutoactiveCallback( self, callback ):\n\t\tself.AutoactiveCallback = callback\n\n\tdef GetCollision( self ):\n\t\treturn self.py_cg\n\n\tdef OnApplyForceAndTorque(self):\n\t\tif self.ApplyForceAndTorqueCallback != None:\n\t\t\tself.ApplyForceAndTorqueCallback( self )\n\n\tdef OnAutoactive(self, state ):\n\t\tif self.AutoactiveCallback != None:\n\t\t\tself.AutoactiveCallback( self, state )\n\n\tdef OnTransform( self ):\n\t\tmatrix = self.GetMatrix()\n\t\tif self.TransformCallback != None:\n\t\t\tself.TransformCallback( self, matrix )\n\n\tdef OnDestruct( self ):\n\t\tif self.DestructorCallback != None:\n\t\t\tself.DestructorCallback( self, matrix )\n\n\tdef OnTreeCollisionWith( self, body ):\n\t\tif self.TreeCollisionCallback != None:\n\t\t\tself.TreeCollisionCallback(body)\n\n\tdef Draw( self ):\n\t\tm = self.GetMatrix()\n\t\tif not GLPresent: raise \"OpenGL module not loaded, cannot draw\"\n\t\tGL.glPushMatrix()\n\t\tGL.glMultMatrixf( m )\n\t\tc = self.GetCollision()\n\t\tc.draw()\n\t\tGL.glPopMatrix()\n\n\n\nclass _materialCallback( object ):\n\tdef __init__(self, id1, id2, begin_function, process_function, end_function, userobject):\n\t\tself.id1 = id1\n\t\tself.id2 = id2\n\t\tself.beginCallback = begin_function\n\t\tself.processCallback = process_function\n\t\tself.endCallback = end_function\n\t\tself.userobject = userobject\n\n\n#extensions to world\nclass World( pynewton.World ):\n\tdef __init__(self ):\n\t\tself.bodyList = []\n\t\tself.newtonBodyLookup = {}\n\t\tself.materialCallbacks = {}\n\t\tself.currentCallback = None\n\t\tself.raycastUserData = None\n\t\tself.raycastCallback = None\n\t\tpynewton.World.__init__(self)\n\n\tdef RegisterBody( self, body ):\n\t\tself.bodyList.append( body )\n\t\tself.newtonBodyLookup[body.IDKey()] = body\n\n\tdef UnregisterBody( self, body ):\n\t\tself.bodyList.remove( bodyList.index(body) )\n\t\tdel self.newtonBodyLookup[body.m_body]\n\n\tdef NewtonBodyToBody( self, ptr ):\n\t\treturn self.newtonBodyLookup[int(ptr)]\n\n\tdef ForEachBodyDo( self, function ):\n\t\tfor body in self.bodyList:\n\t\t\tfunction( body )\n\t\n\tdef RayCast( self, p0, p1, callback, userdata):\n\t\t\"\"\"Casts a ray in the world defined by p0 and p1 and calls callback\n\t\twith the body, normal, collision id, user data and intersection distance\"\"\"\n\t\tself.raycastUserData = userdata\n\t\tself.raycastCallback = callback\n\t\tself.CppRayCast.__call__(p0[0], p0[1], p0[2], p1[0], p1[1], p1[2])\n\t\n\tdef RayCastCallback( self, body, nx, ny, nz, collisionID, intersectParam ):\n\t\t#delegate this off to the user specified function\n\t\treturn self.raycastCallback( body, (nx, ny, nz), collisionID, self.raycastUserData, intersectParam )\n\n\tdef MaterialSetCollisionCallback( self, id1, id2, userdata=None, begin_func=None, process_func=None, end_func=None ):\n\t\tself.materialCallbacks[(id1,id2)] = _materialCallback( id1, id2, begin_func, process_func, end_func, userdata)\n\t\tself.RegisterMaterialCallbackBetween( id1, id2)\n\n\tdef GetMaterialCallback(self, material, body1, body2):\n\t\tid1 = body1.MaterialGroupID()\n\t\tid2 = body2.MaterialGroupID()\n\t\tcb = self.materialCallbacks[(id1,id2)]\n\t\treturn cb\n\n\n\tdef MaterialBeginCollision( self, material, b1, b2 ):\n\t\tbody1 = self.newtonBodyLookup[int(b1)]\n\t\tbody2 = self.newtonBodyLookup[int(b2)]\n\t\tself.currentCallback = self.GetMaterialCallback( material, body1, body2 )\n\t\tif self.currentCallback.beginCallback:\n\t\t\tself.currentCallback.beginCallback(material,\n\t\t\t\t\t\t\t\t\t\t\t body1,\n\t\t\t\t\t\t\t\t\t\t\t body2,\n\t\t\t\t\t\t\t\t\t\t\t self.currentCallback.userobject )\n\n\tdef MaterialProcessCollision( self, material, contactHandle ):\n\t\tif self.currentCallback.processCallback:\n\t\t\tself.currentCallback.processCallback(material,\n\t\t\t\t\t\t\t\t\t\t\t\t contactHandle,\n\t\t\t\t\t\t\t\t\t\t\t\t self.currentCallback.userobject )\n\n\tdef MaterialEndCollision( self, material ):\n\t\tif self.currentCallback.endCallback:\n\t\t\tself.currentCallback.endCallback( material,\n\t\t\t\t\t\t\t\t\t\t\t self.currentCallback.userobject )\n\n#collision extensions\nclass CollisionGeometry( pynewton.CollisionGeometry ):\n\tdef draw(self):\n\t\tif not GlPresent: raise \"OpenGL module could not be loaded\"\n\nclass Sphere ( pynewton.Sphere ):\n\tdef __init__(self, world, w, h, d, offset_matrix=None):\n\t\tpynewton.Sphere.__init__( self, world, w, h, d, offset_matrix )\n\t\tself.width = w\n\t\tself.height = h\n\t\tself.depth = d\n\t\tif GLPresent:\n\t\t\tself.quad = GLU.gluNewQuadric()\n\n\tdef draw(self):\n\t\tif not GLPresent: raise \"OpenGL module could not be loaded\"\n\t\tGL.glPushMatrix()\n\t\tGL.glScalef( self.width, self.height, self.depth )\n\t\tGL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_LINE )\n\t\tGLU.gluSphere( self.quad, 1.0, 12, 12 )\n\t\tGL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_FILL )\n\t\tGL.glPopMatrix()\n\n\n\nclass Box ( pynewton.Box ):\n\tpass\n\nclass Cone ( pynewton.Cone ):\n\tpass\n\nclass Cylinder (pynewton.Cylinder):\n\tpass\n\nclass ChamferCylinder (pynewton.ChamferCylinder):\n\tpass\n\nclass ConvexHull (pynewton.ConvexHull):\n\tpass\n\nclass ConvexHullModifier (pynewton.ConvexHullModifier):\n\tpass\n\nclass NullCollider (pynewton.NullCollider ):\n\tpass\n\nclass TreeCollision (pynewton.TreeCollision):\n\tpass\n\nclass TreeCollisionUserCallback ( pynewton.TreeCollisionUserCallback ):\n\tdef __init__( self, func ):\n\t\tself.callbackFunc = func\n\n\tdef OnCallback (self, bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray):\n\t\tif self.callbackFunc != None:\n\t\t\tself.callbackFunc( bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray)\n\t\tpass\n\n#material extensions\nclass Material ( pynewton.Material ):\n\tpass\n\n\n#joint extensions\nclass BallJoint ( pynewton.BallJoint ):\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.callback = None\n\t\treturn pynewton.BallJoint.__init__(self, *args, **kwargs )\n\n\tdef OnCallback():\n\t\tif self.callback != None:\n\t\t\tself.callback( )\n\t\tpass\n\nclass Hinge ( pynewton.Hinge ):\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.callback = None\n\t\treturn pynewton.Hinge.__init__( *args, **kwargs )\n\n\tdef OnCallback( desc ):\n\t\tif self.callback != None:\n\t\t\treturn self.callback( desc )\n\t\treturn 0\n\nclass Slider ( pynewton.Slider ):\n\tdef __init__( self, *args, **kwargs ):\n\t\tself.callback = None\n\t\treturn pynewton.Slider.__init__( self, *args, **kwargs )\n\n#\tdef OnCallback( desc ):\n#\t\tif self.callback != None:\n#\t\t\treturn self.callback( desc )\n#\t\treturn 0\n\nclass Corkscrew ( pynewton.Corkscrew ):\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.callback = None\n\t\tpynewton.Corkscrew.__init__(self, *args, **kwargs )\n\n\tdef OnCallback( desc ):\n\t\tif self.callback != None:\n\t\t\treturn self.callback( desc )\n\t\treturn 0\n\nclass UniversalJoint ( pynewton.UniversalJoint ):\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.callback = None\n\t\treturn pynewton.UniversalJoint.__init__( self, *args, **kwargs )\n\n\tdef OnCallback( desc ):\n\t\tif self.callback != None:\n\t\t\treturn self.callback( desc )\n\t\treturn 0\n\nclass UpVector ( pynewton.UpVector ):\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.callback = None\n\t\treturn pynewton.UpVector.__init__(self, *args, **kwargs )\n\n\tdef OnCallback():\n\t\tif self.callback != None:\n\t\t\tself.callback( )\n\n\nclass Tire ( pynewton.Tire ):\n\tpass\n\nclass Vehicle ( pynewton.Vehicle ):\n\n\tdef __init__(self, *args, **kwargs ):\n\t\tself.tires = []\n\t\tself.UpdateTireCallback = None\n\t\treturn pynewton.Vehicle.__init__(self, *args, **kwargs )\n\n\tdef AddTire ( self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID):\n\t\ttire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID)\n\t\ttires.append( tire )\n\t\treturn tire\n\n\tdef RemoveTire( self, tire ):\n\t\tdel tires[tires.index(tire)]\n\t\ttire = pynewton.Vehicle.RemoveTire( self, tire )\n\n\tdef OnCallback( self):\n\t\tif self.UpdateTireCallback != None:\n\t\t\tself.UpdateTireCallback(self)\n\n#Heightmap\nclass HeightField ( pynewton.HeightField ):\n\tpass\n",
"step-ids": [
33,
52,
66,
68,
76
]
}
|
[
33,
52,
66,
68,
76
] |
data = {
'title': 'Dva leteca (gostimo na 2)',
'song': [
'x - - - - - x - - - - -',
'- x - - - x - - - x - -',
'- - x - x - - - x - x -',
'- - - x - - - x - - - x'
],
'bpm': 120,
'timeSignature': '4/4'
}
from prog import BellMusicCreator
exportFile = __file__.replace('.py', '') + '.xml'
# BellMusicCreator().show(data)
BellMusicCreator().write(data, fp=exportFile)
|
normal
|
{
"blob_id": "957fb1bd34d13b86334da47ac9446e30afd01678",
"index": 5477,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBellMusicCreator().write(data, fp=exportFile)\n",
"step-3": "data = {'title': 'Dva leteca (gostimo na 2)', 'song': [\n 'x - - - - - x - - - - -', '- x - - - x - - - x - -',\n '- - x - x - - - x - x -', '- - - x - - - x - - - x'], 'bpm': 120,\n 'timeSignature': '4/4'}\n<mask token>\nexportFile = __file__.replace('.py', '') + '.xml'\nBellMusicCreator().write(data, fp=exportFile)\n",
"step-4": "data = {'title': 'Dva leteca (gostimo na 2)', 'song': [\n 'x - - - - - x - - - - -', '- x - - - x - - - x - -',\n '- - x - x - - - x - x -', '- - - x - - - x - - - x'], 'bpm': 120,\n 'timeSignature': '4/4'}\nfrom prog import BellMusicCreator\nexportFile = __file__.replace('.py', '') + '.xml'\nBellMusicCreator().write(data, fp=exportFile)\n",
"step-5": "data = {\n 'title': 'Dva leteca (gostimo na 2)',\n 'song': [\n 'x - - - - - x - - - - -',\n '- x - - - x - - - x - -',\n '- - x - x - - - x - x -',\n '- - - x - - - x - - - x'\n ],\n 'bpm': 120,\n 'timeSignature': '4/4'\n}\n\nfrom prog import BellMusicCreator\n\nexportFile = __file__.replace('.py', '') + '.xml'\n# BellMusicCreator().show(data)\nBellMusicCreator().write(data, fp=exportFile)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/bin/python3
# Implement a stack with push, pop, inc(e, k) operations
# inc (e,k) - Add k to each of bottom e elements
import sys
class Stack(object):
def __init__(self):
self.arr = []
def push(self, val):
self.arr.append(val)
def pop(self):
if len(self.arr):
return self.arr.pop()
def inc(self, e, k):
count = min(len(self.arr), e)
for i in range(count):
self.arr[i] += k
def peek(self):
if len(self.arr):
return self.arr[-1]
else:
return 'EMPTY'
def superStack(operations):
s = Stack()
for o in operations:
op = o.split(' ')
if op[0] == 'push':
s.push(int(op[1]))
print(s.peek())
elif op[0] == 'pop':
s.pop()
print(s.peek())
elif op[0] == 'inc':
s.inc(int(op[1]), int(op[2]))
print(s.peek())
if __name__ == "__main__":
operations_cnt = 0
operations_cnt = int(input())
operations_i = 0
operations = []
while operations_i < operations_cnt:
try:
operations_item = str(input())
except:
operations_item = None
operations.append(operations_item)
operations_i += 1
res = superStack(operations);
|
normal
|
{
"blob_id": "5ed439a2a7cfb9c941c40ea0c5eba2851a0f2855",
"index": 24,
"step-1": "<mask token>\n\n\nclass Stack(object):\n\n def __init__(self):\n self.arr = []\n\n def push(self, val):\n self.arr.append(val)\n\n def pop(self):\n if len(self.arr):\n return self.arr.pop()\n\n def inc(self, e, k):\n count = min(len(self.arr), e)\n for i in range(count):\n self.arr[i] += k\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Stack(object):\n\n def __init__(self):\n self.arr = []\n\n def push(self, val):\n self.arr.append(val)\n\n def pop(self):\n if len(self.arr):\n return self.arr.pop()\n\n def inc(self, e, k):\n count = min(len(self.arr), e)\n for i in range(count):\n self.arr[i] += k\n\n def peek(self):\n if len(self.arr):\n return self.arr[-1]\n else:\n return 'EMPTY'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Stack(object):\n\n def __init__(self):\n self.arr = []\n\n def push(self, val):\n self.arr.append(val)\n\n def pop(self):\n if len(self.arr):\n return self.arr.pop()\n\n def inc(self, e, k):\n count = min(len(self.arr), e)\n for i in range(count):\n self.arr[i] += k\n\n def peek(self):\n if len(self.arr):\n return self.arr[-1]\n else:\n return 'EMPTY'\n\n\ndef superStack(operations):\n s = Stack()\n for o in operations:\n op = o.split(' ')\n if op[0] == 'push':\n s.push(int(op[1]))\n print(s.peek())\n elif op[0] == 'pop':\n s.pop()\n print(s.peek())\n elif op[0] == 'inc':\n s.inc(int(op[1]), int(op[2]))\n print(s.peek())\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Stack(object):\n\n def __init__(self):\n self.arr = []\n\n def push(self, val):\n self.arr.append(val)\n\n def pop(self):\n if len(self.arr):\n return self.arr.pop()\n\n def inc(self, e, k):\n count = min(len(self.arr), e)\n for i in range(count):\n self.arr[i] += k\n\n def peek(self):\n if len(self.arr):\n return self.arr[-1]\n else:\n return 'EMPTY'\n\n\ndef superStack(operations):\n s = Stack()\n for o in operations:\n op = o.split(' ')\n if op[0] == 'push':\n s.push(int(op[1]))\n print(s.peek())\n elif op[0] == 'pop':\n s.pop()\n print(s.peek())\n elif op[0] == 'inc':\n s.inc(int(op[1]), int(op[2]))\n print(s.peek())\n\n\nif __name__ == '__main__':\n operations_cnt = 0\n operations_cnt = int(input())\n operations_i = 0\n operations = []\n while operations_i < operations_cnt:\n try:\n operations_item = str(input())\n except:\n operations_item = None\n operations.append(operations_item)\n operations_i += 1\n res = superStack(operations)\n",
"step-5": "#!/bin/python3\n\n# Implement a stack with push, pop, inc(e, k) operations\n# inc (e,k) - Add k to each of bottom e elements\nimport sys\n\nclass Stack(object):\n def __init__(self):\n self.arr = []\n\n def push(self, val):\n self.arr.append(val)\n\n def pop(self):\n if len(self.arr):\n return self.arr.pop()\n\n def inc(self, e, k):\n count = min(len(self.arr), e)\n for i in range(count):\n self.arr[i] += k\n\n def peek(self):\n if len(self.arr):\n return self.arr[-1]\n else:\n return 'EMPTY'\n\ndef superStack(operations):\n s = Stack()\n for o in operations:\n op = o.split(' ')\n if op[0] == 'push':\n s.push(int(op[1]))\n print(s.peek())\n elif op[0] == 'pop':\n s.pop()\n print(s.peek())\n elif op[0] == 'inc':\n s.inc(int(op[1]), int(op[2]))\n print(s.peek())\n \n\nif __name__ == \"__main__\":\n operations_cnt = 0\n operations_cnt = int(input())\n operations_i = 0\n operations = []\n while operations_i < operations_cnt:\n try:\n operations_item = str(input())\n except:\n operations_item = None\n operations.append(operations_item)\n operations_i += 1\n\n\n res = superStack(operations);\n \n\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
from bs4 import BeautifulSoup
import os, re, json
import pandas as pd
from urllib import request
from openpyxl import load_workbook
from bilibili.append_xlsx import append_df_to_excel
# 获取页面的所有的avid, title, url
def parse_html(content):
arr = []
# 使用beautifulsoup解析html文档
soup = BeautifulSoup(content)
# 获取指定标签
tag_list = soup.find_all("a", attrs={'title': True, 'href': True, "class": "title"})
# tag_list = soup.find_all("span", attrs={'class': 'type avid'})
for tag in tag_list:
# print(tag.get("title"), tag.get("href"))
# 获取标签内容,并去除首尾空格
title = tag.get("title")
href = tag.get("href")[2:]
avid = re.search("av([0-9]*)", href).group(0)
base_dict[avid] = [avid, title, href]
return base_dict.keys()
# 读取路径文件名
def read_path(path):
path_set = set()
dir_path = os.listdir(path)
for item in dir_path:
child = os.path.join('%s/%s' % (path, item))
path_set.add(child)
return path_set
# 提取html文件
def filter(path_set):
filterable = []
pattern = re.compile(r'.*\.[html|htm]+', re.I)
for path in path_set:
m = pattern.match(path)
if m:
filterable.append(m.group(0).strip())
return filterable
# 读取文件内容
def read_html(path):
file = open(path.encode('utf-8').strip(), 'r', encoding="utf-8")
content = file.read()
return content
# 写入csv
def storeCSV(filename=r'/Users/robbin/Desktop/bilibili/bilibili.xlsx'):
df_base = pd.DataFrame.from_dict(base_dict, orient="index")
df_base.columns = ['avid', 'title', 'href']
df_tags = pd.DataFrame.from_dict(tags_dict, orient="index")
df_tags.columns = ['tags']
df_info = pd.DataFrame.from_dict(info_dict, orient='index')
df_info.columns = ['like', 'his_rank', 'view', 'now_rank', 'coin', 'reply', 'aid', 'no_reprint', 'favorite', 'danmaku', 'copyright', 'share']
df = df_base.join([df_tags, df_info])
append_df_to_excel(filename, df, index=False)
# 根据avid请求api获得视频信息
def query_info(avid):
stat_url = "https://api.bilibili.com/x/web-interface/archive/stat?aid="
id = avid[2:]
url = stat_url + id
response = request.urlopen(url)
return response.read().decode("utf-8")
# 根据avid请求api获得视频标签
def query_tags(avid):
stat_url = "https://api.bilibili.com/x/tag/archive/tags?aid="
id = avid[2:]
url = stat_url + id
response = request.urlopen(url)
return response.read().decode("utf-8")
if __name__ == '__main__':
print("now read folder...")
path_set = read_path("/Users/robbin/Desktop/bilibili")
print("parse file path finshed...")
filterable = filter(path_set)
for path in filterable:
base_dict = {}
tags_dict = {}
info_dict = {}
print("now parse the file:", path)
content = read_html(path)
avid_list = parse_html(content)
for avid in avid_list:
print("Proccessing:", avid)
tags_json = query_tags(avid)
tags_obj = json.loads(tags_json)
tags_row_list = tags_obj.get("data")
if tags_row_list:
# print(data)
tag_list = []
for item in tags_row_list:
tag_name = item.get("tag_name")
tag_list.append(tag_name)
tag = ",".join(tag_list)
tags_dict[avid] = tag
info_json = query_info(avid)
info_obj = json.loads(info_json)
info_row_dict = info_obj.get("data")
if info_row_dict:
info_dict[avid] = list(info_row_dict.values())
print("Start to writing ", path, " to xls")
storeCSV()
print("End of writing ", path, " to xls")
|
normal
|
{
"blob_id": "a63718ba5f23d6f180bdafcb12b337465d6fa052",
"index": 4734,
"step-1": "<mask token>\n\n\ndef read_path(path):\n path_set = set()\n dir_path = os.listdir(path)\n for item in dir_path:\n child = os.path.join('%s/%s' % (path, item))\n path_set.add(child)\n return path_set\n\n\ndef filter(path_set):\n filterable = []\n pattern = re.compile('.*\\\\.[html|htm]+', re.I)\n for path in path_set:\n m = pattern.match(path)\n if m:\n filterable.append(m.group(0).strip())\n return filterable\n\n\ndef read_html(path):\n file = open(path.encode('utf-8').strip(), 'r', encoding='utf-8')\n content = file.read()\n return content\n\n\n<mask token>\n\n\ndef query_info(avid):\n stat_url = 'https://api.bilibili.com/x/web-interface/archive/stat?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_html(content):\n arr = []\n soup = BeautifulSoup(content)\n tag_list = soup.find_all('a', attrs={'title': True, 'href': True,\n 'class': 'title'})\n for tag in tag_list:\n title = tag.get('title')\n href = tag.get('href')[2:]\n avid = re.search('av([0-9]*)', href).group(0)\n base_dict[avid] = [avid, title, href]\n return base_dict.keys()\n\n\ndef read_path(path):\n path_set = set()\n dir_path = os.listdir(path)\n for item in dir_path:\n child = os.path.join('%s/%s' % (path, item))\n path_set.add(child)\n return path_set\n\n\ndef filter(path_set):\n filterable = []\n pattern = re.compile('.*\\\\.[html|htm]+', re.I)\n for path in path_set:\n m = pattern.match(path)\n if m:\n filterable.append(m.group(0).strip())\n return filterable\n\n\ndef read_html(path):\n file = open(path.encode('utf-8').strip(), 'r', encoding='utf-8')\n content = file.read()\n return content\n\n\n<mask token>\n\n\ndef query_info(avid):\n stat_url = 'https://api.bilibili.com/x/web-interface/archive/stat?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef parse_html(content):\n arr = []\n soup = BeautifulSoup(content)\n tag_list = soup.find_all('a', attrs={'title': True, 'href': True,\n 'class': 'title'})\n for tag in tag_list:\n title = tag.get('title')\n href = tag.get('href')[2:]\n avid = re.search('av([0-9]*)', href).group(0)\n base_dict[avid] = [avid, title, href]\n return base_dict.keys()\n\n\ndef read_path(path):\n path_set = set()\n dir_path = os.listdir(path)\n for item in dir_path:\n child = os.path.join('%s/%s' % (path, item))\n path_set.add(child)\n return path_set\n\n\ndef filter(path_set):\n filterable = []\n pattern = re.compile('.*\\\\.[html|htm]+', re.I)\n for path in path_set:\n m = pattern.match(path)\n if m:\n filterable.append(m.group(0).strip())\n return filterable\n\n\ndef read_html(path):\n file = open(path.encode('utf-8').strip(), 'r', encoding='utf-8')\n content = file.read()\n return content\n\n\ndef storeCSV(filename='/Users/robbin/Desktop/bilibili/bilibili.xlsx'):\n df_base = pd.DataFrame.from_dict(base_dict, orient='index')\n df_base.columns = ['avid', 'title', 'href']\n df_tags = pd.DataFrame.from_dict(tags_dict, orient='index')\n df_tags.columns = ['tags']\n df_info = pd.DataFrame.from_dict(info_dict, orient='index')\n df_info.columns = ['like', 'his_rank', 'view', 'now_rank', 'coin',\n 'reply', 'aid', 'no_reprint', 'favorite', 'danmaku', 'copyright',\n 'share']\n df = df_base.join([df_tags, df_info])\n append_df_to_excel(filename, df, index=False)\n\n\ndef query_info(avid):\n stat_url = 'https://api.bilibili.com/x/web-interface/archive/stat?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\ndef query_tags(avid):\n stat_url = 'https://api.bilibili.com/x/tag/archive/tags?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef parse_html(content):\n arr = []\n soup = BeautifulSoup(content)\n tag_list = soup.find_all('a', attrs={'title': True, 'href': True,\n 'class': 'title'})\n for tag in tag_list:\n title = tag.get('title')\n href = tag.get('href')[2:]\n avid = re.search('av([0-9]*)', href).group(0)\n base_dict[avid] = [avid, title, href]\n return base_dict.keys()\n\n\ndef read_path(path):\n path_set = set()\n dir_path = os.listdir(path)\n for item in dir_path:\n child = os.path.join('%s/%s' % (path, item))\n path_set.add(child)\n return path_set\n\n\ndef filter(path_set):\n filterable = []\n pattern = re.compile('.*\\\\.[html|htm]+', re.I)\n for path in path_set:\n m = pattern.match(path)\n if m:\n filterable.append(m.group(0).strip())\n return filterable\n\n\ndef read_html(path):\n file = open(path.encode('utf-8').strip(), 'r', encoding='utf-8')\n content = file.read()\n return content\n\n\ndef storeCSV(filename='/Users/robbin/Desktop/bilibili/bilibili.xlsx'):\n df_base = pd.DataFrame.from_dict(base_dict, orient='index')\n df_base.columns = ['avid', 'title', 'href']\n df_tags = pd.DataFrame.from_dict(tags_dict, orient='index')\n df_tags.columns = ['tags']\n df_info = pd.DataFrame.from_dict(info_dict, orient='index')\n df_info.columns = ['like', 'his_rank', 'view', 'now_rank', 'coin',\n 'reply', 'aid', 'no_reprint', 'favorite', 'danmaku', 'copyright',\n 'share']\n df = df_base.join([df_tags, df_info])\n append_df_to_excel(filename, df, index=False)\n\n\ndef query_info(avid):\n stat_url = 'https://api.bilibili.com/x/web-interface/archive/stat?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\ndef query_tags(avid):\n stat_url = 'https://api.bilibili.com/x/tag/archive/tags?aid='\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode('utf-8')\n\n\nif __name__ == '__main__':\n print('now read folder...')\n path_set = read_path('/Users/robbin/Desktop/bilibili')\n print('parse file path finshed...')\n filterable = filter(path_set)\n for path in filterable:\n base_dict = {}\n tags_dict = {}\n info_dict = {}\n print('now parse the file:', path)\n content = read_html(path)\n avid_list = parse_html(content)\n for avid in avid_list:\n print('Proccessing:', avid)\n tags_json = query_tags(avid)\n tags_obj = json.loads(tags_json)\n tags_row_list = tags_obj.get('data')\n if tags_row_list:\n tag_list = []\n for item in tags_row_list:\n tag_name = item.get('tag_name')\n tag_list.append(tag_name)\n tag = ','.join(tag_list)\n tags_dict[avid] = tag\n info_json = query_info(avid)\n info_obj = json.loads(info_json)\n info_row_dict = info_obj.get('data')\n if info_row_dict:\n info_dict[avid] = list(info_row_dict.values())\n print('Start to writing ', path, ' to xls')\n storeCSV()\n print('End of writing ', path, ' to xls')\n",
"step-5": "from bs4 import BeautifulSoup\nimport os, re, json\nimport pandas as pd\nfrom urllib import request\nfrom openpyxl import load_workbook\nfrom bilibili.append_xlsx import append_df_to_excel\n\n\n# 获取页面的所有的avid, title, url\ndef parse_html(content):\n arr = []\n # 使用beautifulsoup解析html文档\n soup = BeautifulSoup(content)\n # 获取指定标签\n tag_list = soup.find_all(\"a\", attrs={'title': True, 'href': True, \"class\": \"title\"})\n # tag_list = soup.find_all(\"span\", attrs={'class': 'type avid'})\n for tag in tag_list:\n # print(tag.get(\"title\"), tag.get(\"href\"))\n # 获取标签内容,并去除首尾空格\n title = tag.get(\"title\")\n href = tag.get(\"href\")[2:]\n avid = re.search(\"av([0-9]*)\", href).group(0)\n base_dict[avid] = [avid, title, href]\n return base_dict.keys()\n\n# 读取路径文件名\ndef read_path(path):\n path_set = set()\n dir_path = os.listdir(path)\n for item in dir_path:\n child = os.path.join('%s/%s' % (path, item))\n path_set.add(child)\n return path_set\n\n# 提取html文件\ndef filter(path_set):\n filterable = []\n pattern = re.compile(r'.*\\.[html|htm]+', re.I)\n for path in path_set:\n m = pattern.match(path)\n if m:\n filterable.append(m.group(0).strip())\n return filterable\n\n# 读取文件内容\ndef read_html(path):\n file = open(path.encode('utf-8').strip(), 'r', encoding=\"utf-8\")\n content = file.read()\n return content\n\n# 写入csv\ndef storeCSV(filename=r'/Users/robbin/Desktop/bilibili/bilibili.xlsx'):\n df_base = pd.DataFrame.from_dict(base_dict, orient=\"index\")\n df_base.columns = ['avid', 'title', 'href']\n df_tags = pd.DataFrame.from_dict(tags_dict, orient=\"index\")\n df_tags.columns = ['tags']\n df_info = pd.DataFrame.from_dict(info_dict, orient='index')\n df_info.columns = ['like', 'his_rank', 'view', 'now_rank', 'coin', 'reply', 'aid', 'no_reprint', 'favorite', 'danmaku', 'copyright', 'share']\n df = df_base.join([df_tags, df_info])\n append_df_to_excel(filename, df, index=False)\n\n# 根据avid请求api获得视频信息\ndef query_info(avid):\n stat_url = \"https://api.bilibili.com/x/web-interface/archive/stat?aid=\"\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode(\"utf-8\")\n\n# 根据avid请求api获得视频标签\ndef query_tags(avid):\n stat_url = \"https://api.bilibili.com/x/tag/archive/tags?aid=\"\n id = avid[2:]\n url = stat_url + id\n response = request.urlopen(url)\n return response.read().decode(\"utf-8\")\n\nif __name__ == '__main__':\n print(\"now read folder...\")\n path_set = read_path(\"/Users/robbin/Desktop/bilibili\")\n print(\"parse file path finshed...\")\n filterable = filter(path_set)\n\n for path in filterable:\n base_dict = {}\n tags_dict = {}\n info_dict = {}\n print(\"now parse the file:\", path)\n content = read_html(path)\n avid_list = parse_html(content)\n\n for avid in avid_list:\n print(\"Proccessing:\", avid)\n tags_json = query_tags(avid)\n tags_obj = json.loads(tags_json)\n tags_row_list = tags_obj.get(\"data\")\n if tags_row_list:\n # print(data)\n tag_list = []\n for item in tags_row_list:\n tag_name = item.get(\"tag_name\")\n tag_list.append(tag_name)\n tag = \",\".join(tag_list)\n tags_dict[avid] = tag\n\n info_json = query_info(avid)\n info_obj = json.loads(info_json)\n info_row_dict = info_obj.get(\"data\")\n if info_row_dict:\n info_dict[avid] = list(info_row_dict.values())\n print(\"Start to writing \", path, \" to xls\")\n storeCSV()\n print(\"End of writing \", path, \" to xls\")\n",
"step-ids": [
4,
5,
7,
8,
10
]
}
|
[
4,
5,
7,
8,
10
] |
"""
This class runs the RL Training
"""
from __future__ import division
import logging
import numpy as np
from data.data_provider import DataProvider
from episode.episode import Episode
from tracker import TrainingTracker
from tqdm import tqdm
class RLTrainer(object):
"""
Creates RL training object
"""
def __init__(self, config_, grid_search=False):
"""
Constructor
:param config_:
:param grid_search:
:return:
"""
self.config = config_
self.grid_search = grid_search
self.logger = logging.getLogger("cuda_logger")
self.expt_name = self.config['RL_parameters']['experiment']
self.objective = self.config['RL_parameters']['objective']
self.city_states_filename = self.config['RL_parameters']['city_states_filename']
# Create training tracker
self.training_tracker = TrainingTracker(self.config)
def run(self):
"""
Creates and runs training episode
:param:
:return:
"""
data_provider = DataProvider(self.config)
hex_attr_df = data_provider.read_hex_bin_attributes()
hex_distance_df = data_provider.read_hex_bin_distances()
city_states = data_provider.read_city_states(self.city_states_filename)
neighborhood = data_provider.read_neighborhood_data()
popular_bins = data_provider.read_popular_hex_bins()
num_episodes = self.config['RL_parameters']['num_episodes']
ind_episodes = self.config['RL_parameters']['ind_episodes']
exp_decay_multiplier = self.config['RL_parameters']['exp_decay_multiplier']
q_ind = None
r_table = None
xi_matrix = None
best_episode = None
best_model = {}
progress_bar = tqdm(xrange(num_episodes))
for episode_id in progress_bar:
progress_bar.set_description("Episode: {}".format(episode_id))
current_best = -1000000
# Create episode
ind_exploration_factor = np.e ** (-1 * episode_id * exp_decay_multiplier / ind_episodes)
episode = Episode(self.config,
episode_id,
ind_exploration_factor,
hex_attr_df,
hex_distance_df,
city_states,
neighborhood,
popular_bins,
q_ind,
r_table,
xi_matrix)
# Run episode
tables = episode.run()
q_ind = tables['q_ind']
r_table = tables['r_table']
xi_matrix = tables['xi_matrix']
episode_tracker = tables['episode_tracker']
# Uncomment for logging if running a job, comment during experiments
# otherwise it leads to insanely huge logging output which is useless
# self.logger.info("""
# Expt: {} Episode: {} Earnings: {}
# Pax rides: {} Relocation rides: {} Unmet demand: {}
# """.format(self.expt_name, episode_id,
# episode_tracker.gross_earnings,
# episode_tracker.successful_waits,
# episode_tracker.relocation_rides,
# episode_tracker.unmet_demand))
# self.logger.info("----------------------------------")
self.training_tracker.update_RL_tracker(
episode_id, episode_tracker.gross_earnings,
episode_tracker.successful_waits, episode_tracker.unsuccessful_waits,
episode_tracker.unmet_demand, episode_tracker.relocation_rides,
episode_tracker.DET, episode_tracker.DPRT, episode_tracker.DWT,
episode_tracker.DRT, episode_tracker.DCT)
# Keep track of the best episode
if self.objective == 'revenue':
if episode_tracker.gross_earnings >= current_best:
best_episode = episode_tracker
current_best = best_episode.gross_earnings
else: # self.objective == 'pickups':
if episode_tracker.successful_waits >= current_best:
best_episode = episode_tracker
current_best = episode_tracker.successful_waits
# Keep track of the best model
best_model['ind_exploration_factor'] = ind_exploration_factor
best_model['config'] = self.config
best_model['q_ind'] = q_ind
best_model['r_table'] = r_table
best_model['xi_matrix'] = xi_matrix
best_model['training_tracker'] = self.training_tracker
# After finishing training
self.logger.info("Expt: {} Earnings: {} Met Demand: {} Unmet Demand: {}".format(self.expt_name,
best_episode.gross_earnings,
best_episode.successful_waits,
best_episode.unmet_demand))
return best_episode, best_model, self.training_tracker
|
normal
|
{
"blob_id": "7c004cb0c9eefa5e88f5085fb3b2878db98d2b20",
"index": 3200,
"step-1": "<mask token>\n\n\nclass RLTrainer(object):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RLTrainer(object):\n <mask token>\n\n def __init__(self, config_, grid_search=False):\n \"\"\"\n Constructor\n :param config_:\n :param grid_search:\n :return:\n \"\"\"\n self.config = config_\n self.grid_search = grid_search\n self.logger = logging.getLogger('cuda_logger')\n self.expt_name = self.config['RL_parameters']['experiment']\n self.objective = self.config['RL_parameters']['objective']\n self.city_states_filename = self.config['RL_parameters'][\n 'city_states_filename']\n self.training_tracker = TrainingTracker(self.config)\n\n def run(self):\n \"\"\"\n Creates and runs training episode\n :param:\n :return:\n \"\"\"\n data_provider = DataProvider(self.config)\n hex_attr_df = data_provider.read_hex_bin_attributes()\n hex_distance_df = data_provider.read_hex_bin_distances()\n city_states = data_provider.read_city_states(self.city_states_filename)\n neighborhood = data_provider.read_neighborhood_data()\n popular_bins = data_provider.read_popular_hex_bins()\n num_episodes = self.config['RL_parameters']['num_episodes']\n ind_episodes = self.config['RL_parameters']['ind_episodes']\n exp_decay_multiplier = self.config['RL_parameters'][\n 'exp_decay_multiplier']\n q_ind = None\n r_table = None\n xi_matrix = None\n best_episode = None\n best_model = {}\n progress_bar = tqdm(xrange(num_episodes))\n for episode_id in progress_bar:\n progress_bar.set_description('Episode: {}'.format(episode_id))\n current_best = -1000000\n ind_exploration_factor = np.e ** (-1 * episode_id *\n exp_decay_multiplier / ind_episodes)\n episode = Episode(self.config, episode_id,\n ind_exploration_factor, hex_attr_df, hex_distance_df,\n city_states, neighborhood, popular_bins, q_ind, r_table,\n xi_matrix)\n tables = episode.run()\n q_ind = tables['q_ind']\n r_table = tables['r_table']\n xi_matrix = tables['xi_matrix']\n episode_tracker = tables['episode_tracker']\n self.training_tracker.update_RL_tracker(episode_id,\n episode_tracker.gross_earnings, episode_tracker.\n successful_waits, episode_tracker.unsuccessful_waits,\n episode_tracker.unmet_demand, episode_tracker.\n relocation_rides, episode_tracker.DET, episode_tracker.DPRT,\n episode_tracker.DWT, episode_tracker.DRT, episode_tracker.DCT)\n if self.objective == 'revenue':\n if episode_tracker.gross_earnings >= current_best:\n best_episode = episode_tracker\n current_best = best_episode.gross_earnings\n elif episode_tracker.successful_waits >= current_best:\n best_episode = episode_tracker\n current_best = episode_tracker.successful_waits\n best_model['ind_exploration_factor'] = ind_exploration_factor\n best_model['config'] = self.config\n best_model['q_ind'] = q_ind\n best_model['r_table'] = r_table\n best_model['xi_matrix'] = xi_matrix\n best_model['training_tracker'] = self.training_tracker\n self.logger.info(\n 'Expt: {} Earnings: {} Met Demand: {} Unmet Demand: {}'.format(\n self.expt_name, best_episode.gross_earnings, best_episode.\n successful_waits, best_episode.unmet_demand))\n return best_episode, best_model, self.training_tracker\n",
"step-3": "<mask token>\n\n\nclass RLTrainer(object):\n \"\"\"\n Creates RL training object\n \"\"\"\n\n def __init__(self, config_, grid_search=False):\n \"\"\"\n Constructor\n :param config_:\n :param grid_search:\n :return:\n \"\"\"\n self.config = config_\n self.grid_search = grid_search\n self.logger = logging.getLogger('cuda_logger')\n self.expt_name = self.config['RL_parameters']['experiment']\n self.objective = self.config['RL_parameters']['objective']\n self.city_states_filename = self.config['RL_parameters'][\n 'city_states_filename']\n self.training_tracker = TrainingTracker(self.config)\n\n def run(self):\n \"\"\"\n Creates and runs training episode\n :param:\n :return:\n \"\"\"\n data_provider = DataProvider(self.config)\n hex_attr_df = data_provider.read_hex_bin_attributes()\n hex_distance_df = data_provider.read_hex_bin_distances()\n city_states = data_provider.read_city_states(self.city_states_filename)\n neighborhood = data_provider.read_neighborhood_data()\n popular_bins = data_provider.read_popular_hex_bins()\n num_episodes = self.config['RL_parameters']['num_episodes']\n ind_episodes = self.config['RL_parameters']['ind_episodes']\n exp_decay_multiplier = self.config['RL_parameters'][\n 'exp_decay_multiplier']\n q_ind = None\n r_table = None\n xi_matrix = None\n best_episode = None\n best_model = {}\n progress_bar = tqdm(xrange(num_episodes))\n for episode_id in progress_bar:\n progress_bar.set_description('Episode: {}'.format(episode_id))\n current_best = -1000000\n ind_exploration_factor = np.e ** (-1 * episode_id *\n exp_decay_multiplier / ind_episodes)\n episode = Episode(self.config, episode_id,\n ind_exploration_factor, hex_attr_df, hex_distance_df,\n city_states, neighborhood, popular_bins, q_ind, r_table,\n xi_matrix)\n tables = episode.run()\n q_ind = tables['q_ind']\n r_table = tables['r_table']\n xi_matrix = tables['xi_matrix']\n episode_tracker = tables['episode_tracker']\n self.training_tracker.update_RL_tracker(episode_id,\n episode_tracker.gross_earnings, episode_tracker.\n successful_waits, episode_tracker.unsuccessful_waits,\n episode_tracker.unmet_demand, episode_tracker.\n relocation_rides, episode_tracker.DET, episode_tracker.DPRT,\n episode_tracker.DWT, episode_tracker.DRT, episode_tracker.DCT)\n if self.objective == 'revenue':\n if episode_tracker.gross_earnings >= current_best:\n best_episode = episode_tracker\n current_best = best_episode.gross_earnings\n elif episode_tracker.successful_waits >= current_best:\n best_episode = episode_tracker\n current_best = episode_tracker.successful_waits\n best_model['ind_exploration_factor'] = ind_exploration_factor\n best_model['config'] = self.config\n best_model['q_ind'] = q_ind\n best_model['r_table'] = r_table\n best_model['xi_matrix'] = xi_matrix\n best_model['training_tracker'] = self.training_tracker\n self.logger.info(\n 'Expt: {} Earnings: {} Met Demand: {} Unmet Demand: {}'.format(\n self.expt_name, best_episode.gross_earnings, best_episode.\n successful_waits, best_episode.unmet_demand))\n return best_episode, best_model, self.training_tracker\n",
"step-4": "<mask token>\nfrom __future__ import division\nimport logging\nimport numpy as np\nfrom data.data_provider import DataProvider\nfrom episode.episode import Episode\nfrom tracker import TrainingTracker\nfrom tqdm import tqdm\n\n\nclass RLTrainer(object):\n \"\"\"\n Creates RL training object\n \"\"\"\n\n def __init__(self, config_, grid_search=False):\n \"\"\"\n Constructor\n :param config_:\n :param grid_search:\n :return:\n \"\"\"\n self.config = config_\n self.grid_search = grid_search\n self.logger = logging.getLogger('cuda_logger')\n self.expt_name = self.config['RL_parameters']['experiment']\n self.objective = self.config['RL_parameters']['objective']\n self.city_states_filename = self.config['RL_parameters'][\n 'city_states_filename']\n self.training_tracker = TrainingTracker(self.config)\n\n def run(self):\n \"\"\"\n Creates and runs training episode\n :param:\n :return:\n \"\"\"\n data_provider = DataProvider(self.config)\n hex_attr_df = data_provider.read_hex_bin_attributes()\n hex_distance_df = data_provider.read_hex_bin_distances()\n city_states = data_provider.read_city_states(self.city_states_filename)\n neighborhood = data_provider.read_neighborhood_data()\n popular_bins = data_provider.read_popular_hex_bins()\n num_episodes = self.config['RL_parameters']['num_episodes']\n ind_episodes = self.config['RL_parameters']['ind_episodes']\n exp_decay_multiplier = self.config['RL_parameters'][\n 'exp_decay_multiplier']\n q_ind = None\n r_table = None\n xi_matrix = None\n best_episode = None\n best_model = {}\n progress_bar = tqdm(xrange(num_episodes))\n for episode_id in progress_bar:\n progress_bar.set_description('Episode: {}'.format(episode_id))\n current_best = -1000000\n ind_exploration_factor = np.e ** (-1 * episode_id *\n exp_decay_multiplier / ind_episodes)\n episode = Episode(self.config, episode_id,\n ind_exploration_factor, hex_attr_df, hex_distance_df,\n city_states, neighborhood, popular_bins, q_ind, r_table,\n xi_matrix)\n tables = episode.run()\n q_ind = tables['q_ind']\n r_table = tables['r_table']\n xi_matrix = tables['xi_matrix']\n episode_tracker = tables['episode_tracker']\n self.training_tracker.update_RL_tracker(episode_id,\n episode_tracker.gross_earnings, episode_tracker.\n successful_waits, episode_tracker.unsuccessful_waits,\n episode_tracker.unmet_demand, episode_tracker.\n relocation_rides, episode_tracker.DET, episode_tracker.DPRT,\n episode_tracker.DWT, episode_tracker.DRT, episode_tracker.DCT)\n if self.objective == 'revenue':\n if episode_tracker.gross_earnings >= current_best:\n best_episode = episode_tracker\n current_best = best_episode.gross_earnings\n elif episode_tracker.successful_waits >= current_best:\n best_episode = episode_tracker\n current_best = episode_tracker.successful_waits\n best_model['ind_exploration_factor'] = ind_exploration_factor\n best_model['config'] = self.config\n best_model['q_ind'] = q_ind\n best_model['r_table'] = r_table\n best_model['xi_matrix'] = xi_matrix\n best_model['training_tracker'] = self.training_tracker\n self.logger.info(\n 'Expt: {} Earnings: {} Met Demand: {} Unmet Demand: {}'.format(\n self.expt_name, best_episode.gross_earnings, best_episode.\n successful_waits, best_episode.unmet_demand))\n return best_episode, best_model, self.training_tracker\n",
"step-5": "\"\"\"\nThis class runs the RL Training\n\"\"\"\n\nfrom __future__ import division\nimport logging\nimport numpy as np\nfrom data.data_provider import DataProvider\nfrom episode.episode import Episode\nfrom tracker import TrainingTracker\nfrom tqdm import tqdm\n\n\nclass RLTrainer(object):\n \"\"\"\n Creates RL training object\n \"\"\"\n\n def __init__(self, config_, grid_search=False):\n \"\"\"\n Constructor\n :param config_:\n :param grid_search:\n :return:\n \"\"\"\n self.config = config_\n self.grid_search = grid_search\n self.logger = logging.getLogger(\"cuda_logger\")\n self.expt_name = self.config['RL_parameters']['experiment']\n self.objective = self.config['RL_parameters']['objective']\n self.city_states_filename = self.config['RL_parameters']['city_states_filename']\n\n # Create training tracker\n self.training_tracker = TrainingTracker(self.config)\n\n def run(self):\n \"\"\"\n Creates and runs training episode\n :param:\n :return:\n \"\"\"\n data_provider = DataProvider(self.config)\n hex_attr_df = data_provider.read_hex_bin_attributes()\n hex_distance_df = data_provider.read_hex_bin_distances()\n city_states = data_provider.read_city_states(self.city_states_filename)\n neighborhood = data_provider.read_neighborhood_data()\n popular_bins = data_provider.read_popular_hex_bins()\n num_episodes = self.config['RL_parameters']['num_episodes']\n ind_episodes = self.config['RL_parameters']['ind_episodes']\n exp_decay_multiplier = self.config['RL_parameters']['exp_decay_multiplier']\n\n q_ind = None\n r_table = None\n xi_matrix = None\n\n best_episode = None\n best_model = {}\n\n progress_bar = tqdm(xrange(num_episodes))\n for episode_id in progress_bar:\n progress_bar.set_description(\"Episode: {}\".format(episode_id))\n current_best = -1000000\n\n # Create episode\n ind_exploration_factor = np.e ** (-1 * episode_id * exp_decay_multiplier / ind_episodes)\n\n episode = Episode(self.config,\n episode_id,\n ind_exploration_factor,\n hex_attr_df,\n hex_distance_df,\n city_states,\n neighborhood,\n popular_bins,\n q_ind,\n r_table,\n xi_matrix)\n\n # Run episode\n tables = episode.run()\n q_ind = tables['q_ind']\n r_table = tables['r_table']\n xi_matrix = tables['xi_matrix']\n episode_tracker = tables['episode_tracker']\n\n # Uncomment for logging if running a job, comment during experiments\n # otherwise it leads to insanely huge logging output which is useless\n\n # self.logger.info(\"\"\"\n # Expt: {} Episode: {} Earnings: {}\n # Pax rides: {} Relocation rides: {} Unmet demand: {}\n # \"\"\".format(self.expt_name, episode_id,\n # episode_tracker.gross_earnings,\n # episode_tracker.successful_waits,\n # episode_tracker.relocation_rides,\n # episode_tracker.unmet_demand))\n # self.logger.info(\"----------------------------------\")\n\n self.training_tracker.update_RL_tracker(\n episode_id, episode_tracker.gross_earnings,\n episode_tracker.successful_waits, episode_tracker.unsuccessful_waits,\n episode_tracker.unmet_demand, episode_tracker.relocation_rides,\n episode_tracker.DET, episode_tracker.DPRT, episode_tracker.DWT,\n episode_tracker.DRT, episode_tracker.DCT)\n\n # Keep track of the best episode\n if self.objective == 'revenue':\n if episode_tracker.gross_earnings >= current_best:\n best_episode = episode_tracker\n current_best = best_episode.gross_earnings\n else: # self.objective == 'pickups':\n if episode_tracker.successful_waits >= current_best:\n best_episode = episode_tracker\n current_best = episode_tracker.successful_waits\n\n # Keep track of the best model\n best_model['ind_exploration_factor'] = ind_exploration_factor\n best_model['config'] = self.config\n best_model['q_ind'] = q_ind\n best_model['r_table'] = r_table\n best_model['xi_matrix'] = xi_matrix\n best_model['training_tracker'] = self.training_tracker\n\n # After finishing training\n self.logger.info(\"Expt: {} Earnings: {} Met Demand: {} Unmet Demand: {}\".format(self.expt_name,\n best_episode.gross_earnings,\n best_episode.successful_waits,\n best_episode.unmet_demand))\n return best_episode, best_model, self.training_tracker\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
"""
The :mod:`sklearn.experimental` module provides importable modules that enable
the use of experimental features or estimators.
The features and estimators that are experimental aren't subject to
deprecation cycles. Use them at your own risks!
"""
|
normal
|
{
"blob_id": "d3952306679d5a4dc6765a7afa19ce671ff4c0b4",
"index": 8501,
"step-1": "<mask token>\n",
"step-2": "\"\"\"\nThe :mod:`sklearn.experimental` module provides importable modules that enable\nthe use of experimental features or estimators.\n\nThe features and estimators that are experimental aren't subject to\ndeprecation cycles. Use them at your own risks!\n\"\"\"\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import pickle
import saveClass as sc
import libcell as lb
import numpy as np
import struct
import os
# def save_Ldend(Ldends, bfname):
# # create a binary file
# bfname='Dend_length.bin'
# binfile = file(bfname, 'wb')
# # and write out two integers with the row and column dimension
# header = struct.pack('2I', Ldends.shape[0], Ldends.shape[1])
# binfile.write(header)
# # then loop over columns and write each
# for i in range(Ldends.shape[1]):
# ddata = struct.pack('%id' % Ldends.shape[0], *Ldends[:,i])
# binfile.write(ddata)
# binfile.close()
def save_ave_replay(aveData, nIter, nStart, bfname):
vd = np.zeros((nIter, 4, nStart))
for i_trial in range(nIter):
vv = aveData[i_trial]
for i_dendrite in range(4):
vvv = vv[i_dendrite]
mv = np.reshape(vvv, (nStart, 1501))
vd[i_trial, i_dendrite, :] = np.mean(mv[:,550:1000], 1)
mvd = np.mean(vd, 0)
# print (bfname)
# create a binary file
binfile = file(bfname, 'wb')
# and write out two integers with the row and column dimension
header = struct.pack('2I', mvd.shape[0], mvd.shape[1])
binfile.write(header)
# then loop over columns and write each
for i in range(mvd.shape[1]):
ddata = struct.pack('%id' % mvd.shape[0], *mvd[:,i])
binfile.write(ddata)
binfile.close()
def save_ave_place(aveData, nIter, bfname):
vd = np.zeros((nIter, 4, 20))
for i_trial in range(nIter):
vv = aveData[i_trial]
for i_dendrite in range(4):
vvv = vv[i_dendrite]
mv = np.reshape(vvv[0:50000], (20, 2500))
vd[i_trial, i_dendrite, :] = np.mean(mv, 1)
mvd = np.mean(vd, 0)
print (bfname)
# create a binary file
binfile = file(bfname, 'wb')
# and write out two integers with the row and column dimension
header = struct.pack('2I', mvd.shape[0], mvd.shape[1])
binfile.write(header)
# then loop over columns and write each
for i in range(mvd.shape[1]):
ddata = struct.pack('%id' % mvd.shape[0], *mvd[:,i])
binfile.write(ddata)
binfile.close()
def save_sim(data, out_binary=False, out_vdend=False, out_pickle=False, outdir='data', dt_save=1):
if not os.path.exists(outdir):
os.makedirs(outdir)
modelData = sc.emptyObject()
lb.props(modelData)
if (data.stimType=='DStim'):
filename = 'T' + str(data.TSTOP) + '_dend' + str(data.iclampLoc[2]) + '_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1]-data.iRange[0])
elif (data.stimType=='SStim'):
filename = 'T' + str(data.TSTOP) + '_soma_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1]-data.iRange[0])
else :
filename = 'T' + str(data.TSTOP) + '_Ne' + str(data.Ensyn)+'_gA'+str(round(data.Agmax,2)) + '_tauA' + str(data.Atau2)
if (data.NMDA):
filename = filename + '_gN'+str(round(data.Ngmax,2))
if (data.GABA):
filename = filename + '_Ni'+str(data.Insyn) + '_gG'+str(round(data.Igmax, 2))
if (data.GABA_B):
filename = filename + '_gB'+str(round(data.Bgmax, 2))
if (data.modulateNa):
filename = filename + '_noDendNa'
if (data.stimType == 'nIter'):
filename = filename + '_tInt' + str(data.tInterval) + 'ms_' + data.locBias + '_' + data.direction
if ((data.stimType == 'place') + (data.stimType == 'poisson') + (data.stimType == 'replay')):
filename = filename + "_Er" + str(data.Erate) + '_Ir'+str(data.Irate) + '_' + data.placeType + '_rep' + str(data.nIter)
filename = filename + '_stimseed' + str(data.stimseed)
if (data.modulateK == True):
filename = filename + '_K0'
if (data.modulateK_local == True):
filename = filename + '_KL0'
if (data.modulateK_parents == True):
filename = filename + '_KP0'
if (data.modulateRmRa == True):
filename = filename + '_RmRa'
if (data.modulateRmRaSeg == True):
filename = filename + '_RmRaSeg'
if (data.randomW == True):
filename = filename + '_randW'
if out_pickle:
dataList = [data, modelData]
fname = './'+outdir+'/'+filename+'.pkl'
f = open(fname, 'wb')
pickle.dump(dataList, f)
f.close()
if out_binary:
#---------------------------------------------
# WRITE the response in a binary file to read it with R
mat = np.array(data.vdata)
L = mat.shape[1]
dt_ratio = int(round(dt_save / data.dt))
mat = mat[:,0:L:dt_ratio]
np.save("./"+outdir+"/vdata_"+filename+".npy", mat)
#bfname = './'+outdir+'/vdata_'+filename+'.bin'
#print (bfname)
# create a binary file
#binfile = file(bfname, 'wb')
# and write out two integers with the row and column dimension
#header = struct.pack('2I', mat.shape[0], mat.shape[1])
#binfile.write(header)
# then loop over columns and write each
#for i in range(mat.shape[1]):
#ddata = struct.pack('%id' % mat.shape[0], *mat[:,i])
#binfile.write(ddata)
#binfile.close()
if out_vdend:
# # WRITE the dendritic response
nRep = len(data.vDdata)
mat = np.array(data.vDdata[0])
for i in range(1, nRep):
mat = np.hstack((mat, data.vDdata[i]))
L = mat.shape[1]
dt_ratio = int(round(dt_save / data.dt))
mat = mat[:,0:L:dt_ratio]
np.save("./"+outdir+"/vDdata_"+filename+".npy", mat)
# bfname = './'+outdir+'/vDdata_'+filename+'.bin'
# # create a binary file
# binfile = file(bfname, 'wb')
# # and write out two integers with the row and column dimension
# header = struct.pack('2I', mat.shape[0], mat.shape[1])
# binfile.write(header)
# # then loop over columns and write each
# for i in range(mat.shape[1]):
# ddata = struct.pack('%id' % mat.shape[0], *mat[:,i])
# binfile.write(ddata)
# binfile.close()
# # ---------------------------------------------
# # WRITE the location of the synapses
if (data.GABA) :
Ilocs = np.array(data.Ilocs)
#Ilocs[:,1] = 1 + Ilocs[:,1] # code that these are inhibitory synapses
Elocs = np.array(data.Elocs)
Locs = np.row_stack((Elocs, Ilocs))
else :
Locs = np.array(data.Elocs)
#bfname = './'+outdir+'/synlocs_'+filename+'.npy'
#print (bfname)
np.save("./"+outdir+"/Elocs_"+filename+".npy", Elocs)
np.save("./"+outdir+"/Ilocs_"+filename+".npy", Ilocs)
# # create a binary file
# binfile = file(bfname, 'wb')
# # and write out two integers with the row and column dimension
# header = struct.pack('2I', Locs.shape[0], Locs.shape[1])
# binfile.write(header)
# # then loop over columns and write each
# for i in range(Locs.shape[1]):
# ddata = struct.pack('%id' % Locs.shape[0], *Locs[:,i])
# binfile.write(ddata)
# binfile.close()
# #---------------------------------------------
# Write the input spike train
if (len(data.stim)>0):
stim = data.stim
#bfname = './'+outdir+'/stim_'+filename+'.bin'
np.save("./"+outdir+"/stim_"+filename+".npy", stim)
# create a binary file
#binfile = file(bfname, 'wb')
# and write out two integers with the row and column dimension
#header = struct.pack('2I', stim.shape[0], stim.shape[1])
#binfile.write(header)
# then loop over columns and write each
#for i in range(stim.shape[1]):
#ddata = struct.pack('%id' % stim.shape[0], *stim[:,i])
#binfile.write(ddata)
#binfile.close()
|
normal
|
{
"blob_id": "6eb8172e7e26ad6ec9cb0d30c5a0613ce79296e6",
"index": 8421,
"step-1": "<mask token>\n\n\ndef save_ave_replay(aveData, nIter, nStart, bfname):\n vd = np.zeros((nIter, 4, nStart))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv, (nStart, 1501))\n vd[i_trial, i_dendrite, :] = np.mean(mv[:, 550:1000], 1)\n mvd = np.mean(vd, 0)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef save_ave_replay(aveData, nIter, nStart, bfname):\n vd = np.zeros((nIter, 4, nStart))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv, (nStart, 1501))\n vd[i_trial, i_dendrite, :] = np.mean(mv[:, 550:1000], 1)\n mvd = np.mean(vd, 0)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_ave_place(aveData, nIter, bfname):\n vd = np.zeros((nIter, 4, 20))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv[0:50000], (20, 2500))\n vd[i_trial, i_dendrite, :] = np.mean(mv, 1)\n mvd = np.mean(vd, 0)\n print(bfname)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef save_ave_replay(aveData, nIter, nStart, bfname):\n vd = np.zeros((nIter, 4, nStart))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv, (nStart, 1501))\n vd[i_trial, i_dendrite, :] = np.mean(mv[:, 550:1000], 1)\n mvd = np.mean(vd, 0)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_ave_place(aveData, nIter, bfname):\n vd = np.zeros((nIter, 4, 20))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv[0:50000], (20, 2500))\n vd[i_trial, i_dendrite, :] = np.mean(mv, 1)\n mvd = np.mean(vd, 0)\n print(bfname)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_sim(data, out_binary=False, out_vdend=False, out_pickle=False,\n outdir='data', dt_save=1):\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n modelData = sc.emptyObject()\n lb.props(modelData)\n if data.stimType == 'DStim':\n filename = 'T' + str(data.TSTOP) + '_dend' + str(data.iclampLoc[2]\n ) + '_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]\n ) + '_dI' + str(data.iRange[1] - data.iRange[0])\n elif data.stimType == 'SStim':\n filename = 'T' + str(data.TSTOP) + '_soma_N' + str(len(data.iRange)\n ) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1] -\n data.iRange[0])\n else:\n filename = 'T' + str(data.TSTOP) + '_Ne' + str(data.Ensyn\n ) + '_gA' + str(round(data.Agmax, 2)) + '_tauA' + str(data.Atau2)\n if data.NMDA:\n filename = filename + '_gN' + str(round(data.Ngmax, 2))\n if data.GABA:\n filename = filename + '_Ni' + str(data.Insyn) + '_gG' + str(round\n (data.Igmax, 2))\n if data.GABA_B:\n filename = filename + '_gB' + str(round(data.Bgmax, 2))\n if data.modulateNa:\n filename = filename + '_noDendNa'\n if data.stimType == 'nIter':\n filename = filename + '_tInt' + str(data.tInterval\n ) + 'ms_' + data.locBias + '_' + data.direction\n if (data.stimType == 'place') + (data.stimType == 'poisson') + (data\n .stimType == 'replay'):\n filename = filename + '_Er' + str(data.Erate) + '_Ir' + str(data\n .Irate) + '_' + data.placeType + '_rep' + str(data.nIter)\n filename = filename + '_stimseed' + str(data.stimseed)\n if data.modulateK == True:\n filename = filename + '_K0'\n if data.modulateK_local == True:\n filename = filename + '_KL0'\n if data.modulateK_parents == True:\n filename = filename + '_KP0'\n if data.modulateRmRa == True:\n filename = filename + '_RmRa'\n if data.modulateRmRaSeg == True:\n filename = filename + '_RmRaSeg'\n if data.randomW == True:\n filename = filename + '_randW'\n if out_pickle:\n dataList = [data, modelData]\n fname = './' + outdir + '/' + filename + '.pkl'\n f = open(fname, 'wb')\n pickle.dump(dataList, f)\n f.close()\n if out_binary:\n mat = np.array(data.vdata)\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:, 0:L:dt_ratio]\n np.save('./' + outdir + '/vdata_' + filename + '.npy', mat)\n if out_vdend:\n nRep = len(data.vDdata)\n mat = np.array(data.vDdata[0])\n for i in range(1, nRep):\n mat = np.hstack((mat, data.vDdata[i]))\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:, 0:L:dt_ratio]\n np.save('./' + outdir + '/vDdata_' + filename + '.npy', mat)\n if data.GABA:\n Ilocs = np.array(data.Ilocs)\n Elocs = np.array(data.Elocs)\n Locs = np.row_stack((Elocs, Ilocs))\n else:\n Locs = np.array(data.Elocs)\n np.save('./' + outdir + '/Elocs_' + filename + '.npy', Elocs)\n np.save('./' + outdir + '/Ilocs_' + filename + '.npy', Ilocs)\n if len(data.stim) > 0:\n stim = data.stim\n np.save('./' + outdir + '/stim_' + filename + '.npy', stim)\n",
"step-4": "import pickle\nimport saveClass as sc\nimport libcell as lb\nimport numpy as np\nimport struct\nimport os\n\n\ndef save_ave_replay(aveData, nIter, nStart, bfname):\n vd = np.zeros((nIter, 4, nStart))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv, (nStart, 1501))\n vd[i_trial, i_dendrite, :] = np.mean(mv[:, 550:1000], 1)\n mvd = np.mean(vd, 0)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_ave_place(aveData, nIter, bfname):\n vd = np.zeros((nIter, 4, 20))\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv[0:50000], (20, 2500))\n vd[i_trial, i_dendrite, :] = np.mean(mv, 1)\n mvd = np.mean(vd, 0)\n print(bfname)\n binfile = file(bfname, 'wb')\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:, i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_sim(data, out_binary=False, out_vdend=False, out_pickle=False,\n outdir='data', dt_save=1):\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n modelData = sc.emptyObject()\n lb.props(modelData)\n if data.stimType == 'DStim':\n filename = 'T' + str(data.TSTOP) + '_dend' + str(data.iclampLoc[2]\n ) + '_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]\n ) + '_dI' + str(data.iRange[1] - data.iRange[0])\n elif data.stimType == 'SStim':\n filename = 'T' + str(data.TSTOP) + '_soma_N' + str(len(data.iRange)\n ) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1] -\n data.iRange[0])\n else:\n filename = 'T' + str(data.TSTOP) + '_Ne' + str(data.Ensyn\n ) + '_gA' + str(round(data.Agmax, 2)) + '_tauA' + str(data.Atau2)\n if data.NMDA:\n filename = filename + '_gN' + str(round(data.Ngmax, 2))\n if data.GABA:\n filename = filename + '_Ni' + str(data.Insyn) + '_gG' + str(round\n (data.Igmax, 2))\n if data.GABA_B:\n filename = filename + '_gB' + str(round(data.Bgmax, 2))\n if data.modulateNa:\n filename = filename + '_noDendNa'\n if data.stimType == 'nIter':\n filename = filename + '_tInt' + str(data.tInterval\n ) + 'ms_' + data.locBias + '_' + data.direction\n if (data.stimType == 'place') + (data.stimType == 'poisson') + (data\n .stimType == 'replay'):\n filename = filename + '_Er' + str(data.Erate) + '_Ir' + str(data\n .Irate) + '_' + data.placeType + '_rep' + str(data.nIter)\n filename = filename + '_stimseed' + str(data.stimseed)\n if data.modulateK == True:\n filename = filename + '_K0'\n if data.modulateK_local == True:\n filename = filename + '_KL0'\n if data.modulateK_parents == True:\n filename = filename + '_KP0'\n if data.modulateRmRa == True:\n filename = filename + '_RmRa'\n if data.modulateRmRaSeg == True:\n filename = filename + '_RmRaSeg'\n if data.randomW == True:\n filename = filename + '_randW'\n if out_pickle:\n dataList = [data, modelData]\n fname = './' + outdir + '/' + filename + '.pkl'\n f = open(fname, 'wb')\n pickle.dump(dataList, f)\n f.close()\n if out_binary:\n mat = np.array(data.vdata)\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:, 0:L:dt_ratio]\n np.save('./' + outdir + '/vdata_' + filename + '.npy', mat)\n if out_vdend:\n nRep = len(data.vDdata)\n mat = np.array(data.vDdata[0])\n for i in range(1, nRep):\n mat = np.hstack((mat, data.vDdata[i]))\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:, 0:L:dt_ratio]\n np.save('./' + outdir + '/vDdata_' + filename + '.npy', mat)\n if data.GABA:\n Ilocs = np.array(data.Ilocs)\n Elocs = np.array(data.Elocs)\n Locs = np.row_stack((Elocs, Ilocs))\n else:\n Locs = np.array(data.Elocs)\n np.save('./' + outdir + '/Elocs_' + filename + '.npy', Elocs)\n np.save('./' + outdir + '/Ilocs_' + filename + '.npy', Ilocs)\n if len(data.stim) > 0:\n stim = data.stim\n np.save('./' + outdir + '/stim_' + filename + '.npy', stim)\n",
"step-5": "import pickle\nimport saveClass as sc\nimport libcell as lb\nimport numpy as np\nimport struct\nimport os\n\n# def save_Ldend(Ldends, bfname):\n# # create a binary file\n# bfname='Dend_length.bin'\n# binfile = file(bfname, 'wb')\n# # and write out two integers with the row and column dimension\n# header = struct.pack('2I', Ldends.shape[0], Ldends.shape[1])\n# binfile.write(header)\n# # then loop over columns and write each\n# for i in range(Ldends.shape[1]):\n# ddata = struct.pack('%id' % Ldends.shape[0], *Ldends[:,i])\n# binfile.write(ddata)\n# binfile.close()\n\ndef save_ave_replay(aveData, nIter, nStart, bfname):\n vd = np.zeros((nIter, 4, nStart))\n\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv, (nStart, 1501))\n vd[i_trial, i_dendrite, :] = np.mean(mv[:,550:1000], 1)\n\n mvd = np.mean(vd, 0)\n\n # print (bfname)\n\n # create a binary file\n binfile = file(bfname, 'wb')\n # and write out two integers with the row and column dimension\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n # then loop over columns and write each\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:,i])\n binfile.write(ddata)\n binfile.close()\n\ndef save_ave_place(aveData, nIter, bfname):\n vd = np.zeros((nIter, 4, 20))\n\n for i_trial in range(nIter):\n vv = aveData[i_trial]\n for i_dendrite in range(4):\n vvv = vv[i_dendrite]\n mv = np.reshape(vvv[0:50000], (20, 2500))\n vd[i_trial, i_dendrite, :] = np.mean(mv, 1)\n\n mvd = np.mean(vd, 0)\n\n print (bfname)\n\n # create a binary file\n binfile = file(bfname, 'wb')\n # and write out two integers with the row and column dimension\n header = struct.pack('2I', mvd.shape[0], mvd.shape[1])\n binfile.write(header)\n # then loop over columns and write each\n for i in range(mvd.shape[1]):\n ddata = struct.pack('%id' % mvd.shape[0], *mvd[:,i])\n binfile.write(ddata)\n binfile.close()\n\n\ndef save_sim(data, out_binary=False, out_vdend=False, out_pickle=False, outdir='data', dt_save=1):\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n\n modelData = sc.emptyObject()\n lb.props(modelData)\n\n if (data.stimType=='DStim'):\n filename = 'T' + str(data.TSTOP) + '_dend' + str(data.iclampLoc[2]) + '_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1]-data.iRange[0]) \n elif (data.stimType=='SStim'):\n filename = 'T' + str(data.TSTOP) + '_soma_N' + str(len(data.iRange)) + '_I' + str(data.iRange[0]) + '_dI' + str(data.iRange[1]-data.iRange[0]) \n\n else :\n filename = 'T' + str(data.TSTOP) + '_Ne' + str(data.Ensyn)+'_gA'+str(round(data.Agmax,2)) + '_tauA' + str(data.Atau2)\n if (data.NMDA):\n filename = filename + '_gN'+str(round(data.Ngmax,2))\n if (data.GABA):\n filename = filename + '_Ni'+str(data.Insyn) + '_gG'+str(round(data.Igmax, 2))\n if (data.GABA_B):\n filename = filename + '_gB'+str(round(data.Bgmax, 2))\n\n if (data.modulateNa):\n filename = filename + '_noDendNa'\n\n if (data.stimType == 'nIter'):\n filename = filename + '_tInt' + str(data.tInterval) + 'ms_' + data.locBias + '_' + data.direction\n \n if ((data.stimType == 'place') + (data.stimType == 'poisson') + (data.stimType == 'replay')):\n filename = filename + \"_Er\" + str(data.Erate) + '_Ir'+str(data.Irate) + '_' + data.placeType + '_rep' + str(data.nIter)\n filename = filename + '_stimseed' + str(data.stimseed)\n\n if (data.modulateK == True):\n filename = filename + '_K0'\n if (data.modulateK_local == True):\n filename = filename + '_KL0'\n if (data.modulateK_parents == True):\n filename = filename + '_KP0'\n\n if (data.modulateRmRa == True):\n filename = filename + '_RmRa'\n if (data.modulateRmRaSeg == True):\n filename = filename + '_RmRaSeg'\n if (data.randomW == True):\n filename = filename + '_randW'\n\n if out_pickle:\n dataList = [data, modelData]\n fname = './'+outdir+'/'+filename+'.pkl'\n f = open(fname, 'wb')\n pickle.dump(dataList, f)\n f.close()\n\n\n if out_binary:\n #---------------------------------------------\n # WRITE the response in a binary file to read it with R\n mat = np.array(data.vdata)\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:,0:L:dt_ratio]\n\n np.save(\"./\"+outdir+\"/vdata_\"+filename+\".npy\", mat)\n\n #bfname = './'+outdir+'/vdata_'+filename+'.bin'\n #print (bfname)\n # create a binary file\n #binfile = file(bfname, 'wb')\n # and write out two integers with the row and column dimension\n #header = struct.pack('2I', mat.shape[0], mat.shape[1])\n #binfile.write(header)\n # then loop over columns and write each\n #for i in range(mat.shape[1]):\n #ddata = struct.pack('%id' % mat.shape[0], *mat[:,i])\n #binfile.write(ddata)\n #binfile.close()\n\n if out_vdend:\n # # WRITE the dendritic response\n nRep = len(data.vDdata)\n mat = np.array(data.vDdata[0])\n for i in range(1, nRep):\n mat = np.hstack((mat, data.vDdata[i]))\n\n L = mat.shape[1]\n dt_ratio = int(round(dt_save / data.dt))\n mat = mat[:,0:L:dt_ratio]\n \n np.save(\"./\"+outdir+\"/vDdata_\"+filename+\".npy\", mat)\n \n # bfname = './'+outdir+'/vDdata_'+filename+'.bin'\n # # create a binary file\n # binfile = file(bfname, 'wb')\n # # and write out two integers with the row and column dimension\n # header = struct.pack('2I', mat.shape[0], mat.shape[1])\n # binfile.write(header)\n # # then loop over columns and write each\n # for i in range(mat.shape[1]):\n # ddata = struct.pack('%id' % mat.shape[0], *mat[:,i])\n # binfile.write(ddata)\n # binfile.close()\n \n\n # # ---------------------------------------------\n # # WRITE the location of the synapses \n if (data.GABA) :\n Ilocs = np.array(data.Ilocs) \n #Ilocs[:,1] = 1 + Ilocs[:,1] # code that these are inhibitory synapses\n Elocs = np.array(data.Elocs)\n Locs = np.row_stack((Elocs, Ilocs))\n else :\n Locs = np.array(data.Elocs)\n\n #bfname = './'+outdir+'/synlocs_'+filename+'.npy'\n #print (bfname)\n np.save(\"./\"+outdir+\"/Elocs_\"+filename+\".npy\", Elocs)\n np.save(\"./\"+outdir+\"/Ilocs_\"+filename+\".npy\", Ilocs)\n # # create a binary file\n # binfile = file(bfname, 'wb')\n # # and write out two integers with the row and column dimension\n # header = struct.pack('2I', Locs.shape[0], Locs.shape[1])\n # binfile.write(header)\n # # then loop over columns and write each\n # for i in range(Locs.shape[1]):\n # ddata = struct.pack('%id' % Locs.shape[0], *Locs[:,i])\n # binfile.write(ddata)\n # binfile.close()\n\n # #---------------------------------------------\n # Write the input spike train\n if (len(data.stim)>0):\n stim = data.stim\n #bfname = './'+outdir+'/stim_'+filename+'.bin'\n np.save(\"./\"+outdir+\"/stim_\"+filename+\".npy\", stim)\n\n # create a binary file\n #binfile = file(bfname, 'wb')\n # and write out two integers with the row and column dimension\n #header = struct.pack('2I', stim.shape[0], stim.shape[1])\n #binfile.write(header)\n # then loop over columns and write each\n #for i in range(stim.shape[1]):\n #ddata = struct.pack('%id' % stim.shape[0], *stim[:,i])\n #binfile.write(ddata)\n #binfile.close()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def findFirst(arr, l, h, x):
if l > h:
return -1
mid = (l + h) // 2
if arr[mid] == x:
return mid
elif arr[mid] > x:
return findFirst(arr, l, mid - 1, x)
return findFirst(arr, mid + 1, h, x)
def indexes(arr, x):
n = len(arr)
ind = findFirst(arr, 0, n - 1, x)
if ind == -1:
return [-1, -1]
l = u = ind
for i in range(ind + 1, n):
if arr[i] == x:
u = i
else:
break
for i in range(ind - 1, -1, -1):
if arr[i] == x:
l = i
else:
break
return [l, u]
print(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))
|
normal
|
{
"blob_id": "b4783540224902b10088edbd038d6d664934a237",
"index": 4893,
"step-1": "<mask token>\n",
"step-2": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\n<mask token>\n",
"step-3": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\n<mask token>\n",
"step-4": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\nprint(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
SECRET_KEY = '06A52C5B30EC2960310B45E4E0FF21C5D6C86C47D91FE19FA5934EFF445276A0'
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')
SQLALCHEMY_ECHO = True
DATABASE_CONNECT_OPTIONS = {}
THREADS_PER_PAGE = 8
CSRF_ENABLED = True
CSRF_SESSION_KEY = '8C371D8166DA8A9F770DAB562878BDD8704F079BB735D607CE8E2C507D55359A'
UPLOAD_FOLDER = '%s/images'
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])
|
normal
|
{
"blob_id": "6ee71cf61ae6a79ec0cd06f1ddc7dc614a76c7b9",
"index": 6547,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n_basedir = os.path.abspath(os.path.dirname(__file__))\nDEBUG = True\nSECRET_KEY = '06A52C5B30EC2960310B45E4E0FF21C5D6C86C47D91FE19FA5934EFF445276A0'\nSQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')\nSQLALCHEMY_ECHO = True\nDATABASE_CONNECT_OPTIONS = {}\nTHREADS_PER_PAGE = 8\nCSRF_ENABLED = True\nCSRF_SESSION_KEY = (\n '8C371D8166DA8A9F770DAB562878BDD8704F079BB735D607CE8E2C507D55359A')\nUPLOAD_FOLDER = '%s/images'\nALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])\n",
"step-3": "import os\n_basedir = os.path.abspath(os.path.dirname(__file__))\nDEBUG = True\nSECRET_KEY = '06A52C5B30EC2960310B45E4E0FF21C5D6C86C47D91FE19FA5934EFF445276A0'\nSQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')\nSQLALCHEMY_ECHO = True\nDATABASE_CONNECT_OPTIONS = {}\nTHREADS_PER_PAGE = 8\nCSRF_ENABLED = True\nCSRF_SESSION_KEY = (\n '8C371D8166DA8A9F770DAB562878BDD8704F079BB735D607CE8E2C507D55359A')\nUPLOAD_FOLDER = '%s/images'\nALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])\n",
"step-4": "import os\n_basedir = os.path.abspath(os.path.dirname(__file__))\n\nDEBUG = True\n\nSECRET_KEY = '06A52C5B30EC2960310B45E4E0FF21C5D6C86C47D91FE19FA5934EFF445276A0'\n\nSQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'app.db')\nSQLALCHEMY_ECHO = True\nDATABASE_CONNECT_OPTIONS = {}\n\nTHREADS_PER_PAGE = 8\n\nCSRF_ENABLED = True\nCSRF_SESSION_KEY = '8C371D8166DA8A9F770DAB562878BDD8704F079BB735D607CE8E2C507D55359A'\n\nUPLOAD_FOLDER = '%s/images'\nALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
_registry = []
def registry(name):
_registry.append(name)
def registry_names():
return iter(_registry)
|
normal
|
{
"blob_id": "51642dbb210600f9ca4e035fb884fbdda030fd04",
"index": 1491,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-3": "<mask token>\n\n\ndef registry(name):\n _registry.append(name)\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-4": "_registry = []\n\n\ndef registry(name):\n _registry.append(name)\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import uuid
from fastapi import APIRouter, Depends, HTTPException, Form, Body
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from sqlalchemy.orm import Session
# dependency
from configs.config_sqlalchemy import get_db
# schema
from schema import store_schema
# define the url the client will use to access the token
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/login")
# router object
router = APIRouter(
prefix="/auth",
tags=["AUTHORIZATION AND AUTHENTICATION"],
responses={
200:{'description':'Ok'},
201:{'description':'created'},
400: {"description": "Bad Request"},
404: {"description": "Not found"}
}
)
# register a new account
@router.post("/account/register",
summary='register to create a new store',
response_model=store_schema.Store,
status_code=201
)
async def account_register(
StoreName: str = Body(...),
OwnerFirstName: str = Body(...),
OwnerLastName: str = Body(...),
OwnerEmail: str = Body(...),
):
return
# account login
@router.post('/login',
summary='login to get access token',
status_code=200
)
async def login(form_data: OAuth2PasswordRequestForm = Depends(), db:Session=Depends(get_db)):
user = authenticate_user(email=form_data.username, password=form_data.password, db=db)
if not user:
raise HTTPException(
status_code=401,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = create_access_token(
data={"sub": str(user.id)}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer", "user":user}
|
normal
|
{
"blob_id": "64bbf2e3b961a6e0b5d7e551278bb21990df2ed9",
"index": 5526,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@router.post('/account/register', summary='register to create a new store',\n response_model=store_schema.Store, status_code=201)\nasync def account_register(StoreName: str=Body(...), OwnerFirstName: str=\n Body(...), OwnerLastName: str=Body(...), OwnerEmail: str=Body(...)):\n return\n\n\n@router.post('/login', summary='login to get access token', status_code=200)\nasync def login(form_data: OAuth2PasswordRequestForm=Depends(), db: Session\n =Depends(get_db)):\n user = authenticate_user(email=form_data.username, password=form_data.\n password, db=db)\n if not user:\n raise HTTPException(status_code=401, detail=\n 'Incorrect username or password', headers={'WWW-Authenticate':\n 'Bearer'})\n access_token_expires = timedelta(minutes=settings.\n ACCESS_TOKEN_EXPIRE_MINUTES)\n access_token = create_access_token(data={'sub': str(user.id)},\n expires_delta=access_token_expires)\n return {'access_token': access_token, 'token_type': 'bearer', 'user': user}\n",
"step-3": "<mask token>\noauth2_scheme = OAuth2PasswordBearer(tokenUrl='auth/login')\nrouter = APIRouter(prefix='/auth', tags=['AUTHORIZATION AND AUTHENTICATION'\n ], responses={(200): {'description': 'Ok'}, (201): {'description':\n 'created'}, (400): {'description': 'Bad Request'}, (404): {\n 'description': 'Not found'}})\n\n\n@router.post('/account/register', summary='register to create a new store',\n response_model=store_schema.Store, status_code=201)\nasync def account_register(StoreName: str=Body(...), OwnerFirstName: str=\n Body(...), OwnerLastName: str=Body(...), OwnerEmail: str=Body(...)):\n return\n\n\n@router.post('/login', summary='login to get access token', status_code=200)\nasync def login(form_data: OAuth2PasswordRequestForm=Depends(), db: Session\n =Depends(get_db)):\n user = authenticate_user(email=form_data.username, password=form_data.\n password, db=db)\n if not user:\n raise HTTPException(status_code=401, detail=\n 'Incorrect username or password', headers={'WWW-Authenticate':\n 'Bearer'})\n access_token_expires = timedelta(minutes=settings.\n ACCESS_TOKEN_EXPIRE_MINUTES)\n access_token = create_access_token(data={'sub': str(user.id)},\n expires_delta=access_token_expires)\n return {'access_token': access_token, 'token_type': 'bearer', 'user': user}\n",
"step-4": "import uuid\nfrom fastapi import APIRouter, Depends, HTTPException, Form, Body\nfrom fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm\nfrom sqlalchemy.orm import Session\nfrom configs.config_sqlalchemy import get_db\nfrom schema import store_schema\noauth2_scheme = OAuth2PasswordBearer(tokenUrl='auth/login')\nrouter = APIRouter(prefix='/auth', tags=['AUTHORIZATION AND AUTHENTICATION'\n ], responses={(200): {'description': 'Ok'}, (201): {'description':\n 'created'}, (400): {'description': 'Bad Request'}, (404): {\n 'description': 'Not found'}})\n\n\n@router.post('/account/register', summary='register to create a new store',\n response_model=store_schema.Store, status_code=201)\nasync def account_register(StoreName: str=Body(...), OwnerFirstName: str=\n Body(...), OwnerLastName: str=Body(...), OwnerEmail: str=Body(...)):\n return\n\n\n@router.post('/login', summary='login to get access token', status_code=200)\nasync def login(form_data: OAuth2PasswordRequestForm=Depends(), db: Session\n =Depends(get_db)):\n user = authenticate_user(email=form_data.username, password=form_data.\n password, db=db)\n if not user:\n raise HTTPException(status_code=401, detail=\n 'Incorrect username or password', headers={'WWW-Authenticate':\n 'Bearer'})\n access_token_expires = timedelta(minutes=settings.\n ACCESS_TOKEN_EXPIRE_MINUTES)\n access_token = create_access_token(data={'sub': str(user.id)},\n expires_delta=access_token_expires)\n return {'access_token': access_token, 'token_type': 'bearer', 'user': user}\n",
"step-5": "import uuid\n\nfrom fastapi import APIRouter, Depends, HTTPException, Form, Body\nfrom fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm\nfrom sqlalchemy.orm import Session\n\n# dependency\nfrom configs.config_sqlalchemy import get_db\n# schema\nfrom schema import store_schema \n\n\n\n# define the url the client will use to access the token\noauth2_scheme = OAuth2PasswordBearer(tokenUrl=\"auth/login\")\n\n# router object\nrouter = APIRouter(\n prefix=\"/auth\",\n tags=[\"AUTHORIZATION AND AUTHENTICATION\"],\n responses={\n 200:{'description':'Ok'},\n 201:{'description':'created'},\n 400: {\"description\": \"Bad Request\"},\n 404: {\"description\": \"Not found\"}\n } \n)\n\n# register a new account\n@router.post(\"/account/register\",\nsummary='register to create a new store',\nresponse_model=store_schema.Store,\nstatus_code=201\n)\nasync def account_register(\n StoreName: str = Body(...),\n OwnerFirstName: str = Body(...),\n OwnerLastName: str = Body(...),\n OwnerEmail: str = Body(...),\n):\n return\n \n# account login\n@router.post('/login',\nsummary='login to get access token',\nstatus_code=200\n)\nasync def login(form_data: OAuth2PasswordRequestForm = Depends(), db:Session=Depends(get_db)):\n user = authenticate_user(email=form_data.username, password=form_data.password, db=db)\n if not user:\n raise HTTPException(\n status_code=401,\n detail=\"Incorrect username or password\",\n headers={\"WWW-Authenticate\": \"Bearer\"},\n )\n access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)\n access_token = create_access_token(\n data={\"sub\": str(user.id)}, expires_delta=access_token_expires\n )\n return {\"access_token\": access_token, \"token_type\": \"bearer\", \"user\":user}\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import torch
from training import PointNetTrain, PointAugmentTrain, Model
#from PointAugment.Augment.config import opts
from data_utils.dataloader import DataLoaderClass
from mpl_toolkits import mplot3d
import matplotlib.pyplot as plt
import numpy as np
import yaml
def visualize_batch(pointclouds, pred_labels, labels, categories):
batch_size = len(pointclouds)
fig = plt.figure(figsize=(8, batch_size / 2))
ncols = 5
nrows = max(1, batch_size // 5)
for idx, pc in enumerate(pointclouds):
label = categories[int(labels[idx].item())]
pred = categories[int(pred_labels[idx])]
colour = 'g' if label == pred else 'r'
pc = pc.cpu().numpy()
ax = fig.add_subplot(nrows, ncols, idx + 1, projection='3d')
ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=colour, s=2)
ax.axis('off')
ax.set_title('GT: {0}\nPred: {1}'.format(label, pred))
plt.show()
if __name__ == '__main__':
with open("config.yaml", "r") as yamlfile:
config = yaml.load(yamlfile, Loader=yaml.FullLoader)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# PointNet
training_instance_2 = PointNetTrain(config['MODEL']['POINTNET'], device)
modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'], config['MODEL']['POINTNET']['TRAINING'])
#training_instance_2.train(modelnet10_dataloader.trainloader, modelnet10_dataloader.validloader, adv = False)
training_instance_2.test(modelnet10_dataloader.validloader)
# Point Augment
#training_instance_1 = PointAugmentTrain(config['MODEL']['POINT_AUGMENT'], device)
#modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'], config['MODEL']['POINTNET']['TRAINING'])
#training_instance_1.train(modelnet10_dataloader.trainloader, modelnet10_dataloader.validloader, adv = False)
#training_instance_1.test(modelnet10_dataloader.validloader)
|
normal
|
{
"blob_id": "0ced42c8bfaad32fc2b397326150e6c7bc5cedab",
"index": 4991,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef visualize_batch(pointclouds, pred_labels, labels, categories):\n batch_size = len(pointclouds)\n fig = plt.figure(figsize=(8, batch_size / 2))\n ncols = 5\n nrows = max(1, batch_size // 5)\n for idx, pc in enumerate(pointclouds):\n label = categories[int(labels[idx].item())]\n pred = categories[int(pred_labels[idx])]\n colour = 'g' if label == pred else 'r'\n pc = pc.cpu().numpy()\n ax = fig.add_subplot(nrows, ncols, idx + 1, projection='3d')\n ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=colour, s=2)\n ax.axis('off')\n ax.set_title('GT: {0}\\nPred: {1}'.format(label, pred))\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef visualize_batch(pointclouds, pred_labels, labels, categories):\n batch_size = len(pointclouds)\n fig = plt.figure(figsize=(8, batch_size / 2))\n ncols = 5\n nrows = max(1, batch_size // 5)\n for idx, pc in enumerate(pointclouds):\n label = categories[int(labels[idx].item())]\n pred = categories[int(pred_labels[idx])]\n colour = 'g' if label == pred else 'r'\n pc = pc.cpu().numpy()\n ax = fig.add_subplot(nrows, ncols, idx + 1, projection='3d')\n ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=colour, s=2)\n ax.axis('off')\n ax.set_title('GT: {0}\\nPred: {1}'.format(label, pred))\n plt.show()\n\n\nif __name__ == '__main__':\n with open('config.yaml', 'r') as yamlfile:\n config = yaml.load(yamlfile, Loader=yaml.FullLoader)\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n training_instance_2 = PointNetTrain(config['MODEL']['POINTNET'], device)\n modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'],\n config['MODEL']['POINTNET']['TRAINING'])\n training_instance_2.test(modelnet10_dataloader.validloader)\n",
"step-4": "import torch\nfrom training import PointNetTrain, PointAugmentTrain, Model\nfrom data_utils.dataloader import DataLoaderClass\nfrom mpl_toolkits import mplot3d\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport yaml\n\n\ndef visualize_batch(pointclouds, pred_labels, labels, categories):\n batch_size = len(pointclouds)\n fig = plt.figure(figsize=(8, batch_size / 2))\n ncols = 5\n nrows = max(1, batch_size // 5)\n for idx, pc in enumerate(pointclouds):\n label = categories[int(labels[idx].item())]\n pred = categories[int(pred_labels[idx])]\n colour = 'g' if label == pred else 'r'\n pc = pc.cpu().numpy()\n ax = fig.add_subplot(nrows, ncols, idx + 1, projection='3d')\n ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=colour, s=2)\n ax.axis('off')\n ax.set_title('GT: {0}\\nPred: {1}'.format(label, pred))\n plt.show()\n\n\nif __name__ == '__main__':\n with open('config.yaml', 'r') as yamlfile:\n config = yaml.load(yamlfile, Loader=yaml.FullLoader)\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n training_instance_2 = PointNetTrain(config['MODEL']['POINTNET'], device)\n modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'],\n config['MODEL']['POINTNET']['TRAINING'])\n training_instance_2.test(modelnet10_dataloader.validloader)\n",
"step-5": "import torch\nfrom training import PointNetTrain, PointAugmentTrain, Model\n#from PointAugment.Augment.config import opts\nfrom data_utils.dataloader import DataLoaderClass\nfrom mpl_toolkits import mplot3d\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport yaml\n\ndef visualize_batch(pointclouds, pred_labels, labels, categories):\n batch_size = len(pointclouds)\n fig = plt.figure(figsize=(8, batch_size / 2))\n\n ncols = 5\n nrows = max(1, batch_size // 5)\n for idx, pc in enumerate(pointclouds):\n label = categories[int(labels[idx].item())]\n pred = categories[int(pred_labels[idx])]\n colour = 'g' if label == pred else 'r'\n pc = pc.cpu().numpy()\n ax = fig.add_subplot(nrows, ncols, idx + 1, projection='3d')\n ax.scatter(pc[:, 0], pc[:, 1], pc[:, 2], c=colour, s=2)\n ax.axis('off')\n ax.set_title('GT: {0}\\nPred: {1}'.format(label, pred))\n\n plt.show()\n\n\nif __name__ == '__main__':\n with open(\"config.yaml\", \"r\") as yamlfile:\n config = yaml.load(yamlfile, Loader=yaml.FullLoader)\n\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n\n # PointNet\n training_instance_2 = PointNetTrain(config['MODEL']['POINTNET'], device)\n modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'], config['MODEL']['POINTNET']['TRAINING']) \n #training_instance_2.train(modelnet10_dataloader.trainloader, modelnet10_dataloader.validloader, adv = False)\n training_instance_2.test(modelnet10_dataloader.validloader)\n\n # Point Augment\n #training_instance_1 = PointAugmentTrain(config['MODEL']['POINT_AUGMENT'], device)\n #modelnet10_dataloader = DataLoaderClass(config['DATA']['MODELNET10'], config['MODEL']['POINTNET']['TRAINING']) \n #training_instance_1.train(modelnet10_dataloader.trainloader, modelnet10_dataloader.validloader, adv = False)\n #training_instance_1.test(modelnet10_dataloader.validloader)\n\n\n \n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import click
import csv
import sqlite3
from sqlite3.dbapi2 import Connection
import requests
import mimetypes
from urllib.parse import urljoin, urlparse
from lxml.html.soupparser import fromstring
from lxml import etree
from lxml.etree import tostring
from analysis import lmdict, tone_count_with_negation_check
from parser import parse_text
@click.command()
@click.option('-s','--batch-size', 'batch_size', default=50)
def analyze(batch_size):
db = db_connect()
db_ensure_init(db)
cmd = db.execute("SELECT id, url FROM reports WHERE is_analyzed = 0")
for batch in iter(lambda: cmd.fetchmany(batch_size), []):
to_update = list()
for r in batch:
print("Analyzing: " + r[1])
response = requests.get(r[1])
text = parse_text(response.text)
print(text[0:400] + '\n[CLIPPED]')
# perform text analysis
result = tone_count_with_negation_check(lmdict, text)
has_positive_sentiment = result[1] > result[2]
# TODO: FIXME
# Here you should pass in all the variables that you want to store in the database
# Refer to "db_update" method in what order params should be passed
to_update.append((
True,
has_positive_sentiment,
result[0],
result[1],
result[2],
" ".join(result[3]),
" ".join(result[4]),
r[0]))
db_update(db, to_update)
@click.command()
@click.argument('start', nargs=1)
@click.argument('end', nargs=1)
@click.option('-s','--batch-size', 'batch_size', default=50)
def fetch_report_urls(start, end, batch_size):
"""Fetches and stores the 10-K report URLs"""
db = db_connect()
db_ensure_init(db)
with open('log.csv', 'w', newline='') as log:
logwriter = csv.writer(log)
cmd = db.execute("""
SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path
FROM "index" ix
LEFT JOIN reports r ON ix.id = r.index_id
WHERE ix.type = '10-K' AND r.id IS NULL AND
CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND
CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}
ORDER BY ix.date DESC
""".format(start=start, end=end))
for batch in iter(lambda: cmd.fetchmany(batch_size), []):
to_insert = list()
for r in batch:
# print(r)
log_row = r
response = requests.get(r[5])
href = parse_href(response.content)
url = fix_url(href, r[5])
print(url)
filetype = mimetypes.guess_type(url)[0]
print(filetype)
filename = os.path.basename(urlparse(url).path)
print(filename)
to_insert.append((r[0], r[1], r[2], r[3], r[4], url, filetype, filename))
logwriter.writerow(log_row)
db_insert(db, to_insert)
def parse_href(html_content):
# print(html_content)
root = to_doc(html_content)
# f = open("debug_idx.html", "wb")
# f.write(tostring(root, pretty_print=True))
# f.close()
elements = root.xpath('(//div[@id="formDiv"]//table//tr[2]/td[3]/a)')
if len(elements) == 0:
raise Exception("Unable to parse URL from index page")
href = elements[0].get('href')
return href
def fix_url(href, base_url):
# if the url links to an interactive iXBRL adjust the URL to link to the normal html
# eg. https://www.sec.gov/ix?doc=/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm
# -> https://www.sec.gov/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm
path = href.replace('ix?doc=/', '')
# a relative url needs to be joined with the base url
url = urljoin(base_url, path)
return url
def to_doc(content):
# Try to parse as XML/XHTML and fallback to soupparser
try:
doc = etree.fromstring(content)
except:
doc = fromstring(content)
return doc
def db_connect():
db = sqlite3.connect('edgar_htm_idx.sqlite3')
return db
def db_insert(db: Connection, records):
c = db.cursor()
c.executemany("INSERT INTO reports(index_id, conm, type, cik, date, url, filetype, filename) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", records)
db.commit()
def db_update(db: Connection, records):
c = db.cursor()
c.executemany("""
UPDATE reports SET
is_analyzed = ?,
has_positive_sentiment = ?,
word_count = ?,
pos_count = ?,
neg_count = ?,
pos_words = ?,
neg_words = ?
where id = ?""", records)
db.commit()
def db_ensure_init(db: Connection):
cur = db.cursor()
# TODO: FIXME add any new columns you want to store in the database
cur.execute("""CREATE TABLE IF NOT EXISTS "reports" (
"id" INTEGER NOT NULL,
"index_id" INTEGER UNIQUE,
"conm" TEXT,
"type" TEXT,
"cik" TEXT,
"date" TEXT,
"url" TEXT,
"filetype" TEXT,
"filename" TEXT,
"is_analyzed" INTEGER DEFAULT 0,
"has_positive_sentiment" INTEGER,
"word_count" INTEGER,
"pos_count" INTEGER,
"neg_count" INTEGER,
"pos_words" TEXT,
"neg_words" TEXT,
PRIMARY KEY("id" AUTOINCREMENT)
FOREIGN KEY (index_id) REFERENCES "index"(id)
);""")
@click.group()
def cli():
pass
cli.add_command(fetch_report_urls)
cli.add_command(analyze)
if __name__ == '__main__':
cli()
|
normal
|
{
"blob_id": "88e4e6647d4720d1c99f3e3438100790903921b5",
"index": 9163,
"step-1": "<mask token>\n\n\n@click.command()\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef analyze(batch_size):\n db = db_connect()\n db_ensure_init(db)\n cmd = db.execute('SELECT id, url FROM reports WHERE is_analyzed = 0')\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_update = list()\n for r in batch:\n print('Analyzing: ' + r[1])\n response = requests.get(r[1])\n text = parse_text(response.text)\n print(text[0:400] + '\\n[CLIPPED]')\n result = tone_count_with_negation_check(lmdict, text)\n has_positive_sentiment = result[1] > result[2]\n to_update.append((True, has_positive_sentiment, result[0],\n result[1], result[2], ' '.join(result[3]), ' '.join(result[\n 4]), r[0]))\n db_update(db, to_update)\n\n\n<mask token>\n\n\ndef fix_url(href, base_url):\n path = href.replace('ix?doc=/', '')\n url = urljoin(base_url, path)\n return url\n\n\n<mask token>\n\n\n@click.group()\ndef cli():\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@click.command()\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef analyze(batch_size):\n db = db_connect()\n db_ensure_init(db)\n cmd = db.execute('SELECT id, url FROM reports WHERE is_analyzed = 0')\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_update = list()\n for r in batch:\n print('Analyzing: ' + r[1])\n response = requests.get(r[1])\n text = parse_text(response.text)\n print(text[0:400] + '\\n[CLIPPED]')\n result = tone_count_with_negation_check(lmdict, text)\n has_positive_sentiment = result[1] > result[2]\n to_update.append((True, has_positive_sentiment, result[0],\n result[1], result[2], ' '.join(result[3]), ' '.join(result[\n 4]), r[0]))\n db_update(db, to_update)\n\n\n@click.command()\n@click.argument('start', nargs=1)\n@click.argument('end', nargs=1)\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef fetch_report_urls(start, end, batch_size):\n \"\"\"Fetches and stores the 10-K report URLs\"\"\"\n db = db_connect()\n db_ensure_init(db)\n with open('log.csv', 'w', newline='') as log:\n logwriter = csv.writer(log)\n cmd = db.execute(\n \"\"\"\n SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path\n FROM \"index\" ix\n LEFT JOIN reports r ON ix.id = r.index_id\n WHERE ix.type = '10-K' AND r.id IS NULL AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}\n ORDER BY ix.date DESC\n \"\"\"\n .format(start=start, end=end))\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_insert = list()\n for r in batch:\n log_row = r\n response = requests.get(r[5])\n href = parse_href(response.content)\n url = fix_url(href, r[5])\n print(url)\n filetype = mimetypes.guess_type(url)[0]\n print(filetype)\n filename = os.path.basename(urlparse(url).path)\n print(filename)\n to_insert.append((r[0], r[1], r[2], r[3], r[4], url,\n filetype, filename))\n logwriter.writerow(log_row)\n db_insert(db, to_insert)\n\n\n<mask token>\n\n\ndef fix_url(href, base_url):\n path = href.replace('ix?doc=/', '')\n url = urljoin(base_url, path)\n return url\n\n\n<mask token>\n\n\ndef db_connect():\n db = sqlite3.connect('edgar_htm_idx.sqlite3')\n return db\n\n\n<mask token>\n\n\ndef db_update(db: Connection, records):\n c = db.cursor()\n c.executemany(\n \"\"\"\n UPDATE reports SET\n is_analyzed = ?,\n has_positive_sentiment = ?,\n word_count = ?,\n pos_count = ?,\n neg_count = ?,\n pos_words = ?,\n neg_words = ?\n where id = ?\"\"\"\n , records)\n db.commit()\n\n\n<mask token>\n\n\n@click.group()\ndef cli():\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@click.command()\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef analyze(batch_size):\n db = db_connect()\n db_ensure_init(db)\n cmd = db.execute('SELECT id, url FROM reports WHERE is_analyzed = 0')\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_update = list()\n for r in batch:\n print('Analyzing: ' + r[1])\n response = requests.get(r[1])\n text = parse_text(response.text)\n print(text[0:400] + '\\n[CLIPPED]')\n result = tone_count_with_negation_check(lmdict, text)\n has_positive_sentiment = result[1] > result[2]\n to_update.append((True, has_positive_sentiment, result[0],\n result[1], result[2], ' '.join(result[3]), ' '.join(result[\n 4]), r[0]))\n db_update(db, to_update)\n\n\n@click.command()\n@click.argument('start', nargs=1)\n@click.argument('end', nargs=1)\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef fetch_report_urls(start, end, batch_size):\n \"\"\"Fetches and stores the 10-K report URLs\"\"\"\n db = db_connect()\n db_ensure_init(db)\n with open('log.csv', 'w', newline='') as log:\n logwriter = csv.writer(log)\n cmd = db.execute(\n \"\"\"\n SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path\n FROM \"index\" ix\n LEFT JOIN reports r ON ix.id = r.index_id\n WHERE ix.type = '10-K' AND r.id IS NULL AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}\n ORDER BY ix.date DESC\n \"\"\"\n .format(start=start, end=end))\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_insert = list()\n for r in batch:\n log_row = r\n response = requests.get(r[5])\n href = parse_href(response.content)\n url = fix_url(href, r[5])\n print(url)\n filetype = mimetypes.guess_type(url)[0]\n print(filetype)\n filename = os.path.basename(urlparse(url).path)\n print(filename)\n to_insert.append((r[0], r[1], r[2], r[3], r[4], url,\n filetype, filename))\n logwriter.writerow(log_row)\n db_insert(db, to_insert)\n\n\ndef parse_href(html_content):\n root = to_doc(html_content)\n elements = root.xpath('(//div[@id=\"formDiv\"]//table//tr[2]/td[3]/a)')\n if len(elements) == 0:\n raise Exception('Unable to parse URL from index page')\n href = elements[0].get('href')\n return href\n\n\ndef fix_url(href, base_url):\n path = href.replace('ix?doc=/', '')\n url = urljoin(base_url, path)\n return url\n\n\ndef to_doc(content):\n try:\n doc = etree.fromstring(content)\n except:\n doc = fromstring(content)\n return doc\n\n\ndef db_connect():\n db = sqlite3.connect('edgar_htm_idx.sqlite3')\n return db\n\n\ndef db_insert(db: Connection, records):\n c = db.cursor()\n c.executemany(\n 'INSERT INTO reports(index_id, conm, type, cik, date, url, filetype, filename) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'\n , records)\n db.commit()\n\n\ndef db_update(db: Connection, records):\n c = db.cursor()\n c.executemany(\n \"\"\"\n UPDATE reports SET\n is_analyzed = ?,\n has_positive_sentiment = ?,\n word_count = ?,\n pos_count = ?,\n neg_count = ?,\n pos_words = ?,\n neg_words = ?\n where id = ?\"\"\"\n , records)\n db.commit()\n\n\n<mask token>\n\n\n@click.group()\ndef cli():\n pass\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\n@click.command()\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef analyze(batch_size):\n db = db_connect()\n db_ensure_init(db)\n cmd = db.execute('SELECT id, url FROM reports WHERE is_analyzed = 0')\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_update = list()\n for r in batch:\n print('Analyzing: ' + r[1])\n response = requests.get(r[1])\n text = parse_text(response.text)\n print(text[0:400] + '\\n[CLIPPED]')\n result = tone_count_with_negation_check(lmdict, text)\n has_positive_sentiment = result[1] > result[2]\n to_update.append((True, has_positive_sentiment, result[0],\n result[1], result[2], ' '.join(result[3]), ' '.join(result[\n 4]), r[0]))\n db_update(db, to_update)\n\n\n@click.command()\n@click.argument('start', nargs=1)\n@click.argument('end', nargs=1)\n@click.option('-s', '--batch-size', 'batch_size', default=50)\ndef fetch_report_urls(start, end, batch_size):\n \"\"\"Fetches and stores the 10-K report URLs\"\"\"\n db = db_connect()\n db_ensure_init(db)\n with open('log.csv', 'w', newline='') as log:\n logwriter = csv.writer(log)\n cmd = db.execute(\n \"\"\"\n SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path\n FROM \"index\" ix\n LEFT JOIN reports r ON ix.id = r.index_id\n WHERE ix.type = '10-K' AND r.id IS NULL AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}\n ORDER BY ix.date DESC\n \"\"\"\n .format(start=start, end=end))\n for batch in iter(lambda : cmd.fetchmany(batch_size), []):\n to_insert = list()\n for r in batch:\n log_row = r\n response = requests.get(r[5])\n href = parse_href(response.content)\n url = fix_url(href, r[5])\n print(url)\n filetype = mimetypes.guess_type(url)[0]\n print(filetype)\n filename = os.path.basename(urlparse(url).path)\n print(filename)\n to_insert.append((r[0], r[1], r[2], r[3], r[4], url,\n filetype, filename))\n logwriter.writerow(log_row)\n db_insert(db, to_insert)\n\n\ndef parse_href(html_content):\n root = to_doc(html_content)\n elements = root.xpath('(//div[@id=\"formDiv\"]//table//tr[2]/td[3]/a)')\n if len(elements) == 0:\n raise Exception('Unable to parse URL from index page')\n href = elements[0].get('href')\n return href\n\n\ndef fix_url(href, base_url):\n path = href.replace('ix?doc=/', '')\n url = urljoin(base_url, path)\n return url\n\n\ndef to_doc(content):\n try:\n doc = etree.fromstring(content)\n except:\n doc = fromstring(content)\n return doc\n\n\ndef db_connect():\n db = sqlite3.connect('edgar_htm_idx.sqlite3')\n return db\n\n\ndef db_insert(db: Connection, records):\n c = db.cursor()\n c.executemany(\n 'INSERT INTO reports(index_id, conm, type, cik, date, url, filetype, filename) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'\n , records)\n db.commit()\n\n\ndef db_update(db: Connection, records):\n c = db.cursor()\n c.executemany(\n \"\"\"\n UPDATE reports SET\n is_analyzed = ?,\n has_positive_sentiment = ?,\n word_count = ?,\n pos_count = ?,\n neg_count = ?,\n pos_words = ?,\n neg_words = ?\n where id = ?\"\"\"\n , records)\n db.commit()\n\n\ndef db_ensure_init(db: Connection):\n cur = db.cursor()\n cur.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS \"reports\" (\n \"id\"\tINTEGER NOT NULL,\n \"index_id\" INTEGER UNIQUE,\n \"conm\" TEXT,\n \"type\" TEXT,\n \"cik\" TEXT,\n \"date\" TEXT,\n \"url\"\tTEXT,\n \"filetype\"\tTEXT,\n \"filename\"\tTEXT,\n \"is_analyzed\"\tINTEGER DEFAULT 0,\n \"has_positive_sentiment\" INTEGER,\n \"word_count\" INTEGER,\n \"pos_count\" INTEGER,\n \"neg_count\" INTEGER,\n \"pos_words\" TEXT,\n \"neg_words\" TEXT,\n PRIMARY KEY(\"id\" AUTOINCREMENT)\n FOREIGN KEY (index_id) REFERENCES \"index\"(id)\n );\"\"\"\n )\n\n\n@click.group()\ndef cli():\n pass\n\n\n<mask token>\n",
"step-5": "import os\nimport click\nimport csv\nimport sqlite3\nfrom sqlite3.dbapi2 import Connection\nimport requests\nimport mimetypes\nfrom urllib.parse import urljoin, urlparse\nfrom lxml.html.soupparser import fromstring\nfrom lxml import etree\nfrom lxml.etree import tostring\nfrom analysis import lmdict, tone_count_with_negation_check\nfrom parser import parse_text\n\n@click.command()\n@click.option('-s','--batch-size', 'batch_size', default=50)\ndef analyze(batch_size):\n db = db_connect()\n db_ensure_init(db)\n\n cmd = db.execute(\"SELECT id, url FROM reports WHERE is_analyzed = 0\")\n for batch in iter(lambda: cmd.fetchmany(batch_size), []):\n to_update = list()\n for r in batch:\n print(\"Analyzing: \" + r[1])\n response = requests.get(r[1])\n\n text = parse_text(response.text)\n print(text[0:400] + '\\n[CLIPPED]')\n\n # perform text analysis\n result = tone_count_with_negation_check(lmdict, text)\n\n has_positive_sentiment = result[1] > result[2]\n\n # TODO: FIXME\n # Here you should pass in all the variables that you want to store in the database\n # Refer to \"db_update\" method in what order params should be passed\n to_update.append((\n True,\n has_positive_sentiment,\n result[0],\n result[1],\n result[2],\n \" \".join(result[3]),\n \" \".join(result[4]),\n r[0]))\n\n db_update(db, to_update)\n\n\n@click.command()\n@click.argument('start', nargs=1)\n@click.argument('end', nargs=1)\n@click.option('-s','--batch-size', 'batch_size', default=50)\ndef fetch_report_urls(start, end, batch_size):\n \"\"\"Fetches and stores the 10-K report URLs\"\"\"\n db = db_connect()\n db_ensure_init(db)\n\n with open('log.csv', 'w', newline='') as log:\n logwriter = csv.writer(log)\n\n cmd = db.execute(\"\"\"\n SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path\n FROM \"index\" ix\n LEFT JOIN reports r ON ix.id = r.index_id\n WHERE ix.type = '10-K' AND r.id IS NULL AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND\n CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}\n ORDER BY ix.date DESC\n \"\"\".format(start=start, end=end))\n\n for batch in iter(lambda: cmd.fetchmany(batch_size), []):\n to_insert = list()\n for r in batch:\n # print(r)\n log_row = r\n\n response = requests.get(r[5])\n href = parse_href(response.content)\n url = fix_url(href, r[5])\n print(url)\n\n filetype = mimetypes.guess_type(url)[0]\n print(filetype)\n\n filename = os.path.basename(urlparse(url).path)\n print(filename)\n\n to_insert.append((r[0], r[1], r[2], r[3], r[4], url, filetype, filename))\n\n logwriter.writerow(log_row)\n\n db_insert(db, to_insert)\n\ndef parse_href(html_content):\n # print(html_content)\n root = to_doc(html_content)\n # f = open(\"debug_idx.html\", \"wb\")\n # f.write(tostring(root, pretty_print=True))\n # f.close()\n elements = root.xpath('(//div[@id=\"formDiv\"]//table//tr[2]/td[3]/a)')\n\n if len(elements) == 0:\n raise Exception(\"Unable to parse URL from index page\")\n\n href = elements[0].get('href')\n return href\n\ndef fix_url(href, base_url):\n # if the url links to an interactive iXBRL adjust the URL to link to the normal html\n # eg. https://www.sec.gov/ix?doc=/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm\n # -> https://www.sec.gov/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm\n path = href.replace('ix?doc=/', '')\n # a relative url needs to be joined with the base url\n url = urljoin(base_url, path)\n return url\n\ndef to_doc(content):\n # Try to parse as XML/XHTML and fallback to soupparser\n try:\n doc = etree.fromstring(content)\n except:\n doc = fromstring(content)\n\n return doc\n\ndef db_connect():\n db = sqlite3.connect('edgar_htm_idx.sqlite3')\n return db\n\ndef db_insert(db: Connection, records):\n c = db.cursor()\n c.executemany(\"INSERT INTO reports(index_id, conm, type, cik, date, url, filetype, filename) VALUES (?, ?, ?, ?, ?, ?, ?, ?)\", records)\n db.commit()\n\ndef db_update(db: Connection, records):\n c = db.cursor()\n c.executemany(\"\"\"\n UPDATE reports SET\n is_analyzed = ?,\n has_positive_sentiment = ?,\n word_count = ?,\n pos_count = ?,\n neg_count = ?,\n pos_words = ?,\n neg_words = ?\n where id = ?\"\"\", records)\n db.commit()\n\ndef db_ensure_init(db: Connection):\n cur = db.cursor()\n # TODO: FIXME add any new columns you want to store in the database\n cur.execute(\"\"\"CREATE TABLE IF NOT EXISTS \"reports\" (\n \"id\"\tINTEGER NOT NULL,\n \"index_id\" INTEGER UNIQUE,\n \"conm\" TEXT,\n \"type\" TEXT,\n \"cik\" TEXT,\n \"date\" TEXT,\n \"url\"\tTEXT,\n \"filetype\"\tTEXT,\n \"filename\"\tTEXT,\n \"is_analyzed\"\tINTEGER DEFAULT 0,\n \"has_positive_sentiment\" INTEGER,\n \"word_count\" INTEGER,\n \"pos_count\" INTEGER,\n \"neg_count\" INTEGER,\n \"pos_words\" TEXT,\n \"neg_words\" TEXT,\n PRIMARY KEY(\"id\" AUTOINCREMENT)\n FOREIGN KEY (index_id) REFERENCES \"index\"(id)\n );\"\"\")\n\n\n@click.group()\ndef cli():\n pass\n\ncli.add_command(fetch_report_urls)\ncli.add_command(analyze)\n\nif __name__ == '__main__':\n cli()\n",
"step-ids": [
3,
6,
9,
10,
13
]
}
|
[
3,
6,
9,
10,
13
] |
#!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import numpy as np
import tensorflow as tf
from tensorflow.contrib.factorization import WALSMatrixFactorization
tf.logging.set_verbosity(tf.logging.INFO)
import os
import tensorflow as tf
from tensorflow.python.lib.io import file_io
from tensorflow.contrib.factorization import WALSMatrixFactorization
import os
import tensorflow as tf
from tensorflow.python.lib.io import file_io
from tensorflow.contrib.factorization import WALSMatrixFactorization
def read_dataset(mode, args):
def decode_example(protos, vocab_size):
features = {
"key": tf.FixedLenFeature(shape = [1], dtype = tf.int64),
"indices": tf.VarLenFeature(dtype = tf.int64),
"values": tf.VarLenFeature(dtype = tf.float32)}
parsed_features = tf.parse_single_example(serialized = protos, features = features)
values = tf.sparse_merge(sp_ids = parsed_features["indices"], sp_values = parsed_features["values"], vocab_size = vocab_size)
# Save key to remap after batching
# This is a temporary workaround to assign correct row numbers in each batch.
# You can ignore details of this part and remap_keys().
key = parsed_features["key"]
decoded_sparse_tensor = tf.SparseTensor(indices = tf.concat(values = [values.indices, [key]], axis = 0),
values = tf.concat(values = [values.values, [0.0]], axis = 0),
dense_shape = values.dense_shape)
return decoded_sparse_tensor
def remap_keys(sparse_tensor):
# Current indices of our SparseTensor that we need to fix
bad_indices = sparse_tensor.indices # shape = (current_batch_size * (number_of_items/users[i] + 1), 2)
# Current values of our SparseTensor that we need to fix
bad_values = sparse_tensor.values # shape = (current_batch_size * (number_of_items/users[i] + 1),)
# Since batch is ordered, the last value for a batch index is the user
# Find where the batch index chages to extract the user rows
# 1 where user, else 0
user_mask = tf.concat(values = [bad_indices[1:,0] - bad_indices[:-1,0], tf.constant(value = [1], dtype = tf.int64)], axis = 0) # shape = (current_batch_size * (number_of_items/users[i] + 1), 2)
# Mask out the user rows from the values
good_values = tf.boolean_mask(tensor = bad_values, mask = tf.equal(x = user_mask, y = 0)) # shape = (current_batch_size * number_of_items/users[i],)
item_indices = tf.boolean_mask(tensor = bad_indices, mask = tf.equal(x = user_mask, y = 0)) # shape = (current_batch_size * number_of_items/users[i],)
user_indices = tf.boolean_mask(tensor = bad_indices, mask = tf.equal(x = user_mask, y = 1))[:, 1] # shape = (current_batch_size,)
good_user_indices = tf.gather(params = user_indices, indices = item_indices[:,0]) # shape = (current_batch_size * number_of_items/users[i],)
# User and item indices are rank 1, need to make rank 1 to concat
good_user_indices_expanded = tf.expand_dims(input = good_user_indices, axis = -1) # shape = (current_batch_size * number_of_items/users[i], 1)
good_item_indices_expanded = tf.expand_dims(input = item_indices[:, 1], axis = -1) # shape = (current_batch_size * number_of_items/users[i], 1)
good_indices = tf.concat(values = [good_user_indices_expanded, good_item_indices_expanded], axis = 1) # shape = (current_batch_size * number_of_items/users[i], 2)
remapped_sparse_tensor = tf.SparseTensor(indices = good_indices, values = good_values, dense_shape = sparse_tensor.dense_shape)
return remapped_sparse_tensor
def parse_tfrecords(filename, vocab_size):
if mode == tf.estimator.ModeKeys.TRAIN:
num_epochs = None # indefinitely
else:
num_epochs = 1 # end-of-input after this
files = tf.gfile.Glob(filename = os.path.join(args["input_path"], filename))
# Create dataset from file list
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.map(map_func = lambda x: decode_example(x, vocab_size))
dataset = dataset.repeat(count = num_epochs)
dataset = dataset.batch(batch_size = args["batch_size"])
dataset = dataset.map(map_func = lambda x: remap_keys(x))
return dataset.make_one_shot_iterator().get_next()
def _input_fn():
features = {
WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords("items_for_user", args["nitems"]),
WALSMatrixFactorization.INPUT_COLS: parse_tfrecords("users_for_item", args["nusers"]),
WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)
}
return features, None
return _input_fn
def input_cols():
return parse_tfrecords('users_for_item', args['nusers'])
return _input_fn
def find_top_k(user, item_factors, k):
all_items = tf.matmul(a = tf.expand_dims(input = user, axis = 0), b = tf.transpose(a = item_factors))
topk = tf.nn.top_k(input = all_items, k = k)
return tf.cast(x = topk.indices, dtype = tf.int64)
def batch_predict(args):
import numpy as np
with tf.Session() as sess:
estimator = tf.contrib.factorization.WALSMatrixFactorization(
num_rows = args["nusers"],
num_cols = args["nitems"],
embedding_dimension = args["n_embeds"],
model_dir = args["output_dir"])
# This is how you would get the row factors for out-of-vocab user data
# row_factors = list(estimator.get_projections(input_fn=read_dataset(tf.estimator.ModeKeys.EVAL, args)))
# user_factors = tf.convert_to_tensor(np.array(row_factors))
# But for in-vocab data, the row factors are already in the checkpoint
user_factors = tf.convert_to_tensor(value = estimator.get_row_factors()[0]) # (nusers, nembeds)
# In either case, we have to assume catalog doesn"t change, so col_factors are read in
item_factors = tf.convert_to_tensor(value = estimator.get_col_factors()[0])# (nitems, nembeds)
# For each user, find the top K items
topk = tf.squeeze(input = tf.map_fn(fn = lambda user: find_top_k(user, item_factors, args["topk"]), elems = user_factors, dtype = tf.int64))
with file_io.FileIO(os.path.join(args["output_dir"], "batch_pred.txt"), mode = 'w') as f:
for best_items_for_user in topk.eval():
f.write(",".join(str(x) for x in best_items_for_user) + '\n')
def train_and_evaluate(args):
train_steps = int(0.5 + (1.0 * args["num_epochs"] * args["nusers"]) / args["batch_size"])
steps_in_epoch = int(0.5 + args["nusers"] / args["batch_size"])
print("Will train for {} steps, evaluating once every {} steps".format(train_steps, steps_in_epoch))
def experiment_fn(output_dir):
return tf.contrib.learn.Experiment(
tf.contrib.factorization.WALSMatrixFactorization(
num_rows = args["nusers"],
num_cols = args["nitems"],
embedding_dimension = args["n_embeds"],
model_dir = args["output_dir"]),
train_input_fn = read_dataset(tf.estimator.ModeKeys.TRAIN, args),
eval_input_fn = read_dataset(tf.estimator.ModeKeys.EVAL, args),
train_steps = train_steps,
eval_steps = 1,
min_eval_frequency = steps_in_epoch
)
from tensorflow.contrib.learn.python.learn import learn_runner
learn_runner.run(experiment_fn = experiment_fn, output_dir = args["output_dir"])
batch_predict(args)
|
normal
|
{
"blob_id": "fb9ae5b3cdeac0c254669e214779ad43a02bff6d",
"index": 4596,
"step-1": "<mask token>\n\n\ndef read_dataset(mode, args):\n\n def decode_example(protos, vocab_size):\n features = {'key': tf.FixedLenFeature(shape=[1], dtype=tf.int64),\n 'indices': tf.VarLenFeature(dtype=tf.int64), 'values': tf.\n VarLenFeature(dtype=tf.float32)}\n parsed_features = tf.parse_single_example(serialized=protos,\n features=features)\n values = tf.sparse_merge(sp_ids=parsed_features['indices'],\n sp_values=parsed_features['values'], vocab_size=vocab_size)\n key = parsed_features['key']\n decoded_sparse_tensor = tf.SparseTensor(indices=tf.concat(values=[\n values.indices, [key]], axis=0), values=tf.concat(values=[\n values.values, [0.0]], axis=0), dense_shape=values.dense_shape)\n return decoded_sparse_tensor\n\n def remap_keys(sparse_tensor):\n bad_indices = sparse_tensor.indices\n bad_values = sparse_tensor.values\n user_mask = tf.concat(values=[bad_indices[1:, 0] - bad_indices[:-1,\n 0], tf.constant(value=[1], dtype=tf.int64)], axis=0)\n good_values = tf.boolean_mask(tensor=bad_values, mask=tf.equal(x=\n user_mask, y=0))\n item_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=0))\n user_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=1))[:, 1]\n good_user_indices = tf.gather(params=user_indices, indices=\n item_indices[:, 0])\n good_user_indices_expanded = tf.expand_dims(input=good_user_indices,\n axis=-1)\n good_item_indices_expanded = tf.expand_dims(input=item_indices[:, 1\n ], axis=-1)\n good_indices = tf.concat(values=[good_user_indices_expanded,\n good_item_indices_expanded], axis=1)\n remapped_sparse_tensor = tf.SparseTensor(indices=good_indices,\n values=good_values, dense_shape=sparse_tensor.dense_shape)\n return remapped_sparse_tensor\n\n def parse_tfrecords(filename, vocab_size):\n if mode == tf.estimator.ModeKeys.TRAIN:\n num_epochs = None\n else:\n num_epochs = 1\n files = tf.gfile.Glob(filename=os.path.join(args['input_path'],\n filename))\n dataset = tf.data.TFRecordDataset(files)\n dataset = dataset.map(map_func=lambda x: decode_example(x, vocab_size))\n dataset = dataset.repeat(count=num_epochs)\n dataset = dataset.batch(batch_size=args['batch_size'])\n dataset = dataset.map(map_func=lambda x: remap_keys(x))\n return dataset.make_one_shot_iterator().get_next()\n\n def _input_fn():\n features = {WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords(\n 'items_for_user', args['nitems']), WALSMatrixFactorization.\n INPUT_COLS: parse_tfrecords('users_for_item', args['nusers']),\n WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)}\n return features, None\n return _input_fn\n\n def input_cols():\n return parse_tfrecords('users_for_item', args['nusers'])\n return _input_fn\n\n\ndef find_top_k(user, item_factors, k):\n all_items = tf.matmul(a=tf.expand_dims(input=user, axis=0), b=tf.\n transpose(a=item_factors))\n topk = tf.nn.top_k(input=all_items, k=k)\n return tf.cast(x=topk.indices, dtype=tf.int64)\n\n\n<mask token>\n\n\ndef train_and_evaluate(args):\n train_steps = int(0.5 + 1.0 * args['num_epochs'] * args['nusers'] /\n args['batch_size'])\n steps_in_epoch = int(0.5 + args['nusers'] / args['batch_size'])\n print('Will train for {} steps, evaluating once every {} steps'.format(\n train_steps, steps_in_epoch))\n\n def experiment_fn(output_dir):\n return tf.contrib.learn.Experiment(tf.contrib.factorization.\n WALSMatrixFactorization(num_rows=args['nusers'], num_cols=args[\n 'nitems'], embedding_dimension=args['n_embeds'], model_dir=args\n ['output_dir']), train_input_fn=read_dataset(tf.estimator.\n ModeKeys.TRAIN, args), eval_input_fn=read_dataset(tf.estimator.\n ModeKeys.EVAL, args), train_steps=train_steps, eval_steps=1,\n min_eval_frequency=steps_in_epoch)\n from tensorflow.contrib.learn.python.learn import learn_runner\n learn_runner.run(experiment_fn=experiment_fn, output_dir=args['output_dir']\n )\n batch_predict(args)\n",
"step-2": "<mask token>\n\n\ndef read_dataset(mode, args):\n\n def decode_example(protos, vocab_size):\n features = {'key': tf.FixedLenFeature(shape=[1], dtype=tf.int64),\n 'indices': tf.VarLenFeature(dtype=tf.int64), 'values': tf.\n VarLenFeature(dtype=tf.float32)}\n parsed_features = tf.parse_single_example(serialized=protos,\n features=features)\n values = tf.sparse_merge(sp_ids=parsed_features['indices'],\n sp_values=parsed_features['values'], vocab_size=vocab_size)\n key = parsed_features['key']\n decoded_sparse_tensor = tf.SparseTensor(indices=tf.concat(values=[\n values.indices, [key]], axis=0), values=tf.concat(values=[\n values.values, [0.0]], axis=0), dense_shape=values.dense_shape)\n return decoded_sparse_tensor\n\n def remap_keys(sparse_tensor):\n bad_indices = sparse_tensor.indices\n bad_values = sparse_tensor.values\n user_mask = tf.concat(values=[bad_indices[1:, 0] - bad_indices[:-1,\n 0], tf.constant(value=[1], dtype=tf.int64)], axis=0)\n good_values = tf.boolean_mask(tensor=bad_values, mask=tf.equal(x=\n user_mask, y=0))\n item_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=0))\n user_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=1))[:, 1]\n good_user_indices = tf.gather(params=user_indices, indices=\n item_indices[:, 0])\n good_user_indices_expanded = tf.expand_dims(input=good_user_indices,\n axis=-1)\n good_item_indices_expanded = tf.expand_dims(input=item_indices[:, 1\n ], axis=-1)\n good_indices = tf.concat(values=[good_user_indices_expanded,\n good_item_indices_expanded], axis=1)\n remapped_sparse_tensor = tf.SparseTensor(indices=good_indices,\n values=good_values, dense_shape=sparse_tensor.dense_shape)\n return remapped_sparse_tensor\n\n def parse_tfrecords(filename, vocab_size):\n if mode == tf.estimator.ModeKeys.TRAIN:\n num_epochs = None\n else:\n num_epochs = 1\n files = tf.gfile.Glob(filename=os.path.join(args['input_path'],\n filename))\n dataset = tf.data.TFRecordDataset(files)\n dataset = dataset.map(map_func=lambda x: decode_example(x, vocab_size))\n dataset = dataset.repeat(count=num_epochs)\n dataset = dataset.batch(batch_size=args['batch_size'])\n dataset = dataset.map(map_func=lambda x: remap_keys(x))\n return dataset.make_one_shot_iterator().get_next()\n\n def _input_fn():\n features = {WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords(\n 'items_for_user', args['nitems']), WALSMatrixFactorization.\n INPUT_COLS: parse_tfrecords('users_for_item', args['nusers']),\n WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)}\n return features, None\n return _input_fn\n\n def input_cols():\n return parse_tfrecords('users_for_item', args['nusers'])\n return _input_fn\n\n\ndef find_top_k(user, item_factors, k):\n all_items = tf.matmul(a=tf.expand_dims(input=user, axis=0), b=tf.\n transpose(a=item_factors))\n topk = tf.nn.top_k(input=all_items, k=k)\n return tf.cast(x=topk.indices, dtype=tf.int64)\n\n\ndef batch_predict(args):\n import numpy as np\n with tf.Session() as sess:\n estimator = tf.contrib.factorization.WALSMatrixFactorization(num_rows\n =args['nusers'], num_cols=args['nitems'], embedding_dimension=\n args['n_embeds'], model_dir=args['output_dir'])\n user_factors = tf.convert_to_tensor(value=estimator.get_row_factors\n ()[0])\n item_factors = tf.convert_to_tensor(value=estimator.get_col_factors\n ()[0])\n topk = tf.squeeze(input=tf.map_fn(fn=lambda user: find_top_k(user,\n item_factors, args['topk']), elems=user_factors, dtype=tf.int64))\n with file_io.FileIO(os.path.join(args['output_dir'],\n 'batch_pred.txt'), mode='w') as f:\n for best_items_for_user in topk.eval():\n f.write(','.join(str(x) for x in best_items_for_user) + '\\n')\n\n\ndef train_and_evaluate(args):\n train_steps = int(0.5 + 1.0 * args['num_epochs'] * args['nusers'] /\n args['batch_size'])\n steps_in_epoch = int(0.5 + args['nusers'] / args['batch_size'])\n print('Will train for {} steps, evaluating once every {} steps'.format(\n train_steps, steps_in_epoch))\n\n def experiment_fn(output_dir):\n return tf.contrib.learn.Experiment(tf.contrib.factorization.\n WALSMatrixFactorization(num_rows=args['nusers'], num_cols=args[\n 'nitems'], embedding_dimension=args['n_embeds'], model_dir=args\n ['output_dir']), train_input_fn=read_dataset(tf.estimator.\n ModeKeys.TRAIN, args), eval_input_fn=read_dataset(tf.estimator.\n ModeKeys.EVAL, args), train_steps=train_steps, eval_steps=1,\n min_eval_frequency=steps_in_epoch)\n from tensorflow.contrib.learn.python.learn import learn_runner\n learn_runner.run(experiment_fn=experiment_fn, output_dir=args['output_dir']\n )\n batch_predict(args)\n",
"step-3": "<mask token>\ntf.logging.set_verbosity(tf.logging.INFO)\n<mask token>\n\n\ndef read_dataset(mode, args):\n\n def decode_example(protos, vocab_size):\n features = {'key': tf.FixedLenFeature(shape=[1], dtype=tf.int64),\n 'indices': tf.VarLenFeature(dtype=tf.int64), 'values': tf.\n VarLenFeature(dtype=tf.float32)}\n parsed_features = tf.parse_single_example(serialized=protos,\n features=features)\n values = tf.sparse_merge(sp_ids=parsed_features['indices'],\n sp_values=parsed_features['values'], vocab_size=vocab_size)\n key = parsed_features['key']\n decoded_sparse_tensor = tf.SparseTensor(indices=tf.concat(values=[\n values.indices, [key]], axis=0), values=tf.concat(values=[\n values.values, [0.0]], axis=0), dense_shape=values.dense_shape)\n return decoded_sparse_tensor\n\n def remap_keys(sparse_tensor):\n bad_indices = sparse_tensor.indices\n bad_values = sparse_tensor.values\n user_mask = tf.concat(values=[bad_indices[1:, 0] - bad_indices[:-1,\n 0], tf.constant(value=[1], dtype=tf.int64)], axis=0)\n good_values = tf.boolean_mask(tensor=bad_values, mask=tf.equal(x=\n user_mask, y=0))\n item_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=0))\n user_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=1))[:, 1]\n good_user_indices = tf.gather(params=user_indices, indices=\n item_indices[:, 0])\n good_user_indices_expanded = tf.expand_dims(input=good_user_indices,\n axis=-1)\n good_item_indices_expanded = tf.expand_dims(input=item_indices[:, 1\n ], axis=-1)\n good_indices = tf.concat(values=[good_user_indices_expanded,\n good_item_indices_expanded], axis=1)\n remapped_sparse_tensor = tf.SparseTensor(indices=good_indices,\n values=good_values, dense_shape=sparse_tensor.dense_shape)\n return remapped_sparse_tensor\n\n def parse_tfrecords(filename, vocab_size):\n if mode == tf.estimator.ModeKeys.TRAIN:\n num_epochs = None\n else:\n num_epochs = 1\n files = tf.gfile.Glob(filename=os.path.join(args['input_path'],\n filename))\n dataset = tf.data.TFRecordDataset(files)\n dataset = dataset.map(map_func=lambda x: decode_example(x, vocab_size))\n dataset = dataset.repeat(count=num_epochs)\n dataset = dataset.batch(batch_size=args['batch_size'])\n dataset = dataset.map(map_func=lambda x: remap_keys(x))\n return dataset.make_one_shot_iterator().get_next()\n\n def _input_fn():\n features = {WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords(\n 'items_for_user', args['nitems']), WALSMatrixFactorization.\n INPUT_COLS: parse_tfrecords('users_for_item', args['nusers']),\n WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)}\n return features, None\n return _input_fn\n\n def input_cols():\n return parse_tfrecords('users_for_item', args['nusers'])\n return _input_fn\n\n\ndef find_top_k(user, item_factors, k):\n all_items = tf.matmul(a=tf.expand_dims(input=user, axis=0), b=tf.\n transpose(a=item_factors))\n topk = tf.nn.top_k(input=all_items, k=k)\n return tf.cast(x=topk.indices, dtype=tf.int64)\n\n\ndef batch_predict(args):\n import numpy as np\n with tf.Session() as sess:\n estimator = tf.contrib.factorization.WALSMatrixFactorization(num_rows\n =args['nusers'], num_cols=args['nitems'], embedding_dimension=\n args['n_embeds'], model_dir=args['output_dir'])\n user_factors = tf.convert_to_tensor(value=estimator.get_row_factors\n ()[0])\n item_factors = tf.convert_to_tensor(value=estimator.get_col_factors\n ()[0])\n topk = tf.squeeze(input=tf.map_fn(fn=lambda user: find_top_k(user,\n item_factors, args['topk']), elems=user_factors, dtype=tf.int64))\n with file_io.FileIO(os.path.join(args['output_dir'],\n 'batch_pred.txt'), mode='w') as f:\n for best_items_for_user in topk.eval():\n f.write(','.join(str(x) for x in best_items_for_user) + '\\n')\n\n\ndef train_and_evaluate(args):\n train_steps = int(0.5 + 1.0 * args['num_epochs'] * args['nusers'] /\n args['batch_size'])\n steps_in_epoch = int(0.5 + args['nusers'] / args['batch_size'])\n print('Will train for {} steps, evaluating once every {} steps'.format(\n train_steps, steps_in_epoch))\n\n def experiment_fn(output_dir):\n return tf.contrib.learn.Experiment(tf.contrib.factorization.\n WALSMatrixFactorization(num_rows=args['nusers'], num_cols=args[\n 'nitems'], embedding_dimension=args['n_embeds'], model_dir=args\n ['output_dir']), train_input_fn=read_dataset(tf.estimator.\n ModeKeys.TRAIN, args), eval_input_fn=read_dataset(tf.estimator.\n ModeKeys.EVAL, args), train_steps=train_steps, eval_steps=1,\n min_eval_frequency=steps_in_epoch)\n from tensorflow.contrib.learn.python.learn import learn_runner\n learn_runner.run(experiment_fn=experiment_fn, output_dir=args['output_dir']\n )\n batch_predict(args)\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport os\nimport shutil\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\ntf.logging.set_verbosity(tf.logging.INFO)\nimport os\nimport tensorflow as tf\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\nimport os\nimport tensorflow as tf\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\n\n\ndef read_dataset(mode, args):\n\n def decode_example(protos, vocab_size):\n features = {'key': tf.FixedLenFeature(shape=[1], dtype=tf.int64),\n 'indices': tf.VarLenFeature(dtype=tf.int64), 'values': tf.\n VarLenFeature(dtype=tf.float32)}\n parsed_features = tf.parse_single_example(serialized=protos,\n features=features)\n values = tf.sparse_merge(sp_ids=parsed_features['indices'],\n sp_values=parsed_features['values'], vocab_size=vocab_size)\n key = parsed_features['key']\n decoded_sparse_tensor = tf.SparseTensor(indices=tf.concat(values=[\n values.indices, [key]], axis=0), values=tf.concat(values=[\n values.values, [0.0]], axis=0), dense_shape=values.dense_shape)\n return decoded_sparse_tensor\n\n def remap_keys(sparse_tensor):\n bad_indices = sparse_tensor.indices\n bad_values = sparse_tensor.values\n user_mask = tf.concat(values=[bad_indices[1:, 0] - bad_indices[:-1,\n 0], tf.constant(value=[1], dtype=tf.int64)], axis=0)\n good_values = tf.boolean_mask(tensor=bad_values, mask=tf.equal(x=\n user_mask, y=0))\n item_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=0))\n user_indices = tf.boolean_mask(tensor=bad_indices, mask=tf.equal(x=\n user_mask, y=1))[:, 1]\n good_user_indices = tf.gather(params=user_indices, indices=\n item_indices[:, 0])\n good_user_indices_expanded = tf.expand_dims(input=good_user_indices,\n axis=-1)\n good_item_indices_expanded = tf.expand_dims(input=item_indices[:, 1\n ], axis=-1)\n good_indices = tf.concat(values=[good_user_indices_expanded,\n good_item_indices_expanded], axis=1)\n remapped_sparse_tensor = tf.SparseTensor(indices=good_indices,\n values=good_values, dense_shape=sparse_tensor.dense_shape)\n return remapped_sparse_tensor\n\n def parse_tfrecords(filename, vocab_size):\n if mode == tf.estimator.ModeKeys.TRAIN:\n num_epochs = None\n else:\n num_epochs = 1\n files = tf.gfile.Glob(filename=os.path.join(args['input_path'],\n filename))\n dataset = tf.data.TFRecordDataset(files)\n dataset = dataset.map(map_func=lambda x: decode_example(x, vocab_size))\n dataset = dataset.repeat(count=num_epochs)\n dataset = dataset.batch(batch_size=args['batch_size'])\n dataset = dataset.map(map_func=lambda x: remap_keys(x))\n return dataset.make_one_shot_iterator().get_next()\n\n def _input_fn():\n features = {WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords(\n 'items_for_user', args['nitems']), WALSMatrixFactorization.\n INPUT_COLS: parse_tfrecords('users_for_item', args['nusers']),\n WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)}\n return features, None\n return _input_fn\n\n def input_cols():\n return parse_tfrecords('users_for_item', args['nusers'])\n return _input_fn\n\n\ndef find_top_k(user, item_factors, k):\n all_items = tf.matmul(a=tf.expand_dims(input=user, axis=0), b=tf.\n transpose(a=item_factors))\n topk = tf.nn.top_k(input=all_items, k=k)\n return tf.cast(x=topk.indices, dtype=tf.int64)\n\n\ndef batch_predict(args):\n import numpy as np\n with tf.Session() as sess:\n estimator = tf.contrib.factorization.WALSMatrixFactorization(num_rows\n =args['nusers'], num_cols=args['nitems'], embedding_dimension=\n args['n_embeds'], model_dir=args['output_dir'])\n user_factors = tf.convert_to_tensor(value=estimator.get_row_factors\n ()[0])\n item_factors = tf.convert_to_tensor(value=estimator.get_col_factors\n ()[0])\n topk = tf.squeeze(input=tf.map_fn(fn=lambda user: find_top_k(user,\n item_factors, args['topk']), elems=user_factors, dtype=tf.int64))\n with file_io.FileIO(os.path.join(args['output_dir'],\n 'batch_pred.txt'), mode='w') as f:\n for best_items_for_user in topk.eval():\n f.write(','.join(str(x) for x in best_items_for_user) + '\\n')\n\n\ndef train_and_evaluate(args):\n train_steps = int(0.5 + 1.0 * args['num_epochs'] * args['nusers'] /\n args['batch_size'])\n steps_in_epoch = int(0.5 + args['nusers'] / args['batch_size'])\n print('Will train for {} steps, evaluating once every {} steps'.format(\n train_steps, steps_in_epoch))\n\n def experiment_fn(output_dir):\n return tf.contrib.learn.Experiment(tf.contrib.factorization.\n WALSMatrixFactorization(num_rows=args['nusers'], num_cols=args[\n 'nitems'], embedding_dimension=args['n_embeds'], model_dir=args\n ['output_dir']), train_input_fn=read_dataset(tf.estimator.\n ModeKeys.TRAIN, args), eval_input_fn=read_dataset(tf.estimator.\n ModeKeys.EVAL, args), train_steps=train_steps, eval_steps=1,\n min_eval_frequency=steps_in_epoch)\n from tensorflow.contrib.learn.python.learn import learn_runner\n learn_runner.run(experiment_fn=experiment_fn, output_dir=args['output_dir']\n )\n batch_predict(args)\n",
"step-5": "#!/usr/bin/env python\n\n# Copyright 2017 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport shutil\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\n\ntf.logging.set_verbosity(tf.logging.INFO)\n\nimport os\nimport tensorflow as tf\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\n \nimport os\nimport tensorflow as tf\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.contrib.factorization import WALSMatrixFactorization\n \ndef read_dataset(mode, args):\n def decode_example(protos, vocab_size):\n features = {\n \"key\": tf.FixedLenFeature(shape = [1], dtype = tf.int64),\n \"indices\": tf.VarLenFeature(dtype = tf.int64),\n \"values\": tf.VarLenFeature(dtype = tf.float32)}\n parsed_features = tf.parse_single_example(serialized = protos, features = features)\n values = tf.sparse_merge(sp_ids = parsed_features[\"indices\"], sp_values = parsed_features[\"values\"], vocab_size = vocab_size)\n # Save key to remap after batching\n # This is a temporary workaround to assign correct row numbers in each batch.\n # You can ignore details of this part and remap_keys().\n key = parsed_features[\"key\"]\n decoded_sparse_tensor = tf.SparseTensor(indices = tf.concat(values = [values.indices, [key]], axis = 0), \n values = tf.concat(values = [values.values, [0.0]], axis = 0), \n dense_shape = values.dense_shape)\n return decoded_sparse_tensor\n \n \n def remap_keys(sparse_tensor):\n # Current indices of our SparseTensor that we need to fix\n bad_indices = sparse_tensor.indices # shape = (current_batch_size * (number_of_items/users[i] + 1), 2)\n # Current values of our SparseTensor that we need to fix\n bad_values = sparse_tensor.values # shape = (current_batch_size * (number_of_items/users[i] + 1),)\n\n # Since batch is ordered, the last value for a batch index is the user\n # Find where the batch index chages to extract the user rows\n # 1 where user, else 0\n user_mask = tf.concat(values = [bad_indices[1:,0] - bad_indices[:-1,0], tf.constant(value = [1], dtype = tf.int64)], axis = 0) # shape = (current_batch_size * (number_of_items/users[i] + 1), 2)\n\n # Mask out the user rows from the values\n good_values = tf.boolean_mask(tensor = bad_values, mask = tf.equal(x = user_mask, y = 0)) # shape = (current_batch_size * number_of_items/users[i],)\n item_indices = tf.boolean_mask(tensor = bad_indices, mask = tf.equal(x = user_mask, y = 0)) # shape = (current_batch_size * number_of_items/users[i],)\n user_indices = tf.boolean_mask(tensor = bad_indices, mask = tf.equal(x = user_mask, y = 1))[:, 1] # shape = (current_batch_size,)\n\n good_user_indices = tf.gather(params = user_indices, indices = item_indices[:,0]) # shape = (current_batch_size * number_of_items/users[i],)\n\n # User and item indices are rank 1, need to make rank 1 to concat\n good_user_indices_expanded = tf.expand_dims(input = good_user_indices, axis = -1) # shape = (current_batch_size * number_of_items/users[i], 1)\n good_item_indices_expanded = tf.expand_dims(input = item_indices[:, 1], axis = -1) # shape = (current_batch_size * number_of_items/users[i], 1)\n good_indices = tf.concat(values = [good_user_indices_expanded, good_item_indices_expanded], axis = 1) # shape = (current_batch_size * number_of_items/users[i], 2)\n\n remapped_sparse_tensor = tf.SparseTensor(indices = good_indices, values = good_values, dense_shape = sparse_tensor.dense_shape)\n return remapped_sparse_tensor\n\n \n def parse_tfrecords(filename, vocab_size):\n if mode == tf.estimator.ModeKeys.TRAIN:\n num_epochs = None # indefinitely\n else:\n num_epochs = 1 # end-of-input after this\n\n files = tf.gfile.Glob(filename = os.path.join(args[\"input_path\"], filename))\n\n # Create dataset from file list\n dataset = tf.data.TFRecordDataset(files)\n dataset = dataset.map(map_func = lambda x: decode_example(x, vocab_size))\n dataset = dataset.repeat(count = num_epochs)\n dataset = dataset.batch(batch_size = args[\"batch_size\"])\n dataset = dataset.map(map_func = lambda x: remap_keys(x))\n return dataset.make_one_shot_iterator().get_next()\n \n def _input_fn():\n features = {\n WALSMatrixFactorization.INPUT_ROWS: parse_tfrecords(\"items_for_user\", args[\"nitems\"]),\n WALSMatrixFactorization.INPUT_COLS: parse_tfrecords(\"users_for_item\", args[\"nusers\"]),\n WALSMatrixFactorization.PROJECT_ROW: tf.constant(True)\n }\n return features, None\n\n return _input_fn\n \n def input_cols():\n return parse_tfrecords('users_for_item', args['nusers'])\n \n return _input_fn\n\ndef find_top_k(user, item_factors, k):\n all_items = tf.matmul(a = tf.expand_dims(input = user, axis = 0), b = tf.transpose(a = item_factors))\n topk = tf.nn.top_k(input = all_items, k = k)\n return tf.cast(x = topk.indices, dtype = tf.int64)\n \ndef batch_predict(args):\n import numpy as np\n with tf.Session() as sess:\n estimator = tf.contrib.factorization.WALSMatrixFactorization(\n num_rows = args[\"nusers\"], \n num_cols = args[\"nitems\"],\n embedding_dimension = args[\"n_embeds\"],\n model_dir = args[\"output_dir\"])\n \n # This is how you would get the row factors for out-of-vocab user data\n # row_factors = list(estimator.get_projections(input_fn=read_dataset(tf.estimator.ModeKeys.EVAL, args)))\n # user_factors = tf.convert_to_tensor(np.array(row_factors))\n\n # But for in-vocab data, the row factors are already in the checkpoint\n user_factors = tf.convert_to_tensor(value = estimator.get_row_factors()[0]) # (nusers, nembeds)\n # In either case, we have to assume catalog doesn\"t change, so col_factors are read in\n item_factors = tf.convert_to_tensor(value = estimator.get_col_factors()[0])# (nitems, nembeds)\n\n # For each user, find the top K items\n topk = tf.squeeze(input = tf.map_fn(fn = lambda user: find_top_k(user, item_factors, args[\"topk\"]), elems = user_factors, dtype = tf.int64))\n with file_io.FileIO(os.path.join(args[\"output_dir\"], \"batch_pred.txt\"), mode = 'w') as f:\n for best_items_for_user in topk.eval():\n f.write(\",\".join(str(x) for x in best_items_for_user) + '\\n')\n\ndef train_and_evaluate(args):\n train_steps = int(0.5 + (1.0 * args[\"num_epochs\"] * args[\"nusers\"]) / args[\"batch_size\"])\n steps_in_epoch = int(0.5 + args[\"nusers\"] / args[\"batch_size\"])\n print(\"Will train for {} steps, evaluating once every {} steps\".format(train_steps, steps_in_epoch))\n def experiment_fn(output_dir):\n return tf.contrib.learn.Experiment(\n tf.contrib.factorization.WALSMatrixFactorization(\n num_rows = args[\"nusers\"], \n num_cols = args[\"nitems\"],\n embedding_dimension = args[\"n_embeds\"],\n model_dir = args[\"output_dir\"]),\n train_input_fn = read_dataset(tf.estimator.ModeKeys.TRAIN, args),\n eval_input_fn = read_dataset(tf.estimator.ModeKeys.EVAL, args),\n train_steps = train_steps,\n eval_steps = 1,\n min_eval_frequency = steps_in_epoch\n )\n\n from tensorflow.contrib.learn.python.learn import learn_runner\n learn_runner.run(experiment_fn = experiment_fn, output_dir = args[\"output_dir\"])\n \n batch_predict(args)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from rasa_core.actions.action import Action
from rasa_core.events import SlotSet
from rasa_core.dispatcher import Button, Element, Dispatcher
import json
import pickle
class ActionWeather(Action):
def name(self):
return 'action_doctor'
def run(self, dispatcher, tracker, domain):
loc = tracker.get_slot('department')
#response = tracker.current_slot_values()
# response = '#' + json.dumps(aaa) + '#'
if loc == 'algology':
#response = "Prof. Dr. Öznur Öken"
buttons = [
Button(title="Prof. Dr. Öznur Öken", payload="/Dr1")
]
elif loc == 'brain and neurosurgery':
#response = "1- Doç. Dr. Gülşah Bademci\n2- Doç. Dr. Suat CANBAY"
buttons = [
Button(title="Doç. Dr. Gülşah Bademci", payload="/btn1"),
Button(title="Doç. Dr. Suat CANBAY", payload="/btn2")
]
elif loc == 'child hematology':
#response = "Prof. Dr. Hatice Emel Özyürek"
buttons = [
Button(title="Prof. Dr. Hatice Emel Özyürek", payload="/btn1")
]
elif loc == 'child nephrology':
#response = "Prof. Dr. Süleyman Kalman"
buttons = [
Button(title="Prof. Dr. Süleyman Kalman", payload="/btn1")
]
elif loc == 'child health and illness':
#response = "1- Prof. Dr. Musa Kazım Çağlar\n2- Prof. Dr. Süleyman Kalman\n3- Prof. Dr. Hatice Emel Özyürek\n4- Yar. Doç. Dr. Pakize Elif Alkış\n5- Uzm. Dr. Mustafa Yücel Kızıltan\n6- Uzm. Dr. Gökalp Başbozkurt\n7- Uzm. Dr. Hafsa Uçur\n8- Uzm. Dr. Hüsniye Altan\n 9- Uzm. Dr. Sarkhan Elbayıyev\n 10- Uzm. Dr. Shahın Guliyev"
buttons = [
Button(title="Prof. Dr. Musa Kazım Çağlar", payload="/btn1"),
Button(title="Prof. Dr. Süleyman Kalman", payload="/btn2"),
Button(title="Prof. Dr. Hatice Emel Özyürek", payload="/btn3"),
Button(title="Yar. Doç. Dr. Pakize Elif Alkışn", payload="/btn4"),
Button(title="Uzm. Dr. Mustafa Yücel Kızıltan", payload="/btn5"),
Button(title="Uzm. Dr. Gökalp Başbozkurt", payload="/btn6"),
Button(title="Uzm. Dr. Hafsa Uçur", payload="/btn7"),
Button(title="Uzm. Dr. Hüsniye Altan", payload="/btn8"),
Button(title="Uzm. Dr. Sarkhan Elbayıyev", payload="/btn9"),
Button(title="Uzm. Dr. Shahın Guliyev", payload="/btn10")
]
elif loc == 'dermatology':
#response = "1- Uzm. Dr. Aylin Gözübüyükoğulları\n2- Uzm. Dr. Yeşim Akpınar Kara"
buttons = [
Button(title="Uzm. Dr. Aylin Gözübüyükoğulları", payload="/Dr1"),
Button(title="Uzm. Dr. Yeşim Akpınar Kara", payload="/Dr2")
]
elif loc == 'diet policlinic':
#response = "1- Uzm. Dyt. Gaye Başkurt\n2- Dyt. Deniz Özdemir\n3- Dyt. Halime Besler"
buttons = [
Button(title="Uzm. Dyt. Gaye Başkurt", payload="/Dr1"),
Button(title="Dyt. Deniz Özdemir", payload="/Dr2"),
Button(title="Dyt. Halime Besler", payload="/Dr3")
]
elif loc == 'endocrinology':
#response = "Prof. Dr. Serdar Güler"
buttons = [
Button(title="Prof. Dr. Serdar Güler", payload="/Dr1")
]
elif loc == 'infectious diseases':
#response = "Uzm. Dr. Mine Işık Arıgün"
buttons = [
Button(title="Uzm. Dr. Mine Işık Arıgün", payload="/Dr1")
]
elif loc == 'physical therapy and rehabilitation':
#response = "1- Prof. Dr. Öznur Öken\n2- Uzm. Dr. Beril Özturan"
buttons = [
Button(title="Prof. Dr. Öznur Öken", payload="/Dr1"),
Button(title="Uzm. Dr. Beril Özturan", payload="/Dr2")
]
elif loc == 'gastroenterology':
#response = "1- Doç. Dr. Reskan Altun\n2- Doç. Dr. Yasemin Özderin Özin"
buttons = [
Button(title="Doç. Dr. Reskan Altun", payload="/Dr1"),
Button(title="Doç. Dr. Yasemin Özderin Özin", payload="/Dr2")
]
elif loc == 'general surgery':
#response = "1- Prof. Dr. Mehmet Mahir Özmen\n2- Yar. Doç. Dr. Cem Emir Güldoğan\n3- Yar. Doç. Dr. Emre Gündoğdu"
buttons = [
Button(title="Prof. Dr. Mehmet Mahir Özmen", payload="/Dr1"),
Button(title="Yar. Doç. Dr. Cem Emir Güldoğan", payload="/Dr2"),
Button(title="Yar. Doç. Dr. Emre Gündoğdu", payload="/Dr3")
]
elif loc == 'chest diseases':
#response = "Prof. Dr. Uğur Gönüllü"
buttons = [
Button(title="Prof. Dr. Uğur Gönüllü", payload="/Dr1")
]
elif loc == 'eye diseases':
#response = "Op. Dr. Samim Özdeş"
buttons = [
Button(title="Op. Dr. Samim Özdeş", payload="/Dr1")
]
elif loc == 'hematology policlinic':
#response = "Prof. Dr. Oral Nevruz"
buttons = [
Button(title="Prof. Dr. Oral Nevruz", payload="/Dr1")
]
elif loc == 'internal diseases':
#response = "1- Doç. Dr. Beril Akman\n2- Uzm. Dr. Sercan Cansaran\n3- Uzm. Dr. Sevgi Karabuğa\n4- Yar. Doç. Dr. Gökhan Celbek"
buttons = [
Button(title="Doç. Dr. Beril Akman", payload="/Dr1"),
Button(title="Uzm. Dr. Sercan Cansaran", payload="/Dr2"),
Button(title="Uzm. Dr. Sevgi Karabuğa", payload="/Dr3"),
Button(title="Yar. Doç. Dr. Gökhan Celbek", payload="/Dr4")
]
elif loc == 'gynecology and Obstetrics':
#response = "1- Yar. Doç. Dr. Müberra Namlı Kalem\n2- Yar. Doç. Dr. Coşkun Şimşir\n3- Prof. Dr. Ali Ergün\n4- Doç. Dr. Korhan Kahraman\n5- Doç. Dr. Turgut Var\n6- Doç. Dr. Türkan Örnek Gülpınar\n7- Op. Dr. Aslı Yücetürk\n8- Op. Dr. Ebru Yüce\n9- Prof. Dr. Timur Gürgan"
buttons = [
Button(title="Yar. Doç. Dr. Müberra Namlı Kalem", payload="/Dr1"),
Button(title="Yar. Doç. Dr. Coşkun Şimşir", payload="/Dr2"),
Button(title="Prof. Dr. Ali Ergün", payload="/Dr3"),
Button(title="Doç. Dr. Korhan Kahraman", payload="/Dr4"),
Button(title="Doç. Dr. Turgut Var", payload="/Dr5"),
Button(title="Doç. Dr. Türkan Örnek Gülpınar", payload="/Dr6"),
Button(title="Op. Dr. Aslı Yücetürk", payload="/Dr7"),
Button(title="Op. Dr. Ebru Yüce", payload="/Dr8"),
Button(title="Prof. Dr. Timur Gürgan", payload="/Dr9")
]
elif loc == 'cardiac surgery':
#response = "1- Prof. Dr. Erol Şener\n2- Yar. Doç. Dr. Emre Boysan\n2- Yar. Doç. Renda Cırcı"
buttons = [
Button(title="Prof. Dr. Erol Şener", payload="/Dr1"),
Button(title="Yar. Doç. Dr. Emre Boysan", payload="/Dr2"),
Button(title="Yar. Doç. Renda Cırcı", payload="/Dr3")
]
elif loc == 'cardiology':
#response = "1- Prof. Dr. Erdoğan İlkay\n2- Doç. Dr. Alper Canbay\n3- Uzm. Dr. Çiğdem Koca Tarı\n4- Uzm. Dr. Erol Kalender"
buttons = [
Button(title="Prof. Dr. Erdoğan İlkay", payload="/Dr1"),
Button(title="Doç. Dr. Alper Canbay", payload="/Dr2"),
Button(title="Uzm. Dr. Çiğdem Koca Tarı", payload="/Dr3"),
Button(title="Uzm. Dr. Erol Kalender", payload="/Dr4")
]
elif loc == 'ENT diseases':
#response = "1- Prof. Dr. Ali Altuntaş\n2- Prof. Dr. Serdar Karahatay\n3- Yar. Doç Dr. Canset Aydın"
buttons = [
Button(title="Prof. Dr. Ali Altuntaş", payload="/Dr1"),
Button(title="Prof. Dr. Serdar Karahatay", payload="/Dr2"),
Button(title="Yar. Doç Dr. Canset Aydın", payload="/Dr3")
]
elif loc == 'nephrology':
#response = "Doç. Dr. Beril Akman"
buttons = [
Button(title="Doç. Dr. Beril Akman", payload="/Dr1")
]
elif loc == 'neurology':
#response = "1- Prof. Dr. Mehmet Zülküf Önal\n2- Yar. Doç. Dr. Akçay Övünç Ozon"
buttons = [
Button(title="Prof. Dr. Mehmet Zülküf Önal", payload="/Dr1"),
Button(title="Yar. Doç. Dr. Akçay Övünç Ozon", payload="/Dr2")
]
elif loc == 'orthopedics and traumatology':
#response = "1- Yar. Doç. Dr. Uğur Gönç\n2- Op. Dr. Mesut Atabek\n3- Prof. Dr. levent Çelebi"
buttons = [
Button(title="Yar. Doç. Dr. Uğur Gönç", payload="/Dr1"),
Button(title="Op. Dr. Mesut Atabek", payload="/Dr2"),
Button(title="Prof. Dr. levent Çelebi", payload="/Dr3")
]
elif loc == 'plastic surgery':
#response = "1- Op. Dr. Ergin Işık\n2- Op. Dr. Serdar Düzgün"
buttons = [
Button(title="Op. Dr. Ergin Işık", payload="/Dr1"),
Button(title="Op. Dr. Serdar Düzgün", payload="/Dr2")
]
elif loc == 'psychiatry':
#response = "Prof. Dr. Ali Bozkurt"
buttons = [
Button(title="Prof. Dr. Ali Bozkurt", payload="/Dr1")
]
elif loc == 'psychologist':
#response = "Psk. Ezgi Kılınç"
buttons = [
Button(title="Psk. Ezgi Kılınç", payload="/Dr1")
]
elif loc == 'rheumatology':
#response = "Doç. Dr. Orhan Küçükşahin"
buttons = [
Button(title="Doç. Dr. Orhan Küçükşahin", payload="/Dr1")
]
elif loc == 'medical oncology':
#response = ["Prof. Dr. Fikret Arpacı", "Doç. Dr. Gökhan Erdem"]
buttons = [
Button(title="Prof. Dr. Fikret Arpacı", payload="/Dr1"),
Button(title="Doç. Dr. Gökhan Erdem", payload="/Dr2")
]
elif loc == 'urology':
response = "Müsait doktor bulunmamaktadır..."
#response = "abc\n\nasd"
response=""
# buttons = [
# Button(title="Btn1", payload="/btn1"),
# Button(title="Btn2", payload="/btn2")
# ]
dispatcher.utter_button_message("my message", buttons)
return [SlotSet('doctor', response)]
|
normal
|
{
"blob_id": "f87d08f3bb6faa237cce8379de3aaaa3270a4a34",
"index": 3854,
"step-1": "<mask token>\n\n\nclass ActionWeather(Action):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ActionWeather(Action):\n <mask token>\n\n def run(self, dispatcher, tracker, domain):\n loc = tracker.get_slot('department')\n if loc == 'algology':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1')]\n elif loc == 'brain and neurosurgery':\n buttons = [Button(title='Doç. Dr. Gülşah Bademci', payload=\n '/btn1'), Button(title='Doç. Dr. Suat CANBAY', payload='/btn2')\n ]\n elif loc == 'child hematology':\n buttons = [Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn1')]\n elif loc == 'child nephrology':\n buttons = [Button(title='Prof. Dr. Süleyman Kalman', payload=\n '/btn1')]\n elif loc == 'child health and illness':\n buttons = [Button(title='Prof. Dr. Musa Kazım Çağlar', payload=\n '/btn1'), Button(title='Prof. Dr. Süleyman Kalman', payload\n ='/btn2'), Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn3'), Button(title=\n 'Yar. Doç. Dr. Pakize Elif Alkışn', payload='/btn4'),\n Button(title='Uzm. Dr. Mustafa Yücel Kızıltan', payload=\n '/btn5'), Button(title='Uzm. Dr. Gökalp Başbozkurt',\n payload='/btn6'), Button(title='Uzm. Dr. Hafsa Uçur',\n payload='/btn7'), Button(title='Uzm. Dr. Hüsniye Altan',\n payload='/btn8'), Button(title='Uzm. Dr. Sarkhan Elbayıyev',\n payload='/btn9'), Button(title='Uzm. Dr. Shahın Guliyev',\n payload='/btn10')]\n elif loc == 'dermatology':\n buttons = [Button(title='Uzm. Dr. Aylin Gözübüyükoğulları',\n payload='/Dr1'), Button(title='Uzm. Dr. Yeşim Akpınar Kara',\n payload='/Dr2')]\n elif loc == 'diet policlinic':\n buttons = [Button(title='Uzm. Dyt. Gaye Başkurt', payload=\n '/Dr1'), Button(title='Dyt. Deniz Özdemir', payload='/Dr2'),\n Button(title='Dyt. Halime Besler', payload='/Dr3')]\n elif loc == 'endocrinology':\n buttons = [Button(title='Prof. Dr. Serdar Güler', payload='/Dr1')]\n elif loc == 'infectious diseases':\n buttons = [Button(title='Uzm. Dr. Mine Işık Arıgün', payload=\n '/Dr1')]\n elif loc == 'physical therapy and rehabilitation':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1'),\n Button(title='Uzm. Dr. Beril Özturan', payload='/Dr2')]\n elif loc == 'gastroenterology':\n buttons = [Button(title='Doç. Dr. Reskan Altun', payload='/Dr1'\n ), Button(title='Doç. Dr. Yasemin Özderin Özin', payload=\n '/Dr2')]\n elif loc == 'general surgery':\n buttons = [Button(title='Prof. Dr. Mehmet Mahir Özmen', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Cem Emir Güldoğan',\n payload='/Dr2'), Button(title='Yar. Doç. Dr. Emre Gündoğdu',\n payload='/Dr3')]\n elif loc == 'chest diseases':\n buttons = [Button(title='Prof. Dr. Uğur Gönüllü', payload='/Dr1')]\n elif loc == 'eye diseases':\n buttons = [Button(title='Op. Dr. Samim Özdeş', payload='/Dr1')]\n elif loc == 'hematology policlinic':\n buttons = [Button(title='Prof. Dr. Oral Nevruz', payload='/Dr1')]\n elif loc == 'internal diseases':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1'),\n Button(title='Uzm. Dr. Sercan Cansaran', payload='/Dr2'),\n Button(title='Uzm. Dr. Sevgi Karabuğa', payload='/Dr3'),\n Button(title='Yar. Doç. Dr. Gökhan Celbek', payload='/Dr4')]\n elif loc == 'gynecology and Obstetrics':\n buttons = [Button(title='Yar. Doç. Dr. Müberra Namlı Kalem',\n payload='/Dr1'), Button(title='Yar. Doç. Dr. Coşkun Şimşir',\n payload='/Dr2'), Button(title='Prof. Dr. Ali Ergün',\n payload='/Dr3'), Button(title='Doç. Dr. Korhan Kahraman',\n payload='/Dr4'), Button(title='Doç. Dr. Turgut Var',\n payload='/Dr5'), Button(title=\n 'Doç. Dr. Türkan Örnek Gülpınar', payload='/Dr6'), Button(\n title='Op. Dr. Aslı Yücetürk', payload='/Dr7'), Button(\n title='Op. Dr. Ebru Yüce', payload='/Dr8'), Button(title=\n 'Prof. Dr. Timur Gürgan', payload='/Dr9')]\n elif loc == 'cardiac surgery':\n buttons = [Button(title='Prof. Dr. Erol Şener', payload='/Dr1'),\n Button(title='Yar. Doç. Dr. Emre Boysan', payload='/Dr2'),\n Button(title='Yar. Doç. Renda Cırcı', payload='/Dr3')]\n elif loc == 'cardiology':\n buttons = [Button(title='Prof. Dr. Erdoğan İlkay', payload=\n '/Dr1'), Button(title='Doç. Dr. Alper Canbay', payload=\n '/Dr2'), Button(title='Uzm. Dr. Çiğdem Koca Tarı', payload=\n '/Dr3'), Button(title='Uzm. Dr. Erol Kalender', payload='/Dr4')\n ]\n elif loc == 'ENT diseases':\n buttons = [Button(title='Prof. Dr. Ali Altuntaş', payload=\n '/Dr1'), Button(title='Prof. Dr. Serdar Karahatay', payload\n ='/Dr2'), Button(title='Yar. Doç Dr. Canset Aydın', payload\n ='/Dr3')]\n elif loc == 'nephrology':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1')]\n elif loc == 'neurology':\n buttons = [Button(title='Prof. Dr. Mehmet Zülküf Önal', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Akçay Övünç Ozon',\n payload='/Dr2')]\n elif loc == 'orthopedics and traumatology':\n buttons = [Button(title='Yar. Doç. Dr. Uğur Gönç', payload=\n '/Dr1'), Button(title='Op. Dr. Mesut Atabek', payload=\n '/Dr2'), Button(title='Prof. Dr. levent Çelebi', payload=\n '/Dr3')]\n elif loc == 'plastic surgery':\n buttons = [Button(title='Op. Dr. Ergin Işık', payload='/Dr1'),\n Button(title='Op. Dr. Serdar Düzgün', payload='/Dr2')]\n elif loc == 'psychiatry':\n buttons = [Button(title='Prof. Dr. Ali Bozkurt', payload='/Dr1')]\n elif loc == 'psychologist':\n buttons = [Button(title='Psk. Ezgi Kılınç', payload='/Dr1')]\n elif loc == 'rheumatology':\n buttons = [Button(title='Doç. Dr. Orhan Küçükşahin', payload=\n '/Dr1')]\n elif loc == 'medical oncology':\n buttons = [Button(title='Prof. Dr. Fikret Arpacı', payload=\n '/Dr1'), Button(title='Doç. Dr. Gökhan Erdem', payload='/Dr2')]\n elif loc == 'urology':\n response = 'Müsait doktor bulunmamaktadır...'\n response = ''\n dispatcher.utter_button_message('my message', buttons)\n return [SlotSet('doctor', response)]\n",
"step-3": "<mask token>\n\n\nclass ActionWeather(Action):\n\n def name(self):\n return 'action_doctor'\n\n def run(self, dispatcher, tracker, domain):\n loc = tracker.get_slot('department')\n if loc == 'algology':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1')]\n elif loc == 'brain and neurosurgery':\n buttons = [Button(title='Doç. Dr. Gülşah Bademci', payload=\n '/btn1'), Button(title='Doç. Dr. Suat CANBAY', payload='/btn2')\n ]\n elif loc == 'child hematology':\n buttons = [Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn1')]\n elif loc == 'child nephrology':\n buttons = [Button(title='Prof. Dr. Süleyman Kalman', payload=\n '/btn1')]\n elif loc == 'child health and illness':\n buttons = [Button(title='Prof. Dr. Musa Kazım Çağlar', payload=\n '/btn1'), Button(title='Prof. Dr. Süleyman Kalman', payload\n ='/btn2'), Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn3'), Button(title=\n 'Yar. Doç. Dr. Pakize Elif Alkışn', payload='/btn4'),\n Button(title='Uzm. Dr. Mustafa Yücel Kızıltan', payload=\n '/btn5'), Button(title='Uzm. Dr. Gökalp Başbozkurt',\n payload='/btn6'), Button(title='Uzm. Dr. Hafsa Uçur',\n payload='/btn7'), Button(title='Uzm. Dr. Hüsniye Altan',\n payload='/btn8'), Button(title='Uzm. Dr. Sarkhan Elbayıyev',\n payload='/btn9'), Button(title='Uzm. Dr. Shahın Guliyev',\n payload='/btn10')]\n elif loc == 'dermatology':\n buttons = [Button(title='Uzm. Dr. Aylin Gözübüyükoğulları',\n payload='/Dr1'), Button(title='Uzm. Dr. Yeşim Akpınar Kara',\n payload='/Dr2')]\n elif loc == 'diet policlinic':\n buttons = [Button(title='Uzm. Dyt. Gaye Başkurt', payload=\n '/Dr1'), Button(title='Dyt. Deniz Özdemir', payload='/Dr2'),\n Button(title='Dyt. Halime Besler', payload='/Dr3')]\n elif loc == 'endocrinology':\n buttons = [Button(title='Prof. Dr. Serdar Güler', payload='/Dr1')]\n elif loc == 'infectious diseases':\n buttons = [Button(title='Uzm. Dr. Mine Işık Arıgün', payload=\n '/Dr1')]\n elif loc == 'physical therapy and rehabilitation':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1'),\n Button(title='Uzm. Dr. Beril Özturan', payload='/Dr2')]\n elif loc == 'gastroenterology':\n buttons = [Button(title='Doç. Dr. Reskan Altun', payload='/Dr1'\n ), Button(title='Doç. Dr. Yasemin Özderin Özin', payload=\n '/Dr2')]\n elif loc == 'general surgery':\n buttons = [Button(title='Prof. Dr. Mehmet Mahir Özmen', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Cem Emir Güldoğan',\n payload='/Dr2'), Button(title='Yar. Doç. Dr. Emre Gündoğdu',\n payload='/Dr3')]\n elif loc == 'chest diseases':\n buttons = [Button(title='Prof. Dr. Uğur Gönüllü', payload='/Dr1')]\n elif loc == 'eye diseases':\n buttons = [Button(title='Op. Dr. Samim Özdeş', payload='/Dr1')]\n elif loc == 'hematology policlinic':\n buttons = [Button(title='Prof. Dr. Oral Nevruz', payload='/Dr1')]\n elif loc == 'internal diseases':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1'),\n Button(title='Uzm. Dr. Sercan Cansaran', payload='/Dr2'),\n Button(title='Uzm. Dr. Sevgi Karabuğa', payload='/Dr3'),\n Button(title='Yar. Doç. Dr. Gökhan Celbek', payload='/Dr4')]\n elif loc == 'gynecology and Obstetrics':\n buttons = [Button(title='Yar. Doç. Dr. Müberra Namlı Kalem',\n payload='/Dr1'), Button(title='Yar. Doç. Dr. Coşkun Şimşir',\n payload='/Dr2'), Button(title='Prof. Dr. Ali Ergün',\n payload='/Dr3'), Button(title='Doç. Dr. Korhan Kahraman',\n payload='/Dr4'), Button(title='Doç. Dr. Turgut Var',\n payload='/Dr5'), Button(title=\n 'Doç. Dr. Türkan Örnek Gülpınar', payload='/Dr6'), Button(\n title='Op. Dr. Aslı Yücetürk', payload='/Dr7'), Button(\n title='Op. Dr. Ebru Yüce', payload='/Dr8'), Button(title=\n 'Prof. Dr. Timur Gürgan', payload='/Dr9')]\n elif loc == 'cardiac surgery':\n buttons = [Button(title='Prof. Dr. Erol Şener', payload='/Dr1'),\n Button(title='Yar. Doç. Dr. Emre Boysan', payload='/Dr2'),\n Button(title='Yar. Doç. Renda Cırcı', payload='/Dr3')]\n elif loc == 'cardiology':\n buttons = [Button(title='Prof. Dr. Erdoğan İlkay', payload=\n '/Dr1'), Button(title='Doç. Dr. Alper Canbay', payload=\n '/Dr2'), Button(title='Uzm. Dr. Çiğdem Koca Tarı', payload=\n '/Dr3'), Button(title='Uzm. Dr. Erol Kalender', payload='/Dr4')\n ]\n elif loc == 'ENT diseases':\n buttons = [Button(title='Prof. Dr. Ali Altuntaş', payload=\n '/Dr1'), Button(title='Prof. Dr. Serdar Karahatay', payload\n ='/Dr2'), Button(title='Yar. Doç Dr. Canset Aydın', payload\n ='/Dr3')]\n elif loc == 'nephrology':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1')]\n elif loc == 'neurology':\n buttons = [Button(title='Prof. Dr. Mehmet Zülküf Önal', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Akçay Övünç Ozon',\n payload='/Dr2')]\n elif loc == 'orthopedics and traumatology':\n buttons = [Button(title='Yar. Doç. Dr. Uğur Gönç', payload=\n '/Dr1'), Button(title='Op. Dr. Mesut Atabek', payload=\n '/Dr2'), Button(title='Prof. Dr. levent Çelebi', payload=\n '/Dr3')]\n elif loc == 'plastic surgery':\n buttons = [Button(title='Op. Dr. Ergin Işık', payload='/Dr1'),\n Button(title='Op. Dr. Serdar Düzgün', payload='/Dr2')]\n elif loc == 'psychiatry':\n buttons = [Button(title='Prof. Dr. Ali Bozkurt', payload='/Dr1')]\n elif loc == 'psychologist':\n buttons = [Button(title='Psk. Ezgi Kılınç', payload='/Dr1')]\n elif loc == 'rheumatology':\n buttons = [Button(title='Doç. Dr. Orhan Küçükşahin', payload=\n '/Dr1')]\n elif loc == 'medical oncology':\n buttons = [Button(title='Prof. Dr. Fikret Arpacı', payload=\n '/Dr1'), Button(title='Doç. Dr. Gökhan Erdem', payload='/Dr2')]\n elif loc == 'urology':\n response = 'Müsait doktor bulunmamaktadır...'\n response = ''\n dispatcher.utter_button_message('my message', buttons)\n return [SlotSet('doctor', response)]\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import unicode_literals\nfrom rasa_core.actions.action import Action\nfrom rasa_core.events import SlotSet\nfrom rasa_core.dispatcher import Button, Element, Dispatcher\nimport json\nimport pickle\n\n\nclass ActionWeather(Action):\n\n def name(self):\n return 'action_doctor'\n\n def run(self, dispatcher, tracker, domain):\n loc = tracker.get_slot('department')\n if loc == 'algology':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1')]\n elif loc == 'brain and neurosurgery':\n buttons = [Button(title='Doç. Dr. Gülşah Bademci', payload=\n '/btn1'), Button(title='Doç. Dr. Suat CANBAY', payload='/btn2')\n ]\n elif loc == 'child hematology':\n buttons = [Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn1')]\n elif loc == 'child nephrology':\n buttons = [Button(title='Prof. Dr. Süleyman Kalman', payload=\n '/btn1')]\n elif loc == 'child health and illness':\n buttons = [Button(title='Prof. Dr. Musa Kazım Çağlar', payload=\n '/btn1'), Button(title='Prof. Dr. Süleyman Kalman', payload\n ='/btn2'), Button(title='Prof. Dr. Hatice Emel Özyürek',\n payload='/btn3'), Button(title=\n 'Yar. Doç. Dr. Pakize Elif Alkışn', payload='/btn4'),\n Button(title='Uzm. Dr. Mustafa Yücel Kızıltan', payload=\n '/btn5'), Button(title='Uzm. Dr. Gökalp Başbozkurt',\n payload='/btn6'), Button(title='Uzm. Dr. Hafsa Uçur',\n payload='/btn7'), Button(title='Uzm. Dr. Hüsniye Altan',\n payload='/btn8'), Button(title='Uzm. Dr. Sarkhan Elbayıyev',\n payload='/btn9'), Button(title='Uzm. Dr. Shahın Guliyev',\n payload='/btn10')]\n elif loc == 'dermatology':\n buttons = [Button(title='Uzm. Dr. Aylin Gözübüyükoğulları',\n payload='/Dr1'), Button(title='Uzm. Dr. Yeşim Akpınar Kara',\n payload='/Dr2')]\n elif loc == 'diet policlinic':\n buttons = [Button(title='Uzm. Dyt. Gaye Başkurt', payload=\n '/Dr1'), Button(title='Dyt. Deniz Özdemir', payload='/Dr2'),\n Button(title='Dyt. Halime Besler', payload='/Dr3')]\n elif loc == 'endocrinology':\n buttons = [Button(title='Prof. Dr. Serdar Güler', payload='/Dr1')]\n elif loc == 'infectious diseases':\n buttons = [Button(title='Uzm. Dr. Mine Işık Arıgün', payload=\n '/Dr1')]\n elif loc == 'physical therapy and rehabilitation':\n buttons = [Button(title='Prof. Dr. Öznur Öken', payload='/Dr1'),\n Button(title='Uzm. Dr. Beril Özturan', payload='/Dr2')]\n elif loc == 'gastroenterology':\n buttons = [Button(title='Doç. Dr. Reskan Altun', payload='/Dr1'\n ), Button(title='Doç. Dr. Yasemin Özderin Özin', payload=\n '/Dr2')]\n elif loc == 'general surgery':\n buttons = [Button(title='Prof. Dr. Mehmet Mahir Özmen', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Cem Emir Güldoğan',\n payload='/Dr2'), Button(title='Yar. Doç. Dr. Emre Gündoğdu',\n payload='/Dr3')]\n elif loc == 'chest diseases':\n buttons = [Button(title='Prof. Dr. Uğur Gönüllü', payload='/Dr1')]\n elif loc == 'eye diseases':\n buttons = [Button(title='Op. Dr. Samim Özdeş', payload='/Dr1')]\n elif loc == 'hematology policlinic':\n buttons = [Button(title='Prof. Dr. Oral Nevruz', payload='/Dr1')]\n elif loc == 'internal diseases':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1'),\n Button(title='Uzm. Dr. Sercan Cansaran', payload='/Dr2'),\n Button(title='Uzm. Dr. Sevgi Karabuğa', payload='/Dr3'),\n Button(title='Yar. Doç. Dr. Gökhan Celbek', payload='/Dr4')]\n elif loc == 'gynecology and Obstetrics':\n buttons = [Button(title='Yar. Doç. Dr. Müberra Namlı Kalem',\n payload='/Dr1'), Button(title='Yar. Doç. Dr. Coşkun Şimşir',\n payload='/Dr2'), Button(title='Prof. Dr. Ali Ergün',\n payload='/Dr3'), Button(title='Doç. Dr. Korhan Kahraman',\n payload='/Dr4'), Button(title='Doç. Dr. Turgut Var',\n payload='/Dr5'), Button(title=\n 'Doç. Dr. Türkan Örnek Gülpınar', payload='/Dr6'), Button(\n title='Op. Dr. Aslı Yücetürk', payload='/Dr7'), Button(\n title='Op. Dr. Ebru Yüce', payload='/Dr8'), Button(title=\n 'Prof. Dr. Timur Gürgan', payload='/Dr9')]\n elif loc == 'cardiac surgery':\n buttons = [Button(title='Prof. Dr. Erol Şener', payload='/Dr1'),\n Button(title='Yar. Doç. Dr. Emre Boysan', payload='/Dr2'),\n Button(title='Yar. Doç. Renda Cırcı', payload='/Dr3')]\n elif loc == 'cardiology':\n buttons = [Button(title='Prof. Dr. Erdoğan İlkay', payload=\n '/Dr1'), Button(title='Doç. Dr. Alper Canbay', payload=\n '/Dr2'), Button(title='Uzm. Dr. Çiğdem Koca Tarı', payload=\n '/Dr3'), Button(title='Uzm. Dr. Erol Kalender', payload='/Dr4')\n ]\n elif loc == 'ENT diseases':\n buttons = [Button(title='Prof. Dr. Ali Altuntaş', payload=\n '/Dr1'), Button(title='Prof. Dr. Serdar Karahatay', payload\n ='/Dr2'), Button(title='Yar. Doç Dr. Canset Aydın', payload\n ='/Dr3')]\n elif loc == 'nephrology':\n buttons = [Button(title='Doç. Dr. Beril Akman', payload='/Dr1')]\n elif loc == 'neurology':\n buttons = [Button(title='Prof. Dr. Mehmet Zülküf Önal', payload\n ='/Dr1'), Button(title='Yar. Doç. Dr. Akçay Övünç Ozon',\n payload='/Dr2')]\n elif loc == 'orthopedics and traumatology':\n buttons = [Button(title='Yar. Doç. Dr. Uğur Gönç', payload=\n '/Dr1'), Button(title='Op. Dr. Mesut Atabek', payload=\n '/Dr2'), Button(title='Prof. Dr. levent Çelebi', payload=\n '/Dr3')]\n elif loc == 'plastic surgery':\n buttons = [Button(title='Op. Dr. Ergin Işık', payload='/Dr1'),\n Button(title='Op. Dr. Serdar Düzgün', payload='/Dr2')]\n elif loc == 'psychiatry':\n buttons = [Button(title='Prof. Dr. Ali Bozkurt', payload='/Dr1')]\n elif loc == 'psychologist':\n buttons = [Button(title='Psk. Ezgi Kılınç', payload='/Dr1')]\n elif loc == 'rheumatology':\n buttons = [Button(title='Doç. Dr. Orhan Küçükşahin', payload=\n '/Dr1')]\n elif loc == 'medical oncology':\n buttons = [Button(title='Prof. Dr. Fikret Arpacı', payload=\n '/Dr1'), Button(title='Doç. Dr. Gökhan Erdem', payload='/Dr2')]\n elif loc == 'urology':\n response = 'Müsait doktor bulunmamaktadır...'\n response = ''\n dispatcher.utter_button_message('my message', buttons)\n return [SlotSet('doctor', response)]\n",
"step-5": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import unicode_literals\n\nfrom rasa_core.actions.action import Action\nfrom rasa_core.events import SlotSet\nfrom rasa_core.dispatcher import Button, Element, Dispatcher\nimport json\nimport pickle\n\nclass ActionWeather(Action):\n def name(self):\n return 'action_doctor'\n\n def run(self, dispatcher, tracker, domain):\n\n loc = tracker.get_slot('department')\n #response = tracker.current_slot_values()\n # response = '#' + json.dumps(aaa) + '#'\n\n if loc == 'algology':\n #response = \"Prof. Dr. Öznur Öken\"\n buttons = [\n Button(title=\"Prof. Dr. Öznur Öken\", payload=\"/Dr1\")\n ]\n\n elif loc == 'brain and neurosurgery':\n #response = \"1- Doç. Dr. Gülşah Bademci\\n2- Doç. Dr. Suat CANBAY\"\n buttons = [\n Button(title=\"Doç. Dr. Gülşah Bademci\", payload=\"/btn1\"),\n Button(title=\"Doç. Dr. Suat CANBAY\", payload=\"/btn2\")\n ]\n\n elif loc == 'child hematology':\n #response = \"Prof. Dr. Hatice Emel Özyürek\"\n buttons = [\n Button(title=\"Prof. Dr. Hatice Emel Özyürek\", payload=\"/btn1\")\n ]\n\n elif loc == 'child nephrology':\n #response = \"Prof. Dr. Süleyman Kalman\"\n buttons = [\n Button(title=\"Prof. Dr. Süleyman Kalman\", payload=\"/btn1\")\n ]\n\n elif loc == 'child health and illness':\n #response = \"1- Prof. Dr. Musa Kazım Çağlar\\n2- Prof. Dr. Süleyman Kalman\\n3- Prof. Dr. Hatice Emel Özyürek\\n4- Yar. Doç. Dr. Pakize Elif Alkış\\n5- Uzm. Dr. Mustafa Yücel Kızıltan\\n6- Uzm. Dr. Gökalp Başbozkurt\\n7- Uzm. Dr. Hafsa Uçur\\n8- Uzm. Dr. Hüsniye Altan\\n 9- Uzm. Dr. Sarkhan Elbayıyev\\n 10- Uzm. Dr. Shahın Guliyev\"\n buttons = [\n Button(title=\"Prof. Dr. Musa Kazım Çağlar\", payload=\"/btn1\"),\n Button(title=\"Prof. Dr. Süleyman Kalman\", payload=\"/btn2\"),\n Button(title=\"Prof. Dr. Hatice Emel Özyürek\", payload=\"/btn3\"),\n Button(title=\"Yar. Doç. Dr. Pakize Elif Alkışn\", payload=\"/btn4\"),\n Button(title=\"Uzm. Dr. Mustafa Yücel Kızıltan\", payload=\"/btn5\"),\n Button(title=\"Uzm. Dr. Gökalp Başbozkurt\", payload=\"/btn6\"),\n Button(title=\"Uzm. Dr. Hafsa Uçur\", payload=\"/btn7\"),\n Button(title=\"Uzm. Dr. Hüsniye Altan\", payload=\"/btn8\"),\n Button(title=\"Uzm. Dr. Sarkhan Elbayıyev\", payload=\"/btn9\"),\n Button(title=\"Uzm. Dr. Shahın Guliyev\", payload=\"/btn10\")\n ]\n elif loc == 'dermatology':\n #response = \"1- Uzm. Dr. Aylin Gözübüyükoğulları\\n2- Uzm. Dr. Yeşim Akpınar Kara\"\n buttons = [\n Button(title=\"Uzm. Dr. Aylin Gözübüyükoğulları\", payload=\"/Dr1\"),\n Button(title=\"Uzm. Dr. Yeşim Akpınar Kara\", payload=\"/Dr2\")\n ]\n elif loc == 'diet policlinic':\n #response = \"1- Uzm. Dyt. Gaye Başkurt\\n2- Dyt. Deniz Özdemir\\n3- Dyt. Halime Besler\"\n buttons = [\n Button(title=\"Uzm. Dyt. Gaye Başkurt\", payload=\"/Dr1\"),\n Button(title=\"Dyt. Deniz Özdemir\", payload=\"/Dr2\"),\n Button(title=\"Dyt. Halime Besler\", payload=\"/Dr3\")\n ]\n\n elif loc == 'endocrinology':\n #response = \"Prof. Dr. Serdar Güler\"\n buttons = [\n Button(title=\"Prof. Dr. Serdar Güler\", payload=\"/Dr1\")\n ]\n\n elif loc == 'infectious diseases':\n #response = \"Uzm. Dr. Mine Işık Arıgün\"\n buttons = [\n Button(title=\"Uzm. Dr. Mine Işık Arıgün\", payload=\"/Dr1\")\n ]\n\n elif loc == 'physical therapy and rehabilitation':\n #response = \"1- Prof. Dr. Öznur Öken\\n2- Uzm. Dr. Beril Özturan\"\n buttons = [\n Button(title=\"Prof. Dr. Öznur Öken\", payload=\"/Dr1\"),\n Button(title=\"Uzm. Dr. Beril Özturan\", payload=\"/Dr2\")\n ]\n\n elif loc == 'gastroenterology':\n #response = \"1- Doç. Dr. Reskan Altun\\n2- Doç. Dr. Yasemin Özderin Özin\"\n buttons = [\n Button(title=\"Doç. Dr. Reskan Altun\", payload=\"/Dr1\"),\n Button(title=\"Doç. Dr. Yasemin Özderin Özin\", payload=\"/Dr2\")\n ]\n\n elif loc == 'general surgery':\n #response = \"1- Prof. Dr. Mehmet Mahir Özmen\\n2- Yar. Doç. Dr. Cem Emir Güldoğan\\n3- Yar. Doç. Dr. Emre Gündoğdu\"\n buttons = [\n Button(title=\"Prof. Dr. Mehmet Mahir Özmen\", payload=\"/Dr1\"),\n Button(title=\"Yar. Doç. Dr. Cem Emir Güldoğan\", payload=\"/Dr2\"),\n Button(title=\"Yar. Doç. Dr. Emre Gündoğdu\", payload=\"/Dr3\")\n ]\n\n elif loc == 'chest diseases':\n #response = \"Prof. Dr. Uğur Gönüllü\"\n buttons = [\n Button(title=\"Prof. Dr. Uğur Gönüllü\", payload=\"/Dr1\")\n ]\n\n\n elif loc == 'eye diseases':\n #response = \"Op. Dr. Samim Özdeş\"\n buttons = [\n Button(title=\"Op. Dr. Samim Özdeş\", payload=\"/Dr1\")\n ]\n\n elif loc == 'hematology policlinic':\n #response = \"Prof. Dr. Oral Nevruz\"\n buttons = [\n Button(title=\"Prof. Dr. Oral Nevruz\", payload=\"/Dr1\")\n ]\n\n elif loc == 'internal diseases':\n #response = \"1- Doç. Dr. Beril Akman\\n2- Uzm. Dr. Sercan Cansaran\\n3- Uzm. Dr. Sevgi Karabuğa\\n4- Yar. Doç. Dr. Gökhan Celbek\"\n buttons = [\n Button(title=\"Doç. Dr. Beril Akman\", payload=\"/Dr1\"),\n Button(title=\"Uzm. Dr. Sercan Cansaran\", payload=\"/Dr2\"),\n Button(title=\"Uzm. Dr. Sevgi Karabuğa\", payload=\"/Dr3\"),\n Button(title=\"Yar. Doç. Dr. Gökhan Celbek\", payload=\"/Dr4\")\n ]\n\n elif loc == 'gynecology and Obstetrics':\n #response = \"1- Yar. Doç. Dr. Müberra Namlı Kalem\\n2- Yar. Doç. Dr. Coşkun Şimşir\\n3- Prof. Dr. Ali Ergün\\n4- Doç. Dr. Korhan Kahraman\\n5- Doç. Dr. Turgut Var\\n6- Doç. Dr. Türkan Örnek Gülpınar\\n7- Op. Dr. Aslı Yücetürk\\n8- Op. Dr. Ebru Yüce\\n9- Prof. Dr. Timur Gürgan\"\n buttons = [\n Button(title=\"Yar. Doç. Dr. Müberra Namlı Kalem\", payload=\"/Dr1\"),\n Button(title=\"Yar. Doç. Dr. Coşkun Şimşir\", payload=\"/Dr2\"),\n Button(title=\"Prof. Dr. Ali Ergün\", payload=\"/Dr3\"),\n Button(title=\"Doç. Dr. Korhan Kahraman\", payload=\"/Dr4\"),\n Button(title=\"Doç. Dr. Turgut Var\", payload=\"/Dr5\"),\n Button(title=\"Doç. Dr. Türkan Örnek Gülpınar\", payload=\"/Dr6\"),\n Button(title=\"Op. Dr. Aslı Yücetürk\", payload=\"/Dr7\"),\n Button(title=\"Op. Dr. Ebru Yüce\", payload=\"/Dr8\"),\n Button(title=\"Prof. Dr. Timur Gürgan\", payload=\"/Dr9\")\n ]\n\n elif loc == 'cardiac surgery':\n #response = \"1- Prof. Dr. Erol Şener\\n2- Yar. Doç. Dr. Emre Boysan\\n2- Yar. Doç. Renda Cırcı\"\n buttons = [\n Button(title=\"Prof. Dr. Erol Şener\", payload=\"/Dr1\"),\n Button(title=\"Yar. Doç. Dr. Emre Boysan\", payload=\"/Dr2\"),\n Button(title=\"Yar. Doç. Renda Cırcı\", payload=\"/Dr3\")\n ]\n\n elif loc == 'cardiology':\n #response = \"1- Prof. Dr. Erdoğan İlkay\\n2- Doç. Dr. Alper Canbay\\n3- Uzm. Dr. Çiğdem Koca Tarı\\n4- Uzm. Dr. Erol Kalender\"\n buttons = [\n Button(title=\"Prof. Dr. Erdoğan İlkay\", payload=\"/Dr1\"),\n Button(title=\"Doç. Dr. Alper Canbay\", payload=\"/Dr2\"),\n Button(title=\"Uzm. Dr. Çiğdem Koca Tarı\", payload=\"/Dr3\"),\n Button(title=\"Uzm. Dr. Erol Kalender\", payload=\"/Dr4\")\n ]\n\n elif loc == 'ENT diseases':\n #response = \"1- Prof. Dr. Ali Altuntaş\\n2- Prof. Dr. Serdar Karahatay\\n3- Yar. Doç Dr. Canset Aydın\"\n buttons = [\n Button(title=\"Prof. Dr. Ali Altuntaş\", payload=\"/Dr1\"),\n Button(title=\"Prof. Dr. Serdar Karahatay\", payload=\"/Dr2\"),\n Button(title=\"Yar. Doç Dr. Canset Aydın\", payload=\"/Dr3\")\n ]\n\n elif loc == 'nephrology':\n #response = \"Doç. Dr. Beril Akman\"\n buttons = [\n Button(title=\"Doç. Dr. Beril Akman\", payload=\"/Dr1\")\n ]\n\n elif loc == 'neurology':\n #response = \"1- Prof. Dr. Mehmet Zülküf Önal\\n2- Yar. Doç. Dr. Akçay Övünç Ozon\"\n buttons = [\n Button(title=\"Prof. Dr. Mehmet Zülküf Önal\", payload=\"/Dr1\"),\n Button(title=\"Yar. Doç. Dr. Akçay Övünç Ozon\", payload=\"/Dr2\")\n ]\n\n elif loc == 'orthopedics and traumatology':\n #response = \"1- Yar. Doç. Dr. Uğur Gönç\\n2- Op. Dr. Mesut Atabek\\n3- Prof. Dr. levent Çelebi\"\n buttons = [\n Button(title=\"Yar. Doç. Dr. Uğur Gönç\", payload=\"/Dr1\"),\n Button(title=\"Op. Dr. Mesut Atabek\", payload=\"/Dr2\"),\n Button(title=\"Prof. Dr. levent Çelebi\", payload=\"/Dr3\")\n\n ]\n\n elif loc == 'plastic surgery':\n #response = \"1- Op. Dr. Ergin Işık\\n2- Op. Dr. Serdar Düzgün\"\n buttons = [\n Button(title=\"Op. Dr. Ergin Işık\", payload=\"/Dr1\"),\n Button(title=\"Op. Dr. Serdar Düzgün\", payload=\"/Dr2\")\n\n ]\n\n elif loc == 'psychiatry':\n #response = \"Prof. Dr. Ali Bozkurt\"\n buttons = [\n Button(title=\"Prof. Dr. Ali Bozkurt\", payload=\"/Dr1\")\n\n ]\n\n elif loc == 'psychologist':\n #response = \"Psk. Ezgi Kılınç\"\n buttons = [\n Button(title=\"Psk. Ezgi Kılınç\", payload=\"/Dr1\")\n\n ]\n\n elif loc == 'rheumatology':\n #response = \"Doç. Dr. Orhan Küçükşahin\"\n buttons = [\n Button(title=\"Doç. Dr. Orhan Küçükşahin\", payload=\"/Dr1\")\n\n ]\n\n\n elif loc == 'medical oncology':\n #response = [\"Prof. Dr. Fikret Arpacı\", \"Doç. Dr. Gökhan Erdem\"]\n buttons = [\n Button(title=\"Prof. Dr. Fikret Arpacı\", payload=\"/Dr1\"),\n Button(title=\"Doç. Dr. Gökhan Erdem\", payload=\"/Dr2\")\n\n ]\n\n elif loc == 'urology':\n response = \"Müsait doktor bulunmamaktadır...\"\n\n #response = \"abc\\n\\nasd\"\n\n response=\"\"\n # buttons = [\n # Button(title=\"Btn1\", payload=\"/btn1\"),\n # Button(title=\"Btn2\", payload=\"/btn2\")\n # ]\n dispatcher.utter_button_message(\"my message\", buttons)\n return [SlotSet('doctor', response)]\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-24 11:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(blank=True, max_length=30, null=True)),
('last_name', models.CharField(blank=True, max_length=30, null=True)),
('gender', models.CharField(blank=True, max_length=10, null=True)),
('email', models.EmailField(blank=True, max_length=255, null=True)),
('phone_number', models.CharField(blank=True, max_length=20, null=True)),
('address', models.TextField(blank=True, max_length=255, null=True)),
('city', models.CharField(blank=True, max_length=50, null=True)),
('state', models.CharField(blank=True, max_length=50, null=True)),
('post_code', models.CharField(blank=True, max_length=10, null=True)),
('comment', models.TextField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('barcode', models.CharField(blank=True, max_length=100, null=True)),
('item_name', models.CharField(blank=True, max_length=100, null=True)),
('catagory', models.CharField(blank=True, max_length=100, null=True)),
('wholesale_price', models.FloatField(blank=True, null=True)),
('retail_price', models.FloatField(blank=True, null=True)),
('tax', models.FloatField(blank=True, null=True)),
('quantity_stock', models.IntegerField(blank=True, null=True)),
('receiving_quantity', models.IntegerField(blank=True, null=True)),
('description', models.TextField(blank=True, max_length=1000, null=True)),
('image', models.ImageField(blank=True, default='no-img.jpg', null=True, upload_to='item/')),
('item_has_serial_number', models.BooleanField(default=False)),
('reorder_level', models.CharField(blank=True, max_length=10, null=True)),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('employee_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='account.Employee')),
],
bases=('account.employee',),
),
migrations.CreateModel(
name='Supplier',
fields=[
('employee_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='account.Employee')),
('company_name', models.CharField(blank=True, max_length=100, null=True)),
],
bases=('account.employee',),
),
migrations.AddField(
model_name='item',
name='supplier',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Supplier'),
),
]
|
normal
|
{
"blob_id": "56157aaf3f98abc58572b45111becb91cb93f328",
"index": 2926,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Employee', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('first_name', models.CharField(blank=\n True, max_length=30, null=True)), ('last_name', models.CharField(\n blank=True, max_length=30, null=True)), ('gender', models.CharField\n (blank=True, max_length=10, null=True)), ('email', models.\n EmailField(blank=True, max_length=255, null=True)), ('phone_number',\n models.CharField(blank=True, max_length=20, null=True)), ('address',\n models.TextField(blank=True, max_length=255, null=True)), ('city',\n models.CharField(blank=True, max_length=50, null=True)), ('state',\n models.CharField(blank=True, max_length=50, null=True)), (\n 'post_code', models.CharField(blank=True, max_length=10, null=True)\n ), ('comment', models.TextField(blank=True, max_length=255, null=\n True))]), migrations.CreateModel(name='Item', fields=[('id', models\n .AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('barcode', models.CharField(blank=True,\n max_length=100, null=True)), ('item_name', models.CharField(blank=\n True, max_length=100, null=True)), ('catagory', models.CharField(\n blank=True, max_length=100, null=True)), ('wholesale_price', models\n .FloatField(blank=True, null=True)), ('retail_price', models.\n FloatField(blank=True, null=True)), ('tax', models.FloatField(blank\n =True, null=True)), ('quantity_stock', models.IntegerField(blank=\n True, null=True)), ('receiving_quantity', models.IntegerField(blank\n =True, null=True)), ('description', models.TextField(blank=True,\n max_length=1000, null=True)), ('image', models.ImageField(blank=\n True, default='no-img.jpg', null=True, upload_to='item/')), (\n 'item_has_serial_number', models.BooleanField(default=False)), (\n 'reorder_level', models.CharField(blank=True, max_length=10, null=\n True))]), migrations.CreateModel(name='Customer', fields=[(\n 'employee_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, serialize=False, to='account.Employee'))], bases=(\n 'account.employee',)), migrations.CreateModel(name='Supplier',\n fields=[('employee_ptr', models.OneToOneField(auto_created=True,\n on_delete=django.db.models.deletion.CASCADE, parent_link=True,\n primary_key=True, serialize=False, to='account.Employee')), (\n 'company_name', models.CharField(blank=True, max_length=100, null=\n True))], bases=('account.employee',)), migrations.AddField(\n model_name='item', name='supplier', field=models.ForeignKey(blank=\n True, null=True, on_delete=django.db.models.deletion.CASCADE, to=\n 'account.Supplier'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Employee', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('first_name', models.CharField(blank=\n True, max_length=30, null=True)), ('last_name', models.CharField(\n blank=True, max_length=30, null=True)), ('gender', models.CharField\n (blank=True, max_length=10, null=True)), ('email', models.\n EmailField(blank=True, max_length=255, null=True)), ('phone_number',\n models.CharField(blank=True, max_length=20, null=True)), ('address',\n models.TextField(blank=True, max_length=255, null=True)), ('city',\n models.CharField(blank=True, max_length=50, null=True)), ('state',\n models.CharField(blank=True, max_length=50, null=True)), (\n 'post_code', models.CharField(blank=True, max_length=10, null=True)\n ), ('comment', models.TextField(blank=True, max_length=255, null=\n True))]), migrations.CreateModel(name='Item', fields=[('id', models\n .AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('barcode', models.CharField(blank=True,\n max_length=100, null=True)), ('item_name', models.CharField(blank=\n True, max_length=100, null=True)), ('catagory', models.CharField(\n blank=True, max_length=100, null=True)), ('wholesale_price', models\n .FloatField(blank=True, null=True)), ('retail_price', models.\n FloatField(blank=True, null=True)), ('tax', models.FloatField(blank\n =True, null=True)), ('quantity_stock', models.IntegerField(blank=\n True, null=True)), ('receiving_quantity', models.IntegerField(blank\n =True, null=True)), ('description', models.TextField(blank=True,\n max_length=1000, null=True)), ('image', models.ImageField(blank=\n True, default='no-img.jpg', null=True, upload_to='item/')), (\n 'item_has_serial_number', models.BooleanField(default=False)), (\n 'reorder_level', models.CharField(blank=True, max_length=10, null=\n True))]), migrations.CreateModel(name='Customer', fields=[(\n 'employee_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, serialize=False, to='account.Employee'))], bases=(\n 'account.employee',)), migrations.CreateModel(name='Supplier',\n fields=[('employee_ptr', models.OneToOneField(auto_created=True,\n on_delete=django.db.models.deletion.CASCADE, parent_link=True,\n primary_key=True, serialize=False, to='account.Employee')), (\n 'company_name', models.CharField(blank=True, max_length=100, null=\n True))], bases=('account.employee',)), migrations.AddField(\n model_name='item', name='supplier', field=models.ForeignKey(blank=\n True, null=True, on_delete=django.db.models.deletion.CASCADE, to=\n 'account.Supplier'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2018-02-24 11:30\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Employee',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('first_name', models.CharField(blank=True, max_length=30, null=True)),\n ('last_name', models.CharField(blank=True, max_length=30, null=True)),\n ('gender', models.CharField(blank=True, max_length=10, null=True)),\n ('email', models.EmailField(blank=True, max_length=255, null=True)),\n ('phone_number', models.CharField(blank=True, max_length=20, null=True)),\n ('address', models.TextField(blank=True, max_length=255, null=True)),\n ('city', models.CharField(blank=True, max_length=50, null=True)),\n ('state', models.CharField(blank=True, max_length=50, null=True)),\n ('post_code', models.CharField(blank=True, max_length=10, null=True)),\n ('comment', models.TextField(blank=True, max_length=255, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='Item',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('barcode', models.CharField(blank=True, max_length=100, null=True)),\n ('item_name', models.CharField(blank=True, max_length=100, null=True)),\n ('catagory', models.CharField(blank=True, max_length=100, null=True)),\n ('wholesale_price', models.FloatField(blank=True, null=True)),\n ('retail_price', models.FloatField(blank=True, null=True)),\n ('tax', models.FloatField(blank=True, null=True)),\n ('quantity_stock', models.IntegerField(blank=True, null=True)),\n ('receiving_quantity', models.IntegerField(blank=True, null=True)),\n ('description', models.TextField(blank=True, max_length=1000, null=True)),\n ('image', models.ImageField(blank=True, default='no-img.jpg', null=True, upload_to='item/')),\n ('item_has_serial_number', models.BooleanField(default=False)),\n ('reorder_level', models.CharField(blank=True, max_length=10, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='Customer',\n fields=[\n ('employee_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='account.Employee')),\n ],\n bases=('account.employee',),\n ),\n migrations.CreateModel(\n name='Supplier',\n fields=[\n ('employee_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='account.Employee')),\n ('company_name', models.CharField(blank=True, max_length=100, null=True)),\n ],\n bases=('account.employee',),\n ),\n migrations.AddField(\n model_name='item',\n name='supplier',\n field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Supplier'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
import matplotlib
from matplotlib.colors import to_hex
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
import itertools as it
from pathlib import Path
import subprocess
from tqdm import tqdm
from koala import plotting as pl
from koala import phase_diagrams as pd
from koala import pointsets, voronization, flux_finder, graph_color
from koala import example_graphs as eg
import functools
def multi_set_symmetric_difference(sets):
return list(functools.reduce(lambda a,b: a^b, [set(s) for s in sets]))
def flood_iteration_plaquettes(l, plaquettes):
return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].adjacent_plaquettes for p in plaquettes))
def flood_iteration_vertices(l, vertices):
return set(vertices) | set(it.chain.from_iterable(i for v in set(vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))
# imports just for this plot
column_width = 3.375
w = 3.375
black_line_widths = 1.5
matplotlib.rcParams.update({'font.size': 13, 'text.usetex': True, 'font.family': 'serif', 'font.serif': ['Computer Modern']})
matplotlib.rcParams.update({"axes.linewidth": black_line_widths})
line_colors = [to_hex(a) for a in cm.inferno([0.25, 0.5, 0.75])]
rng = np.random.default_rng(seed = 10)
l, coloring, ujk = eg.make_amorphous(8, rng = rng)
# l, coloring, ujk = eg.make_honeycomb(8)
plaquettes = [40,]
vertices = [78,]
subprocess.run(["mkdir", "-p", "./animation"])
for n in tqdm(range(15)):
fig, axes = plt.subplots(nrows=1, ncols=2)
fig.set_size_inches(2 * w, 2/2 * w)
for a in axes: a.set(xticks = [], yticks = [])
# pl.plot_vertex_indices(l, ax = ax)
# pl.plot_edge_indices(l, ax = ax)
# pl.plot_plaquette_indices(l, ax = ax)
if n > 0:
vertices = flood_iteration_vertices(l, vertices)
plaquettes = flood_iteration_plaquettes(l, plaquettes)
ax = axes[0]
multi_edges = multi_set_symmetric_difference([l.vertices.adjacent_edges[v] for v in vertices])
if multi_edges: pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], subset = multi_edges)
pl.plot_edges(l, ax = ax, color = 'k', subset = multi_edges)
pl.plot_vertices(l, ax = ax, subset = list(vertices), s = 5)
pl.plot_edges(l, ax = ax, alpha = 0.1)
pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], alpha = 0.1)
ax.set(xticks = [], yticks = [])
ax = axes[1]
plaquette_boolean = np.array([i in plaquettes for i in range(l.n_plaquettes)])
fluxes = 1 - 2*plaquette_boolean
ujk = flux_finder.find_flux_sector(l, fluxes, ujk)
fluxes = flux_finder.fluxes_from_bonds(l, ujk)
pl.plot_edges(l, ax = ax, alpha = 0.1)
pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], alpha = 0.1)
pl.plot_edges(l, ax = ax, subset = (ujk == -1))
if len(plaquettes) > 1: pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], subset = (ujk == -1), )
pl.plot_plaquettes(l, subset = fluxes == -1, ax = ax, color_scheme = ["orange", "white"], alpha = 0.5);
ax.set(xticks = [], yticks = [])
fig.tight_layout()
if n == 3:
fig.savefig(f'./{Path.cwd().name}.svg', transparent = True)
fig.savefig(f'./{Path.cwd().name}.pdf')
fig.savefig(f"animation/iteration_{n:03}.svg")
plt.close(fig)
subprocess.run(["magick", "animation/*.svg", f'./{Path.cwd().name}.gif'])
subprocess.run(["convert", "-delay", "100", f'./{Path.cwd().name}.gif', f'./{Path.cwd().name}.gif'])
subprocess.run(["rm", "-r", "./animation"])
|
normal
|
{
"blob_id": "d429f03c0f0c241166d6c0a5a45dc1101bcaec16",
"index": 5878,
"step-1": "<mask token>\n\n\ndef multi_set_symmetric_difference(sets):\n return list(functools.reduce(lambda a, b: a ^ b, [set(s) for s in sets]))\n\n\ndef flood_iteration_plaquettes(l, plaquettes):\n return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].\n adjacent_plaquettes for p in plaquettes))\n\n\ndef flood_iteration_vertices(l, vertices):\n return set(vertices) | set(it.chain.from_iterable(i for v in set(\n vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef multi_set_symmetric_difference(sets):\n return list(functools.reduce(lambda a, b: a ^ b, [set(s) for s in sets]))\n\n\ndef flood_iteration_plaquettes(l, plaquettes):\n return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].\n adjacent_plaquettes for p in plaquettes))\n\n\ndef flood_iteration_vertices(l, vertices):\n return set(vertices) | set(it.chain.from_iterable(i for v in set(\n vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))\n\n\n<mask token>\nmatplotlib.rcParams.update({'font.size': 13, 'text.usetex': True,\n 'font.family': 'serif', 'font.serif': ['Computer Modern']})\nmatplotlib.rcParams.update({'axes.linewidth': black_line_widths})\n<mask token>\nsubprocess.run(['mkdir', '-p', './animation'])\nfor n in tqdm(range(15)):\n fig, axes = plt.subplots(nrows=1, ncols=2)\n fig.set_size_inches(2 * w, 2 / 2 * w)\n for a in axes:\n a.set(xticks=[], yticks=[])\n if n > 0:\n vertices = flood_iteration_vertices(l, vertices)\n plaquettes = flood_iteration_plaquettes(l, plaquettes)\n ax = axes[0]\n multi_edges = multi_set_symmetric_difference([l.vertices.adjacent_edges\n [v] for v in vertices])\n if multi_edges:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=multi_edges\n )\n pl.plot_edges(l, ax=ax, color='k', subset=multi_edges)\n pl.plot_vertices(l, ax=ax, subset=list(vertices), s=5)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n ax.set(xticks=[], yticks=[])\n ax = axes[1]\n plaquette_boolean = np.array([(i in plaquettes) for i in range(l.\n n_plaquettes)])\n fluxes = 1 - 2 * plaquette_boolean\n ujk = flux_finder.find_flux_sector(l, fluxes, ujk)\n fluxes = flux_finder.fluxes_from_bonds(l, ujk)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n pl.plot_edges(l, ax=ax, subset=ujk == -1)\n if len(plaquettes) > 1:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=ujk == -1)\n pl.plot_plaquettes(l, subset=fluxes == -1, ax=ax, color_scheme=[\n 'orange', 'white'], alpha=0.5)\n ax.set(xticks=[], yticks=[])\n fig.tight_layout()\n if n == 3:\n fig.savefig(f'./{Path.cwd().name}.svg', transparent=True)\n fig.savefig(f'./{Path.cwd().name}.pdf')\n fig.savefig(f'animation/iteration_{n:03}.svg')\n plt.close(fig)\nsubprocess.run(['magick', 'animation/*.svg', f'./{Path.cwd().name}.gif'])\nsubprocess.run(['convert', '-delay', '100', f'./{Path.cwd().name}.gif',\n f'./{Path.cwd().name}.gif'])\nsubprocess.run(['rm', '-r', './animation'])\n",
"step-3": "<mask token>\n\n\ndef multi_set_symmetric_difference(sets):\n return list(functools.reduce(lambda a, b: a ^ b, [set(s) for s in sets]))\n\n\ndef flood_iteration_plaquettes(l, plaquettes):\n return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].\n adjacent_plaquettes for p in plaquettes))\n\n\ndef flood_iteration_vertices(l, vertices):\n return set(vertices) | set(it.chain.from_iterable(i for v in set(\n vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))\n\n\ncolumn_width = 3.375\nw = 3.375\nblack_line_widths = 1.5\nmatplotlib.rcParams.update({'font.size': 13, 'text.usetex': True,\n 'font.family': 'serif', 'font.serif': ['Computer Modern']})\nmatplotlib.rcParams.update({'axes.linewidth': black_line_widths})\nline_colors = [to_hex(a) for a in cm.inferno([0.25, 0.5, 0.75])]\nrng = np.random.default_rng(seed=10)\nl, coloring, ujk = eg.make_amorphous(8, rng=rng)\nplaquettes = [40]\nvertices = [78]\nsubprocess.run(['mkdir', '-p', './animation'])\nfor n in tqdm(range(15)):\n fig, axes = plt.subplots(nrows=1, ncols=2)\n fig.set_size_inches(2 * w, 2 / 2 * w)\n for a in axes:\n a.set(xticks=[], yticks=[])\n if n > 0:\n vertices = flood_iteration_vertices(l, vertices)\n plaquettes = flood_iteration_plaquettes(l, plaquettes)\n ax = axes[0]\n multi_edges = multi_set_symmetric_difference([l.vertices.adjacent_edges\n [v] for v in vertices])\n if multi_edges:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=multi_edges\n )\n pl.plot_edges(l, ax=ax, color='k', subset=multi_edges)\n pl.plot_vertices(l, ax=ax, subset=list(vertices), s=5)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n ax.set(xticks=[], yticks=[])\n ax = axes[1]\n plaquette_boolean = np.array([(i in plaquettes) for i in range(l.\n n_plaquettes)])\n fluxes = 1 - 2 * plaquette_boolean\n ujk = flux_finder.find_flux_sector(l, fluxes, ujk)\n fluxes = flux_finder.fluxes_from_bonds(l, ujk)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n pl.plot_edges(l, ax=ax, subset=ujk == -1)\n if len(plaquettes) > 1:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=ujk == -1)\n pl.plot_plaquettes(l, subset=fluxes == -1, ax=ax, color_scheme=[\n 'orange', 'white'], alpha=0.5)\n ax.set(xticks=[], yticks=[])\n fig.tight_layout()\n if n == 3:\n fig.savefig(f'./{Path.cwd().name}.svg', transparent=True)\n fig.savefig(f'./{Path.cwd().name}.pdf')\n fig.savefig(f'animation/iteration_{n:03}.svg')\n plt.close(fig)\nsubprocess.run(['magick', 'animation/*.svg', f'./{Path.cwd().name}.gif'])\nsubprocess.run(['convert', '-delay', '100', f'./{Path.cwd().name}.gif',\n f'./{Path.cwd().name}.gif'])\nsubprocess.run(['rm', '-r', './animation'])\n",
"step-4": "import matplotlib\nfrom matplotlib.colors import to_hex\nfrom matplotlib import cm\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport itertools as it\nfrom pathlib import Path\nimport subprocess\nfrom tqdm import tqdm\nfrom koala import plotting as pl\nfrom koala import phase_diagrams as pd\nfrom koala import pointsets, voronization, flux_finder, graph_color\nfrom koala import example_graphs as eg\nimport functools\n\n\ndef multi_set_symmetric_difference(sets):\n return list(functools.reduce(lambda a, b: a ^ b, [set(s) for s in sets]))\n\n\ndef flood_iteration_plaquettes(l, plaquettes):\n return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].\n adjacent_plaquettes for p in plaquettes))\n\n\ndef flood_iteration_vertices(l, vertices):\n return set(vertices) | set(it.chain.from_iterable(i for v in set(\n vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))\n\n\ncolumn_width = 3.375\nw = 3.375\nblack_line_widths = 1.5\nmatplotlib.rcParams.update({'font.size': 13, 'text.usetex': True,\n 'font.family': 'serif', 'font.serif': ['Computer Modern']})\nmatplotlib.rcParams.update({'axes.linewidth': black_line_widths})\nline_colors = [to_hex(a) for a in cm.inferno([0.25, 0.5, 0.75])]\nrng = np.random.default_rng(seed=10)\nl, coloring, ujk = eg.make_amorphous(8, rng=rng)\nplaquettes = [40]\nvertices = [78]\nsubprocess.run(['mkdir', '-p', './animation'])\nfor n in tqdm(range(15)):\n fig, axes = plt.subplots(nrows=1, ncols=2)\n fig.set_size_inches(2 * w, 2 / 2 * w)\n for a in axes:\n a.set(xticks=[], yticks=[])\n if n > 0:\n vertices = flood_iteration_vertices(l, vertices)\n plaquettes = flood_iteration_plaquettes(l, plaquettes)\n ax = axes[0]\n multi_edges = multi_set_symmetric_difference([l.vertices.adjacent_edges\n [v] for v in vertices])\n if multi_edges:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=multi_edges\n )\n pl.plot_edges(l, ax=ax, color='k', subset=multi_edges)\n pl.plot_vertices(l, ax=ax, subset=list(vertices), s=5)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n ax.set(xticks=[], yticks=[])\n ax = axes[1]\n plaquette_boolean = np.array([(i in plaquettes) for i in range(l.\n n_plaquettes)])\n fluxes = 1 - 2 * plaquette_boolean\n ujk = flux_finder.find_flux_sector(l, fluxes, ujk)\n fluxes = flux_finder.fluxes_from_bonds(l, ujk)\n pl.plot_edges(l, ax=ax, alpha=0.1)\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], alpha=0.1)\n pl.plot_edges(l, ax=ax, subset=ujk == -1)\n if len(plaquettes) > 1:\n pl.plot_dual(l, ax=ax, color_scheme=line_colors[1:], subset=ujk == -1)\n pl.plot_plaquettes(l, subset=fluxes == -1, ax=ax, color_scheme=[\n 'orange', 'white'], alpha=0.5)\n ax.set(xticks=[], yticks=[])\n fig.tight_layout()\n if n == 3:\n fig.savefig(f'./{Path.cwd().name}.svg', transparent=True)\n fig.savefig(f'./{Path.cwd().name}.pdf')\n fig.savefig(f'animation/iteration_{n:03}.svg')\n plt.close(fig)\nsubprocess.run(['magick', 'animation/*.svg', f'./{Path.cwd().name}.gif'])\nsubprocess.run(['convert', '-delay', '100', f'./{Path.cwd().name}.gif',\n f'./{Path.cwd().name}.gif'])\nsubprocess.run(['rm', '-r', './animation'])\n",
"step-5": "#!/usr/bin/env python3\n\nimport matplotlib\nfrom matplotlib.colors import to_hex\nfrom matplotlib import cm\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport itertools as it\nfrom pathlib import Path\nimport subprocess\nfrom tqdm import tqdm\n\nfrom koala import plotting as pl\nfrom koala import phase_diagrams as pd\nfrom koala import pointsets, voronization, flux_finder, graph_color\nfrom koala import example_graphs as eg\n\nimport functools\n\ndef multi_set_symmetric_difference(sets):\n return list(functools.reduce(lambda a,b: a^b, [set(s) for s in sets]))\n\ndef flood_iteration_plaquettes(l, plaquettes):\n return set(plaquettes) | set(it.chain.from_iterable(l.plaquettes[p].adjacent_plaquettes for p in plaquettes))\n\ndef flood_iteration_vertices(l, vertices):\n return set(vertices) | set(it.chain.from_iterable(i for v in set(vertices) for i in l.edges.indices[l.vertices.adjacent_edges[v]]))\n\n\n# imports just for this plot\n\ncolumn_width = 3.375\nw = 3.375\nblack_line_widths = 1.5\n\nmatplotlib.rcParams.update({'font.size': 13, 'text.usetex': True, 'font.family': 'serif', 'font.serif': ['Computer Modern']})\nmatplotlib.rcParams.update({\"axes.linewidth\": black_line_widths})\n\nline_colors = [to_hex(a) for a in cm.inferno([0.25, 0.5, 0.75])]\n\nrng = np.random.default_rng(seed = 10)\nl, coloring, ujk = eg.make_amorphous(8, rng = rng)\n# l, coloring, ujk = eg.make_honeycomb(8)\n\nplaquettes = [40,]\nvertices = [78,]\n\nsubprocess.run([\"mkdir\", \"-p\", \"./animation\"])\n\nfor n in tqdm(range(15)):\n fig, axes = plt.subplots(nrows=1, ncols=2)\n fig.set_size_inches(2 * w, 2/2 * w)\n for a in axes: a.set(xticks = [], yticks = [])\n\n # pl.plot_vertex_indices(l, ax = ax)\n # pl.plot_edge_indices(l, ax = ax)\n # pl.plot_plaquette_indices(l, ax = ax)\n \n if n > 0:\n vertices = flood_iteration_vertices(l, vertices)\n plaquettes = flood_iteration_plaquettes(l, plaquettes)\n \n ax = axes[0]\n \n multi_edges = multi_set_symmetric_difference([l.vertices.adjacent_edges[v] for v in vertices])\n \n if multi_edges: pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], subset = multi_edges)\n pl.plot_edges(l, ax = ax, color = 'k', subset = multi_edges)\n pl.plot_vertices(l, ax = ax, subset = list(vertices), s = 5)\n\n pl.plot_edges(l, ax = ax, alpha = 0.1)\n pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], alpha = 0.1)\n\n ax.set(xticks = [], yticks = [])\n \n ax = axes[1]\n\n plaquette_boolean = np.array([i in plaquettes for i in range(l.n_plaquettes)])\n\n fluxes = 1 - 2*plaquette_boolean\n ujk = flux_finder.find_flux_sector(l, fluxes, ujk)\n fluxes = flux_finder.fluxes_from_bonds(l, ujk)\n\n pl.plot_edges(l, ax = ax, alpha = 0.1)\n pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], alpha = 0.1)\n \n pl.plot_edges(l, ax = ax, subset = (ujk == -1))\n if len(plaquettes) > 1: pl.plot_dual(l, ax = ax, color_scheme = line_colors[1:], subset = (ujk == -1), )\n pl.plot_plaquettes(l, subset = fluxes == -1, ax = ax, color_scheme = [\"orange\", \"white\"], alpha = 0.5);\n ax.set(xticks = [], yticks = [])\n \n fig.tight_layout()\n if n == 3: \n fig.savefig(f'./{Path.cwd().name}.svg', transparent = True)\n fig.savefig(f'./{Path.cwd().name}.pdf')\n fig.savefig(f\"animation/iteration_{n:03}.svg\")\n plt.close(fig)\n\nsubprocess.run([\"magick\", \"animation/*.svg\", f'./{Path.cwd().name}.gif'])\nsubprocess.run([\"convert\", \"-delay\", \"100\", f'./{Path.cwd().name}.gif', f'./{Path.cwd().name}.gif'])\nsubprocess.run([\"rm\", \"-r\", \"./animation\"])",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
class Restaurant():
"""A restaurant model."""
def __init__(self, restaurant_name, cuisine_type):
"""Initialize name and type."""
self.name = restaurant_name
self.type = cuisine_type
def describe_restaurant(self):
"""Prints restaurant information."""
print("The restaurant's name is " + self.name.title())
print("The cuisine type is " + self.type.title())
def open_restaurant(self):
"""Message indicating the restaurant is open."""
print("The restaurant is now open!")
# my_restaurant = Restaurant('Juan Pho Yu', 'pho')
# print(my_restaurant.name)
# print(my_restaurant.type)
# my_restaurant.describe_restaurant()
# my_restaurant.open_restaurant()
|
normal
|
{
"blob_id": "4ecf976a7d655efb5af427083ec1943cae6fe56d",
"index": 3672,
"step-1": "class Restaurant:\n <mask token>\n <mask token>\n <mask token>\n\n def open_restaurant(self):\n \"\"\"Message indicating the restaurant is open.\"\"\"\n print('The restaurant is now open!')\n",
"step-2": "class Restaurant:\n <mask token>\n\n def __init__(self, restaurant_name, cuisine_type):\n \"\"\"Initialize name and type.\"\"\"\n self.name = restaurant_name\n self.type = cuisine_type\n <mask token>\n\n def open_restaurant(self):\n \"\"\"Message indicating the restaurant is open.\"\"\"\n print('The restaurant is now open!')\n",
"step-3": "class Restaurant:\n <mask token>\n\n def __init__(self, restaurant_name, cuisine_type):\n \"\"\"Initialize name and type.\"\"\"\n self.name = restaurant_name\n self.type = cuisine_type\n\n def describe_restaurant(self):\n \"\"\"Prints restaurant information.\"\"\"\n print(\"The restaurant's name is \" + self.name.title())\n print('The cuisine type is ' + self.type.title())\n\n def open_restaurant(self):\n \"\"\"Message indicating the restaurant is open.\"\"\"\n print('The restaurant is now open!')\n",
"step-4": "class Restaurant:\n \"\"\"A restaurant model.\"\"\"\n\n def __init__(self, restaurant_name, cuisine_type):\n \"\"\"Initialize name and type.\"\"\"\n self.name = restaurant_name\n self.type = cuisine_type\n\n def describe_restaurant(self):\n \"\"\"Prints restaurant information.\"\"\"\n print(\"The restaurant's name is \" + self.name.title())\n print('The cuisine type is ' + self.type.title())\n\n def open_restaurant(self):\n \"\"\"Message indicating the restaurant is open.\"\"\"\n print('The restaurant is now open!')\n",
"step-5": "class Restaurant():\n\t\"\"\"A restaurant model.\"\"\"\n\n\tdef __init__(self, restaurant_name, cuisine_type):\n\t\t\"\"\"Initialize name and type.\"\"\"\n\t\tself.name = restaurant_name\n\t\tself.type = cuisine_type\n\n\n\tdef describe_restaurant(self):\n\t\t\"\"\"Prints restaurant information.\"\"\"\n\t\tprint(\"The restaurant's name is \" + self.name.title())\n\t\tprint(\"The cuisine type is \" + self.type.title())\n\n\n\tdef open_restaurant(self):\n\t\t\"\"\"Message indicating the restaurant is open.\"\"\"\n\t\tprint(\"The restaurant is now open!\")\n\n\n# my_restaurant = Restaurant('Juan Pho Yu', 'pho')\n\n# print(my_restaurant.name)\n# print(my_restaurant.type)\n\n# my_restaurant.describe_restaurant()\n# my_restaurant.open_restaurant()",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from parser import read_expression_line, read_expression_lines, read_assignment_line, read_import_line, Import
def test_expression():
lines = ['a % b']
expression, left = read_expression_lines(lines)
assert expression is not None and len(left) == 0, left
print "test_expression 0: {} {}".format(expression, left)
lines = ['[a+b]']
expression, left = read_expression_lines(lines)
assert expression is not None
print "{} {}".format(expression, left)
lines = [
'get_name({',
'"first":"mike",',
'"last":"yu"',
'}):'
]
expression, leftt = read_expression_lines(lines)
assert expression is not None
print "{} {}".format(expression, left)
lines = [
'[a[0]*b[1]]',
]
expression, left = read_expression_lines(lines)
assert expression is not None
print "{} {}".format(expression, left)
lines = [
'[a[0]*b[1] - c[2]*d[3],'
'e]',
]
expression, left = read_expression_lines(lines)
assert expression is not None
print "{} {}".format(expression, left)
lines = [
'(vector[i] * vector[i])'
]
expression, left = read_expression_lines(lines)
assert expression is not None
print "{} {}".format(expression, left)
lines = [
#'if value >= 0 && value < lengths[axis]:'
'value >= 0 && value < lengths[axis]'
#'value >= 0 && value < lengths[axis]'
#'value < 0'
]
expression, left = read_expression_lines(lines)
print "test_expression {} {}".format(expression, left)
assert expression is not None and len(left) == 0
lines = [
'assert(matrix == [[1,2,3],[4,5,6]])'
]
expression, left = read_expression_lines(lines)
print "test_expression assert {} {}".format(expression, left)
assert expression is not None and len(left) == 0
def test_assignment():
print "Testing assignments"
expression = read_assignment_line('a = 5')
assert expression is not None
print "{}".format(expression)
line = 'text = null'
expression = read_assignment_line(line)
assert expression is not None
print "test assignment 0: {}".format(expression)
expression = read_assignment_line('sum += 5')
assert expression is not None
print "{}".format(expression)
expression = read_assignment_line('some[axis] += value')
assert expression is not None
print "{}".format(expression)
expression = read_assignment_line('sum_indices = [indices[0], indices[1], indices[2]]')
assert expression is not None
print "{}".format(expression)
text = 'faces[0][0] = true'
expression = read_assignment_line(text)
assert expression is not None
print "{}\n {}".format(text, expression)
text = 'face.arm = true'
expression = read_assignment_line(text)
assert expression is not None
print "test asignment {}\n {}".format(text, expression)
text = '(a, b, c) = bob()'
expression = read_assignment_line(text)
assert expression is not None
print "test asignment 2 {}\n {}".format(text, expression)
text = 'c = bob(a - 6)'
assignment, tokens = read_assignment_line(text)
assert assignment is not None and len(tokens) == 0
print "test asignment 3 {}\n {}\n {}".format(text, assignment, tokens)
def test_parser():
expression, left = read_import_line("from shared import translate")
assert expression is not None
assert isinstance(expression, Import)
print "test_parser: {}".format(expression)
expression, left = read_import_line("from shared import (translate, bob)")
assert expression is not None
assert isinstance(expression, Import)
print "test_parser 2 : {}".format(expression)
lines = ['"john"']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['a + b']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['0']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['length(c)']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['length(c)[0][1][2]']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['(length(c))[0][1][2]']
expression, left = read_expression_line(lines[0])
assert expression is not None
print "test parser: {}".format(expression)
assert expression is not None
lines = ['d[0]']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['[e, f]']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['[g, str(h)]']
expression, left = read_expression_line(lines[0])
assert expression is not None
print "starting dict test 1"
lines = ['{"name":"mike"}']
expression, left = read_expression_line(lines[0])
assert expression is not None
lines = ['{"first":"alex", "last":"oh"}']
expression, left = read_expression_line(lines[0])
assert expression is not None
line = '((position[0] - middleX)/middleX)*width'
expression, left = read_expression_line(line)
assert expression is not None
line = 'keyboard.key_state.bob'
expression, left = read_expression_line(line)
assert expression is not None
print "test parser 3: {}".format(expression)
line = 'mouse.button[2]'
expression, left = read_expression_line(line)
assert expression is not None
print "test parser 4: {}".format(expression)
line = '{ "position": [0,0,0], "bob": "dole", "nice": "brother" }'
expression, left = read_expression_line(line)
assert expression is not None
print "test parser 5: {}".format(expression)
line = 'file_read(join([state.things_dir, "/", state.thing_name]), text)'
expression, left = read_expression_line(line)
assert expression is not None
print "test parser 6: {}".format(expression)
if __name__ == '__main__':
test_parser()
test_expression()
test_assignment()
|
normal
|
{
"blob_id": "657866affd653a99eb7d9a9a82b2f7d6503ec21a",
"index": 2468,
"step-1": "from parser import read_expression_line, read_expression_lines, read_assignment_line, read_import_line, Import\n\ndef test_expression():\n lines = ['a % b']\n expression, left = read_expression_lines(lines)\n assert expression is not None and len(left) == 0, left\n print \"test_expression 0: {} {}\".format(expression, left)\n lines = ['[a+b]']\n expression, left = read_expression_lines(lines)\n assert expression is not None\n print \"{} {}\".format(expression, left)\n\n lines = [\n 'get_name({',\n '\"first\":\"mike\",',\n '\"last\":\"yu\"',\n '}):'\n ]\n expression, leftt = read_expression_lines(lines)\n assert expression is not None\n print \"{} {}\".format(expression, left)\n\n lines = [\n '[a[0]*b[1]]',\n ]\n expression, left = read_expression_lines(lines)\n assert expression is not None\n print \"{} {}\".format(expression, left)\n\n lines = [\n '[a[0]*b[1] - c[2]*d[3],'\n 'e]',\n ]\n expression, left = read_expression_lines(lines)\n assert expression is not None\n print \"{} {}\".format(expression, left)\n\n lines = [\n '(vector[i] * vector[i])'\n ]\n expression, left = read_expression_lines(lines)\n assert expression is not None\n print \"{} {}\".format(expression, left)\n lines = [\n #'if value >= 0 && value < lengths[axis]:'\n 'value >= 0 && value < lengths[axis]'\n #'value >= 0 && value < lengths[axis]'\n #'value < 0'\n ]\n expression, left = read_expression_lines(lines)\n print \"test_expression {} {}\".format(expression, left)\n assert expression is not None and len(left) == 0\n\n lines = [\n 'assert(matrix == [[1,2,3],[4,5,6]])'\n ]\n expression, left = read_expression_lines(lines)\n print \"test_expression assert {} {}\".format(expression, left)\n assert expression is not None and len(left) == 0\n\ndef test_assignment():\n print \"Testing assignments\"\n expression = read_assignment_line('a = 5')\n assert expression is not None\n print \"{}\".format(expression)\n\n line = 'text = null'\n expression = read_assignment_line(line)\n assert expression is not None\n print \"test assignment 0: {}\".format(expression)\n\n expression = read_assignment_line('sum += 5')\n assert expression is not None\n print \"{}\".format(expression)\n\n expression = read_assignment_line('some[axis] += value')\n assert expression is not None\n print \"{}\".format(expression)\n\n expression = read_assignment_line('sum_indices = [indices[0], indices[1], indices[2]]')\n assert expression is not None\n print \"{}\".format(expression)\n text = 'faces[0][0] = true'\n expression = read_assignment_line(text)\n assert expression is not None\n print \"{}\\n {}\".format(text, expression)\n text = 'face.arm = true'\n expression = read_assignment_line(text)\n assert expression is not None\n print \"test asignment {}\\n {}\".format(text, expression)\n text = '(a, b, c) = bob()'\n expression = read_assignment_line(text)\n assert expression is not None\n print \"test asignment 2 {}\\n {}\".format(text, expression)\n text = 'c = bob(a - 6)'\n assignment, tokens = read_assignment_line(text)\n assert assignment is not None and len(tokens) == 0\n print \"test asignment 3 {}\\n {}\\n {}\".format(text, assignment, tokens)\n\ndef test_parser():\n expression, left = read_import_line(\"from shared import translate\")\n assert expression is not None\n assert isinstance(expression, Import)\n print \"test_parser: {}\".format(expression)\n\n expression, left = read_import_line(\"from shared import (translate, bob)\")\n assert expression is not None\n assert isinstance(expression, Import)\n print \"test_parser 2 : {}\".format(expression)\n\n lines = ['\"john\"']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['a + b']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['0']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['length(c)']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['length(c)[0][1][2]']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['(length(c))[0][1][2]']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n print \"test parser: {}\".format(expression)\n assert expression is not None\n lines = ['d[0]']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['[e, f]']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['[g, str(h)]']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n print \"starting dict test 1\"\n lines = ['{\"name\":\"mike\"}']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n lines = ['{\"first\":\"alex\", \"last\":\"oh\"}']\n expression, left = read_expression_line(lines[0])\n assert expression is not None\n line = '((position[0] - middleX)/middleX)*width'\n expression, left = read_expression_line(line)\n assert expression is not None\n line = 'keyboard.key_state.bob'\n expression, left = read_expression_line(line)\n assert expression is not None\n print \"test parser 3: {}\".format(expression)\n\n line = 'mouse.button[2]'\n expression, left = read_expression_line(line)\n assert expression is not None\n print \"test parser 4: {}\".format(expression)\n\n line = '{ \"position\": [0,0,0], \"bob\": \"dole\", \"nice\": \"brother\" }'\n expression, left = read_expression_line(line)\n assert expression is not None\n print \"test parser 5: {}\".format(expression)\n\n line = 'file_read(join([state.things_dir, \"/\", state.thing_name]), text)'\n expression, left = read_expression_line(line)\n assert expression is not None\n print \"test parser 6: {}\".format(expression)\n\n\nif __name__ == '__main__':\n test_parser()\n test_expression()\n test_assignment()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import reddit
import tts
import sys
import praw
import os
#TODO: CENSOR CURSE WORDS,tag images that have curse words in them. strip punctuation from comment replies mp3
#TODO: pay for ads :thinking: buy views?
#TODO: sort by top upvotes
#todo: remove the formatting stuff
#todo: redo ducking
#todo quick script to get high upvote replies
#todo: remove hyperlinks
POST_ID = sys.argv[1]
NUM_POSTS = int(sys.argv[2])
reddit_object = praw.Reddit(
client_id="aAhfCgWHCGOylw",
client_secret="FLrVvWquolZc4cnKaEhULqzfUYsxQQ",
user_agent='reddit_to_vid')
print(f"NOW PROCESSING POST ID: {POST_ID}")
comments_from_post,post_title = reddit.get_top_comments_from_id(reddit_object,POST_ID,NUM_POSTS)
tts.comment_to_mp3(post_title,'./quota.txt','titles',0,randomize=True)
n = 1
for comment in comments_from_post:
tts.comment_to_mp3(comment,"./quota.txt",POST_ID,n,randomize=True)
n+=1
tts.comment_to_mp3("Oh, you made it to the end? You're a ducking beast! Lets make a deal: Hit like and subscribe and I will provide more humanoid content. Goodbye!","./quota.txt",'duck',1,randomize=True)
|
normal
|
{
"blob_id": "fd57e13269ca00ed5eb05e00bd7999c041141187",
"index": 4256,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(f'NOW PROCESSING POST ID: {POST_ID}')\n<mask token>\ntts.comment_to_mp3(post_title, './quota.txt', 'titles', 0, randomize=True)\n<mask token>\nfor comment in comments_from_post:\n tts.comment_to_mp3(comment, './quota.txt', POST_ID, n, randomize=True)\n n += 1\ntts.comment_to_mp3(\n \"Oh, you made it to the end? You're a ducking beast! Lets make a deal: Hit like and subscribe and I will provide more humanoid content. Goodbye!\"\n , './quota.txt', 'duck', 1, randomize=True)\n",
"step-3": "<mask token>\nPOST_ID = sys.argv[1]\nNUM_POSTS = int(sys.argv[2])\nreddit_object = praw.Reddit(client_id='aAhfCgWHCGOylw', client_secret=\n 'FLrVvWquolZc4cnKaEhULqzfUYsxQQ', user_agent='reddit_to_vid')\nprint(f'NOW PROCESSING POST ID: {POST_ID}')\ncomments_from_post, post_title = reddit.get_top_comments_from_id(reddit_object,\n POST_ID, NUM_POSTS)\ntts.comment_to_mp3(post_title, './quota.txt', 'titles', 0, randomize=True)\nn = 1\nfor comment in comments_from_post:\n tts.comment_to_mp3(comment, './quota.txt', POST_ID, n, randomize=True)\n n += 1\ntts.comment_to_mp3(\n \"Oh, you made it to the end? You're a ducking beast! Lets make a deal: Hit like and subscribe and I will provide more humanoid content. Goodbye!\"\n , './quota.txt', 'duck', 1, randomize=True)\n",
"step-4": "import reddit\nimport tts\nimport sys\nimport praw\nimport os\nPOST_ID = sys.argv[1]\nNUM_POSTS = int(sys.argv[2])\nreddit_object = praw.Reddit(client_id='aAhfCgWHCGOylw', client_secret=\n 'FLrVvWquolZc4cnKaEhULqzfUYsxQQ', user_agent='reddit_to_vid')\nprint(f'NOW PROCESSING POST ID: {POST_ID}')\ncomments_from_post, post_title = reddit.get_top_comments_from_id(reddit_object,\n POST_ID, NUM_POSTS)\ntts.comment_to_mp3(post_title, './quota.txt', 'titles', 0, randomize=True)\nn = 1\nfor comment in comments_from_post:\n tts.comment_to_mp3(comment, './quota.txt', POST_ID, n, randomize=True)\n n += 1\ntts.comment_to_mp3(\n \"Oh, you made it to the end? You're a ducking beast! Lets make a deal: Hit like and subscribe and I will provide more humanoid content. Goodbye!\"\n , './quota.txt', 'duck', 1, randomize=True)\n",
"step-5": "import reddit\r\nimport tts\r\nimport sys\r\nimport praw\r\nimport os\r\n\r\n#TODO: CENSOR CURSE WORDS,tag images that have curse words in them. strip punctuation from comment replies mp3\r\n#TODO: pay for ads :thinking: buy views?\r\n#TODO: sort by top upvotes\r\n#todo: remove the formatting stuff\r\n#todo: redo ducking\r\n#todo quick script to get high upvote replies\r\n#todo: remove hyperlinks\r\n\r\nPOST_ID = sys.argv[1]\r\nNUM_POSTS = int(sys.argv[2])\r\n\r\nreddit_object = praw.Reddit(\r\n client_id=\"aAhfCgWHCGOylw\",\r\n client_secret=\"FLrVvWquolZc4cnKaEhULqzfUYsxQQ\",\r\n user_agent='reddit_to_vid')\r\n\r\n\r\nprint(f\"NOW PROCESSING POST ID: {POST_ID}\")\r\ncomments_from_post,post_title = reddit.get_top_comments_from_id(reddit_object,POST_ID,NUM_POSTS)\r\ntts.comment_to_mp3(post_title,'./quota.txt','titles',0,randomize=True)\r\nn = 1\r\nfor comment in comments_from_post:\r\n tts.comment_to_mp3(comment,\"./quota.txt\",POST_ID,n,randomize=True)\r\n n+=1\r\ntts.comment_to_mp3(\"Oh, you made it to the end? You're a ducking beast! Lets make a deal: Hit like and subscribe and I will provide more humanoid content. Goodbye!\",\"./quota.txt\",'duck',1,randomize=True)\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 11 11:11:32 2017
@author: lindseykitchell
"""
import pandas as pd
import numpy as np
from scipy.stats.stats import pearsonr
import matplotlib.pylab as plt
import glob
import os
pwd = os.getcwd()
df_dict = {}
subj_list = []
for file in glob.glob(pwd + "/*spectrum.json"):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
all_tracts = list(df_dict[subj_list[0]])[:-1]
fig = plt.figure(figsize=(18,18))
all_corrs = []
fig_num = 1
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract], df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5,4,fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.cm.viridis, aspect='equal')
#ocean hot
fig_num += 1
cax = fig.add_axes([0.9, 0.1, 0.03, 0.8])
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show()
|
normal
|
{
"blob_id": "f78f8f560b7eb70232658be762e2058535a68122",
"index": 9086,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\n<mask token>\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\n<mask token>\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-3": "<mask token>\npwd = os.getcwd()\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\nall_tracts = list(df_dict[subj_list[0]])[:-1]\nfig = plt.figure(figsize=(18, 18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-4": "<mask token>\nimport pandas as pd\nimport numpy as np\nfrom scipy.stats.stats import pearsonr\nimport matplotlib.pylab as plt\nimport glob\nimport os\npwd = os.getcwd()\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\nall_tracts = list(df_dict[subj_list[0]])[:-1]\nfig = plt.figure(figsize=(18, 18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Jul 11 11:11:32 2017\n\n@author: lindseykitchell\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom scipy.stats.stats import pearsonr\nimport matplotlib.pylab as plt\nimport glob\nimport os\n\npwd = os.getcwd()\n\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + \"/*spectrum.json\"):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\n \nall_tracts = list(df_dict[subj_list[0]])[:-1] \n \n \n \nfig = plt.figure(figsize=(18,18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract], df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5,4,fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.cm.viridis, aspect='equal')\n #ocean hot \n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB
# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,
# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)
# Imports
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
( DscpOrAny, ) = mibBuilder.importSymbols("DIFFSERV-DSCP-TC", "DscpOrAny")
( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType", "InetPortNumber")
( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup")
( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter32", "Counter64", "Integer32", "Integer32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "mib-2")
( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowStatus", "StorageType", "TextualConvention", "TimeStamp", "TruthValue")
# Types
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = "d"
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)
namedValues = NamedValues(("downstream", 1), ("upstream", 2), )
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)
namedValues = NamedValues(("undefined", 1), ("bestEffort", 2), ("nonRealTimePollingService", 3), ("realTimePollingService", 4), ("unsolictedGrantServiceWithAD", 5), ("unsolictedGrantService", 6), )
# Objects
docsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions(("2006-01-23 00:00",))
if mibBuilder.loadTexts: docsIetfQosMIB.setOrganization("IETF IP over Cable Data Network (IPCDN)\nWorking Group")
if mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo("\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: michael.patrick@motorola.com\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: w.murwin@motorola.com\n\nIETF IPCDN Working Group\nGeneral Discussion: ipcdn@ietf.org\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, Richard_Woundy@cable.comcast.com\n Jean-Francois Mule, jfm@cablelabs.com")
if mibBuilder.loadTexts: docsIetfQosMIB.setDescription("This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.")
docsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))
docsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))
docsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))
if mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription("This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.")
docsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription("An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).")
docsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription("Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.")
docsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription("Indicates the direction to which the classifier\nis applied.")
docsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription("The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.")
docsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription("The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription("The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription("The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.")
docsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription("This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.")
docsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription("The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).")
docsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription("This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription("This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription("This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription("This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription("An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.")
docsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues(("none", 0), ("ethertype", 1), ("dsap", 2), ("mac", 3), ("all", 4), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription("This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.")
docsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription("If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.")
docsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription("This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).")
docsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription("This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues(("rulePriority", 0), ("activationState", 1), ("destPortStart", 10), ("destPortEnd", 11), ("destMac", 12), ("sourceMac", 13), ("ethertype", 14), ("userPri", 15), ("vlanId", 16), ("ipTos", 2), ("ipProtocol", 3), ("ipSourceAddr", 4), ("ipSourceMask", 5), ("ipDestAddr", 6), ("ipDestMask", 7), ("sourcePortStart", 8), ("sourcePortEnd", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription("This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.")
docsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))
if mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription("This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.")
docsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosParamSetType"))
if mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription("A unique set of QOS parameters.")
docsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription("Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.")
docsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription("The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.")
docsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription("Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.")
docsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription("Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.")
docsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription("Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.")
docsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription("Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.")
docsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.")
docsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.")
docsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription("Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.")
docsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription("Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).")
docsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe CM reports a value of 0 in this case.\n\nIf the parameter is not applicable to the\ndirection or upstream scheduling type of the\nService Flow, both CMTS and CM report this\nobject's value as 0.")
docsIetfQosParamSetUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetUnsolicitGrantSize.setDescription("Specifies the unsolicited grant size in bytes.\nThe grant size includes the entire MAC frame\ndata PDU from the Frame Control byte to the end\nof the MAC frame.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\n\n\n\nwhen applicable. Both CMTS and CM report\nthe signaled value of the parameter in this\ncase.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 14), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomGrantInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive data grant opportunities\non an upstream Service Flow.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolGrantJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the transmission opportunities\nmay be delayed from the nominal periodic schedule.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\n\n\n\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetGrantsPerInterval.setDescription("Specifies the number of data grants per Nominal\nGrant Interval\n(docsIetfQosParamSetNomGrantInterval).\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosAndMask.setDescription("Specifies the AND mask for the IP TOS byte for\noverwriting IP packet's TOS value. The IP packet\nTOS byte is bitwise ANDed with\ndocsIetfQosParamSetTosAndMask, and the result is\nbitwise ORed with docsIetfQosParamSetTosORMask and\nthe result is written to the IP packet TOS byte.\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\nand a value of '00'H for\ndocsIetfQosParamSetTosOrMask means that the IP\nPacket TOS byte is not overwritten.\n\nThis combination is reported if the referenced\nparameter is not present in a QOS Parameter Set.\n\n\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosParamSetTosAndMask that have either of\nthe least-significant two bits set to 0. Similarly,\noperators should not use values of\ndocsIetfQosParamSetTosORMask that have either of\nthe least-significant two bits set to 1.\n\nEven though this object is only enforced by the\nCable Modem Termination System (CMTS),\nCable Modems MUST report the value as signaled in\nthe referenced parameter.")
docsIetfQosParamSetTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosOrMask.setDescription("Specifies the OR mask for the IP TOS byte.\n\nSee the description of docsIetfQosParamSetTosAndMask\nfor further details.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.")
docsIetfQosParamSetMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxLatency.setDescription("Specifies the maximum latency between the\nreception of a packet by the CMTS on its NSI\nand the forwarding of the packet to the RF\ninterface. A value of 0 signifies no maximum\nlatency is enforced. This object only applies to\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding downstream DOCSIS QOS Parameter Set,\nthe default value is 0. This parameter is\nnot applicable to upstream DOCSIS QOS Parameter\nSets, and its value is reported as 0 in this case.")
docsIetfQosParamSetType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(1,3,2,)).subtype(namedValues=NamedValues(("active", 1), ("admitted", 2), ("provisioned", 3), ))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosParamSetType.setDescription("Defines the type of the QOS parameter set defined\nby this row. active(1) indicates the Active QOS\nparameter set, describing the service currently\nbeing provided by the DOCSIS MAC domain to the\nService Flow. admitted(2) indicates the Admitted\nQOS Parameter Set, describing services reserved by\nthe DOCSIS MAC domain for use by the service\nflow. provisioned (3) describes the QOS Parameter\nSet defined in the DOCSIS CM Configuration file for\nthe Service Flow.")
docsIetfQosParamSetRequestPolicyOct = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetRequestPolicyOct.setDescription("Specifies which transmit interval opportunities\nthe CM omits for upstream transmission requests and\npacket transmissions. This object takes its\ndefault value for downstream Service Flows.\n\nUnless otherwise indicated, a bit value of 1 means\nthat a CM must not use that opportunity for\nupstream transmission.\n\nIf bit 0 is the least significant bit of the\nleast significant (4th) octet, and if bit number\nis increased with significance, the bit definitions\nare defined as follows:\n\nbroadcastReqOpp(0):\n all CMs broadcast request opportunities\n\npriorityReqMulticastReq(1):\n priority request multicast request\n opportunities\n\nreqDataForReq(2):\n request/data opportunities for requests\n\nreqDataForData(3):\n request/data opportunities for data\n\npiggybackReqWithData(4):\n piggyback requests with data\n\nconcatenateData(5):\n concatenate data\n\nfragmentData(6):\n fragment data\n\nsuppresspayloadheaders(7):\n suppress payload headers\n\n\n\n\ndropPktsExceedUGSize(8):\n A value of 1 means that the Service Flow must\n drop packets that do not fit in the Unsolicited\n Grant size.\n\nIf the referenced parameter is not present in\na QOS Parameter Set, the value of this object is\nreported as '00000000'H.")
docsIetfQosParamSetBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 22), Bits().subtype(namedValues=NamedValues(("trafficPriority", 0), ("maxTrafficRate", 1), ("nomPollInterval", 10), ("tolPollJitter", 11), ("unsolicitGrantSize", 12), ("nomGrantInterval", 13), ("tolGrantJitter", 14), ("grantsPerInterval", 15), ("tosOverwrite", 16), ("maxLatency", 17), ("maxTrafficBurst", 2), ("minReservedRate", 3), ("minReservedPkt", 4), ("activeTimeout", 5), ("admittedTimeout", 6), ("maxConcatBurst", 7), ("schedulingType", 8), ("requestPolicy", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetBitMap.setDescription("This object indicates the set of QOS Parameter\nSet parameters actually signaled in the\nDOCSIS registration or dynamic service request\nmessage that created or modified the QOS Parameter\nSet. A bit is set to 1 when the parameter described\nby the indicated reference section is present\nin the original request.\n\nNote that when Service Class names are expanded,\nthe registration or dynamic response message may\ncontain parameters as expanded by the CMTS based\n\n\n\non a stored service class. These expanded\nparameters are not indicated by a 1 bit in this\nobject.\n\nNote that even though some QOS Parameter Set\nparameters may not be signaled in a message\n(so that the paramater's bit in this object is 0),\nthe DOCSIS specification requires that default\nvalues be used. These default values are reported\nas the corresponding object's value in the row.\n\nNote that BITS objects are encoded most\nsignificant bit first. For example, if bits\n1 and 16 are set, the value of this object\nis the octet string '400080'H.")
docsIetfQosServiceFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 3))
if mibBuilder.loadTexts: docsIetfQosServiceFlowTable.setDescription("This table describes the set of DOCSIS-QOS\nService Flows in a managed device.")
docsIetfQosServiceFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 3, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowEntry.setDescription("Describes a Service Flow.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowId.setDescription("An index assigned to a Service Flow by CMTS.")
docsIetfQosServiceFlowSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16383))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowSID.setDescription("Service Identifier (SID) assigned to an\nadmitted or active Service Flow. This object\nreports a value of 0 if a Service ID is not\nassociated with the Service Flow. Only active\nor admitted upstream Service Flows will have a\nService ID (SID).")
docsIetfQosServiceFlowDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 3), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowDirection.setDescription("The direction of the Service Flow.")
docsIetfQosServiceFlowPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPrimary.setDescription("Object reflects whether Service Flow is the primary\nor a secondary Service Flow.\n\nA primary Service Flow is the default Service Flow\nfor otherwise unclassified traffic and all MAC\nmessages.")
docsIetfQosServiceFlowStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 4))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsTable.setDescription("This table describes statistics associated with the\nService Flows in a managed device.")
docsIetfQosServiceFlowStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 4, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsEntry.setDescription("Describes a set of Service Flow statistics.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPkts.setDescription("For outgoing Service Flows, this object counts the\nnumber of Packet Data PDUs forwarded to this\nService Flow. For incoming upstream CMTS service\nflows, this object counts the number of Packet\nData PDUs actually received on the Service Flow\nidentified by the SID for which the packet was\nscheduled. CMs not classifying downstream packets\nmay report this object's value as 0 for downstream\nService Flows. This object does not count\nMAC-specific management messages.\n\nParticularly for UGS flows, packets sent on the\nprimary Service Flow in violation of the UGS grant\nsize should be counted only by the instance of this\nobject that is associated with the primary service\n\n\n\nflow.\n\nUnclassified upstream user data packets (i.e., non-\nMAC-management) forwarded to the primary upstream\nService Flow should be counted by the instance of\nthis object that is associated with the primary\nservice flow.\n\nThis object does include packets counted by\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\ninclude packets counted by\ndocsIetfQosServiceFlowPolicedDropPkts\nand docsIetfQosServiceFlowPHSUnknowns.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowOctets.setDescription("The number of octets from the byte after the MAC\nheader HCS to the end of the CRC for all packets\ncounted in the docsIetfQosServiceFlowPkts object for\nthis row. Note that this counts the octets after\npayload header suppression and before payload\nheader expansion have been applied.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeActive.setDescription("The number of seconds that the service flow\nhas been active.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPHSUnknowns = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPHSUnknowns.setDescription("For incoming upstream CMTS service flows, this\nobject counts the number of packets received\nwith an unknown payload header suppression index.\nThe service flow is identified by the SID for which\nthe packet was scheduled.\n\nOn a CM, only this object's instance for the primary\ndownstream service flow counts packets received with\nan unknown payload header suppression index. All\nother downstream service flows on CM report this\nobjects value as 0.\n\nAll outgoing service flows report this object's\nvalue as 0.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDropPkts.setDescription("For outgoing service flows, this object counts the\nnumber of Packet Data PDUs classified to this\nservice flow dropped due to:\n (1) implementation-dependent excessive delay\n while enforcing the Maximum Sustained\n Traffic Rate; or\n (2) UGS packets dropped due to exceeding the\n Unsolicited Grant Size with a\n Request/Transmission policy that requires\n such packets to be dropped.\n\nClassified packets dropped due to other reasons\n\n\n\nmust be counted in ifOutDiscards for the interface\nof this service flow. This object reports 0 for\nincoming service flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDelayPkts.setDescription("This object counts only outgoing packets delayed in\norder to maintain the Maximum Sustained Traffic\nRate. This object will always report a value of 0\nfor UGS flows because the Maximum Sustained Traffic\nRate does not apply. This object is 0 for incoming\nservice flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 5))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsTable.setDescription("This table describes statistics associated with\nupstream service flows. All counted frames must\nbe received without a Frame Check Sequence (FCS)\nerror.")
docsIetfQosUpstreamStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 5, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosSID"))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsEntry.setDescription("Describes a set of upstream service flow\nstatistics. An entry in the table exists for each\nupstream Service Flow in a managed device.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 16383))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosSID.setDescription("Identifies a service ID for an admitted or active\nupstream service flow.")
docsIetfQosUpstreamFragments = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragments.setDescription("The number of fragmentation headers received on an\nupstream service flow, regardless of whether\nthe fragment was correctly reassembled into a\nvalid packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamFragDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragDiscards.setDescription("The number of upstream fragments discarded and not\nassembled into a valid upstream packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamConcatBursts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamConcatBursts.setDescription("The number of concatenation headers received on an\nupstream service flow.\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicServiceStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 6))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsTable.setDescription("This table describes statistics associated with the\nDynamic Service Flows in a managed device.")
docsIetfQosDynamicServiceStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 6, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosIfDirection"))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsEntry.setDescription("Describes a set of dynamic service flow statistics.\nTwo entries exist for each DOCSIS MAC layer\ninterface for the upstream and downstream\ndirection. On the CMTS, the downstream direction\nrow indicates messages transmitted or transactions\noriginated by the CMTS. The upstream direction row\nindicates messages received or transaction\noriginated by the CM. On the CM, the downstream\ndirection row indicates messages received or\ntransactions originated by the CMTS. The upstream\ndirection row indicates messages transmitted by\nthe CM or transactions originated by the CM.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosIfDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 1), DocsIetfQosRfMacIfDirection()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosIfDirection.setDescription("The direction of interface.")
docsIetfQosDSAReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAReqs.setDescription("The number of Dynamic Service Addition Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSARsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSARsps.setDescription("The number of Dynamic Service Addition Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSAAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAAcks.setDescription("The number of Dynamic Service Addition\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCReqs.setDescription("The number of Dynamic Service Change Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCRsps.setDescription("The number of Dynamic Service Change Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCAcks.setDescription("The number of Dynamic Service Change\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSDReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDReqs.setDescription("The number of Dynamic Service Delete Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSDRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDRsps.setDescription("The number of Dynamic Service Delete Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAdds = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAdds.setDescription("The number of successful Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAddFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAddFails.setDescription("The number of failed Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDynamicChanges = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChanges.setDescription("The number of successful Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicChangeFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChangeFails.setDescription("The number of failed Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeletes = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeletes.setDescription("The number of successful Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeleteFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeleteFails.setDescription("The number of failed Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDCCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCReqs.setDescription("The number of Dynamic Channel Change Request\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex\nthat indexes this object.")
docsIetfQosDCCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCRsps.setDescription("The number of Dynamic Channel Change Response\nmessages traversing an interface. This count is\nnonzero only on upstream direction rows. This count\nshould include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCAcks.setDescription("The number of Dynamic Channel Change Acknowledgement\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCs.setDescription("The number of successful Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCFails.setDescription("The number of failed Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowLogTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 7))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTable.setDescription("This table contains a log of the disconnected\nService Flows in a managed device.")
docsIetfQosServiceFlowLogEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 7, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogEntry.setDescription("The information regarding a single disconnected\nservice flow.")
docsIetfQosServiceFlowLogIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIndex.setDescription("Unique index for a logged service flow.")
docsIetfQosServiceFlowLogIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 2), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIfIndex.setDescription("The ifIndex of ifType docsCableMaclayer(127)\non the CMTS where the service flow was present.")
docsIetfQosServiceFlowLogSFID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogSFID.setDescription("The index assigned to the service flow by the CMTS.")
docsIetfQosServiceFlowLogCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogCmMac.setDescription("The MAC address for the cable modem associated with\nthe service flow.")
docsIetfQosServiceFlowLogPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPkts.setDescription("The number of packets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogOctets.setDescription("The number of octets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogTimeDeleted = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 7), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeDeleted.setDescription("The value of sysUpTime when the service flow\nwas deleted.")
docsIetfQosServiceFlowLogTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 8), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowLogTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeActive.setDescription("The total time that the service flow was active.")
docsIetfQosServiceFlowLogDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 10), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogDirection.setDescription("The value of docsIetfQosServiceFlowDirection\nfor the service flow.")
docsIetfQosServiceFlowLogPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPrimary.setDescription("The value of docsIetfQosServiceFlowPrimary for the\nservice flow.")
docsIetfQosServiceFlowLogServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogServiceClassName.setDescription("The value of docsIetfQosParamSetServiceClassName for\nthe provisioned QOS Parameter Set of the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDropPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDropPkts for the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDelayPkts for the\nservice flow.")
docsIetfQosServiceFlowLogControl = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 15), Integer().subtype(subtypeSpec=SingleValueConstraint(1,6,)).subtype(namedValues=NamedValues(("active", 1), ("destroy", 6), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogControl.setDescription("Setting this object to the value destroy(6) removes\nthis entry from the table.\n\nReading this object returns the value active(1).")
docsIetfQosServiceClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 8))
if mibBuilder.loadTexts: docsIetfQosServiceClassTable.setDescription("This table describes the set of DOCSIS-QOS\nService Classes in a CMTS.")
docsIetfQosServiceClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 8, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassName"))
if mibBuilder.loadTexts: docsIetfQosServiceClassEntry.setDescription("A provisioned service class on a CMTS.\nEach entry defines a template for certain\nDOCSIS QOS Parameter Set values. When a CM\ncreates or modifies an Admitted QOS Parameter Set\nfor a Service Flow, it may reference a Service Class\nName instead of providing explicit QOS Parameter\nSet values. In this case, the CMTS populates\nthe QOS Parameter Set with the applicable\ncorresponding values from the named Service Class.\nSubsequent changes to a Service Class row do not\naffect the QOS Parameter Set values of any service\nflows already admitted.\n\nA service class template applies to only\na single direction, as indicated in the\ndocsIetfQosServiceClassDirection object.")
docsIetfQosServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassName.setDescription("Service Class Name. DOCSIS specifies that the\nmaximum size is 16 ASCII characters including\na terminating zero. The terminating zero is not\nrepresented in this SnmpAdminString syntax object.")
docsIetfQosServiceClassStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStatus.setDescription("Used to create or delete rows in this table.\nThere is no restriction on the ability to change\nvalues in this row while the row is active.\nInactive rows need not be timed out.")
docsIetfQosServiceClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPriority.setDescription("Template for docsIetfQosParamSetPriority.")
docsIetfQosServiceClassMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 4), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficRate.setDescription("Template for docsIetfQosParamSetMaxTrafficRate.")
docsIetfQosServiceClassMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 5), Unsigned32().clone(3044)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficBurst.setDescription("Template for docsIetfQosParamSetMaxTrafficBurst.")
docsIetfQosServiceClassMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 6), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedRate.setDescription("Template for docsIetfQosParamSEtMinReservedRate.")
docsIetfQosServiceClassMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedPkt.setDescription("Template for docsIetfQosParamSetMinReservedPkt.")
docsIetfQosServiceClassMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1522)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxConcatBurst.setDescription("Template for docsIetfQosParamSetMaxConcatBurst.")
docsIetfQosServiceClassNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 9), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomPollInterval.setDescription("Template for docsIetfQosParamSetNomPollInterval.")
docsIetfQosServiceClassTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 10), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolPollJitter.setDescription("Template for docsIetfQosParamSetTolPollJitter.")
docsIetfQosServiceClassUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassUnsolicitGrantSize.setDescription("Template for docsIetfQosParamSetUnsolicitGrantSize.")
docsIetfQosServiceClassNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 12), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomGrantInterval.setDescription("Template for docsIetfQosParamSetNomGrantInterval.")
docsIetfQosServiceClassTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 13), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolGrantJitter.setDescription("Template for docsIetfQosParamSetTolGrantJitter.")
docsIetfQosServiceClassGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassGrantsPerInterval.setDescription("Template for docsIetfQosParamSetGrantsPerInterval.")
docsIetfQosServiceClassMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 15), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxLatency.setDescription("Template for docsIetfQosParamSetClassMaxLatency.")
docsIetfQosServiceClassActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassActiveTimeout.setDescription("Template for docsIetfQosParamSetActiveTimeout.")
docsIetfQosServiceClassAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassAdmittedTimeout.setDescription("Template for docsIetfQosParamSetAdmittedTimeout.")
docsIetfQosServiceClassSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 18), DocsIetfQosSchedulingType().clone('bestEffort')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassSchedulingType.setDescription("Template for docsIetfQosParamSetSchedulingType.")
docsIetfQosServiceClassRequestPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue='00000000')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassRequestPolicy.setDescription("Template for docsIetfQosParamSetRequestPolicyOct.")
docsIetfQosServiceClassTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosAndMask.setDescription("Template for docsIetfQosParamSetTosAndMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\n\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly,operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosOrMask.setDescription("Template for docsIetfQosParamSetTosOrMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly, operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 22), DocsIetfQosRfMacIfDirection().clone('upstream')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDirection.setDescription("Specifies whether the service class template\napplies to upstream or downstream service flows.")
docsIetfQosServiceClassStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 23), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosServiceClassDSCPOverwrite = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 24), DscpOrAny().clone('-1')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDSCPOverwrite.setDescription("This object allows the overwrite of the DSCP\nfield per RFC 3260.\n\nIf this object is -1, then the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\n'00'H. Otherwise, this object is in the range of\n0..63, and the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\nbe this object's value shifted left by two bit\npositions.")
docsIetfQosServiceClassPolicyTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 9))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyTable.setDescription("This table describes the set of DOCSIS-QOS\nService Class Policies.\n\nThis table is an adjunct to the\n\n\n\ndocsDevFilterPolicy table. Entries in the\ndocsDevFilterPolicy table can point to\nspecific rows in this table.\n\nThis table permits mapping a packet to a service\nclass name of an active service flow so long as\na classifier does not exist at a higher\npriority.")
docsIetfQosServiceClassPolicyEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 9, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyEntry.setDescription("A service class name policy entry.")
docsIetfQosServiceClassPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyIndex.setDescription("Index value to identify an entry in\nthis table uniquely.")
docsIetfQosServiceClassPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyName.setDescription("Service Class Name to identify the name of the\nservice class flow to which the packet should be\ndirected.")
docsIetfQosServiceClassPolicyRulePriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyRulePriority.setDescription("Service Class Policy rule priority for the\nentry.")
docsIetfQosServiceClassPolicyStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStatus.setDescription("Used to create or delete rows in this table.\nThis object should not be deleted if it is\nreferenced by an entry in docsDevFilterPolicy.\nThe reference should be deleted first.\nThere is no restriction on the ability\nto change values in this row while the row is\nactive. Inactive rows need not be timed out.")
docsIetfQosServiceClassPolicyStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosPHSTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 10))
if mibBuilder.loadTexts: docsIetfQosPHSTable.setDescription("This table describes the set of payload header\nsuppression entries.")
docsIetfQosPHSEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 10, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPHSEntry.setDescription("A payload header suppression entry.\n\nThe ifIndex is an ifType of docsCableMaclayer(127).\nThe index docsIetfQosServiceFlowId selects one\nservice flow from the cable MAC layer interface.\nThe docsIetfQosPktClassId index matches an\nindex of the docsIetfQosPktClassTable.")
docsIetfQosPHSField = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSField.setDescription("Payload header suppression field defines the\nbytes of the header that must be\nsuppressed/restored by the sending/receiving\ndevice.\n\nThe number of octets in this object should be\nthe same as the value of docsIetfQosPHSSize.")
docsIetfQosPHSMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSMask.setDescription("Payload header suppression mask defines the\nbit mask that is used in combination with the\ndocsIetfQosPHSField. It defines which bytes in\nthe header must be suppressed/restored by the\nsending or receiving device.\n\nEach bit of this bit mask corresponds to a byte\nin the docsIetfQosPHSField, with the least\n\n\n\nsignificant bit corresponding to the first byte\nof the docsIetfQosPHSField.\n\nEach bit of the bit mask specifies whether\nthe corresponding byte should be suppressed\nin the packet. A bit value of '1' indicates that\nthe byte should be suppressed by the sending\ndevice and restored by the receiving device.\nA bit value of '0' indicates that\nthe byte should not be suppressed by the sending\ndevice or restored by the receiving device.\n\nIf the bit mask does not contain a bit for each\nbyte in the docsIetfQosPHSField, then the bit mask\nis extended with bit values of '1' to be the\nnecessary length.")
docsIetfQosPHSSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSSize.setDescription("Payload header suppression size specifies the\nnumber of bytes in the header to be suppressed\nand restored.\n\nThe value of this object must match the number\nof bytes in the docsIetfQosPHSField.")
docsIetfQosPHSVerify = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSVerify.setDescription("Payload header suppression verification value. If\n'true', the sender must verify docsIetfQosPHSField\nis the same as what is contained in the packet\nto be suppressed.")
docsIetfQosPHSIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSIndex.setDescription("Payload header suppression index uniquely\n\n\n\nreferences the PHS rule for a given service flow.")
docsIetfQosCmtsMacToSrvFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 11))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowTable.setDescription("This table provides for referencing the service\nflows associated with a particular cable modem.\nThis allows indexing into other docsIetfQos\ntables that are indexed by docsIetfQosServiceFlowId\nand ifIndex.")
docsIetfQosCmtsMacToSrvFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 11, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsCmMac"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowEntry.setDescription("An entry is created by CMTS for each service flow\nconnected to this CMTS.")
docsIetfQosCmtsCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 1), MacAddress()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsCmMac.setDescription("The MAC address for the referenced CM.")
docsIetfQosCmtsServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsServiceFlowId.setDescription("An index assigned to a service flow by CMTS.")
docsIetfQosCmtsIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 3), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosCmtsIfIndex.setDescription("The ifIndex of ifType docsCableMacLayer(127)\non the CMTS that is connected to the Cable Modem.")
docsIetfQosConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2))
docsIetfQosGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 1))
docsIetfQosCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 2))
# Augmentions
# Groups
docsIetfQosBaseGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassStateActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSVerify"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSARsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocolType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSField"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChangeFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAdds"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassVlanId"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeleteFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeletes"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowSID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPHSUnknowns"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourceMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAddFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetAddressType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChanges"), ) )
if mibBuilder.loadTexts: docsIetfQosBaseGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems.")
docsIetfQosParamSetGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 2)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetRequestPolicyOct"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxLatency"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetUnsolicitGrantSize"), ) )
if mibBuilder.loadTexts: docsIetfQosParamSetGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems for QOS Parameter Sets.")
docsIetfQosCmtsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 3)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogSFID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragDiscards"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogControl"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamConcatBursts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogCmMac"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragments"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeDeleted"), ) )
if mibBuilder.loadTexts: docsIetfQosCmtsGroup.setDescription("Group of objects implemented only in the CMTS.")
docsIetfQosSrvClassPolicyGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 4)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyRulePriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStatus"), ) )
if mibBuilder.loadTexts: docsIetfQosSrvClassPolicyGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems when supporting policy-based\nservice flows.")
docsIetfQosServiceClassGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 5)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDSCPOverwrite"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStatus"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassUnsolicitGrantSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassRequestPolicy"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxLatency"), ) )
if mibBuilder.loadTexts: docsIetfQosServiceClassGroup.setDescription("Group of objects implemented only in Cable Modem\nTermination Systems when supporting expansion of Service\nClass Names in a QOS Parameter Set")
# Compliances
docsIetfQosCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 127, 2, 2, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosSrvClassPolicyGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosBaseGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGroup"), ) )
if mibBuilder.loadTexts: docsIetfQosCompliance.setDescription("The compliance statement for MCNS Cable Modems and\nCable Modem Termination Systems that implement DOCSIS\nService Flows.")
# Exports
# Module identity
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", PYSNMP_MODULE_ID=docsIetfQosMIB)
# Types
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", DocsIetfQosBitRate=DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=DocsIetfQosSchedulingType)
# Objects
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosMIB=docsIetfQosMIB, docsIetfQosNotifications=docsIetfQosNotifications, docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=docsIetfQosPktClassTable, docsIetfQosPktClassEntry=docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId, docsIetfQosPktClassDirection=docsIetfQosPktClassDirection, docsIetfQosPktClassPriority=docsIetfQosPktClassPriority, docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow, docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh, docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask, docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol, docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType, docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr, docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask, docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr, docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask, docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart, docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd, docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart, docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd, docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr, docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask, docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr, docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType, docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol, docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow, docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh, docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId, docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive, docsIetfQosPktClassPkts=docsIetfQosPktClassPkts, docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap, docsIetfQosParamSetTable=docsIetfQosParamSetTable, docsIetfQosParamSetEntry=docsIetfQosParamSetEntry, docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName, docsIetfQosParamSetPriority=docsIetfQosParamSetPriority, docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate, docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst, docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate, docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt, docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout, docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout, docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst, docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType, docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval, docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter, docsIetfQosParamSetUnsolicitGrantSize=docsIetfQosParamSetUnsolicitGrantSize, docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval, docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter, docsIetfQosParamSetGrantsPerInterval=docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=docsIetfQosServiceFlowPHSUnknowns, docsIetfQosServiceFlowPolicedDropPkts=docsIetfQosServiceFlowPolicedDropPkts, docsIetfQosServiceFlowPolicedDelayPkts=docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID, docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments, docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards, docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts, docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable, docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry, docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps, docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps, docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps, docsIetfQosDynamicAdds=docsIetfQosDynamicAdds, docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails, docsIetfQosDynamicChanges=docsIetfQosDynamicChanges, docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails, docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes, docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails, docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks, docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=docsIetfQosServiceFlowLogTimeDeleted, docsIetfQosServiceFlowLogTimeCreated=docsIetfQosServiceFlowLogTimeCreated, docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive, docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection, docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary, docsIetfQosServiceFlowLogServiceClassName=docsIetfQosServiceFlowLogServiceClassName, docsIetfQosServiceFlowLogPolicedDropPkts=docsIetfQosServiceFlowLogPolicedDropPkts, docsIetfQosServiceFlowLogPolicedDelayPkts=docsIetfQosServiceFlowLogPolicedDelayPkts, docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl, docsIetfQosServiceClassTable=docsIetfQosServiceClassTable, docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry, docsIetfQosServiceClassName=docsIetfQosServiceClassName, docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus, docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority, docsIetfQosServiceClassMaxTrafficRate=docsIetfQosServiceClassMaxTrafficRate, docsIetfQosServiceClassMaxTrafficBurst=docsIetfQosServiceClassMaxTrafficBurst, docsIetfQosServiceClassMinReservedRate=docsIetfQosServiceClassMinReservedRate, docsIetfQosServiceClassMinReservedPkt=docsIetfQosServiceClassMinReservedPkt, docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst)
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosServiceClassNomPollInterval=docsIetfQosServiceClassNomPollInterval, docsIetfQosServiceClassTolPollJitter=docsIetfQosServiceClassTolPollJitter, docsIetfQosServiceClassUnsolicitGrantSize=docsIetfQosServiceClassUnsolicitGrantSize, docsIetfQosServiceClassNomGrantInterval=docsIetfQosServiceClassNomGrantInterval, docsIetfQosServiceClassTolGrantJitter=docsIetfQosServiceClassTolGrantJitter, docsIetfQosServiceClassGrantsPerInterval=docsIetfQosServiceClassGrantsPerInterval, docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency, docsIetfQosServiceClassActiveTimeout=docsIetfQosServiceClassActiveTimeout, docsIetfQosServiceClassAdmittedTimeout=docsIetfQosServiceClassAdmittedTimeout, docsIetfQosServiceClassSchedulingType=docsIetfQosServiceClassSchedulingType, docsIetfQosServiceClassRequestPolicy=docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=docsIetfQosServiceClassStorageType, docsIetfQosServiceClassDSCPOverwrite=docsIetfQosServiceClassDSCPOverwrite, docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable, docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry, docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex, docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName, docsIetfQosServiceClassPolicyRulePriority=docsIetfQosServiceClassPolicyRulePriority, docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus, docsIetfQosServiceClassPolicyStorageType=docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry, docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize, docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance, docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=docsIetfQosCompliances)
# Groups
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosBaseGroup=docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup, docsIetfQosCmtsGroup=docsIetfQosCmtsGroup, docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup, docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)
# Compliances
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosCompliance=docsIetfQosCompliance)
|
normal
|
{
"blob_id": "b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5",
"index": 5432,
"step-1": "<mask token>\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n <mask token>\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = 'd'\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = 'd'\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setOrganization(\n 'IETF IP over Cable Data Network (IPCDN)\\nWorking Group')\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setContactInfo(\n \"\"\"\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: michael.patrick@motorola.com\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: w.murwin@motorola.com\n\nIETF IPCDN Working Group\nGeneral Discussion: ipcdn@ietf.org\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, Richard_Woundy@cable.comcast.com\n Jean-Francois Mule, jfm@cablelabs.com\"\"\"\n )\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setDescription(\n \"\"\"This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassTable.setDescription(\n \"\"\"This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEntry.setDescription(\n \"\"\"An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassId.setDescription(\n \"\"\"Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDirection.setDescription(\n \"\"\"Indicates the direction to which the classifier\nis applied.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassPriority.setDescription(\n \"\"\"The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosLow.setDescription(\n \"\"\"The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosHigh.setDescription(\n \"\"\"The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosMask.setDescription(\n \"\"\"The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpProtocol.setDescription(\n \"\"\"This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetAddressType.setDescription(\n \"\"\"The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetSourceAddr.setDescription(\n \"\"\"This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetSourceMask.setDescription(\n \"\"\"This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetDestAddr.setDescription(\n \"\"\"This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetDestMask.setDescription(\n \"\"\"This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourcePortStart.setDescription(\n \"\"\"This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourcePortEnd.setDescription(\n \"\"\"This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestPortStart.setDescription(\n \"\"\"This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestPortEnd.setDescription(\n \"\"\"This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestMacAddr.setDescription(\n \"\"\"An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestMacMask.setDescription(\n \"\"\"An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourceMacAddr.setDescription(\n \"\"\"An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEnetProtocolType.setDescription(\n \"\"\"This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEnetProtocol.setDescription(\n \"\"\"If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassUserPriLow.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassUserPriHigh.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassVlanId.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassStateActive.setDescription(\n \"\"\"This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassPkts.setDescription(\n \"\"\"This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassBitMap.setDescription(\n \"\"\"This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTable.setDescription(\n \"\"\"This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetEntry.setDescription('A unique set of QOS parameters.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetServiceClassName.setDescription(\n \"\"\"Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetPriority.setDescription(\n \"\"\"The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxTrafficRate.setDescription(\n \"\"\"Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxTrafficBurst.setDescription(\n \"\"\"Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMinReservedRate.setDescription(\n \"\"\"Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMinReservedPkt.setDescription(\n \"\"\"Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetActiveTimeout.setDescription(\n \"\"\"Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetAdmittedTimeout.setDescription(\n \"\"\"Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxConcatBurst.setDescription(\n \"\"\"Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetSchedulingType.setDescription(\n \"\"\"Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetNomPollInterval.setDescription(\n \"\"\"Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTolPollJitter.setDescription(\n \"\"\"Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe CM reports a value of 0 in this case.\n\nIf the parameter is not applicable to the\ndirection or upstream scheduling type of the\nService Flow, both CMTS and CM report this\nobject's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetUnsolicitGrantSize.setDescription(\n \"\"\"Specifies the unsolicited grant size in bytes.\nThe grant size includes the entire MAC frame\ndata PDU from the Frame Control byte to the end\nof the MAC frame.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\n\n\n\nwhen applicable. Both CMTS and CM report\nthe signaled value of the parameter in this\ncase.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetNomGrantInterval.setDescription(\n \"\"\"Specifies the nominal interval in microseconds\nbetween successive data grant opportunities\non an upstream Service Flow.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTolGrantJitter.setDescription(\n \"\"\"Specifies the maximum amount of time in\nmicroseconds that the transmission opportunities\nmay be delayed from the nominal periodic schedule.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\n\n\n\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetGrantsPerInterval.setDescription(\n \"\"\"Specifies the number of data grants per Nominal\nGrant Interval\n(docsIetfQosParamSetNomGrantInterval).\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTosAndMask.setDescription(\n \"\"\"Specifies the AND mask for the IP TOS byte for\noverwriting IP packet's TOS value. The IP packet\nTOS byte is bitwise ANDed with\ndocsIetfQosParamSetTosAndMask, and the result is\nbitwise ORed with docsIetfQosParamSetTosORMask and\nthe result is written to the IP packet TOS byte.\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\nand a value of '00'H for\ndocsIetfQosParamSetTosOrMask means that the IP\nPacket TOS byte is not overwritten.\n\nThis combination is reported if the referenced\nparameter is not present in a QOS Parameter Set.\n\n\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosParamSetTosAndMask that have either of\nthe least-significant two bits set to 0. Similarly,\noperators should not use values of\ndocsIetfQosParamSetTosORMask that have either of\nthe least-significant two bits set to 1.\n\nEven though this object is only enforced by the\nCable Modem Termination System (CMTS),\nCable Modems MUST report the value as signaled in\nthe referenced parameter.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTosOrMask.setDescription(\n \"\"\"Specifies the OR mask for the IP TOS byte.\n\nSee the description of docsIetfQosParamSetTosAndMask\nfor further details.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxLatency.setDescription(\n \"\"\"Specifies the maximum latency between the\nreception of a packet by the CMTS on its NSI\nand the forwarding of the packet to the RF\ninterface. A value of 0 signifies no maximum\nlatency is enforced. This object only applies to\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding downstream DOCSIS QOS Parameter Set,\nthe default value is 0. This parameter is\nnot applicable to upstream DOCSIS QOS Parameter\nSets, and its value is reported as 0 in this case.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetType.setDescription(\n \"\"\"Defines the type of the QOS parameter set defined\nby this row. active(1) indicates the Active QOS\nparameter set, describing the service currently\nbeing provided by the DOCSIS MAC domain to the\nService Flow. admitted(2) indicates the Admitted\nQOS Parameter Set, describing services reserved by\nthe DOCSIS MAC domain for use by the service\nflow. provisioned (3) describes the QOS Parameter\nSet defined in the DOCSIS CM Configuration file for\nthe Service Flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetRequestPolicyOct.setDescription(\n \"\"\"Specifies which transmit interval opportunities\nthe CM omits for upstream transmission requests and\npacket transmissions. This object takes its\ndefault value for downstream Service Flows.\n\nUnless otherwise indicated, a bit value of 1 means\nthat a CM must not use that opportunity for\nupstream transmission.\n\nIf bit 0 is the least significant bit of the\nleast significant (4th) octet, and if bit number\nis increased with significance, the bit definitions\nare defined as follows:\n\nbroadcastReqOpp(0):\n all CMs broadcast request opportunities\n\npriorityReqMulticastReq(1):\n priority request multicast request\n opportunities\n\nreqDataForReq(2):\n request/data opportunities for requests\n\nreqDataForData(3):\n request/data opportunities for data\n\npiggybackReqWithData(4):\n piggyback requests with data\n\nconcatenateData(5):\n concatenate data\n\nfragmentData(6):\n fragment data\n\nsuppresspayloadheaders(7):\n suppress payload headers\n\n\n\n\ndropPktsExceedUGSize(8):\n A value of 1 means that the Service Flow must\n drop packets that do not fit in the Unsolicited\n Grant size.\n\nIf the referenced parameter is not present in\na QOS Parameter Set, the value of this object is\nreported as '00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetBitMap.setDescription(\n \"\"\"This object indicates the set of QOS Parameter\nSet parameters actually signaled in the\nDOCSIS registration or dynamic service request\nmessage that created or modified the QOS Parameter\nSet. A bit is set to 1 when the parameter described\nby the indicated reference section is present\nin the original request.\n\nNote that when Service Class names are expanded,\nthe registration or dynamic response message may\ncontain parameters as expanded by the CMTS based\n\n\n\non a stored service class. These expanded\nparameters are not indicated by a 1 bit in this\nobject.\n\nNote that even though some QOS Parameter Set\nparameters may not be signaled in a message\n(so that the paramater's bit in this object is 0),\nthe DOCSIS specification requires that default\nvalues be used. These default values are reported\nas the corresponding object's value in the row.\n\nNote that BITS objects are encoded most\nsignificant bit first. For example, if bits\n1 and 16 are set, the value of this object\nis the octet string '400080'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowEntry.setDescription(\n \"\"\"Describes a Service Flow.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowId.setDescription(\n 'An index assigned to a Service Flow by CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowSID.setDescription(\n \"\"\"Service Identifier (SID) assigned to an\nadmitted or active Service Flow. This object\nreports a value of 0 if a Service ID is not\nassociated with the Service Flow. Only active\nor admitted upstream Service Flows will have a\nService ID (SID).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowDirection.setDescription(\n 'The direction of the Service Flow.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPrimary.setDescription(\n \"\"\"Object reflects whether Service Flow is the primary\nor a secondary Service Flow.\n\nA primary Service Flow is the default Service Flow\nfor otherwise unclassified traffic and all MAC\nmessages.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowStatsTable.setDescription(\n \"\"\"This table describes statistics associated with the\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowStatsEntry.setDescription(\n \"\"\"Describes a set of Service Flow statistics.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPkts.setDescription(\n \"\"\"For outgoing Service Flows, this object counts the\nnumber of Packet Data PDUs forwarded to this\nService Flow. For incoming upstream CMTS service\nflows, this object counts the number of Packet\nData PDUs actually received on the Service Flow\nidentified by the SID for which the packet was\nscheduled. CMs not classifying downstream packets\nmay report this object's value as 0 for downstream\nService Flows. This object does not count\nMAC-specific management messages.\n\nParticularly for UGS flows, packets sent on the\nprimary Service Flow in violation of the UGS grant\nsize should be counted only by the instance of this\nobject that is associated with the primary service\n\n\n\nflow.\n\nUnclassified upstream user data packets (i.e., non-\nMAC-management) forwarded to the primary upstream\nService Flow should be counted by the instance of\nthis object that is associated with the primary\nservice flow.\n\nThis object does include packets counted by\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\ninclude packets counted by\ndocsIetfQosServiceFlowPolicedDropPkts\nand docsIetfQosServiceFlowPHSUnknowns.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowOctets.setDescription(\n \"\"\"The number of octets from the byte after the MAC\nheader HCS to the end of the CRC for all packets\ncounted in the docsIetfQosServiceFlowPkts object for\nthis row. Note that this counts the octets after\npayload header suppression and before payload\nheader expansion have been applied.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTimeCreated.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas created.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTimeActive.setDescription(\n \"\"\"The number of seconds that the service flow\nhas been active.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPHSUnknowns.setDescription(\n \"\"\"For incoming upstream CMTS service flows, this\nobject counts the number of packets received\nwith an unknown payload header suppression index.\nThe service flow is identified by the SID for which\nthe packet was scheduled.\n\nOn a CM, only this object's instance for the primary\ndownstream service flow counts packets received with\nan unknown payload header suppression index. All\nother downstream service flows on CM report this\nobjects value as 0.\n\nAll outgoing service flows report this object's\nvalue as 0.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPolicedDropPkts.setDescription(\n \"\"\"For outgoing service flows, this object counts the\nnumber of Packet Data PDUs classified to this\nservice flow dropped due to:\n (1) implementation-dependent excessive delay\n while enforcing the Maximum Sustained\n Traffic Rate; or\n (2) UGS packets dropped due to exceeding the\n Unsolicited Grant Size with a\n Request/Transmission policy that requires\n such packets to be dropped.\n\nClassified packets dropped due to other reasons\n\n\n\nmust be counted in ifOutDiscards for the interface\nof this service flow. This object reports 0 for\nincoming service flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPolicedDelayPkts.setDescription(\n \"\"\"This object counts only outgoing packets delayed in\norder to maintain the Maximum Sustained Traffic\nRate. This object will always report a value of 0\nfor UGS flows because the Maximum Sustained Traffic\nRate does not apply. This object is 0 for incoming\nservice flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamStatsTable.setDescription(\n \"\"\"This table describes statistics associated with\nupstream service flows. All counted frames must\nbe received without a Frame Check Sequence (FCS)\nerror.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamStatsEntry.setDescription(\n \"\"\"Describes a set of upstream service flow\nstatistics. An entry in the table exists for each\nupstream Service Flow in a managed device.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosSID.setDescription(\n \"\"\"Identifies a service ID for an admitted or active\nupstream service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamFragments.setDescription(\n \"\"\"The number of fragmentation headers received on an\nupstream service flow, regardless of whether\nthe fragment was correctly reassembled into a\nvalid packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamFragDiscards.setDescription(\n \"\"\"The number of upstream fragments discarded and not\nassembled into a valid upstream packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamConcatBursts.setDescription(\n \"\"\"The number of concatenation headers received on an\nupstream service flow.\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicServiceStatsTable.setDescription(\n \"\"\"This table describes statistics associated with the\nDynamic Service Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicServiceStatsEntry.setDescription(\n \"\"\"Describes a set of dynamic service flow statistics.\nTwo entries exist for each DOCSIS MAC layer\ninterface for the upstream and downstream\ndirection. On the CMTS, the downstream direction\nrow indicates messages transmitted or transactions\noriginated by the CMTS. The upstream direction row\nindicates messages received or transaction\noriginated by the CM. On the CM, the downstream\ndirection row indicates messages received or\ntransactions originated by the CMTS. The upstream\ndirection row indicates messages transmitted by\nthe CM or transactions originated by the CM.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosIfDirection.setDescription('The direction of interface.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSAReqs.setDescription(\n \"\"\"The number of Dynamic Service Addition Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSARsps.setDescription(\n \"\"\"The number of Dynamic Service Addition Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSAAcks.setDescription(\n \"\"\"The number of Dynamic Service Addition\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCReqs.setDescription(\n \"\"\"The number of Dynamic Service Change Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCRsps.setDescription(\n \"\"\"The number of Dynamic Service Change Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCAcks.setDescription(\n \"\"\"The number of Dynamic Service Change\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSDReqs.setDescription(\n \"\"\"The number of Dynamic Service Delete Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSDRsps.setDescription(\n \"\"\"The number of Dynamic Service Delete Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicAdds.setDescription(\n \"\"\"The number of successful Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicAddFails.setDescription(\n \"\"\"The number of failed Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicChanges.setDescription(\n \"\"\"The number of successful Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicChangeFails.setDescription(\n \"\"\"The number of failed Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicDeletes.setDescription(\n \"\"\"The number of successful Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicDeleteFails.setDescription(\n \"\"\"The number of failed Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCReqs.setDescription(\n \"\"\"The number of Dynamic Channel Change Request\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex\nthat indexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCRsps.setDescription(\n \"\"\"The number of Dynamic Channel Change Response\nmessages traversing an interface. This count is\nnonzero only on upstream direction rows. This count\nshould include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCAcks.setDescription(\n \"\"\"The number of Dynamic Channel Change Acknowledgement\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCs.setDescription(\n \"\"\"The number of successful Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCFails.setDescription(\n \"\"\"The number of failed Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTable.setDescription(\n \"\"\"This table contains a log of the disconnected\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogEntry.setDescription(\n \"\"\"The information regarding a single disconnected\nservice flow.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogIndex.setDescription(\n 'Unique index for a logged service flow.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogIfIndex.setDescription(\n \"\"\"The ifIndex of ifType docsCableMaclayer(127)\non the CMTS where the service flow was present.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogSFID.setDescription(\n 'The index assigned to the service flow by the CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogCmMac.setDescription(\n \"\"\"The MAC address for the cable modem associated with\nthe service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPkts.setDescription(\n \"\"\"The number of packets counted on this service flow\nafter payload header suppression.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogOctets.setDescription(\n \"\"\"The number of octets counted on this service flow\nafter payload header suppression.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeDeleted.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas deleted.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeCreated.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas created.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeActive.setDescription(\n 'The total time that the service flow was active.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogDirection.setDescription(\n \"\"\"The value of docsIetfQosServiceFlowDirection\nfor the service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPrimary.setDescription(\n \"\"\"The value of docsIetfQosServiceFlowPrimary for the\nservice flow.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogServiceClassName.setDescription(\n \"\"\"The value of docsIetfQosParamSetServiceClassName for\nthe provisioned QOS Parameter Set of the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPolicedDropPkts.setDescription(\n \"\"\"The final value of\ndocsIetfQosServiceFlowPolicedDropPkts for the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription(\n \"\"\"The final value of\ndocsIetfQosServiceFlowPolicedDelayPkts for the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogControl.setDescription(\n \"\"\"Setting this object to the value destroy(6) removes\nthis entry from the table.\n\nReading this object returns the value active(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Classes in a CMTS.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassEntry.setDescription(\n \"\"\"A provisioned service class on a CMTS.\nEach entry defines a template for certain\nDOCSIS QOS Parameter Set values. When a CM\ncreates or modifies an Admitted QOS Parameter Set\nfor a Service Flow, it may reference a Service Class\nName instead of providing explicit QOS Parameter\nSet values. In this case, the CMTS populates\nthe QOS Parameter Set with the applicable\ncorresponding values from the named Service Class.\nSubsequent changes to a Service Class row do not\naffect the QOS Parameter Set values of any service\nflows already admitted.\n\nA service class template applies to only\na single direction, as indicated in the\ndocsIetfQosServiceClassDirection object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassName.setDescription(\n \"\"\"Service Class Name. DOCSIS specifies that the\nmaximum size is 16 ASCII characters including\na terminating zero. The terminating zero is not\nrepresented in this SnmpAdminString syntax object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassStatus.setDescription(\n \"\"\"Used to create or delete rows in this table.\nThere is no restriction on the ability to change\nvalues in this row while the row is active.\nInactive rows need not be timed out.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPriority.setDescription(\n 'Template for docsIetfQosParamSetPriority.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxTrafficRate.setDescription(\n 'Template for docsIetfQosParamSetMaxTrafficRate.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxTrafficBurst.setDescription(\n 'Template for docsIetfQosParamSetMaxTrafficBurst.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMinReservedRate.setDescription(\n 'Template for docsIetfQosParamSEtMinReservedRate.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMinReservedPkt.setDescription(\n 'Template for docsIetfQosParamSetMinReservedPkt.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxConcatBurst.setDescription(\n 'Template for docsIetfQosParamSetMaxConcatBurst.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassNomPollInterval.setDescription(\n 'Template for docsIetfQosParamSetNomPollInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTolPollJitter.setDescription(\n 'Template for docsIetfQosParamSetTolPollJitter.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassUnsolicitGrantSize.setDescription(\n 'Template for docsIetfQosParamSetUnsolicitGrantSize.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassNomGrantInterval.setDescription(\n 'Template for docsIetfQosParamSetNomGrantInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTolGrantJitter.setDescription(\n 'Template for docsIetfQosParamSetTolGrantJitter.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassGrantsPerInterval.setDescription(\n 'Template for docsIetfQosParamSetGrantsPerInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxLatency.setDescription(\n 'Template for docsIetfQosParamSetClassMaxLatency.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassActiveTimeout.setDescription(\n 'Template for docsIetfQosParamSetActiveTimeout.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassAdmittedTimeout.setDescription(\n 'Template for docsIetfQosParamSetAdmittedTimeout.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassSchedulingType.setDescription(\n 'Template for docsIetfQosParamSetSchedulingType.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassRequestPolicy.setDescription(\n 'Template for docsIetfQosParamSetRequestPolicyOct.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTosAndMask.setDescription(\n \"\"\"Template for docsIetfQosParamSetTosAndMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\n\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly,operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTosOrMask.setDescription(\n \"\"\"Template for docsIetfQosParamSetTosOrMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly, operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassDirection.setDescription(\n \"\"\"Specifies whether the service class template\napplies to upstream or downstream service flows.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassStorageType.setDescription(\n \"\"\"This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassDSCPOverwrite.setDescription(\n \"\"\"This object allows the overwrite of the DSCP\nfield per RFC 3260.\n\nIf this object is -1, then the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\n'00'H. Otherwise, this object is in the range of\n0..63, and the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\nbe this object's value shifted left by two bit\npositions.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Class Policies.\n\nThis table is an adjunct to the\n\n\n\ndocsDevFilterPolicy table. Entries in the\ndocsDevFilterPolicy table can point to\nspecific rows in this table.\n\nThis table permits mapping a packet to a service\nclass name of an active service flow so long as\na classifier does not exist at a higher\npriority.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyEntry.setDescription(\n 'A service class name policy entry.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyIndex.setDescription(\n \"\"\"Index value to identify an entry in\nthis table uniquely.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyName.setDescription(\n \"\"\"Service Class Name to identify the name of the\nservice class flow to which the packet should be\ndirected.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyRulePriority.setDescription(\n \"\"\"Service Class Policy rule priority for the\nentry.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyStatus.setDescription(\n \"\"\"Used to create or delete rows in this table.\nThis object should not be deleted if it is\nreferenced by an entry in docsDevFilterPolicy.\nThe reference should be deleted first.\nThere is no restriction on the ability\nto change values in this row while the row is\nactive. Inactive rows need not be timed out.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyStorageType.setDescription(\n \"\"\"This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSTable.setDescription(\n \"\"\"This table describes the set of payload header\nsuppression entries.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSEntry.setDescription(\n \"\"\"A payload header suppression entry.\n\nThe ifIndex is an ifType of docsCableMaclayer(127).\nThe index docsIetfQosServiceFlowId selects one\nservice flow from the cable MAC layer interface.\nThe docsIetfQosPktClassId index matches an\nindex of the docsIetfQosPktClassTable.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSField.setDescription(\n \"\"\"Payload header suppression field defines the\nbytes of the header that must be\nsuppressed/restored by the sending/receiving\ndevice.\n\nThe number of octets in this object should be\nthe same as the value of docsIetfQosPHSSize.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSMask.setDescription(\n \"\"\"Payload header suppression mask defines the\nbit mask that is used in combination with the\ndocsIetfQosPHSField. It defines which bytes in\nthe header must be suppressed/restored by the\nsending or receiving device.\n\nEach bit of this bit mask corresponds to a byte\nin the docsIetfQosPHSField, with the least\n\n\n\nsignificant bit corresponding to the first byte\nof the docsIetfQosPHSField.\n\nEach bit of the bit mask specifies whether\nthe corresponding byte should be suppressed\nin the packet. A bit value of '1' indicates that\nthe byte should be suppressed by the sending\ndevice and restored by the receiving device.\nA bit value of '0' indicates that\nthe byte should not be suppressed by the sending\ndevice or restored by the receiving device.\n\nIf the bit mask does not contain a bit for each\nbyte in the docsIetfQosPHSField, then the bit mask\nis extended with bit values of '1' to be the\nnecessary length.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSSize.setDescription(\n \"\"\"Payload header suppression size specifies the\nnumber of bytes in the header to be suppressed\nand restored.\n\nThe value of this object must match the number\nof bytes in the docsIetfQosPHSField.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSVerify.setDescription(\n \"\"\"Payload header suppression verification value. If\n'true', the sender must verify docsIetfQosPHSField\nis the same as what is contained in the packet\nto be suppressed.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSIndex.setDescription(\n \"\"\"Payload header suppression index uniquely\n\n\n\nreferences the PHS rule for a given service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsMacToSrvFlowTable.setDescription(\n \"\"\"This table provides for referencing the service\nflows associated with a particular cable modem.\nThis allows indexing into other docsIetfQos\ntables that are indexed by docsIetfQosServiceFlowId\nand ifIndex.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsMacToSrvFlowEntry.setDescription(\n \"\"\"An entry is created by CMTS for each service flow\nconnected to this CMTS.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsCmMac.setDescription(\n 'The MAC address for the referenced CM.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsServiceFlowId.setDescription(\n 'An index assigned to a service flow by CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsIfIndex.setDescription(\n \"\"\"The ifIndex of ifType docsCableMacLayer(127)\non the CMTS that is connected to the Cable Modem.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosBaseGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems for QOS Parameter Sets.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsGroup.setDescription(\n 'Group of objects implemented only in the CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosSrvClassPolicyGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems when supporting policy-based\nservice flows.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassGroup.setDescription(\n \"\"\"Group of objects implemented only in Cable Modem\nTermination Systems when supporting expansion of Service\nClass Names in a QOS Parameter Set\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCompliance.setDescription(\n \"\"\"The compliance statement for MCNS Cable Modems and\nCable Modem Termination Systems that implement DOCSIS\nService Flows.\"\"\"\n )\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', PYSNMP_MODULE_ID=docsIetfQosMIB)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', DocsIetfQosBitRate=\n DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=\n DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=\n DocsIetfQosSchedulingType)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosMIB=docsIetfQosMIB,\n docsIetfQosNotifications=docsIetfQosNotifications,\n docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=\n docsIetfQosPktClassTable, docsIetfQosPktClassEntry=\n docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId,\n docsIetfQosPktClassDirection=docsIetfQosPktClassDirection,\n docsIetfQosPktClassPriority=docsIetfQosPktClassPriority,\n docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow,\n docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh,\n docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask,\n docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol,\n docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType,\n docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr,\n docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask,\n docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr,\n docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask,\n docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart,\n docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd,\n docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart,\n docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd,\n docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr,\n docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask,\n docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr,\n docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType,\n docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol,\n docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow,\n docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh,\n docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId,\n docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive,\n docsIetfQosPktClassPkts=docsIetfQosPktClassPkts,\n docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap,\n docsIetfQosParamSetTable=docsIetfQosParamSetTable,\n docsIetfQosParamSetEntry=docsIetfQosParamSetEntry,\n docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName,\n docsIetfQosParamSetPriority=docsIetfQosParamSetPriority,\n docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate,\n docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst,\n docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate,\n docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt,\n docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout,\n docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout,\n docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst,\n docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType,\n docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval,\n docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter,\n docsIetfQosParamSetUnsolicitGrantSize=\n docsIetfQosParamSetUnsolicitGrantSize,\n docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval,\n docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter,\n docsIetfQosParamSetGrantsPerInterval=\n docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=\n docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=\n docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=\n docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=\n docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=\n docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=\n docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=\n docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=\n docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=\n docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=\n docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=\n docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=\n docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=\n docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=\n docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=\n docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=\n docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=\n docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=\n docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=\n docsIetfQosServiceFlowPHSUnknowns,\n docsIetfQosServiceFlowPolicedDropPkts=\n docsIetfQosServiceFlowPolicedDropPkts,\n docsIetfQosServiceFlowPolicedDelayPkts=\n docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=\n docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=\n docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID,\n docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments,\n docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards,\n docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts,\n docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable,\n docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry,\n docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=\n docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps,\n docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=\n docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps,\n docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=\n docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps,\n docsIetfQosDynamicAdds=docsIetfQosDynamicAdds,\n docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails,\n docsIetfQosDynamicChanges=docsIetfQosDynamicChanges,\n docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails,\n docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes,\n docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails,\n docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=\n docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks,\n docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=\n docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=\n docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=\n docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=\n docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=\n docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=\n docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=\n docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=\n docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=\n docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=\n docsIetfQosServiceFlowLogTimeDeleted,\n docsIetfQosServiceFlowLogTimeCreated=\n docsIetfQosServiceFlowLogTimeCreated,\n docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive,\n docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection,\n docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary,\n docsIetfQosServiceFlowLogServiceClassName=\n docsIetfQosServiceFlowLogServiceClassName,\n docsIetfQosServiceFlowLogPolicedDropPkts=\n docsIetfQosServiceFlowLogPolicedDropPkts,\n docsIetfQosServiceFlowLogPolicedDelayPkts=\n docsIetfQosServiceFlowLogPolicedDelayPkts,\n docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl,\n docsIetfQosServiceClassTable=docsIetfQosServiceClassTable,\n docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry,\n docsIetfQosServiceClassName=docsIetfQosServiceClassName,\n docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus,\n docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority,\n docsIetfQosServiceClassMaxTrafficRate=\n docsIetfQosServiceClassMaxTrafficRate,\n docsIetfQosServiceClassMaxTrafficBurst=\n docsIetfQosServiceClassMaxTrafficBurst,\n docsIetfQosServiceClassMinReservedRate=\n docsIetfQosServiceClassMinReservedRate,\n docsIetfQosServiceClassMinReservedPkt=\n docsIetfQosServiceClassMinReservedPkt,\n docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst\n )\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB',\n docsIetfQosServiceClassNomPollInterval=\n docsIetfQosServiceClassNomPollInterval,\n docsIetfQosServiceClassTolPollJitter=\n docsIetfQosServiceClassTolPollJitter,\n docsIetfQosServiceClassUnsolicitGrantSize=\n docsIetfQosServiceClassUnsolicitGrantSize,\n docsIetfQosServiceClassNomGrantInterval=\n docsIetfQosServiceClassNomGrantInterval,\n docsIetfQosServiceClassTolGrantJitter=\n docsIetfQosServiceClassTolGrantJitter,\n docsIetfQosServiceClassGrantsPerInterval=\n docsIetfQosServiceClassGrantsPerInterval,\n docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency,\n docsIetfQosServiceClassActiveTimeout=\n docsIetfQosServiceClassActiveTimeout,\n docsIetfQosServiceClassAdmittedTimeout=\n docsIetfQosServiceClassAdmittedTimeout,\n docsIetfQosServiceClassSchedulingType=\n docsIetfQosServiceClassSchedulingType,\n docsIetfQosServiceClassRequestPolicy=\n docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask\n =docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=\n docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=\n docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=\n docsIetfQosServiceClassStorageType,\n docsIetfQosServiceClassDSCPOverwrite=\n docsIetfQosServiceClassDSCPOverwrite,\n docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable,\n docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry,\n docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex,\n docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName,\n docsIetfQosServiceClassPolicyRulePriority=\n docsIetfQosServiceClassPolicyRulePriority,\n docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus,\n docsIetfQosServiceClassPolicyStorageType=\n docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=\n docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry,\n docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=\n docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize,\n docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=\n docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=\n docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=\n docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=\n docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=\n docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=\n docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance,\n docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=\n docsIetfQosCompliances)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosBaseGroup=\n docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup,\n docsIetfQosCmtsGroup=docsIetfQosCmtsGroup,\n docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup,\n docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosCompliance=\n docsIetfQosCompliance)\n",
"step-5": "# PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB\n# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,\n# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)\n\n# Imports\n\n( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols(\"ASN1\", \"Integer\", \"ObjectIdentifier\", \"OctetString\")\n( NamedValues, ) = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\n( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"ConstraintsIntersection\", \"ConstraintsUnion\", \"SingleValueConstraint\", \"ValueRangeConstraint\", \"ValueSizeConstraint\")\n( DscpOrAny, ) = mibBuilder.importSymbols(\"DIFFSERV-DSCP-TC\", \"DscpOrAny\")\n( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols(\"IF-MIB\", \"InterfaceIndex\", \"ifIndex\")\n( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols(\"INET-ADDRESS-MIB\", \"InetAddress\", \"InetAddressType\", \"InetPortNumber\")\n( SnmpAdminString, ) = mibBuilder.importSymbols(\"SNMP-FRAMEWORK-MIB\", \"SnmpAdminString\")\n( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"ModuleCompliance\", \"ObjectGroup\")\n( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"Bits\", \"Counter32\", \"Counter64\", \"Integer32\", \"Integer32\", \"ModuleIdentity\", \"MibIdentifier\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"TimeTicks\", \"Unsigned32\", \"mib-2\")\n( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols(\"SNMPv2-TC\", \"MacAddress\", \"RowStatus\", \"StorageType\", \"TextualConvention\", \"TimeStamp\", \"TruthValue\")\n\n# Types\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = \"d\"\n \nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)\n namedValues = NamedValues((\"downstream\", 1), (\"upstream\", 2), )\n \nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)\n namedValues = NamedValues((\"undefined\", 1), (\"bestEffort\", 2), (\"nonRealTimePollingService\", 3), (\"realTimePollingService\", 4), (\"unsolictedGrantServiceWithAD\", 5), (\"unsolictedGrantService\", 6), )\n \n\n# Objects\n\ndocsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions((\"2006-01-23 00:00\",))\nif mibBuilder.loadTexts: docsIetfQosMIB.setOrganization(\"IETF IP over Cable Data Network (IPCDN)\\nWorking Group\")\nif mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo(\"\\nCo-Author: Michael Patrick\\nPostal: Motorola BCS\\n 111 Locke Drive\\n Marlborough, MA 01752-7214\\n U.S.A.\\nPhone: +1 508 786 7563\\nE-mail: michael.patrick@motorola.com\\n\\nCo-Author: William Murwin\\nPostal: Motorola BCS\\n 111 Locke Drive\\n Marlborough, MA 01752-7214\\n U.S.A.\\nPhone: +1 508 786 7594\\nE-mail: w.murwin@motorola.com\\n\\nIETF IPCDN Working Group\\nGeneral Discussion: ipcdn@ietf.org\\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\\nCo-chairs: Richard Woundy, Richard_Woundy@cable.comcast.com\\n Jean-Francois Mule, jfm@cablelabs.com\")\nif mibBuilder.loadTexts: docsIetfQosMIB.setDescription(\"This is the management information for\\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\\n\\n\\n\\nCopyright (C) The Internet Society (2006). This version of\\nthis MIB module is part of RFC 4323; see the RFC itself for\\nfull legal notices.\")\ndocsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))\ndocsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))\ndocsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))\nif mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription(\"This table describes the packet classification\\nconfigured on the CM or CMTS.\\nThe model is that a packet either received\\nas input from an interface or transmitted\\nfor output on an interface may be compared\\nagainst an ordered list of rules pertaining to\\nthe packet contents. Each rule is a row of this\\ntable. A matching rule provides a Service Flow\\nID to which the packet is classified.\\nAll rules need to match for a packet to match\\na classifier.\\n\\nThe objects in this row correspond to a set of\\nClassifier Encoding parameters in a DOCSIS\\nMAC management message. The\\ndocsIetfQosPktClassBitMap indicates which\\nparticular parameters were present in the\\nclassifier as signaled in the DOCSIS message.\\nIf the referenced parameter was not present\\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\\ncorresponding object in this row reports a\\nvalue as specified in the DESCRIPTION section.\")\ndocsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassId\"))\nif mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription(\"An entry in this table provides a single packet\\nclassifier rule. The index ifIndex is an ifType\\nof docsCableMaclayer(127).\")\ndocsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription(\"Index assigned to packet classifier entry by\\nthe CMTS, which is unique per Service Flow.\")\ndocsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription(\"Indicates the direction to which the classifier\\nis applied.\")\ndocsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription(\"The value specifies the order of evaluation\\nof the classifiers.\\n\\nThe higher the value, the higher the priority.\\nThe value of 0 is used as default in\\nprovisioned Service Flows Classifiers.\\nThe default value of 64 is used for dynamic\\nService Flow Classifiers.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the default\\nvalue as defined above.\")\ndocsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription(\"The low value of a range of TOS byte values.\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\\n\\nThe IP TOS octet, as originally defined in RFC 791,\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet as per the DOCSIS Specification\\nfor packet classification.\")\ndocsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription(\"The 8-bit high value of a range of TOS byte\\nvalues.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the\\nvalue of 0.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet as defined by the DOCSIS Specification\\nfor packet classification.\")\ndocsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription(\"The mask value is bitwise ANDed with TOS byte\\nin an IP packet, and this value is used for\\nrange checking of TosLow and TosHigh.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet per the DOCSIS Specification for packet\\nclassification.\")\ndocsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription(\"This object indicates the value of the IP\\nProtocol field required for IP packets to match\\nthis rule.\\n\\n\\n\\n\\nThe value 256 matches traffic with any IP Protocol\\nvalue. The value 257 by convention matches both TCP\\nand UDP.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 258.\")\ndocsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription(\"The type of the Internet address for\\ndocsIetfQosPktClassInetSourceAddr,\\ndocsIetfQosPktClassInetSourceMask,\\ndocsIetfQosPktClassInetDestAddr, and\\ndocsIetfQosPktClassInetDestMask.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\nipv4(1).\")\ndocsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription(\"This object specifies the value of the IP\\nSource Address required for packets to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nIP Source Address bitwise ANDed with the\\ndocsIetfQosPktClassInetSourceMask value equals the\\ndocsIetfQosPktClassInetSourceAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'00000000'H.\")\ndocsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription(\"This object specifies which bits of a packet's\\nIP Source Address are compared to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nsource address bitwise ANDed with the\\ndocsIetfQosPktClassInetSourceMask value equals the\\ndocsIetfQosIpPktClassInetSourceAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFF'H.\")\ndocsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription(\"This object specifies the value of the IP\\nDestination Address required for packets to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nIP Destination Address bitwise ANDed with the\\ndocsIetfQosPktClassInetDestMask value\\nequals the docsIetfQosPktClassInetDestAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'00000000'H.\")\ndocsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription(\"This object specifies which bits of a packet's\\nIP Destination Address are compared to\\nmatch this rule.\\n\\nAn IP packet matches the rule when the packet\\ndestination address bitwise ANDed with the\\ndocsIetfQosPktClassInetDestMask value equals the\\ndocsIetfQosIpPktClassInetDestAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFF'H.\")\ndocsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription(\"This object specifies the low-end inclusive\\nrange of TCP/UDP source port numbers to which\\na packet is compared. This object is irrelevant\\nfor non-TCP/UDP IP packets.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\")\ndocsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription(\"This object specifies the high-end inclusive\\nrange of TCP/UDP source port numbers to which\\na packet is compared. This object is irrelevant\\nfor non-TCP/UDP IP packets.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n65535.\")\ndocsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription(\"This object specifies the low-end inclusive\\nrange of TCP/UDP destination port numbers to\\nwhich a packet is compared.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\")\ndocsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription(\"This object specifies the high-end inclusive\\nrange of TCP/UDP destination port numbers to which\\na packet is compared.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n65535.\")\ndocsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription(\"An Ethernet packet matches an entry when its\\ndestination MAC address bitwise ANDed with\\ndocsIetfQosPktClassDestMacMask equals the value of\\ndocsIetfQosPktClassDestMacAddr.\\n\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'000000000000'H.\")\ndocsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription(\"An Ethernet packet matches an entry when its\\ndestination MAC address bitwise ANDed with\\ndocsIetfQosPktClassDestMacMask equals the value of\\ndocsIetfQosPktClassDestMacAddr.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'000000000000'H.\")\ndocsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription(\"An Ethernet packet matches this entry when its\\nsource MAC address equals the value of\\nthis object.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFFFFFF'H.\")\ndocsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues((\"none\", 0), (\"ethertype\", 1), (\"dsap\", 2), (\"mac\", 3), (\"all\", 4), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription(\"This object indicates the format of the layer 3\\nprotocol ID in the Ethernet packet. A value of\\nnone(0) means that the rule does not use the\\nlayer 3 protocol type as a matching criteria.\\n\\nA value of ethertype(1) means that the rule\\napplies only to frames that contain an\\nEtherType value. Ethertype values are contained\\nin packets using the Dec-Intel-Xerox (DIX)\\nencapsulation or the RFC1042 Sub-Network Access\\nProtocol (SNAP) encapsulation formats.\\n\\nA value of dsap(2) means that the rule applies\\n\\n\\n\\nonly to frames using the IEEE802.3\\nencapsulation format with a Destination Service\\nAccess Point (DSAP) other\\nthan 0xAA (which is reserved for SNAP).\\n\\nA value of mac(3) means that the rule applies\\nonly to MAC management messages for MAC management\\nmessages.\\n\\nA value of all(4) means that the rule matches\\nall Ethernet packets.\\n\\nIf the Ethernet frame contains an 802.1P/Q Tag\\nheader (i.e., EtherType 0x8100), this object\\napplies to the embedded EtherType field within\\nthe 802.1P/Q header.\\n\\nIf the referenced parameter is not present in a\\nclassifier, this object reports the value of 0.\")\ndocsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription(\"If docsIetfQosEthPktClassProtocolType is none(0),\\nthis object is ignored when considering whether\\na packet matches the current rule.\\n\\nIf dosQosPktClassEnetProtocolType is ethertype(1),\\nthis object gives the 16-bit value of the\\nEtherType that the packet must match in order to\\nmatch the rule.\\n\\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\\nthe lower 8 bits of this object's value must match\\nthe DSAP byte of the packet in order to match the\\nrule.\\n\\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\\nthe lower 8 bits of this object's value represent a\\nlower bound (inclusive) of MAC management message\\ntype codes matched, and the upper 8 bits represent\\nthe upper bound (inclusive) of matched MAC message\\ntype codes. Certain message type codes are\\nexcluded from matching, as specified in the\\nreference.\\n\\n\\n\\nIf the Ethernet frame contains an 802.1P/Q Tag\\nheader (i.e., EtherType 0x8100), this object applies\\nto the embedded EtherType field within the 802.1P/Q\\nheader.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Q tag header (indicated with\\nEtherType 0x8100). Such frames include a 16-bit\\nTag that contains a 3-bit Priority field and\\na 12-bit VLAN number.\\n\\nTagged Ethernet packets must have a 3-bit\\nPriority field within the range of\\ndocsIetfQosPktClassPriLow to\\ndocsIetfQosPktClassPriHigh in order to match this\\nrule.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Qtag header (indicated with\\nEtherType 0x8100). Such frames include a 16-bit\\nTag that contains a 3-bit Priority field and\\na 12-bit VLAN number.\\n\\nTagged Ethernet packets must have a 3-bit\\nPriority field within the range of\\ndocsIetfQosPktClassPriLow to\\ndocsIetfQosPktClassPriHigh in order to match this\\nrule.\\n\\n\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 7.\")\ndocsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Q tag header.\\n\\nTagged packets must have a VLAN Identifier that\\nmatches the value in order to match the rule.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription(\"This object indicates whether or not the classifier\\nis enabled to classify packets to a Service Flow.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas true(1).\")\ndocsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription(\"This object counts the number of packets that have\\nbeen classified using this entry. This\\nincludes all packets delivered to a Service Flow\\nmaximum rate policing function, whether or not that\\nfunction drops the packets.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues((\"rulePriority\", 0), (\"activationState\", 1), (\"destPortStart\", 10), (\"destPortEnd\", 11), (\"destMac\", 12), (\"sourceMac\", 13), (\"ethertype\", 14), (\"userPri\", 15), (\"vlanId\", 16), (\"ipTos\", 2), (\"ipProtocol\", 3), (\"ipSourceAddr\", 4), (\"ipSourceMask\", 5), (\"ipDestAddr\", 6), (\"ipDestMask\", 7), (\"sourcePortStart\", 8), (\"sourcePortEnd\", 9), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription(\"This object indicates which parameter encodings\\nwere actually present in the DOCSIS packet\\nclassifier encoding signaled in the DOCSIS message\\nthat created or modified the classifier. Note that\\nDynamic Service Change messages have replace\\nsemantics, so that all non-default parameters must\\nbe present whether the classifier is being created\\nor changed.\\n\\nA bit of this object is set to 1 if the parameter\\nindicated by the comment was present in the\\nclassifier encoding, and to 0 otherwise.\\n\\nNote that BITS are encoded most significant bit\\nfirst, so that if, for example, bits 6 and 7 are\\nset, this object is encoded as the octet string\\n'030000'H.\")\ndocsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))\nif mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription(\"This table describes the set of DOCSIS 1.1 and 2.0\\nQOS parameters defined in a managed device.\\n\\nThe ifIndex index specifies a DOCSIS MAC Domain.\\nThe docsIetfQosServiceFlowId index specifies a\\nparticular Service Flow.\\nThe docsIetfQosParamSetType index indicates whether\\nthe active, admitted, or provisioned QOS Parameter\\nSet is being described by the row.\\n\\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\\nService Flows are represented in this table.\\n\\nDOCSIS 1.0 QOS service profiles are not\\nrepresented in this table.\\n\\nEach row corresponds to a DOCSIS QOS Parameter Set\\nas signaled via DOCSIS MAC management messages.\\nEach object in the row corresponds to one or\\npart of one DOCSIS 1.1 Service Flow Encoding.\\nThe docsIetfQosParamSetBitMap object in the row\\nindicates which particular parameters were signaled\\nin the original registration or dynamic service\\nrequest message that created the QOS Parameter Set.\\n\\nIn many cases, even if a QOS Parameter Set parameter\\nwas not signaled, the DOCSIS specification calls\\nfor a default value to be used. That default value\\nis reported as the value of the corresponding object\\nin this row.\\n\\nMany objects are not applicable, depending on\\nthe Service Flow direction or upstream scheduling\\ntype. The object value reported in this case\\nis specified in the DESCRIPTION clause.\")\ndocsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetType\"))\nif mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription(\"A unique set of QOS parameters.\")\ndocsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription(\"Refers to the Service Class Name from which the\\nparameter set values were derived.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is a zero-length string.\")\ndocsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription(\"The relative priority of a Service Flow.\\nHigher numbers indicate higher priority.\\nThis priority should only be used to differentiate\\n\\n\\n\\nService Flow from identical parameter sets.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter is\\nnot applicable, the reported value is 0.\")\ndocsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription(\"Maximum sustained traffic rate allowed for this\\nService Flow in bits/sec. Must count all MAC frame\\ndata PDU from the bytes following the MAC header\\nHCS to the end of the CRC. The number of bytes\\nforwarded is limited during any time interval.\\nThe value 0 means no maximum traffic rate is\\nenforced. This object applies to both upstream and\\ndownstream Service Flows.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter is\\nnot applicable, it is reported as 0.\")\ndocsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription(\"Specifies the token bucket size in bytes\\nfor this parameter set. The value is calculated\\nfrom the byte following the MAC header HCS to\\nthe end of the CRC. This object is applied in\\nconjunction with docsIetfQosParamSetMaxTrafficRate\\nto calculate maximum sustained traffic rate.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object for scheduling types\\nbestEffort (2), nonRealTimePollingService(3),\\nand realTimePollingService(4) is 3044.\\n\\nIf this parameter is not applicable, it is reported\\nas 0.\")\ndocsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription(\"Specifies the guaranteed minimum rate in\\nbits/sec for this parameter set. The value is\\ncalculated from the byte following the MAC\\nheader HCS to the end of the CRC. The default\\nvalue of 0 means that no bandwidth is reserved.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter\\nis not applicable, it is reported as 0.\")\ndocsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription(\"Specifies an assumed minimum packet size in\\nbytes for which the\\ndocsIetfQosParamSetMinReservedRate will be\\nprovided. The value is calculated from the byte\\nfollowing the MAC header HCS to the end of the\\nCRC.\\n\\nIf the referenced parameter is omitted from a\\nDOCSIS QOS parameter set, the default value is\\nCMTS implementation dependent. In this case, the\\nCMTS reports the default value it is using, and the\\nCM reports a value of 0. If the referenced\\nparameter is not applicable to the direction or\\nscheduling type of the Service Flow, both CMTS and\\nCM report this object's value as 0.\")\ndocsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription(\"Specifies the maximum duration in seconds that\\nresources remain unused on an active service\\nflow before CMTS signals that both active and\\nadmitted parameters set are null. The default\\nvalue of 0 signifies an infinite amount of time.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0.\")\ndocsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription(\"Specifies the maximum duration in seconds that\\nresources remain in admitted state before\\nresources must be released.\\n\\nThe value of 0 signifies an infinite amount\\nof time.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the\\ndefault value of this object is 200.\")\ndocsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription(\"Specifies the maximum concatenated burst in\\nbytes that an upstream Service Flow is allowed.\\nThe value is calculated from the FC byte of the\\nConcatenation MAC Header to the last CRC byte in\\nof the last concatenated MAC frame, inclusive.\\nThe value of 0 specifies no maximum burst.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object for scheduling types\\nbestEffort(2), nonRealTimePollingService(3), and\\n\\n\\n\\nrealTimePollingService(4) is 1522. If the parameter\\nis not applicable, this object's value is reported\\nas 0.\")\ndocsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription(\"Specifies the upstream scheduling service used for\\nupstream Service Flow.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set of an\\nupstream Service Flow, the default value of this\\nobject is bestEffort(2). For QOS parameter sets of\\ndownstream Service Flows, this object's value is\\nreported as undefined(1).\")\ndocsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription(\"Specifies the nominal interval in microseconds\\nbetween successive unicast request\\nopportunities on an upstream Service Flow.\\n\\nThis object applies only to upstream Service Flows\\nwith DocsIetfQosSchedulingType of value\\nnonRealTimePollingService(3),\\nrealTimePollingService(4), and\\nunsolictedGrantServiceWithAD(5). The parameter is\\nmandatory for realTimePollingService(4). If the\\nparameter is omitted with\\nnonRealTimePollingService(3), the CMTS uses an\\nimplementation-dependent value. If the parameter\\nis omitted with unsolictedGrantServiceWithAD(5),\\nthe CMTS uses as a default value the value of the\\nNominal Grant Interval parameter. In all cases,\\nthe CMTS reports the value it is using when the\\nparameter is applicable. The CM reports the\\nsignaled parameter value if it was signaled,\\nand 0 otherwise.\\n\\n\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription(\"Specifies the maximum amount of time in\\nmicroseconds that the unicast request interval\\nmay be delayed from the nominal periodic\\nschedule on an upstream Service Flow.\\n\\nThis parameter is applicable only to upstream\\nService Flows with a DocsIetfQosSchedulingType of\\nrealTimePollingService(4) or\\nunsolictedGrantServiceWithAD(5).\\n\\nIf the referenced parameter is applicable but not\\npresent in the corresponding DOCSIS QOS Parameter\\nSet, the CMTS uses an implementation-dependent\\nvalue and reports the value it is using.\\nThe CM reports a value of 0 in this case.\\n\\nIf the parameter is not applicable to the\\ndirection or upstream scheduling type of the\\nService Flow, both CMTS and CM report this\\nobject's value as 0.\")\ndocsIetfQosParamSetUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetUnsolicitGrantSize.setDescription(\"Specifies the unsolicited grant size in bytes.\\nThe grant size includes the entire MAC frame\\ndata PDU from the Frame Control byte to the end\\nof the MAC frame.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\n\\n\\n\\nwhen applicable. Both CMTS and CM report\\nthe signaled value of the parameter in this\\ncase.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 14), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetNomGrantInterval.setDescription(\"Specifies the nominal interval in microseconds\\nbetween successive data grant opportunities\\non an upstream Service Flow.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 15), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTolGrantJitter.setDescription(\"Specifies the maximum amount of time in\\nmicroseconds that the transmission opportunities\\nmay be delayed from the nominal periodic schedule.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\n\\n\\n\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetGrantsPerInterval.setDescription(\"Specifies the number of data grants per Nominal\\nGrant Interval\\n(docsIetfQosParamSetNomGrantInterval).\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTosAndMask.setDescription(\"Specifies the AND mask for the IP TOS byte for\\noverwriting IP packet's TOS value. The IP packet\\nTOS byte is bitwise ANDed with\\ndocsIetfQosParamSetTosAndMask, and the result is\\nbitwise ORed with docsIetfQosParamSetTosORMask and\\nthe result is written to the IP packet TOS byte.\\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\\nand a value of '00'H for\\ndocsIetfQosParamSetTosOrMask means that the IP\\nPacket TOS byte is not overwritten.\\n\\nThis combination is reported if the referenced\\nparameter is not present in a QOS Parameter Set.\\n\\n\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of docsIetfQosParamSetTosAndMask\\nand docsIetfQosParamSetTosORMask that would result\\nin the modification of the ECN bits.\\n\\nIn particular, operators should not use values of\\ndocsIetfQosParamSetTosAndMask that have either of\\nthe least-significant two bits set to 0. Similarly,\\noperators should not use values of\\ndocsIetfQosParamSetTosORMask that have either of\\nthe least-significant two bits set to 1.\\n\\nEven though this object is only enforced by the\\nCable Modem Termination System (CMTS),\\nCable Modems MUST report the value as signaled in\\nthe referenced parameter.\")\ndocsIetfQosParamSetTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTosOrMask.setDescription(\"Specifies the OR mask for the IP TOS byte.\\n\\nSee the description of docsIetfQosParamSetTosAndMask\\nfor further details.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of docsIetfQosParamSetTosAndMask\\nand docsIetfQosParamSetTosORMask that would result\\nin the modification of the ECN bits.\")\ndocsIetfQosParamSetMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 19), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxLatency.setDescription(\"Specifies the maximum latency between the\\nreception of a packet by the CMTS on its NSI\\nand the forwarding of the packet to the RF\\ninterface. A value of 0 signifies no maximum\\nlatency is enforced. This object only applies to\\ndownstream Service Flows.\\n\\nIf the referenced parameter is not present in the\\ncorresponding downstream DOCSIS QOS Parameter Set,\\nthe default value is 0. This parameter is\\nnot applicable to upstream DOCSIS QOS Parameter\\nSets, and its value is reported as 0 in this case.\")\ndocsIetfQosParamSetType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(1,3,2,)).subtype(namedValues=NamedValues((\"active\", 1), (\"admitted\", 2), (\"provisioned\", 3), ))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosParamSetType.setDescription(\"Defines the type of the QOS parameter set defined\\nby this row. active(1) indicates the Active QOS\\nparameter set, describing the service currently\\nbeing provided by the DOCSIS MAC domain to the\\nService Flow. admitted(2) indicates the Admitted\\nQOS Parameter Set, describing services reserved by\\nthe DOCSIS MAC domain for use by the service\\nflow. provisioned (3) describes the QOS Parameter\\nSet defined in the DOCSIS CM Configuration file for\\nthe Service Flow.\")\ndocsIetfQosParamSetRequestPolicyOct = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetRequestPolicyOct.setDescription(\"Specifies which transmit interval opportunities\\nthe CM omits for upstream transmission requests and\\npacket transmissions. This object takes its\\ndefault value for downstream Service Flows.\\n\\nUnless otherwise indicated, a bit value of 1 means\\nthat a CM must not use that opportunity for\\nupstream transmission.\\n\\nIf bit 0 is the least significant bit of the\\nleast significant (4th) octet, and if bit number\\nis increased with significance, the bit definitions\\nare defined as follows:\\n\\nbroadcastReqOpp(0):\\n all CMs broadcast request opportunities\\n\\npriorityReqMulticastReq(1):\\n priority request multicast request\\n opportunities\\n\\nreqDataForReq(2):\\n request/data opportunities for requests\\n\\nreqDataForData(3):\\n request/data opportunities for data\\n\\npiggybackReqWithData(4):\\n piggyback requests with data\\n\\nconcatenateData(5):\\n concatenate data\\n\\nfragmentData(6):\\n fragment data\\n\\nsuppresspayloadheaders(7):\\n suppress payload headers\\n\\n\\n\\n\\ndropPktsExceedUGSize(8):\\n A value of 1 means that the Service Flow must\\n drop packets that do not fit in the Unsolicited\\n Grant size.\\n\\nIf the referenced parameter is not present in\\na QOS Parameter Set, the value of this object is\\nreported as '00000000'H.\")\ndocsIetfQosParamSetBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 22), Bits().subtype(namedValues=NamedValues((\"trafficPriority\", 0), (\"maxTrafficRate\", 1), (\"nomPollInterval\", 10), (\"tolPollJitter\", 11), (\"unsolicitGrantSize\", 12), (\"nomGrantInterval\", 13), (\"tolGrantJitter\", 14), (\"grantsPerInterval\", 15), (\"tosOverwrite\", 16), (\"maxLatency\", 17), (\"maxTrafficBurst\", 2), (\"minReservedRate\", 3), (\"minReservedPkt\", 4), (\"activeTimeout\", 5), (\"admittedTimeout\", 6), (\"maxConcatBurst\", 7), (\"schedulingType\", 8), (\"requestPolicy\", 9), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetBitMap.setDescription(\"This object indicates the set of QOS Parameter\\nSet parameters actually signaled in the\\nDOCSIS registration or dynamic service request\\nmessage that created or modified the QOS Parameter\\nSet. A bit is set to 1 when the parameter described\\nby the indicated reference section is present\\nin the original request.\\n\\nNote that when Service Class names are expanded,\\nthe registration or dynamic response message may\\ncontain parameters as expanded by the CMTS based\\n\\n\\n\\non a stored service class. These expanded\\nparameters are not indicated by a 1 bit in this\\nobject.\\n\\nNote that even though some QOS Parameter Set\\nparameters may not be signaled in a message\\n(so that the paramater's bit in this object is 0),\\nthe DOCSIS specification requires that default\\nvalues be used. These default values are reported\\nas the corresponding object's value in the row.\\n\\nNote that BITS objects are encoded most\\nsignificant bit first. For example, if bits\\n1 and 16 are set, the value of this object\\nis the octet string '400080'H.\")\ndocsIetfQosServiceFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 3))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 3, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowEntry.setDescription(\"Describes a Service Flow.\\nAn entry in the table exists for each\\nService Flow ID. The ifIndex is an\\nifType of docsCableMaclayer(127).\")\ndocsIetfQosServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowId.setDescription(\"An index assigned to a Service Flow by CMTS.\")\ndocsIetfQosServiceFlowSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16383))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowSID.setDescription(\"Service Identifier (SID) assigned to an\\nadmitted or active Service Flow. This object\\nreports a value of 0 if a Service ID is not\\nassociated with the Service Flow. Only active\\nor admitted upstream Service Flows will have a\\nService ID (SID).\")\ndocsIetfQosServiceFlowDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 3), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowDirection.setDescription(\"The direction of the Service Flow.\")\ndocsIetfQosServiceFlowPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 4), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPrimary.setDescription(\"Object reflects whether Service Flow is the primary\\nor a secondary Service Flow.\\n\\nA primary Service Flow is the default Service Flow\\nfor otherwise unclassified traffic and all MAC\\nmessages.\")\ndocsIetfQosServiceFlowStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 4))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowStatsTable.setDescription(\"This table describes statistics associated with the\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 4, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowStatsEntry.setDescription(\"Describes a set of Service Flow statistics.\\nAn entry in the table exists for each\\nService Flow ID. The ifIndex is an\\nifType of docsCableMaclayer(127).\")\ndocsIetfQosServiceFlowPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 1), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPkts.setDescription(\"For outgoing Service Flows, this object counts the\\nnumber of Packet Data PDUs forwarded to this\\nService Flow. For incoming upstream CMTS service\\nflows, this object counts the number of Packet\\nData PDUs actually received on the Service Flow\\nidentified by the SID for which the packet was\\nscheduled. CMs not classifying downstream packets\\nmay report this object's value as 0 for downstream\\nService Flows. This object does not count\\nMAC-specific management messages.\\n\\nParticularly for UGS flows, packets sent on the\\nprimary Service Flow in violation of the UGS grant\\nsize should be counted only by the instance of this\\nobject that is associated with the primary service\\n\\n\\n\\nflow.\\n\\nUnclassified upstream user data packets (i.e., non-\\nMAC-management) forwarded to the primary upstream\\nService Flow should be counted by the instance of\\nthis object that is associated with the primary\\nservice flow.\\n\\nThis object does include packets counted by\\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\\ninclude packets counted by\\ndocsIetfQosServiceFlowPolicedDropPkts\\nand docsIetfQosServiceFlowPHSUnknowns.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 2), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowOctets.setDescription(\"The number of octets from the byte after the MAC\\nheader HCS to the end of the CRC for all packets\\ncounted in the docsIetfQosServiceFlowPkts object for\\nthis row. Note that this counts the octets after\\npayload header suppression and before payload\\nheader expansion have been applied.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 3), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTimeCreated.setDescription(\"The value of sysUpTime when the service flow\\nwas created.\")\ndocsIetfQosServiceFlowTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTimeActive.setDescription(\"The number of seconds that the service flow\\nhas been active.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPHSUnknowns = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 5), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPHSUnknowns.setDescription(\"For incoming upstream CMTS service flows, this\\nobject counts the number of packets received\\nwith an unknown payload header suppression index.\\nThe service flow is identified by the SID for which\\nthe packet was scheduled.\\n\\nOn a CM, only this object's instance for the primary\\ndownstream service flow counts packets received with\\nan unknown payload header suppression index. All\\nother downstream service flows on CM report this\\nobjects value as 0.\\n\\nAll outgoing service flows report this object's\\nvalue as 0.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 6), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDropPkts.setDescription(\"For outgoing service flows, this object counts the\\nnumber of Packet Data PDUs classified to this\\nservice flow dropped due to:\\n (1) implementation-dependent excessive delay\\n while enforcing the Maximum Sustained\\n Traffic Rate; or\\n (2) UGS packets dropped due to exceeding the\\n Unsolicited Grant Size with a\\n Request/Transmission policy that requires\\n such packets to be dropped.\\n\\nClassified packets dropped due to other reasons\\n\\n\\n\\nmust be counted in ifOutDiscards for the interface\\nof this service flow. This object reports 0 for\\nincoming service flows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 7), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDelayPkts.setDescription(\"This object counts only outgoing packets delayed in\\norder to maintain the Maximum Sustained Traffic\\nRate. This object will always report a value of 0\\nfor UGS flows because the Maximum Sustained Traffic\\nRate does not apply. This object is 0 for incoming\\nservice flows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 5))\nif mibBuilder.loadTexts: docsIetfQosUpstreamStatsTable.setDescription(\"This table describes statistics associated with\\nupstream service flows. All counted frames must\\nbe received without a Frame Check Sequence (FCS)\\nerror.\")\ndocsIetfQosUpstreamStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 5, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosSID\"))\nif mibBuilder.loadTexts: docsIetfQosUpstreamStatsEntry.setDescription(\"Describes a set of upstream service flow\\nstatistics. An entry in the table exists for each\\nupstream Service Flow in a managed device.\\nThe ifIndex is an ifType of\\ndocsCableMaclayer(127).\")\ndocsIetfQosSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 16383))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosSID.setDescription(\"Identifies a service ID for an admitted or active\\nupstream service flow.\")\ndocsIetfQosUpstreamFragments = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamFragments.setDescription(\"The number of fragmentation headers received on an\\nupstream service flow, regardless of whether\\nthe fragment was correctly reassembled into a\\nvalid packet.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamFragDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 3), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamFragDiscards.setDescription(\"The number of upstream fragments discarded and not\\nassembled into a valid upstream packet.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamConcatBursts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamConcatBursts.setDescription(\"The number of concatenation headers received on an\\nupstream service flow.\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicServiceStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 6))\nif mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsTable.setDescription(\"This table describes statistics associated with the\\nDynamic Service Flows in a managed device.\")\ndocsIetfQosDynamicServiceStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 6, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosIfDirection\"))\nif mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsEntry.setDescription(\"Describes a set of dynamic service flow statistics.\\nTwo entries exist for each DOCSIS MAC layer\\ninterface for the upstream and downstream\\ndirection. On the CMTS, the downstream direction\\nrow indicates messages transmitted or transactions\\noriginated by the CMTS. The upstream direction row\\nindicates messages received or transaction\\noriginated by the CM. On the CM, the downstream\\ndirection row indicates messages received or\\ntransactions originated by the CMTS. The upstream\\ndirection row indicates messages transmitted by\\nthe CM or transactions originated by the CM.\\nThe ifIndex is an ifType of\\ndocsCableMaclayer(127).\")\ndocsIetfQosIfDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 1), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosIfDirection.setDescription(\"The direction of interface.\")\ndocsIetfQosDSAReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSAReqs.setDescription(\"The number of Dynamic Service Addition Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSARsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 3), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSARsps.setDescription(\"The number of Dynamic Service Addition Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDSAAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSAAcks.setDescription(\"The number of Dynamic Service Addition\\nAcknowledgements, including retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 5), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCReqs.setDescription(\"The number of Dynamic Service Change Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 6), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCRsps.setDescription(\"The number of Dynamic Service Change Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 7), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCAcks.setDescription(\"The number of Dynamic Service Change\\nAcknowledgements, including retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDSDReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 8), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSDReqs.setDescription(\"The number of Dynamic Service Delete Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSDRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 9), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSDRsps.setDescription(\"The number of Dynamic Service Delete Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicAdds = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 10), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicAdds.setDescription(\"The number of successful Dynamic Service Addition\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicAddFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 11), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicAddFails.setDescription(\"The number of failed Dynamic Service Addition\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDynamicChanges = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 12), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicChanges.setDescription(\"The number of successful Dynamic Service Change\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicChangeFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 13), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicChangeFails.setDescription(\"The number of failed Dynamic Service Change\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicDeletes = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 14), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicDeletes.setDescription(\"The number of successful Dynamic Service Delete\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicDeleteFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 15), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicDeleteFails.setDescription(\"The number of failed Dynamic Service Delete\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDCCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 16), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCReqs.setDescription(\"The number of Dynamic Channel Change Request\\nmessages traversing an interface. This count\\nis nonzero only on downstream direction rows.\\nThis count should include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex\\nthat indexes this object.\")\ndocsIetfQosDCCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 17), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCRsps.setDescription(\"The number of Dynamic Channel Change Response\\nmessages traversing an interface. This count is\\nnonzero only on upstream direction rows. This count\\nshould include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 18), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCAcks.setDescription(\"The number of Dynamic Channel Change Acknowledgement\\nmessages traversing an interface. This count\\nis nonzero only on downstream direction rows.\\nThis count should include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 19), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCs.setDescription(\"The number of successful Dynamic Channel Change\\ntransactions. This count is nonzero only on\\ndownstream direction rows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 20), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCFails.setDescription(\"The number of failed Dynamic Channel Change\\ntransactions. This count is nonzero only on\\ndownstream direction rows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowLogTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 7))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTable.setDescription(\"This table contains a log of the disconnected\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowLogEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 7, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogIndex\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogEntry.setDescription(\"The information regarding a single disconnected\\nservice flow.\")\ndocsIetfQosServiceFlowLogIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogIndex.setDescription(\"Unique index for a logged service flow.\")\ndocsIetfQosServiceFlowLogIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 2), InterfaceIndex()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogIfIndex.setDescription(\"The ifIndex of ifType docsCableMaclayer(127)\\non the CMTS where the service flow was present.\")\ndocsIetfQosServiceFlowLogSFID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogSFID.setDescription(\"The index assigned to the service flow by the CMTS.\")\ndocsIetfQosServiceFlowLogCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 4), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogCmMac.setDescription(\"The MAC address for the cable modem associated with\\nthe service flow.\")\ndocsIetfQosServiceFlowLogPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 5), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPkts.setDescription(\"The number of packets counted on this service flow\\nafter payload header suppression.\")\ndocsIetfQosServiceFlowLogOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 6), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogOctets.setDescription(\"The number of octets counted on this service flow\\nafter payload header suppression.\")\ndocsIetfQosServiceFlowLogTimeDeleted = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 7), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeDeleted.setDescription(\"The value of sysUpTime when the service flow\\nwas deleted.\")\ndocsIetfQosServiceFlowLogTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 8), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeCreated.setDescription(\"The value of sysUpTime when the service flow\\nwas created.\")\ndocsIetfQosServiceFlowLogTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 9), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeActive.setDescription(\"The total time that the service flow was active.\")\ndocsIetfQosServiceFlowLogDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 10), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogDirection.setDescription(\"The value of docsIetfQosServiceFlowDirection\\nfor the service flow.\")\ndocsIetfQosServiceFlowLogPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 11), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPrimary.setDescription(\"The value of docsIetfQosServiceFlowPrimary for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 12), SnmpAdminString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogServiceClassName.setDescription(\"The value of docsIetfQosParamSetServiceClassName for\\nthe provisioned QOS Parameter Set of the\\nservice flow.\")\ndocsIetfQosServiceFlowLogPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 13), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDropPkts.setDescription(\"The final value of\\ndocsIetfQosServiceFlowPolicedDropPkts for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 14), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription(\"The final value of\\ndocsIetfQosServiceFlowPolicedDelayPkts for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogControl = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 15), Integer().subtype(subtypeSpec=SingleValueConstraint(1,6,)).subtype(namedValues=NamedValues((\"active\", 1), (\"destroy\", 6), ))).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogControl.setDescription(\"Setting this object to the value destroy(6) removes\\nthis entry from the table.\\n\\nReading this object returns the value active(1).\")\ndocsIetfQosServiceClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 8))\nif mibBuilder.loadTexts: docsIetfQosServiceClassTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Classes in a CMTS.\")\ndocsIetfQosServiceClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 8, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassName\"))\nif mibBuilder.loadTexts: docsIetfQosServiceClassEntry.setDescription(\"A provisioned service class on a CMTS.\\nEach entry defines a template for certain\\nDOCSIS QOS Parameter Set values. When a CM\\ncreates or modifies an Admitted QOS Parameter Set\\nfor a Service Flow, it may reference a Service Class\\nName instead of providing explicit QOS Parameter\\nSet values. In this case, the CMTS populates\\nthe QOS Parameter Set with the applicable\\ncorresponding values from the named Service Class.\\nSubsequent changes to a Service Class row do not\\naffect the QOS Parameter Set values of any service\\nflows already admitted.\\n\\nA service class template applies to only\\na single direction, as indicated in the\\ndocsIetfQosServiceClassDirection object.\")\ndocsIetfQosServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassName.setDescription(\"Service Class Name. DOCSIS specifies that the\\nmaximum size is 16 ASCII characters including\\na terminating zero. The terminating zero is not\\nrepresented in this SnmpAdminString syntax object.\")\ndocsIetfQosServiceClassStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 2), RowStatus()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassStatus.setDescription(\"Used to create or delete rows in this table.\\nThere is no restriction on the ability to change\\nvalues in this row while the row is active.\\nInactive rows need not be timed out.\")\ndocsIetfQosServiceClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPriority.setDescription(\"Template for docsIetfQosParamSetPriority.\")\ndocsIetfQosServiceClassMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 4), DocsIetfQosBitRate().clone('0')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficRate.setDescription(\"Template for docsIetfQosParamSetMaxTrafficRate.\")\ndocsIetfQosServiceClassMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 5), Unsigned32().clone(3044)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficBurst.setDescription(\"Template for docsIetfQosParamSetMaxTrafficBurst.\")\ndocsIetfQosServiceClassMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 6), DocsIetfQosBitRate().clone('0')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedRate.setDescription(\"Template for docsIetfQosParamSEtMinReservedRate.\")\ndocsIetfQosServiceClassMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedPkt.setDescription(\"Template for docsIetfQosParamSetMinReservedPkt.\")\ndocsIetfQosServiceClassMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1522)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxConcatBurst.setDescription(\"Template for docsIetfQosParamSetMaxConcatBurst.\")\ndocsIetfQosServiceClassNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 9), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassNomPollInterval.setDescription(\"Template for docsIetfQosParamSetNomPollInterval.\")\ndocsIetfQosServiceClassTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 10), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTolPollJitter.setDescription(\"Template for docsIetfQosParamSetTolPollJitter.\")\ndocsIetfQosServiceClassUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassUnsolicitGrantSize.setDescription(\"Template for docsIetfQosParamSetUnsolicitGrantSize.\")\ndocsIetfQosServiceClassNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 12), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassNomGrantInterval.setDescription(\"Template for docsIetfQosParamSetNomGrantInterval.\")\ndocsIetfQosServiceClassTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 13), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTolGrantJitter.setDescription(\"Template for docsIetfQosParamSetTolGrantJitter.\")\ndocsIetfQosServiceClassGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassGrantsPerInterval.setDescription(\"Template for docsIetfQosParamSetGrantsPerInterval.\")\ndocsIetfQosServiceClassMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 15), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxLatency.setDescription(\"Template for docsIetfQosParamSetClassMaxLatency.\")\ndocsIetfQosServiceClassActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassActiveTimeout.setDescription(\"Template for docsIetfQosParamSetActiveTimeout.\")\ndocsIetfQosServiceClassAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassAdmittedTimeout.setDescription(\"Template for docsIetfQosParamSetAdmittedTimeout.\")\ndocsIetfQosServiceClassSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 18), DocsIetfQosSchedulingType().clone('bestEffort')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassSchedulingType.setDescription(\"Template for docsIetfQosParamSetSchedulingType.\")\ndocsIetfQosServiceClassRequestPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue='00000000')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassRequestPolicy.setDescription(\"Template for docsIetfQosParamSetRequestPolicyOct.\")\ndocsIetfQosServiceClassTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTosAndMask.setDescription(\"Template for docsIetfQosParamSetTosAndMask.\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of\\ndocsIetfQosServiceClassTosAndMask and\\ndocsIetfQosServiceClassTosOrMask that would result\\nin the modification of the ECN bits.\\n\\n\\n\\nIn particular, operators should not use values of\\ndocsIetfQosServiceClassTosAndMask that have either\\nof the least-significant two bits set to 0.\\nSimilarly,operators should not use values of\\ndocsIetfQosServiceClassTosOrMask that have either\\nof the least-significant two bits set to 1.\")\ndocsIetfQosServiceClassTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTosOrMask.setDescription(\"Template for docsIetfQosParamSetTosOrMask.\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of\\ndocsIetfQosServiceClassTosAndMask and\\ndocsIetfQosServiceClassTosOrMask that would result\\nin the modification of the ECN bits.\\n\\nIn particular, operators should not use values of\\ndocsIetfQosServiceClassTosAndMask that have either\\nof the least-significant two bits set to 0.\\nSimilarly, operators should not use values of\\ndocsIetfQosServiceClassTosOrMask that have either\\nof the least-significant two bits set to 1.\")\ndocsIetfQosServiceClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 22), DocsIetfQosRfMacIfDirection().clone('upstream')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassDirection.setDescription(\"Specifies whether the service class template\\napplies to upstream or downstream service flows.\")\ndocsIetfQosServiceClassStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 23), StorageType().clone('nonVolatile')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassStorageType.setDescription(\"This object defines whether this row is kept in\\nvolatile storage and lost upon reboot or whether\\nit is backed up by non-volatile or permanent\\nstorage. 'permanent' entries need not allow\\nwritable access to any object.\")\ndocsIetfQosServiceClassDSCPOverwrite = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 24), DscpOrAny().clone('-1')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassDSCPOverwrite.setDescription(\"This object allows the overwrite of the DSCP\\nfield per RFC 3260.\\n\\nIf this object is -1, then the corresponding entry's\\ndocsIetfQosServiceClassTosAndMask value MUST be\\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\\n'00'H. Otherwise, this object is in the range of\\n0..63, and the corresponding entry's\\ndocsIetfQosServiceClassTosAndMask value MUST be\\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\\nbe this object's value shifted left by two bit\\npositions.\")\ndocsIetfQosServiceClassPolicyTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 9))\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Class Policies.\\n\\nThis table is an adjunct to the\\n\\n\\n\\ndocsDevFilterPolicy table. Entries in the\\ndocsDevFilterPolicy table can point to\\nspecific rows in this table.\\n\\nThis table permits mapping a packet to a service\\nclass name of an active service flow so long as\\na classifier does not exist at a higher\\npriority.\")\ndocsIetfQosServiceClassPolicyEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 9, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyIndex\"))\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyEntry.setDescription(\"A service class name policy entry.\")\ndocsIetfQosServiceClassPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyIndex.setDescription(\"Index value to identify an entry in\\nthis table uniquely.\")\ndocsIetfQosServiceClassPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 2), SnmpAdminString()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyName.setDescription(\"Service Class Name to identify the name of the\\nservice class flow to which the packet should be\\ndirected.\")\ndocsIetfQosServiceClassPolicyRulePriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyRulePriority.setDescription(\"Service Class Policy rule priority for the\\nentry.\")\ndocsIetfQosServiceClassPolicyStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 4), RowStatus()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStatus.setDescription(\"Used to create or delete rows in this table.\\nThis object should not be deleted if it is\\nreferenced by an entry in docsDevFilterPolicy.\\nThe reference should be deleted first.\\nThere is no restriction on the ability\\nto change values in this row while the row is\\nactive. Inactive rows need not be timed out.\")\ndocsIetfQosServiceClassPolicyStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStorageType.setDescription(\"This object defines whether this row is kept in\\nvolatile storage and lost upon reboot or whether\\nit is backed up by non-volatile or permanent\\nstorage. 'permanent' entries need not allow\\nwritable access to any object.\")\ndocsIetfQosPHSTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 10))\nif mibBuilder.loadTexts: docsIetfQosPHSTable.setDescription(\"This table describes the set of payload header\\nsuppression entries.\")\ndocsIetfQosPHSEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 10, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassId\"))\nif mibBuilder.loadTexts: docsIetfQosPHSEntry.setDescription(\"A payload header suppression entry.\\n\\nThe ifIndex is an ifType of docsCableMaclayer(127).\\nThe index docsIetfQosServiceFlowId selects one\\nservice flow from the cable MAC layer interface.\\nThe docsIetfQosPktClassId index matches an\\nindex of the docsIetfQosPktClassTable.\")\ndocsIetfQosPHSField = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSField.setDescription(\"Payload header suppression field defines the\\nbytes of the header that must be\\nsuppressed/restored by the sending/receiving\\ndevice.\\n\\nThe number of octets in this object should be\\nthe same as the value of docsIetfQosPHSSize.\")\ndocsIetfQosPHSMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSMask.setDescription(\"Payload header suppression mask defines the\\nbit mask that is used in combination with the\\ndocsIetfQosPHSField. It defines which bytes in\\nthe header must be suppressed/restored by the\\nsending or receiving device.\\n\\nEach bit of this bit mask corresponds to a byte\\nin the docsIetfQosPHSField, with the least\\n\\n\\n\\nsignificant bit corresponding to the first byte\\nof the docsIetfQosPHSField.\\n\\nEach bit of the bit mask specifies whether\\nthe corresponding byte should be suppressed\\nin the packet. A bit value of '1' indicates that\\nthe byte should be suppressed by the sending\\ndevice and restored by the receiving device.\\nA bit value of '0' indicates that\\nthe byte should not be suppressed by the sending\\ndevice or restored by the receiving device.\\n\\nIf the bit mask does not contain a bit for each\\nbyte in the docsIetfQosPHSField, then the bit mask\\nis extended with bit values of '1' to be the\\nnecessary length.\")\ndocsIetfQosPHSSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSSize.setDescription(\"Payload header suppression size specifies the\\nnumber of bytes in the header to be suppressed\\nand restored.\\n\\nThe value of this object must match the number\\nof bytes in the docsIetfQosPHSField.\")\ndocsIetfQosPHSVerify = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 4), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSVerify.setDescription(\"Payload header suppression verification value. If\\n'true', the sender must verify docsIetfQosPHSField\\nis the same as what is contained in the packet\\nto be suppressed.\")\ndocsIetfQosPHSIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSIndex.setDescription(\"Payload header suppression index uniquely\\n\\n\\n\\nreferences the PHS rule for a given service flow.\")\ndocsIetfQosCmtsMacToSrvFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 11))\nif mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowTable.setDescription(\"This table provides for referencing the service\\nflows associated with a particular cable modem.\\nThis allows indexing into other docsIetfQos\\ntables that are indexed by docsIetfQosServiceFlowId\\nand ifIndex.\")\ndocsIetfQosCmtsMacToSrvFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 11, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsCmMac\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowEntry.setDescription(\"An entry is created by CMTS for each service flow\\nconnected to this CMTS.\")\ndocsIetfQosCmtsCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 1), MacAddress()).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosCmtsCmMac.setDescription(\"The MAC address for the referenced CM.\")\ndocsIetfQosCmtsServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosCmtsServiceFlowId.setDescription(\"An index assigned to a service flow by CMTS.\")\ndocsIetfQosCmtsIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 3), InterfaceIndex()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosCmtsIfIndex.setDescription(\"The ifIndex of ifType docsCableMacLayer(127)\\non the CMTS that is connected to the Cable Modem.\")\ndocsIetfQosConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2))\ndocsIetfQosGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 1))\ndocsIetfQosCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 2))\n\n# Augmentions\n\n# Groups\n\ndocsIetfQosBaseGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 1)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassUserPriLow\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourcePortStart\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassEnetProtocol\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetDestAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowTimeActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowTimeCreated\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassStateActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSAReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetDestMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestPortStart\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetSourceMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSDRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSVerify\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSARsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassEnetProtocolType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosLow\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetSourceAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSField\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicChangeFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSDReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestPortEnd\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicAdds\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassVlanId\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicDeleteFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicDeletes\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpProtocol\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowSID\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPHSUnknowns\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPrimary\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSSize\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourcePortEnd\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSAAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowOctets\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassUserPriHigh\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPolicedDelayPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPolicedDropPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosHigh\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourceMacAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestMacMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestMacAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassBitMap\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicAddFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetAddressType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicChanges\"), ) )\nif mibBuilder.loadTexts: docsIetfQosBaseGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems.\")\ndocsIetfQosParamSetGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 2)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxConcatBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetGrantsPerInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxTrafficRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetActiveTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMinReservedPkt\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetRequestPolicyOct\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetServiceClassName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTosOrMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMinReservedRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxTrafficBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetBitMap\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetSchedulingType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTolPollJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTosAndMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxLatency\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTolGrantJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetNomPollInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetNomGrantInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetAdmittedTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetUnsolicitGrantSize\"), ) )\nif mibBuilder.loadTexts: docsIetfQosParamSetGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems for QOS Parameter Sets.\")\ndocsIetfQosCmtsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 3)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogSFID\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamFragDiscards\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPolicedDropPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogControl\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeCreated\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogOctets\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamConcatBursts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogCmMac\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPrimary\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsIfIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamFragments\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogIfIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPolicedDelayPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogServiceClassName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeDeleted\"), ) )\nif mibBuilder.loadTexts: docsIetfQosCmtsGroup.setDescription(\"Group of objects implemented only in the CMTS.\")\ndocsIetfQosSrvClassPolicyGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 4)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyStorageType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyRulePriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyStatus\"), ) )\nif mibBuilder.loadTexts: docsIetfQosSrvClassPolicyGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems when supporting policy-based\\nservice flows.\")\ndocsIetfQosServiceClassGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 5)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassSchedulingType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassNomGrantInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTolGrantJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassDSCPOverwrite\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassGrantsPerInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxTrafficBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxTrafficRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassStorageType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTolPollJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTosOrMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassStatus\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxConcatBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTosAndMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassUnsolicitGrantSize\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassNomPollInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassRequestPolicy\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMinReservedRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassActiveTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMinReservedPkt\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassAdmittedTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxLatency\"), ) )\nif mibBuilder.loadTexts: docsIetfQosServiceClassGroup.setDescription(\"Group of objects implemented only in Cable Modem\\nTermination Systems when supporting expansion of Service\\nClass Names in a QOS Parameter Set\")\n\n# Compliances\n\ndocsIetfQosCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 127, 2, 2, 1)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosSrvClassPolicyGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosBaseGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetGroup\"), ) )\nif mibBuilder.loadTexts: docsIetfQosCompliance.setDescription(\"The compliance statement for MCNS Cable Modems and\\nCable Modem Termination Systems that implement DOCSIS\\nService Flows.\")\n\n# Exports\n\n# Module identity\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", PYSNMP_MODULE_ID=docsIetfQosMIB)\n\n# Types\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", DocsIetfQosBitRate=DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=DocsIetfQosSchedulingType)\n\n# Objects\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosMIB=docsIetfQosMIB, docsIetfQosNotifications=docsIetfQosNotifications, docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=docsIetfQosPktClassTable, docsIetfQosPktClassEntry=docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId, docsIetfQosPktClassDirection=docsIetfQosPktClassDirection, docsIetfQosPktClassPriority=docsIetfQosPktClassPriority, docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow, docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh, docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask, docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol, docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType, docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr, docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask, docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr, docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask, docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart, docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd, docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart, docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd, docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr, docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask, docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr, docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType, docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol, docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow, docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh, docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId, docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive, docsIetfQosPktClassPkts=docsIetfQosPktClassPkts, docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap, docsIetfQosParamSetTable=docsIetfQosParamSetTable, docsIetfQosParamSetEntry=docsIetfQosParamSetEntry, docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName, docsIetfQosParamSetPriority=docsIetfQosParamSetPriority, docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate, docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst, docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate, docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt, docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout, docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout, docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst, docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType, docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval, docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter, docsIetfQosParamSetUnsolicitGrantSize=docsIetfQosParamSetUnsolicitGrantSize, docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval, docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter, docsIetfQosParamSetGrantsPerInterval=docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=docsIetfQosServiceFlowPHSUnknowns, docsIetfQosServiceFlowPolicedDropPkts=docsIetfQosServiceFlowPolicedDropPkts, docsIetfQosServiceFlowPolicedDelayPkts=docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID, docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments, docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards, docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts, docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable, docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry, docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps, docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps, docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps, docsIetfQosDynamicAdds=docsIetfQosDynamicAdds, docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails, docsIetfQosDynamicChanges=docsIetfQosDynamicChanges, docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails, docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes, docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails, docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks, docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=docsIetfQosServiceFlowLogTimeDeleted, docsIetfQosServiceFlowLogTimeCreated=docsIetfQosServiceFlowLogTimeCreated, docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive, docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection, docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary, docsIetfQosServiceFlowLogServiceClassName=docsIetfQosServiceFlowLogServiceClassName, docsIetfQosServiceFlowLogPolicedDropPkts=docsIetfQosServiceFlowLogPolicedDropPkts, docsIetfQosServiceFlowLogPolicedDelayPkts=docsIetfQosServiceFlowLogPolicedDelayPkts, docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl, docsIetfQosServiceClassTable=docsIetfQosServiceClassTable, docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry, docsIetfQosServiceClassName=docsIetfQosServiceClassName, docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus, docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority, docsIetfQosServiceClassMaxTrafficRate=docsIetfQosServiceClassMaxTrafficRate, docsIetfQosServiceClassMaxTrafficBurst=docsIetfQosServiceClassMaxTrafficBurst, docsIetfQosServiceClassMinReservedRate=docsIetfQosServiceClassMinReservedRate, docsIetfQosServiceClassMinReservedPkt=docsIetfQosServiceClassMinReservedPkt, docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst)\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosServiceClassNomPollInterval=docsIetfQosServiceClassNomPollInterval, docsIetfQosServiceClassTolPollJitter=docsIetfQosServiceClassTolPollJitter, docsIetfQosServiceClassUnsolicitGrantSize=docsIetfQosServiceClassUnsolicitGrantSize, docsIetfQosServiceClassNomGrantInterval=docsIetfQosServiceClassNomGrantInterval, docsIetfQosServiceClassTolGrantJitter=docsIetfQosServiceClassTolGrantJitter, docsIetfQosServiceClassGrantsPerInterval=docsIetfQosServiceClassGrantsPerInterval, docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency, docsIetfQosServiceClassActiveTimeout=docsIetfQosServiceClassActiveTimeout, docsIetfQosServiceClassAdmittedTimeout=docsIetfQosServiceClassAdmittedTimeout, docsIetfQosServiceClassSchedulingType=docsIetfQosServiceClassSchedulingType, docsIetfQosServiceClassRequestPolicy=docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=docsIetfQosServiceClassStorageType, docsIetfQosServiceClassDSCPOverwrite=docsIetfQosServiceClassDSCPOverwrite, docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable, docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry, docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex, docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName, docsIetfQosServiceClassPolicyRulePriority=docsIetfQosServiceClassPolicyRulePriority, docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus, docsIetfQosServiceClassPolicyStorageType=docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry, docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize, docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance, docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=docsIetfQosCompliances)\n\n# Groups\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosBaseGroup=docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup, docsIetfQosCmtsGroup=docsIetfQosCmtsGroup, docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup, docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)\n\n# Compliances\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosCompliance=docsIetfQosCompliance)\n",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
# -*- coding: utf-8 -*-
#some xml helpers
from xml.dom.minidom import Document
class XMLReport:
def __init__(self, name):
self.doc = Document()
self.main_node = self.add(name, node=self.doc)
def add(self, name, node=None):
if node is None: node = self.main_node
elem = self.doc.createElement(name)
node.appendChild(elem)
return elem
def text(self, text, node):
node.appendChild(self.doc.createTextNode(text))
def set_node_info(self, node, typ):
node.setAttribute("type-id", hex(typ.id))
node.setAttribute("name", typ.get_name())
def __str__(self):
return self.doc.toprettyxml(indent=" ")
|
normal
|
{
"blob_id": "146487738006ce3efb5bd35c425835a1fd8e0145",
"index": 9490,
"step-1": "# -*- coding: utf-8 -*-\n#some xml helpers\nfrom xml.dom.minidom import Document\n\nclass XMLReport:\n def __init__(self, name):\n\tself.doc = Document()\n\tself.main_node = self.add(name, node=self.doc)\n \n def add(self, name, node=None):\n\tif node is None: node = self.main_node\n\telem = self.doc.createElement(name)\n\tnode.appendChild(elem)\n\treturn elem\n \n def text(self, text, node):\n\tnode.appendChild(self.doc.createTextNode(text))\n \n def set_node_info(self, node, typ):\n\tnode.setAttribute(\"type-id\", hex(typ.id))\n\tnode.setAttribute(\"name\", typ.get_name())\n\n def __str__(self):\n\treturn self.doc.toprettyxml(indent=\" \")",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.shortcuts import render
from PIL import Image
from django.views.decorators import csrf
import numpy as np
import re
import sys
import os
from .utils import *
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
import base64
sys.path.append(os.path.abspath("./models"))
OUTPUT = os.path.join(os.path.dirname(__file__), 'output.png')
from PIL import Image
from io import BytesIO
def getI420FromBase64(codec):
base64_data = re.sub('^data:image/.+;base64,', '', codec)
byte_data = base64.b64decode(base64_data)
image_data = BytesIO(byte_data)
img = Image.open(image_data)
img.save(OUTPUT)
def convertImage(imgData):
getI420FromBase64(imgData)
@csrf_exempt
def predict(request):
imgData = request.POST.get('img')
convertImage(imgData)
x = Image.open(OUTPUT)
x = x.convert('L')
x = x.resize((32,32))
x.save(OUTPUT)
x = np.array(x)
x = x.reshape(1,32,32,1)
model, graph = init()
out = model.predict(x)
response = np.array(np.argmax(out, axis=1))
return JsonResponse({"output": str(response[0]) })
def index(request):
return render(request, 'index.html', { "imagestr" : "static/hindi_characters/1.png"})
|
normal
|
{
"blob_id": "b84b3206e87176feee2c39fc0866ada994c9ac7a",
"index": 8655,
"step-1": "<mask token>\n\n\ndef convertImage(imgData):\n getI420FromBase64(imgData)\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.append(os.path.abspath('./models'))\n<mask token>\n\n\ndef getI420FromBase64(codec):\n base64_data = re.sub('^data:image/.+;base64,', '', codec)\n byte_data = base64.b64decode(base64_data)\n image_data = BytesIO(byte_data)\n img = Image.open(image_data)\n img.save(OUTPUT)\n\n\ndef convertImage(imgData):\n getI420FromBase64(imgData)\n\n\n@csrf_exempt\ndef predict(request):\n imgData = request.POST.get('img')\n convertImage(imgData)\n x = Image.open(OUTPUT)\n x = x.convert('L')\n x = x.resize((32, 32))\n x.save(OUTPUT)\n x = np.array(x)\n x = x.reshape(1, 32, 32, 1)\n model, graph = init()\n out = model.predict(x)\n response = np.array(np.argmax(out, axis=1))\n return JsonResponse({'output': str(response[0])})\n\n\ndef index(request):\n return render(request, 'index.html', {'imagestr':\n 'static/hindi_characters/1.png'})\n",
"step-3": "<mask token>\nsys.path.append(os.path.abspath('./models'))\nOUTPUT = os.path.join(os.path.dirname(__file__), 'output.png')\n<mask token>\n\n\ndef getI420FromBase64(codec):\n base64_data = re.sub('^data:image/.+;base64,', '', codec)\n byte_data = base64.b64decode(base64_data)\n image_data = BytesIO(byte_data)\n img = Image.open(image_data)\n img.save(OUTPUT)\n\n\ndef convertImage(imgData):\n getI420FromBase64(imgData)\n\n\n@csrf_exempt\ndef predict(request):\n imgData = request.POST.get('img')\n convertImage(imgData)\n x = Image.open(OUTPUT)\n x = x.convert('L')\n x = x.resize((32, 32))\n x.save(OUTPUT)\n x = np.array(x)\n x = x.reshape(1, 32, 32, 1)\n model, graph = init()\n out = model.predict(x)\n response = np.array(np.argmax(out, axis=1))\n return JsonResponse({'output': str(response[0])})\n\n\ndef index(request):\n return render(request, 'index.html', {'imagestr':\n 'static/hindi_characters/1.png'})\n",
"step-4": "from django.shortcuts import render\nfrom PIL import Image\nfrom django.views.decorators import csrf\nimport numpy as np\nimport re\nimport sys\nimport os\nfrom .utils import *\nfrom django.http import JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nimport base64\nsys.path.append(os.path.abspath('./models'))\nOUTPUT = os.path.join(os.path.dirname(__file__), 'output.png')\nfrom PIL import Image\nfrom io import BytesIO\n\n\ndef getI420FromBase64(codec):\n base64_data = re.sub('^data:image/.+;base64,', '', codec)\n byte_data = base64.b64decode(base64_data)\n image_data = BytesIO(byte_data)\n img = Image.open(image_data)\n img.save(OUTPUT)\n\n\ndef convertImage(imgData):\n getI420FromBase64(imgData)\n\n\n@csrf_exempt\ndef predict(request):\n imgData = request.POST.get('img')\n convertImage(imgData)\n x = Image.open(OUTPUT)\n x = x.convert('L')\n x = x.resize((32, 32))\n x.save(OUTPUT)\n x = np.array(x)\n x = x.reshape(1, 32, 32, 1)\n model, graph = init()\n out = model.predict(x)\n response = np.array(np.argmax(out, axis=1))\n return JsonResponse({'output': str(response[0])})\n\n\ndef index(request):\n return render(request, 'index.html', {'imagestr':\n 'static/hindi_characters/1.png'})\n",
"step-5": "from django.shortcuts import render\nfrom PIL import Image\nfrom django.views.decorators import csrf\nimport numpy as np\nimport re\nimport sys\nimport os\nfrom .utils import *\nfrom django.http import JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nimport base64\nsys.path.append(os.path.abspath(\"./models\"))\nOUTPUT = os.path.join(os.path.dirname(__file__), 'output.png')\nfrom PIL import Image\nfrom io import BytesIO\ndef getI420FromBase64(codec):\n base64_data = re.sub('^data:image/.+;base64,', '', codec)\n byte_data = base64.b64decode(base64_data)\n image_data = BytesIO(byte_data)\n img = Image.open(image_data)\n img.save(OUTPUT)\n\n\ndef convertImage(imgData):\n getI420FromBase64(imgData)\n\n@csrf_exempt\ndef predict(request):\n imgData = request.POST.get('img')\n convertImage(imgData)\n x = Image.open(OUTPUT)\n x = x.convert('L')\n x = x.resize((32,32))\n x.save(OUTPUT)\n x = np.array(x)\n x = x.reshape(1,32,32,1)\n model, graph = init()\n out = model.predict(x)\n response = np.array(np.argmax(out, axis=1))\n return JsonResponse({\"output\": str(response[0]) })\n\n\ndef index(request):\n return render(request, 'index.html', { \"imagestr\" : \"static/hindi_characters/1.png\"})\n",
"step-ids": [
1,
5,
6,
7,
8
]
}
|
[
1,
5,
6,
7,
8
] |
def calc_fib(n):
fib_lis = dict()
for i in range(n+1):
if (i <= 1):
fib_lis[i] = i
else:
fib_lis[i] = fib_lis[i-2] + fib_lis[i-1]
return fib_lis[n]
n = int(input())
print(calc_fib(n))
|
normal
|
{
"blob_id": "426b711571d3b5c4f8c7b0bad3a613951902e60b",
"index": 4129,
"step-1": "<mask token>\n",
"step-2": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\n<mask token>\n",
"step-3": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\n<mask token>\nprint(calc_fib(n))\n",
"step-4": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n + 1):\n if i <= 1:\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i - 2] + fib_lis[i - 1]\n return fib_lis[n]\n\n\nn = int(input())\nprint(calc_fib(n))\n",
"step-5": "def calc_fib(n):\n fib_lis = dict()\n for i in range(n+1):\n if (i <= 1):\n fib_lis[i] = i\n else:\n fib_lis[i] = fib_lis[i-2] + fib_lis[i-1]\n return fib_lis[n]\nn = int(input())\nprint(calc_fib(n))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Core Packages
import difflib
import tkinter as tk
from tkinter import *
from tkinter import ttk
from tkinter.scrolledtext import *
import tkinter.filedialog
import PyPDF2
from tkinter import filedialog
import torch
import json
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
# NLP Pkgs
from spacy_summarization import text_summarizer
from gensim.summarization import summarize
from nltk_summarization import nltk_summarizer
# Web Scraping Pkg
from bs4 import BeautifulSoup
from urllib.request import urlopen
# Structure and Layout
window = Tk()
window.title("Summaryzer GUI")
window.geometry("700x400")
window.config(background='black')
style = ttk.Style(window)
style.configure('lefttab.TNotebook', tabposition='wn', )
# TAB LAYOUT
tab_control = ttk.Notebook(window, style='lefttab.TNotebook')
tab2 = ttk.Frame(tab_control)
tab3 = ttk.Frame(tab_control)
# ADD TABS TO NOTEBOOK
tab_control.add(tab3, text=f'{"Extractive":^20s}')
tab_control.add(tab2, text=f'{"Abstractive":^20s}')
label1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)
label1.grid(column=1, row=0)
label2 = Label(tab2, text='Abstractive Summrize',padx=5, pady=5)
label2.grid(column=0, row=0)
tab_control.pack(expand=1, fill='both')
def get_summary():
model = T5ForConditionalGeneration.from_pretrained ('t5-small')
tokenizer = T5Tokenizer.from_pretrained ('t5-small')
device = torch.device ('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip ().replace ("\n", "")
t5_prepared_Text = "summarize: " + preprocess_text
tokenized_text = tokenizer.encode (t5_prepared_Text, return_tensors="pt").to (device)
summary_ids = model.generate (tokenized_text,
num_beams=4,
no_repeat_ngram_size=2,
min_length=30,
max_length=100,
early_stopping=True)
output = tokenizer.decode (summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text)-len(output)
Precision = (len(text)+len(output)+edited)/2
Precisioncalc = Precision / 100
result =("\n\nSummarized text: \n", output)," Precision = " , Precisioncalc , " similarity = " , printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(
initialdir="C:/gui/",
title="Open PDF File",
filetypes=(
("PDF Files", "*.pdf"),
("All Files", ".")))
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(
initialdir="C:/gui/",
title="Open PDF File",
filetypes=(
("PDF Files", "*.pdf"),
("All Files", ".")))
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
def clear_display_result():
tab3_display_text.delete('1.0', END)
# Clear For URL
def clear_url_entry():
url_entry.delete(0, END)
# Open File to Read and Process
def openfiles():
file1 = tkinter.filedialog.askopenfilename(filetypes=(("Text Files", ".txt"), ("All files", "*")))
read_text = open(file1).read()
url_display.insert(tk.END, read_text)
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy ():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get ('1.0', tk.END)
final_text = nltk_summarizer (raw_text)
print (final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
def use_gensim():
raw_text = url_display.get ('1.0', tk.END)
final_text = summarize(raw_text)
print (final_text)
Str1 = raw_text
str2 = summarize(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result ='\nGensim Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
# URL TAB
l1 = Label(tab3, text="Enter URL To Summarize")
l1.grid(row=1, column=0)
raw_entry = StringVar()
url_entry = Entry(tab3, textvariable=raw_entry, width=50)
url_entry.grid(row=1, column=1)
# BUTTONS
button1 = Button(tab3, text="Reset", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')
button1.grid(row=4, column=0, padx=10, pady=10)
button2 = Button(tab3, text="Get Text", command=get_text, width=12, bg='#03A9F4', fg='#fff')
button2.grid(row=4, column=1, padx=10, pady=10)
button3 = Button(tab3, text="Open File", width=12, command=openfiles, bg='#c5cae9')
button3.grid(row=5, column=0, padx=10, pady=10)
button4 = Button(tab3, text="Open PDF", width=12, command=open_pdf, bg='#c5cae9')
button4.grid(row=5, column=1, padx=10, pady=10)
button5 = Button(tab3, text="SpaCy", command=use_spacy, width=12, bg='red', fg='#fff')
button5.grid(row=8, column=0, padx=10, pady=10)
button6 = Button(tab3, text="Clear Result", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')
button6.grid(row=9, column=1, padx=10, pady=10)
button7 = Button(tab3, text="NLTK", command=use_nltk, width=12, bg='#03A9F4', fg='#fff')
button7.grid(row=8, column=1, padx=10, pady=10)
button8 = Button(tab3, text="Gensim", command=use_gensim, width=12, bg='#03A9F4', fg='#fff')
button8.grid(row=9, column=0, padx=10, pady=10)
# Display Screen For Result
url_display = ScrolledText(tab3, height=10)
url_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab3_display_text = ScrolledText(tab3, height=10)
tab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
l1 = Label(tab2, text="Enter URL To Summarize")
l1.grid(row=1, column=0)
raw_entry1 = StringVar()
url_entry1 = Entry(tab2, textvariable=raw_entry, width=50)
url_entry1.grid(row=1, column=1)
# BUTTONS
button9 = Button(tab2, text="Reset", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')
button9.grid(row=4, column=0, padx=10, pady=10)
button10 = Button(tab2, text="Get Text", command=get_text, width=12, bg='#03A9F4', fg='#fff')
button10.grid(row=4, column=1, padx=10, pady=10)
button11 = Button(tab2, text="Open File", width=12, command=openfiles, bg='#c5cae9')
button11.grid(row=5, column=0, padx=10, pady=10)
button12 = Button(tab2, text="Open PDF", width=12, command=open_pdf1, bg='#c5cae9')
button12.grid(row=5, column=1, padx=10, pady=10)
button13 = Button(tab2, text="Clear Result", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')
button13.grid(row=9, column=1, padx=10, pady=10)
button14 = Button(tab2, text="Abstract", command=get_summary, width=12, bg='#03A9F4', fg='#fff')
button14.grid(row=9, column=0, padx=10, pady=10)
url_display1 = ScrolledText(tab2, height=10)
url_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab2_display_text = ScrolledText(tab2, height=10)
tab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
window.mainloop()
|
normal
|
{
"blob_id": "e3dece36ba3e5b3df763e7119c485f6ed2155098",
"index": 795,
"step-1": "<mask token>\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\n<mask token>\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\n<mask token>\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\n<mask token>\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\n<mask token>\n",
"step-3": "<mask token>\nwindow = Tk()\nwindow.title('Summaryzer GUI')\nwindow.geometry('700x400')\nwindow.config(background='black')\nstyle = ttk.Style(window)\nstyle.configure('lefttab.TNotebook', tabposition='wn')\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\ntab2 = ttk.Frame(tab_control)\ntab3 = ttk.Frame(tab_control)\ntab_control.add(tab3, text=f\"{'Extractive':^20s}\")\ntab_control.add(tab2, text=f\"{'Abstractive':^20s}\")\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\nlabel1.grid(column=1, row=0)\nlabel2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)\nlabel2.grid(column=0, row=0)\ntab_control.pack(expand=1, fill='both')\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\ndef openfiles():\n file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',\n '.txt'), ('All files', '*')))\n read_text = open(file1).read()\n url_display.insert(tk.END, read_text)\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_gensim():\n raw_text = url_display.get('1.0', tk.END)\n final_text = summarize(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = summarize(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nGensim Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\nl1 = Label(tab3, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry = StringVar()\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\nurl_entry.grid(row=1, column=1)\nbutton1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton1.grid(row=4, column=0, padx=10, pady=10)\nbutton2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton2.grid(row=4, column=1, padx=10, pady=10)\nbutton3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton3.grid(row=5, column=0, padx=10, pady=10)\nbutton4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=\n '#c5cae9')\nbutton4.grid(row=5, column=1, padx=10, pady=10)\nbutton5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',\n fg='#fff')\nbutton5.grid(row=8, column=0, padx=10, pady=10)\nbutton6 = Button(tab3, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton6.grid(row=9, column=1, padx=10, pady=10)\nbutton7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton7.grid(row=8, column=1, padx=10, pady=10)\nbutton8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton8.grid(row=9, column=0, padx=10, pady=10)\nurl_display = ScrolledText(tab3, height=10)\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab3_display_text = ScrolledText(tab3, height=10)\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nl1 = Label(tab2, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry1 = StringVar()\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\nurl_entry1.grid(row=1, column=1)\nbutton9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton9.grid(row=4, column=0, padx=10, pady=10)\nbutton10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton10.grid(row=4, column=1, padx=10, pady=10)\nbutton11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton11.grid(row=5, column=0, padx=10, pady=10)\nbutton12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=\n '#c5cae9')\nbutton12.grid(row=5, column=1, padx=10, pady=10)\nbutton13 = Button(tab2, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton13.grid(row=9, column=1, padx=10, pady=10)\nbutton14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton14.grid(row=9, column=0, padx=10, pady=10)\nurl_display1 = ScrolledText(tab2, height=10)\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab2_display_text = ScrolledText(tab2, height=10)\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nwindow.mainloop()\n",
"step-4": "import difflib\nimport tkinter as tk\nfrom tkinter import *\nfrom tkinter import ttk\nfrom tkinter.scrolledtext import *\nimport tkinter.filedialog\nimport PyPDF2\nfrom tkinter import filedialog\nimport torch\nimport json\nfrom transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config\nfrom spacy_summarization import text_summarizer\nfrom gensim.summarization import summarize\nfrom nltk_summarization import nltk_summarizer\nfrom bs4 import BeautifulSoup\nfrom urllib.request import urlopen\nwindow = Tk()\nwindow.title('Summaryzer GUI')\nwindow.geometry('700x400')\nwindow.config(background='black')\nstyle = ttk.Style(window)\nstyle.configure('lefttab.TNotebook', tabposition='wn')\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\ntab2 = ttk.Frame(tab_control)\ntab3 = ttk.Frame(tab_control)\ntab_control.add(tab3, text=f\"{'Extractive':^20s}\")\ntab_control.add(tab2, text=f\"{'Abstractive':^20s}\")\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\nlabel1.grid(column=1, row=0)\nlabel2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)\nlabel2.grid(column=0, row=0)\ntab_control.pack(expand=1, fill='both')\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\ndef openfiles():\n file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',\n '.txt'), ('All files', '*')))\n read_text = open(file1).read()\n url_display.insert(tk.END, read_text)\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_gensim():\n raw_text = url_display.get('1.0', tk.END)\n final_text = summarize(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = summarize(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nGensim Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\nl1 = Label(tab3, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry = StringVar()\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\nurl_entry.grid(row=1, column=1)\nbutton1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton1.grid(row=4, column=0, padx=10, pady=10)\nbutton2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton2.grid(row=4, column=1, padx=10, pady=10)\nbutton3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton3.grid(row=5, column=0, padx=10, pady=10)\nbutton4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=\n '#c5cae9')\nbutton4.grid(row=5, column=1, padx=10, pady=10)\nbutton5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',\n fg='#fff')\nbutton5.grid(row=8, column=0, padx=10, pady=10)\nbutton6 = Button(tab3, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton6.grid(row=9, column=1, padx=10, pady=10)\nbutton7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton7.grid(row=8, column=1, padx=10, pady=10)\nbutton8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton8.grid(row=9, column=0, padx=10, pady=10)\nurl_display = ScrolledText(tab3, height=10)\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab3_display_text = ScrolledText(tab3, height=10)\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nl1 = Label(tab2, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry1 = StringVar()\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\nurl_entry1.grid(row=1, column=1)\nbutton9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton9.grid(row=4, column=0, padx=10, pady=10)\nbutton10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton10.grid(row=4, column=1, padx=10, pady=10)\nbutton11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton11.grid(row=5, column=0, padx=10, pady=10)\nbutton12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=\n '#c5cae9')\nbutton12.grid(row=5, column=1, padx=10, pady=10)\nbutton13 = Button(tab2, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton13.grid(row=9, column=1, padx=10, pady=10)\nbutton14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton14.grid(row=9, column=0, padx=10, pady=10)\nurl_display1 = ScrolledText(tab2, height=10)\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab2_display_text = ScrolledText(tab2, height=10)\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nwindow.mainloop()\n",
"step-5": "# Core Packages\r\nimport difflib\r\nimport tkinter as tk\r\nfrom tkinter import *\r\nfrom tkinter import ttk\r\nfrom tkinter.scrolledtext import *\r\nimport tkinter.filedialog\r\nimport PyPDF2\r\nfrom tkinter import filedialog\r\nimport torch\r\nimport json\r\nfrom transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config\r\n\r\n# NLP Pkgs\r\nfrom spacy_summarization import text_summarizer\r\nfrom gensim.summarization import summarize\r\nfrom nltk_summarization import nltk_summarizer\r\n\r\n# Web Scraping Pkg\r\nfrom bs4 import BeautifulSoup\r\nfrom urllib.request import urlopen\r\n\r\n# Structure and Layout\r\nwindow = Tk()\r\nwindow.title(\"Summaryzer GUI\")\r\nwindow.geometry(\"700x400\")\r\nwindow.config(background='black')\r\n\r\nstyle = ttk.Style(window)\r\nstyle.configure('lefttab.TNotebook', tabposition='wn', )\r\n\r\n# TAB LAYOUT\r\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\r\n\r\ntab2 = ttk.Frame(tab_control)\r\ntab3 = ttk.Frame(tab_control)\r\n\r\n# ADD TABS TO NOTEBOOK\r\ntab_control.add(tab3, text=f'{\"Extractive\":^20s}')\r\ntab_control.add(tab2, text=f'{\"Abstractive\":^20s}')\r\n\r\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\r\nlabel1.grid(column=1, row=0)\r\n\r\n\r\nlabel2 = Label(tab2, text='Abstractive Summrize',padx=5, pady=5)\r\nlabel2.grid(column=0, row=0)\r\n\r\ntab_control.pack(expand=1, fill='both')\r\n\r\ndef get_summary():\r\n model = T5ForConditionalGeneration.from_pretrained ('t5-small')\r\n tokenizer = T5Tokenizer.from_pretrained ('t5-small')\r\n device = torch.device ('cpu')\r\n text = str(url_display1.get('1.0', tk.END))\r\n preprocess_text = text.strip ().replace (\"\\n\", \"\")\r\n t5_prepared_Text = \"summarize: \" + preprocess_text\r\n tokenized_text = tokenizer.encode (t5_prepared_Text, return_tensors=\"pt\").to (device)\r\n\r\n summary_ids = model.generate (tokenized_text,\r\n num_beams=4,\r\n no_repeat_ngram_size=2,\r\n min_length=30,\r\n max_length=100,\r\n early_stopping=True)\r\n\r\n output = tokenizer.decode (summary_ids[0], skip_special_tokens=True)\r\n\r\n Str1 = text\r\n str2 = output\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n edited = len(text)-len(output)\r\n Precision = (len(text)+len(output)+edited)/2\r\n Precisioncalc = Precision / 100\r\n\r\n result =(\"\\n\\nSummarized text: \\n\", output),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n\r\n tab2_display_text.insert(tk.END, result)\r\n\r\ndef open_pdf():\r\n open_file = filedialog.askopenfilename(\r\n initialdir=\"C:/gui/\",\r\n title=\"Open PDF File\",\r\n filetypes=(\r\n (\"PDF Files\", \"*.pdf\"),\r\n (\"All Files\", \".\")))\r\n\r\n if open_file:\r\n pdf_file = PyPDF2.PdfFileReader(open_file)\r\n page = pdf_file.getPage(0)\r\n page_stuff = page.extractText()\r\n io = page_stuff.split()\r\n url_display.insert(3.0, io)\r\n\r\n\r\ndef open_pdf1():\r\n open_file = filedialog.askopenfilename(\r\n initialdir=\"C:/gui/\",\r\n title=\"Open PDF File\",\r\n filetypes=(\r\n (\"PDF Files\", \"*.pdf\"),\r\n (\"All Files\", \".\")))\r\n\r\n if open_file:\r\n pdf_file = PyPDF2.PdfFileReader(open_file)\r\n page = pdf_file.getPage(0)\r\n page_stuff = page.extractText()\r\n io = page_stuff.split()\r\n url_display1.insert(3.0, io)\r\n\r\n\r\ndef clear_display_result():\r\n tab3_display_text.delete('1.0', END)\r\n\r\n# Clear For URL\r\ndef clear_url_entry():\r\n url_entry.delete(0, END)\r\n\r\n\r\n# Open File to Read and Process\r\ndef openfiles():\r\n file1 = tkinter.filedialog.askopenfilename(filetypes=((\"Text Files\", \".txt\"), (\"All files\", \"*\")))\r\n read_text = open(file1).read()\r\n url_display.insert(tk.END, read_text)\r\n\r\n\r\ndef get_text():\r\n raw_text = str(url_entry.get())\r\n page = urlopen(raw_text)\r\n soup = BeautifulSoup(page)\r\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\r\n url_display.insert(tk.END, fetched_text)\r\n\r\n\r\ndef get_url_summary():\r\n raw_text = url_display.get('1.0', tk.END)\r\n final_text = text_summarizer(raw_text)\r\n result = '\\nSummary:{}'.format(final_text)\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\ndef use_spacy ():\r\n\r\n raw_text = url_display.get('1.0', tk.END)\r\n final_text = text_summarizer(raw_text)\r\n print(final_text)\r\n\r\n Str1 = raw_text\r\n str2 = text_summarizer(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result = '\\nSpacy Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\ndef use_nltk():\r\n raw_text = url_display.get ('1.0', tk.END)\r\n final_text = nltk_summarizer (raw_text)\r\n print (final_text)\r\n\r\n Str1 = raw_text\r\n str2 = nltk_summarizer(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result = '\\nNLTK Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\ndef use_gensim():\r\n raw_text = url_display.get ('1.0', tk.END)\r\n final_text = summarize(raw_text)\r\n print (final_text)\r\n Str1 = raw_text\r\n str2 = summarize(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result ='\\nGensim Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\n# URL TAB\r\nl1 = Label(tab3, text=\"Enter URL To Summarize\")\r\nl1.grid(row=1, column=0)\r\n\r\nraw_entry = StringVar()\r\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\r\nurl_entry.grid(row=1, column=1)\r\n\r\n# BUTTONS\r\nbutton1 = Button(tab3, text=\"Reset\", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')\r\nbutton1.grid(row=4, column=0, padx=10, pady=10)\r\n\r\nbutton2 = Button(tab3, text=\"Get Text\", command=get_text, width=12, bg='#03A9F4', fg='#fff')\r\nbutton2.grid(row=4, column=1, padx=10, pady=10)\r\n\r\nbutton3 = Button(tab3, text=\"Open File\", width=12, command=openfiles, bg='#c5cae9')\r\nbutton3.grid(row=5, column=0, padx=10, pady=10)\r\n\r\nbutton4 = Button(tab3, text=\"Open PDF\", width=12, command=open_pdf, bg='#c5cae9')\r\nbutton4.grid(row=5, column=1, padx=10, pady=10)\r\n\r\nbutton5 = Button(tab3, text=\"SpaCy\", command=use_spacy, width=12, bg='red', fg='#fff')\r\nbutton5.grid(row=8, column=0, padx=10, pady=10)\r\n\r\nbutton6 = Button(tab3, text=\"Clear Result\", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')\r\nbutton6.grid(row=9, column=1, padx=10, pady=10)\r\n\r\nbutton7 = Button(tab3, text=\"NLTK\", command=use_nltk, width=12, bg='#03A9F4', fg='#fff')\r\nbutton7.grid(row=8, column=1, padx=10, pady=10)\r\n\r\nbutton8 = Button(tab3, text=\"Gensim\", command=use_gensim, width=12, bg='#03A9F4', fg='#fff')\r\nbutton8.grid(row=9, column=0, padx=10, pady=10)\r\n# Display Screen For Result\r\nurl_display = ScrolledText(tab3, height=10)\r\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\r\n\r\ntab3_display_text = ScrolledText(tab3, height=10)\r\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\r\n\r\n\r\n\r\nl1 = Label(tab2, text=\"Enter URL To Summarize\")\r\nl1.grid(row=1, column=0)\r\n\r\nraw_entry1 = StringVar()\r\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\r\nurl_entry1.grid(row=1, column=1)\r\n\r\n# BUTTONS\r\n\r\nbutton9 = Button(tab2, text=\"Reset\", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')\r\nbutton9.grid(row=4, column=0, padx=10, pady=10)\r\n\r\nbutton10 = Button(tab2, text=\"Get Text\", command=get_text, width=12, bg='#03A9F4', fg='#fff')\r\nbutton10.grid(row=4, column=1, padx=10, pady=10)\r\n\r\nbutton11 = Button(tab2, text=\"Open File\", width=12, command=openfiles, bg='#c5cae9')\r\nbutton11.grid(row=5, column=0, padx=10, pady=10)\r\n\r\nbutton12 = Button(tab2, text=\"Open PDF\", width=12, command=open_pdf1, bg='#c5cae9')\r\nbutton12.grid(row=5, column=1, padx=10, pady=10)\r\n\r\nbutton13 = Button(tab2, text=\"Clear Result\", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')\r\nbutton13.grid(row=9, column=1, padx=10, pady=10)\r\n\r\nbutton14 = Button(tab2, text=\"Abstract\", command=get_summary, width=12, bg='#03A9F4', fg='#fff')\r\nbutton14.grid(row=9, column=0, padx=10, pady=10)\r\n\r\nurl_display1 = ScrolledText(tab2, height=10)\r\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\r\n\r\ntab2_display_text = ScrolledText(tab2, height=10)\r\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\r\n\r\nwindow.mainloop()\r\n\r\n",
"step-ids": [
8,
9,
13,
14,
15
]
}
|
[
8,
9,
13,
14,
15
] |
"""
File: ex17_map_reduce.py
Author: TonyDeep
Date: 2020-07-21
"""
from functools import reduce
print('#1 map')
a_list = [2, 18, 9, 22, 17, 24, 8, 12, 27]
map_data = map(lambda x: x * 2 + 1, a_list)
new_list = list(map_data)
print(new_list)
print('\n#2 reduce')
b_list = [1, 2, 3, 4, 5]
reduce_data = reduce(lambda x, y: x + y, b_list)
print(reduce_data)
|
normal
|
{
"blob_id": "8e3b26826752b6b3482e8a29b9b58f5025c7ef58",
"index": 4758,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('#1 map')\n<mask token>\nprint(new_list)\nprint('\\n#2 reduce')\n<mask token>\nprint(reduce_data)\n",
"step-3": "<mask token>\nprint('#1 map')\na_list = [2, 18, 9, 22, 17, 24, 8, 12, 27]\nmap_data = map(lambda x: x * 2 + 1, a_list)\nnew_list = list(map_data)\nprint(new_list)\nprint('\\n#2 reduce')\nb_list = [1, 2, 3, 4, 5]\nreduce_data = reduce(lambda x, y: x + y, b_list)\nprint(reduce_data)\n",
"step-4": "<mask token>\nfrom functools import reduce\nprint('#1 map')\na_list = [2, 18, 9, 22, 17, 24, 8, 12, 27]\nmap_data = map(lambda x: x * 2 + 1, a_list)\nnew_list = list(map_data)\nprint(new_list)\nprint('\\n#2 reduce')\nb_list = [1, 2, 3, 4, 5]\nreduce_data = reduce(lambda x, y: x + y, b_list)\nprint(reduce_data)\n",
"step-5": "\"\"\"\nFile: ex17_map_reduce.py\nAuthor: TonyDeep\nDate: 2020-07-21\n\"\"\"\n\nfrom functools import reduce\n\nprint('#1 map')\na_list = [2, 18, 9, 22, 17, 24, 8, 12, 27]\nmap_data = map(lambda x: x * 2 + 1, a_list)\nnew_list = list(map_data)\nprint(new_list)\n\nprint('\\n#2 reduce')\nb_list = [1, 2, 3, 4, 5]\nreduce_data = reduce(lambda x, y: x + y, b_list)\nprint(reduce_data)\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
from PySide6.QtCore import *
from PySide6.QtWidgets import *
from PySide6.QtGui import *
from simple_drawing_window import *
class simple_drawing_window1( simple_drawing_window):
def __init__(self):
super().__init__()
def paintEvent(self, e):
p = QPainter()
p.begin(self)
"""
p.setPen(QColor(0,0,0))
p.setBrush(QColor(0,127,0))
p.drawPolygon(
[QPoint(70,100), QPoint(100,110),
QPoint(130, 100), QPoint(100,150),]
)
"""
p.setPen(QColor(255,127,0))
p.setBrush(QColor(255,127,0))
p.drawPolygon(
[QPoint(50,100), QPoint(200,100),QPoint(200,400), QPoint(50,400),]
)
p.drawPixmap(QRect(400,150,200,200), self.rabbit)
p.end()
|
normal
|
{
"blob_id": "6fc43919f521234d0dc9e167bb72f014e9c0bf17",
"index": 2102,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass simple_drawing_window1(simple_drawing_window):\n <mask token>\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-3": "<mask token>\n\n\nclass simple_drawing_window1(simple_drawing_window):\n\n def __init__(self):\n super().__init__()\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-4": "import sys\nfrom PySide6.QtCore import *\nfrom PySide6.QtWidgets import *\nfrom PySide6.QtGui import *\nfrom simple_drawing_window import *\n\n\nclass simple_drawing_window1(simple_drawing_window):\n\n def __init__(self):\n super().__init__()\n\n def paintEvent(self, e):\n p = QPainter()\n p.begin(self)\n \"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n p.setPen(QColor(255, 127, 0))\n p.setBrush(QColor(255, 127, 0))\n p.drawPolygon([QPoint(50, 100), QPoint(200, 100), QPoint(200, 400),\n QPoint(50, 400)])\n p.drawPixmap(QRect(400, 150, 200, 200), self.rabbit)\n p.end()\n",
"step-5": "\nimport sys\nfrom PySide6.QtCore import *\nfrom PySide6.QtWidgets import *\nfrom PySide6.QtGui import *\nfrom simple_drawing_window import *\n\nclass simple_drawing_window1( simple_drawing_window):\n\tdef __init__(self):\n\t\tsuper().__init__()\n \n\tdef paintEvent(self, e):\n\t\tp = QPainter()\n\t\tp.begin(self)\n\t\t\"\"\"\n\t\tp.setPen(QColor(0,0,0))\n\t\tp.setBrush(QColor(0,127,0))\n\t\tp.drawPolygon(\n\t\t\t[QPoint(70,100), QPoint(100,110), \n\t\t\tQPoint(130, 100), QPoint(100,150),]\n\t\t)\n\t\t\"\"\"\n\n\t\tp.setPen(QColor(255,127,0))\n\t\tp.setBrush(QColor(255,127,0))\n \n\t\t\n \n\t\tp.drawPolygon(\n\t\t\t[QPoint(50,100), QPoint(200,100),QPoint(200,400), QPoint(50,400),]\n\t\t)\n\t\t\n\t\tp.drawPixmap(QRect(400,150,200,200), self.rabbit)\n \n\t\tp.end()\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# © MNELAB developers
#
# License: BSD (3-clause)
from .dependencies import have
from .syntax import PythonHighlighter
from .utils import count_locations, image_path, interface_style, natural_sort
|
normal
|
{
"blob_id": "837534ebc953dae966154921709398ab2b2e0b33",
"index": 578,
"step-1": "<mask token>\n",
"step-2": "from .dependencies import have\nfrom .syntax import PythonHighlighter\nfrom .utils import count_locations, image_path, interface_style, natural_sort\n",
"step-3": "# © MNELAB developers\n#\n# License: BSD (3-clause)\n\nfrom .dependencies import have\nfrom .syntax import PythonHighlighter\nfrom .utils import count_locations, image_path, interface_style, natural_sort\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class FlaskConfig(object):
SECRET_KEY = os.environ.get('FLASK_SECRET_KEY') or 'TuLAsWbcoKr5YhDE'
BOOTSTRAP_SERVE_LOCAL = os.environ.get('FLASK_BOOTSTRAP_SERVE_LOCAL') or True
APPLICATION_ROOT = os.environ.get('FLASK_APPLICATION_ROOT') or ''
# SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
# 'sqlite:///' + os.path.join(basedir, 'app.db')
# SQLALCHEMY_TRACK_MODIFICATIONS = False
|
normal
|
{
"blob_id": "a0349abb3a56ff4bc1700dbf0fa5a1fc2e3453ce",
"index": 6469,
"step-1": "<mask token>\n\n\nclass FlaskConfig(object):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass FlaskConfig(object):\n SECRET_KEY = os.environ.get('FLASK_SECRET_KEY') or 'TuLAsWbcoKr5YhDE'\n BOOTSTRAP_SERVE_LOCAL = os.environ.get('FLASK_BOOTSTRAP_SERVE_LOCAL'\n ) or True\n APPLICATION_ROOT = os.environ.get('FLASK_APPLICATION_ROOT') or ''\n",
"step-3": "<mask token>\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n\nclass FlaskConfig(object):\n SECRET_KEY = os.environ.get('FLASK_SECRET_KEY') or 'TuLAsWbcoKr5YhDE'\n BOOTSTRAP_SERVE_LOCAL = os.environ.get('FLASK_BOOTSTRAP_SERVE_LOCAL'\n ) or True\n APPLICATION_ROOT = os.environ.get('FLASK_APPLICATION_ROOT') or ''\n",
"step-4": "import os\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\n\nclass FlaskConfig(object):\n SECRET_KEY = os.environ.get('FLASK_SECRET_KEY') or 'TuLAsWbcoKr5YhDE'\n BOOTSTRAP_SERVE_LOCAL = os.environ.get('FLASK_BOOTSTRAP_SERVE_LOCAL'\n ) or True\n APPLICATION_ROOT = os.environ.get('FLASK_APPLICATION_ROOT') or ''\n",
"step-5": "import os\n\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\nclass FlaskConfig(object):\n SECRET_KEY = os.environ.get('FLASK_SECRET_KEY') or 'TuLAsWbcoKr5YhDE'\n BOOTSTRAP_SERVE_LOCAL = os.environ.get('FLASK_BOOTSTRAP_SERVE_LOCAL') or True\n APPLICATION_ROOT = os.environ.get('FLASK_APPLICATION_ROOT') or ''\n # SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \\\n # 'sqlite:///' + os.path.join(basedir, 'app.db')\n # SQLALCHEMY_TRACK_MODIFICATIONS = False\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import cv2
import pdb
import skvideo
import numpy as np
import pandas as pd
from tqdm import tqdm
from harp import fdops
from word2number import w2n
from harp.vid import VidReader
class RegPropData:
"""
Processes region proposal data.
"""
_df = None
props = None
"""Dictionary containing region proposal data properties """
def __init__(self, csv_path):
"""
Initialize a region proposal data instance.
Parameters
----------
csv_path : str
Path to csv file containing proposal information.
Note
----
It is assumed that the directory containing the proposals
csv file has `properties_session.cv` file. This file should
contain information about current session.
"""
# Checking files
fdops.check_if_file_exists(csv_path)
# loading proposal data as a data frame
self._df = pd.read_csv(csv_path)
# Dictionary containing proposal properties
self.props = self._get_properties(csv_path)
def _get_properties(self, csv_path):
"""
Creates a dictionary containing properties of proposal
data.
Parameters
----------
csv_path : str
Path to csv file containing proposal information
"""
props = {}
# File properties
loc, fname, ext = fdops.get_loc_name_ext(csv_path)
props['loc'] = loc
props['name'] = fname
props['ext'] = ext
# Video properties
props['W'] = self._df['W'].unique().item()
props['H'] = self._df['H'].unique().item()
props['FPS'] = self._df['FPS'].unique().item()
props['dur'] = self._df['dur'].unique().item()
props['vname'] = self._get_video_name(fname)
# Proposal properties
props['num_props'] = self._get_num_proposals()
return props
def write_proposals_to_video(self, vdir, frms_per_sec=1.0):
""" Writes proposals to video.
Parameters
----------
vdir : str
Directory where we can find video.
frms_per_sec : float, default 1
A value of 0.5 means that we will skip
`FPS x 1/(frms_per_sec) = 60` frames
"""
# Input video
vid_name = self.props['vname']
vfpath = fdops.get_files_with_kws(vdir, [vid_name, ".mp4"])
if len(vfpath) > 1:
raise Exception(f"More than one video found\n\t{vfpath}")
vin = VidReader(vfpath[0])
# Output video
ovid_path = f"{self.props['loc']}/{self.props['name']}.mp4"
vw = skvideo.io.FFmpegWriter(
ovid_path,
outputdict={'-vcodec': 'libx264','-r':'30'}
)
# Calculate frame numbers(POC) that we will use.
f0_start = 0 # starting frame poc
f0_end = vin.props['num_frames'] - 1 # ending frame poc
f0_skip = vin.props['frame_rate']*(1/frms_per_sec)
f0s = list(range(f0_start, f0_end, int(f0_skip)))
# Loop over each frame number and draw proposal regions
# over them
for f0 in tqdm(f0s):
frm = vin.get_frame(f0, c='bgr')
# Get proposals for frame f0
props = self._get_proposals_for_frame(f0)
# Proposals looop
for p in props:
if len(p) > 0:
w0, h0, w, h = p
frame = cv2.rectangle(
frm, (w0, h0), (w0+w, h0+h), (0, 256, 0), 1
)
# Write frame to output
vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
vw.close()
vin.release()
import sys; sys.exit()
def _get_proposals_for_frame(self, fn):
"""
Returns a list of proposal regions
Parameters
----------
fn : int
Frame number
"""
# Get dataframe that contains f0. It should have only one row
tdf = self._df.copy() # lower bound
tdf['f1'] = (tdf['f0'] # creating column
+ tdf['f'] - 1) # with last frame
df = tdf[fn >= tdf['f0']]
df = df[fn <= df['f1']]
if len(df) == 0:
return []
if len(df) > 1:
pdb.set_trace()
raise Exception("USER_ERROR: proposals csv is fishy\n"
f"{df}")
# Proposal string to numpy array
prop_list = df['props'].item().split(":")
# Loop over bounding box list and create a numpy array
if len(prop_list) > 0:
props = []
for p in prop_list:
coords = p.split("-")
if len(coords) == 4:
props += [[int(x) for x in coords]]
return props
def _get_video_name(self, fname):
""" Returns video name by parsing csv file name
Parameters
----------
fname : str
Name of csv file having proposals
"""
csv_name_split = fname.split("_")
thirty_fps_loc = csv_name_split.index("30fps")
video_name = "_".join(csv_name_split[0:thirty_fps_loc+1])
return video_name
def _get_num_proposals(self):
""" Returns number of proposals.
"""
total_props = self._df['nprops'].sum()
return total_props
|
normal
|
{
"blob_id": "b10badc172be119be5b2ab8ccc32cc95a0ed1e7a",
"index": 2680,
"step-1": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n <mask token>\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n",
"step-3": "<mask token>\n\n\nclass RegPropData:\n <mask token>\n _df = None\n props = None\n <mask token>\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n",
"step-4": "<mask token>\n\n\nclass RegPropData:\n \"\"\"\n Processes region proposal data.\n \"\"\"\n _df = None\n props = None\n \"\"\"Dictionary containing region proposal data properties \"\"\"\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n fdops.check_if_file_exists(csv_path)\n self._df = pd.read_csv(csv_path)\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n props['num_props'] = self._get_num_proposals()\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, '.mp4'])\n if len(vfpath) > 1:\n raise Exception(f'More than one video found\\n\\t{vfpath}')\n vin = VidReader(vfpath[0])\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(ovid_path, outputdict={'-vcodec':\n 'libx264', '-r': '30'})\n f0_start = 0\n f0_end = vin.props['num_frames'] - 1\n f0_skip = vin.props['frame_rate'] * (1 / frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n props = self._get_proposals_for_frame(f0)\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(frm, (w0, h0), (w0 + w, h0 + h),\n (0, 256, 0), 1)\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n vw.close()\n vin.release()\n import sys\n sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n tdf = self._df.copy()\n tdf['f1'] = tdf['f0'] + tdf['f'] - 1\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(f'USER_ERROR: proposals csv is fishy\\n{df}')\n prop_list = df['props'].item().split(':')\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split('-')\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split('_')\n thirty_fps_loc = csv_name_split.index('30fps')\n video_name = '_'.join(csv_name_split[0:thirty_fps_loc + 1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n",
"step-5": "import cv2\nimport pdb\nimport skvideo\nimport numpy as np\nimport pandas as pd\nfrom tqdm import tqdm\nfrom harp import fdops\nfrom word2number import w2n\nfrom harp.vid import VidReader\n\nclass RegPropData:\n \"\"\"\n Processes region proposal data.\n \"\"\"\n\n _df = None\n\n props = None\n \"\"\"Dictionary containing region proposal data properties \"\"\"\n\n def __init__(self, csv_path):\n \"\"\"\n Initialize a region proposal data instance.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information.\n\n Note\n ----\n It is assumed that the directory containing the proposals\n csv file has `properties_session.cv` file. This file should\n contain information about current session.\n \"\"\"\n # Checking files\n fdops.check_if_file_exists(csv_path)\n\n # loading proposal data as a data frame\n self._df = pd.read_csv(csv_path)\n\n # Dictionary containing proposal properties\n self.props = self._get_properties(csv_path)\n\n def _get_properties(self, csv_path):\n \"\"\"\n Creates a dictionary containing properties of proposal\n data.\n\n Parameters\n ----------\n csv_path : str\n Path to csv file containing proposal information\n \"\"\"\n props = {}\n\n # File properties\n loc, fname, ext = fdops.get_loc_name_ext(csv_path)\n props['loc'] = loc\n props['name'] = fname\n props['ext'] = ext\n\n # Video properties\n props['W'] = self._df['W'].unique().item()\n props['H'] = self._df['H'].unique().item()\n props['FPS'] = self._df['FPS'].unique().item()\n props['dur'] = self._df['dur'].unique().item()\n props['vname'] = self._get_video_name(fname)\n\n # Proposal properties\n props['num_props'] = self._get_num_proposals()\n\n return props\n\n def write_proposals_to_video(self, vdir, frms_per_sec=1.0):\n \"\"\" Writes proposals to video.\n\n Parameters\n ----------\n vdir : str\n Directory where we can find video.\n frms_per_sec : float, default 1\n A value of 0.5 means that we will skip\n `FPS x 1/(frms_per_sec) = 60` frames\n \"\"\"\n # Input video\n vid_name = self.props['vname']\n vfpath = fdops.get_files_with_kws(vdir, [vid_name, \".mp4\"])\n if len(vfpath) > 1:\n raise Exception(f\"More than one video found\\n\\t{vfpath}\")\n vin = VidReader(vfpath[0])\n\n # Output video\n ovid_path = f\"{self.props['loc']}/{self.props['name']}.mp4\"\n vw = skvideo.io.FFmpegWriter(\n ovid_path,\n outputdict={'-vcodec': 'libx264','-r':'30'}\n )\n\n # Calculate frame numbers(POC) that we will use.\n f0_start = 0 # starting frame poc\n f0_end = vin.props['num_frames'] - 1 # ending frame poc\n f0_skip = vin.props['frame_rate']*(1/frms_per_sec)\n f0s = list(range(f0_start, f0_end, int(f0_skip)))\n\n # Loop over each frame number and draw proposal regions\n # over them\n for f0 in tqdm(f0s):\n frm = vin.get_frame(f0, c='bgr')\n\n # Get proposals for frame f0\n props = self._get_proposals_for_frame(f0)\n\n # Proposals looop\n for p in props:\n if len(p) > 0:\n w0, h0, w, h = p\n frame = cv2.rectangle(\n frm, (w0, h0), (w0+w, h0+h), (0, 256, 0), 1\n )\n # Write frame to output\n vw.writeFrame(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n\n vw.close()\n vin.release()\n import sys; sys.exit()\n\n def _get_proposals_for_frame(self, fn):\n \"\"\"\n Returns a list of proposal regions\n\n Parameters\n ----------\n fn : int\n Frame number\n \"\"\"\n\n # Get dataframe that contains f0. It should have only one row\n tdf = self._df.copy() # lower bound\n tdf['f1'] = (tdf['f0'] # creating column\n + tdf['f'] - 1) # with last frame\n df = tdf[fn >= tdf['f0']]\n df = df[fn <= df['f1']]\n if len(df) == 0:\n return []\n if len(df) > 1:\n pdb.set_trace()\n raise Exception(\"USER_ERROR: proposals csv is fishy\\n\"\n f\"{df}\")\n\n # Proposal string to numpy array\n prop_list = df['props'].item().split(\":\")\n\n # Loop over bounding box list and create a numpy array\n if len(prop_list) > 0:\n props = []\n for p in prop_list:\n coords = p.split(\"-\")\n if len(coords) == 4:\n props += [[int(x) for x in coords]]\n return props\n\n def _get_video_name(self, fname):\n \"\"\" Returns video name by parsing csv file name\n\n Parameters\n ----------\n fname : str\n Name of csv file having proposals\n \"\"\"\n csv_name_split = fname.split(\"_\")\n thirty_fps_loc = csv_name_split.index(\"30fps\")\n video_name = \"_\".join(csv_name_split[0:thirty_fps_loc+1])\n return video_name\n\n def _get_num_proposals(self):\n \"\"\" Returns number of proposals.\n \"\"\"\n total_props = self._df['nprops'].sum()\n return total_props\n",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
""" Interfaces to Juju API ModelManager """
from conjureup import juju
@juju.requires_login
def list_models(user='user-admin'):
""" Lists Juju Models
Arguments:
user: Name of user to list models for.
Returns:
Dictionary of known Juju Models (default: user-admin)
"""
models = juju.CLIENT.ModelManager(request="ListModels",
params={'Tag': user})
return models['UserModels']
@juju.requires_login
def model_info(model):
""" Returns information on select model
Arguments:
model: name of model to inspect
Returns:
Dictionary of model attributes
"""
return juju.CLIENT.Client(request="ModelInfo",
params={"Name": model})
@juju.requires_login
def model_status():
""" Returns the FullStatus output of a model
Returns:
Dictionary of model status
"""
return juju.CLIENT.Client(request="FullStatus")
|
normal
|
{
"blob_id": "11045cffc6d47902be7236e1d684422317f2c5f9",
"index": 1444,
"step-1": "<mask token>\n\n\n@juju.requires_login\ndef list_models(user='user-admin'):\n \"\"\" Lists Juju Models\n\n Arguments:\n user: Name of user to list models for.\n\n Returns:\n Dictionary of known Juju Models (default: user-admin)\n \"\"\"\n models = juju.CLIENT.ModelManager(request='ListModels', params={'Tag':\n user})\n return models['UserModels']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@juju.requires_login\ndef list_models(user='user-admin'):\n \"\"\" Lists Juju Models\n\n Arguments:\n user: Name of user to list models for.\n\n Returns:\n Dictionary of known Juju Models (default: user-admin)\n \"\"\"\n models = juju.CLIENT.ModelManager(request='ListModels', params={'Tag':\n user})\n return models['UserModels']\n\n\n@juju.requires_login\ndef model_info(model):\n \"\"\" Returns information on select model\n\n Arguments:\n model: name of model to inspect\n\n Returns:\n Dictionary of model attributes\n \"\"\"\n return juju.CLIENT.Client(request='ModelInfo', params={'Name': model})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@juju.requires_login\ndef list_models(user='user-admin'):\n \"\"\" Lists Juju Models\n\n Arguments:\n user: Name of user to list models for.\n\n Returns:\n Dictionary of known Juju Models (default: user-admin)\n \"\"\"\n models = juju.CLIENT.ModelManager(request='ListModels', params={'Tag':\n user})\n return models['UserModels']\n\n\n@juju.requires_login\ndef model_info(model):\n \"\"\" Returns information on select model\n\n Arguments:\n model: name of model to inspect\n\n Returns:\n Dictionary of model attributes\n \"\"\"\n return juju.CLIENT.Client(request='ModelInfo', params={'Name': model})\n\n\n@juju.requires_login\ndef model_status():\n \"\"\" Returns the FullStatus output of a model\n\n Returns:\n Dictionary of model status\n \"\"\"\n return juju.CLIENT.Client(request='FullStatus')\n",
"step-4": "<mask token>\nfrom conjureup import juju\n\n\n@juju.requires_login\ndef list_models(user='user-admin'):\n \"\"\" Lists Juju Models\n\n Arguments:\n user: Name of user to list models for.\n\n Returns:\n Dictionary of known Juju Models (default: user-admin)\n \"\"\"\n models = juju.CLIENT.ModelManager(request='ListModels', params={'Tag':\n user})\n return models['UserModels']\n\n\n@juju.requires_login\ndef model_info(model):\n \"\"\" Returns information on select model\n\n Arguments:\n model: name of model to inspect\n\n Returns:\n Dictionary of model attributes\n \"\"\"\n return juju.CLIENT.Client(request='ModelInfo', params={'Name': model})\n\n\n@juju.requires_login\ndef model_status():\n \"\"\" Returns the FullStatus output of a model\n\n Returns:\n Dictionary of model status\n \"\"\"\n return juju.CLIENT.Client(request='FullStatus')\n",
"step-5": "\"\"\" Interfaces to Juju API ModelManager \"\"\"\n\nfrom conjureup import juju\n\n\n@juju.requires_login\ndef list_models(user='user-admin'):\n \"\"\" Lists Juju Models\n\n Arguments:\n user: Name of user to list models for.\n\n Returns:\n Dictionary of known Juju Models (default: user-admin)\n \"\"\"\n models = juju.CLIENT.ModelManager(request=\"ListModels\",\n params={'Tag': user})\n return models['UserModels']\n\n\n@juju.requires_login\ndef model_info(model):\n \"\"\" Returns information on select model\n\n Arguments:\n model: name of model to inspect\n\n Returns:\n Dictionary of model attributes\n \"\"\"\n return juju.CLIENT.Client(request=\"ModelInfo\",\n params={\"Name\": model})\n\n\n@juju.requires_login\ndef model_status():\n \"\"\" Returns the FullStatus output of a model\n\n Returns:\n Dictionary of model status\n \"\"\"\n return juju.CLIENT.Client(request=\"FullStatus\")\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/2/18 22:27
# @Author : name
# @File : 01.requests第一血.py
import requests
if __name__ == "__main__":
# step1:指定url
url = r'https://www.sogou.com/'
# step2:发起请求
reponse = requests.get(url = url)
# setp3:获取响应数据 text返回的是字符串形式的响应数据
page_text = reponse.text
print(page_text)
# step4:持久化存储
with open('./sogou.html', 'w', encoding='utf-8') as fp:
fp.write(page_text)
print('爬取数据结束!')
|
normal
|
{
"blob_id": "7ae6ed8797d6ee02effd04750e243c5a59840177",
"index": 8444,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n url = 'https://www.sogou.com/'\n reponse = requests.get(url=url)\n page_text = reponse.text\n print(page_text)\n with open('./sogou.html', 'w', encoding='utf-8') as fp:\n fp.write(page_text)\n print('爬取数据结束!')\n",
"step-3": "import requests\nif __name__ == '__main__':\n url = 'https://www.sogou.com/'\n reponse = requests.get(url=url)\n page_text = reponse.text\n print(page_text)\n with open('./sogou.html', 'w', encoding='utf-8') as fp:\n fp.write(page_text)\n print('爬取数据结束!')\n",
"step-4": "#!/usr/bin/env python\r\n# -*- coding: utf-8 -*-\r\n# @Time : 2021/2/18 22:27\r\n# @Author : name\r\n# @File : 01.requests第一血.py\r\n\r\nimport requests\r\n\r\nif __name__ == \"__main__\":\r\n # step1:指定url\r\n url = r'https://www.sogou.com/'\r\n # step2:发起请求\r\n reponse = requests.get(url = url)\r\n # setp3:获取响应数据 text返回的是字符串形式的响应数据\r\n page_text = reponse.text\r\n print(page_text)\r\n # step4:持久化存储\r\n with open('./sogou.html', 'w', encoding='utf-8') as fp:\r\n fp.write(page_text)\r\n print('爬取数据结束!')\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from card import Card;
from deck import Deck;
import people;
import chip;
import sys;
import time;
def display_instructions() :
print('\nInstructions: The objective of this game is to obtain a hand of cards whose value is as close to 21 ');
print('as possible without going over. The numbered cards have the value of their number, face cards have ');
print('a value of 10 each, and the ace can either be counted as 1 or 11 (player\'s choice)\n');
print('Each round of the game begins with each player placing a bet. Then, the dealer passes out two cards to ');
print('each player (up to 7 players) and to the dealer. The player\'s cards will be face up while one of the ');
print('dealer\'s cards will be face down. Then, each player will choose to either hit, stand, split, or double down: \n');
print(' Hit: when a player \'hits,\' he or she is dealt another card. A player can hit as many ');
print(' times as wanted, up until the player busts (goes over 21). \n');
print(' Stand: To \'stand\' means to stay with the current cards. \n');
print(' Split: A player can \'split\' only when the first two cards of his or her hand are the ');
print(' same. When this occurs, the player makes two separate piles, one with each ');
print(' identical card, and places a bet identical to the initial bet for the second ');
print(' pile. Then, the player can hit or stand with each pile as in a normal round.\n');
print(' Double Down: When a player chooses to \'double down\', he or she can increase the current bet ');
print(' by 100% in exchange for agreeing to stand after being dealt one more card.\n');
input('Ready to play? Hit any key to continue: ');
print();
def get_num_players() :
num = input('How many people will be playing (up to 7)? Enter a number: ');
while not num.isdigit() or int(num) < 1 or int(num) > 7:
num = input('Please enter a number from 1 to 7: ');
print('\nGreat! Now decide amongst yourselves the order you all will be playing in (who will be Player 1 through 7).\n');
time.sleep(1);
return int(num);
def create_players(num) :
players_list = [];
for i in range(num) :
name = input(f'Player {i+1}, what is your name? ');
while name == '':
name = input('Please enter your name: ');
players_list.append(people.Player(name, 1000));
print('\nAll players will begin the game with the same amount of $1,000 dollars.\n');
return players_list;
def deal(dealer, players) :
for player in players[:-1] :
if not player.check_broke() : dealer.deal_card(player);
dealer.deal_card(players[-1]); # dealer deals card to dealer, too
def place_bets(players) :
print('Now, each of you must place your bets.\n');
bets = [];
for player in players[:-1] : # doesn't reach dealer
if not player.check_broke() :
bet = input(f'Bet for {player.name}: ');
while not bet.isdigit() or int(bet) > player.money :
msg = 'Please enter a whole number: ';
if bet.isdigit() :
msg = 'You don\'t have enough money! Enter a different value: ';
bet = input(msg);
player.bet = int(bet);
print();
def view_hands(players) :
print('Here are the hands for each player: \n');
for p in players :
if isinstance(p, people.Dealer) :
print(f'{p.name}: [{p.hand[0][0]}, ?]', end='');
print();
else :
if not p.check_broke() :
print(f'{p.name}: {p.hand}', end='');
if p.check_blackjack() :
print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!');
else : print();
print();
def do_decision(player, dealer, hand_index=0) :
choices_dict = {'s':stand, 'h':hit, 'p':split, 'd':double_down};
valid_choice = False;
while not valid_choice :
choice = input(f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): ');
while choice.lower() not in choices_dict.keys() :
choice = input('Please enter either \'s\', \'h\', \'p\', or \'d\', corresponding to your choice: ');
valid_choice = choices_dict.get(choice)(player, dealer, hand_index);
def cycle_decisions(players) :
dealer = players[-1];
for p in players :
if isinstance(p, people.Dealer) :
print(f'{p.name} will hit until reaching a hand of at least \'hard\' 17 (without an ace counting for 11).');
sys.stdout.flush();
time.sleep(0.8);
if not check_status(p) and not p.check_hard_17() : hit(p, dealer);
sys.stdout.flush();
time.sleep(0.5);
disp_str_slow('\nEnd-of-Round Earnings: \n', 0.05);
if p.check_bust() :
for i in players[:-1] :
if not i.check_broke() :
sys.stdout.flush();
time.sleep(0.5);
print(' ', end='');
for j in range(0,len(i.hand)) : # this is to loop through each hand for a player (player would have multiple hands after splitting)
if not i.check_bust(j) :
print(f'{i.name} wins ${i.bet}! ', end='');
i.money += i.bet;
else :
print(f'{i.name} loses ${i.bet}! ', end='');
i.money -= i.bet;
i.chips = chip.convert_to_chips(i.money);
if i.check_broke() :
print(f'Sorry {i.name}, but you\'re out of money and can no longer play in this game');
else :
print(f'Current Balance: ${i.money} (Chips: {i.chips})');
else :
for i in players[:-1] :
if not i.check_broke() :
sys.stdout.flush();
time.sleep(0.5);
print(' ', end='');
for j in range(0,len(i.hand)) :
if not i.check_bust(j) :
if i.hand_value(j) > p.hand_value() :
print(f'{i.name} wins ${i.bet}! ', end='');
i.money += i.bet;
elif i.hand_value(j) < p.hand_value() :
print(f'{i.name} loses ${i.bet}! ', end='');
i.money -= i.bet;
else :
print(f'{i.name} tied with the {p.name}! No change. ', end='');
else :
print(f'{i.name} loses ${i.bet}! ', end='');
i.money -= i.bet;
i.chips = chip.convert_to_chips(i.money);
if i.check_broke() :
print(f'Sorry {i.name}, but you\'re out of money and can no longer play in this game');
else :
print(f'Current Balance: ${i.money} (Chips: {i.chips})');
sys.stdout.flush();
time.sleep(0.5);
else :
if not p.check_blackjack() and not p.check_broke() :
do_decision(p, dealer);
def stand(player, dealer, hand_index=0) :
print(f'{player.name} stands.\n');
return True;
def hit(player, dealer, hand_index=0) :
dealer.deal_card(player, hand_index);
done = check_status(player, hand_index);
if isinstance(player, people.Dealer) :
while not player.check_hard_17() and not done:
time.sleep(0.5);
dealer.deal_card(player, hand_index);
done = check_status(player, hand_index);
else :
choice = '';
if not done :
choice = input('Do you want to hit again (\'y\' or \'n\')? ').lower();
while choice != 'y' and choice != 'n' :
choice = input('Enter either \'y\' or \'n\': ');
while choice == 'y' and not done:
dealer.deal_card(player, hand_index);
done = check_status(player, hand_index);
if not done :
choice = input('Do you want to hit again (\'y\' or \'n\')? ').lower();
while choice != 'y' and choice != 'n' :
choice = input('Enter either \'y\' or \'n\': ');
if not done : print();
return True;
def split(player, dealer, hand_index=0) :
if player.hand[hand_index][0] != player.hand[hand_index][1] :
print('You can\'t split on that hand! You need two identical cards to split. Choose again.');
return False;
elif player.bet*2 > player.money :
print(f'You don\'t have enough money to split with your current bet (${player.bet} * 2 = ${player.bet*2})! Choose again.');
return False;
hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]];
player.hand = hands;
print('Now you will play each hand separately: \n');
for i in range(0,2) :
print(f'For Hand #{i+1}: ');
do_decision(player, dealer, i);
return True;
def double_down(player, dealer, hand_index=0) :
if player.bet*2 > player.money :
print(f'You don\'t have enough money to do that (${player.bet} * 2 = ${player.bet*2})! Choose again.');
return False;
elif player.did_double_down :
print('You can double down only once! Choose a different option.');
return False;
player.bet *= 2;
player.did_double_down = True;
print(f'Bet increased to ${player.bet}!.');
do_decision(player, dealer, hand_index);
return True;
def check_status(player, hand_index=0) :
done = False;
hand_string = '[';
for card in player.hand[hand_index][:-1] :
hand_string += card.__str__() + ', ';
print(f'Current Hand: {hand_string}', end='');
sys.stdout.flush();
time.sleep(0.5);
disp_str_slow(f'{player.hand[hand_index][-1].__str__()}]', 0.05);
time.sleep(0.5);
if player.check_blackjack(hand_index) :
disp_str_slow(' ==> BLACKJACK!!! ', 0.05);
if not isinstance(player, people.Dealer) :
disp_str_slow(f'-- {player.name} wins ${player.bet}!', 0.05);
print('\n\n', end='');
done = True;
sys.stdout.flush();
time.sleep(0.5);
elif player.check_bust(hand_index) :
disp_str_slow(' ==> BUST! ', 0.05);
if not isinstance(player, people.Dealer) :
disp_str_slow(f'-- {player.name} loses ${player.bet}!', 0.05);
print('\n\n', end='');
done = True;
sys.stdout.flush();
time.sleep(0.5);
else :
print();
return done;
def play_again(players) :
print();
all_broke = True;
for i in players :
if not i.check_broke() : all_broke = False;
if not all_broke :
choice = input('Do you all want to play another round? Enter \'y\' or \'n\': ').lower();
while choice != 'y' and choice != 'n' :
choice = input('Enter either \'y\' or \'n\': ');
print();
return choice;
else :
print();
return 'n';
def reset(players) :
dealer = players[-1];
for player in players :
dealer.retrieve_cards(player);
player.bet = 0;
def display_accounts(players) :
for player in players[:-1] :
change = player.money - player.initial_money;
word = 'gain';
if change < 0 :
word = 'loss';
print(f' {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\n');
sys.stdout.flush();
time.sleep(0.5);
def disp_str_slow(phrase, t) :
for i in phrase :
print(i, end='');
sys.stdout.flush();
time.sleep(t);
def print_players(players) :
for player in players :
print(player);
def main() :
display_instructions();
num_players = get_num_players();
players = create_players(num_players);
dealer = people.Dealer(Deck(6));
players.append(dealer);
replay_choice = 'y';
while replay_choice == 'y' :
reset(players);
place_bets(players);
for i in range(0,2) :
deal(dealer, players);
view_hands(players);
cycle_decisions(players);
replay_choice = play_again(players);
print('------------------------------------------------------------------------------------------------\n');
disp_str_slow('FINAL PLAYER ACCOUNTS\n\n', 0.05);
sys.stdout.flush();
time.sleep(0.5)
display_accounts(players);
sys.stdout.flush();
time.sleep(0.2)
print('------------------------------------------------------------------------------------------------\n');
print('Goodbye!');
if __name__ == '__main__' :
main();
|
normal
|
{
"blob_id": "a7050ebd545c4169b481672aed140af610aea997",
"index": 4879,
"step-1": "<mask token>\n\n\ndef create_players(num):\n players_list = []\n for i in range(num):\n name = input(f'Player {i + 1}, what is your name? ')\n while name == '':\n name = input('Please enter your name: ')\n players_list.append(people.Player(name, 1000))\n print(\n '\\nAll players will begin the game with the same amount of $1,000 dollars.\\n'\n )\n return players_list\n\n\n<mask token>\n\n\ndef view_hands(players):\n print('Here are the hands for each player: \\n')\n for p in players:\n if isinstance(p, people.Dealer):\n print(f'{p.name}: [{p.hand[0][0]}, ?]', end='')\n print()\n elif not p.check_broke():\n print(f'{p.name}: {p.hand}', end='')\n if p.check_blackjack():\n print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!')\n else:\n print()\n print()\n\n\ndef do_decision(player, dealer, hand_index=0):\n choices_dict = {'s': stand, 'h': hit, 'p': split, 'd': double_down}\n valid_choice = False\n while not valid_choice:\n choice = input(\n f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): '\n )\n while choice.lower() not in choices_dict.keys():\n choice = input(\n \"Please enter either 's', 'h', 'p', or 'd', corresponding to your choice: \"\n )\n valid_choice = choices_dict.get(choice)(player, dealer, hand_index)\n\n\n<mask token>\n\n\ndef split(player, dealer, hand_index=0):\n if player.hand[hand_index][0] != player.hand[hand_index][1]:\n print(\n \"You can't split on that hand! You need two identical cards to split. Choose again.\"\n )\n return False\n elif player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to split with your current bet (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]]\n player.hand = hands\n print('Now you will play each hand separately: \\n')\n for i in range(0, 2):\n print(f'For Hand #{i + 1}: ')\n do_decision(player, dealer, i)\n return True\n\n\n<mask token>\n\n\ndef reset(players):\n dealer = players[-1]\n for player in players:\n dealer.retrieve_cards(player)\n player.bet = 0\n\n\ndef display_accounts(players):\n for player in players[:-1]:\n change = player.money - player.initial_money\n word = 'gain'\n if change < 0:\n word = 'loss'\n print(\n f\"\"\" {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\n\"\"\"\n )\n sys.stdout.flush()\n time.sleep(0.5)\n\n\ndef disp_str_slow(phrase, t):\n for i in phrase:\n print(i, end='')\n sys.stdout.flush()\n time.sleep(t)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef display_instructions():\n print(\n \"\"\"\nInstructions: The objective of this game is to obtain a hand of cards whose value is as close to 21 \"\"\"\n )\n print(\n 'as possible without going over. The numbered cards have the value of their number, face cards have '\n )\n print(\n \"a value of 10 each, and the ace can either be counted as 1 or 11 (player's choice)\\n\"\n )\n print(\n 'Each round of the game begins with each player placing a bet. Then, the dealer passes out two cards to '\n )\n print(\n \"each player (up to 7 players) and to the dealer. The player's cards will be face up while one of the \"\n )\n print(\n \"\"\"dealer's cards will be face down. Then, each player will choose to either hit, stand, split, or double down: \n\"\"\"\n )\n print(\n \" Hit: when a player 'hits,' he or she is dealt another card. A player can hit as many \"\n )\n print(\n ' times as wanted, up until the player busts (goes over 21). \\n'\n )\n print(\n \" Stand: To 'stand' means to stay with the current cards. \\n\"\n )\n print(\n \" Split: A player can 'split' only when the first two cards of his or her hand are the \"\n )\n print(\n ' same. When this occurs, the player makes two separate piles, one with each '\n )\n print(\n ' identical card, and places a bet identical to the initial bet for the second '\n )\n print(\n \"\"\" pile. Then, the player can hit or stand with each pile as in a normal round.\n\"\"\"\n )\n print(\n \" Double Down: When a player chooses to 'double down', he or she can increase the current bet \"\n )\n print(\n \"\"\" by 100% in exchange for agreeing to stand after being dealt one more card.\n\"\"\"\n )\n input('Ready to play? Hit any key to continue: ')\n print()\n\n\ndef get_num_players():\n num = input('How many people will be playing (up to 7)? Enter a number: ')\n while not num.isdigit() or int(num) < 1 or int(num) > 7:\n num = input('Please enter a number from 1 to 7: ')\n print(\n \"\"\"\nGreat! Now decide amongst yourselves the order you all will be playing in (who will be Player 1 through 7).\n\"\"\"\n )\n time.sleep(1)\n return int(num)\n\n\ndef create_players(num):\n players_list = []\n for i in range(num):\n name = input(f'Player {i + 1}, what is your name? ')\n while name == '':\n name = input('Please enter your name: ')\n players_list.append(people.Player(name, 1000))\n print(\n '\\nAll players will begin the game with the same amount of $1,000 dollars.\\n'\n )\n return players_list\n\n\ndef deal(dealer, players):\n for player in players[:-1]:\n if not player.check_broke():\n dealer.deal_card(player)\n dealer.deal_card(players[-1])\n\n\ndef place_bets(players):\n print('Now, each of you must place your bets.\\n')\n bets = []\n for player in players[:-1]:\n if not player.check_broke():\n bet = input(f'Bet for {player.name}: ')\n while not bet.isdigit() or int(bet) > player.money:\n msg = 'Please enter a whole number: '\n if bet.isdigit():\n msg = (\n \"You don't have enough money! Enter a different value: \"\n )\n bet = input(msg)\n player.bet = int(bet)\n print()\n\n\ndef view_hands(players):\n print('Here are the hands for each player: \\n')\n for p in players:\n if isinstance(p, people.Dealer):\n print(f'{p.name}: [{p.hand[0][0]}, ?]', end='')\n print()\n elif not p.check_broke():\n print(f'{p.name}: {p.hand}', end='')\n if p.check_blackjack():\n print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!')\n else:\n print()\n print()\n\n\ndef do_decision(player, dealer, hand_index=0):\n choices_dict = {'s': stand, 'h': hit, 'p': split, 'd': double_down}\n valid_choice = False\n while not valid_choice:\n choice = input(\n f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): '\n )\n while choice.lower() not in choices_dict.keys():\n choice = input(\n \"Please enter either 's', 'h', 'p', or 'd', corresponding to your choice: \"\n )\n valid_choice = choices_dict.get(choice)(player, dealer, hand_index)\n\n\ndef cycle_decisions(players):\n dealer = players[-1]\n for p in players:\n if isinstance(p, people.Dealer):\n print(\n f\"{p.name} will hit until reaching a hand of at least 'hard' 17 (without an ace counting for 11).\"\n )\n sys.stdout.flush()\n time.sleep(0.8)\n if not check_status(p) and not p.check_hard_17():\n hit(p, dealer)\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow('\\nEnd-of-Round Earnings: \\n', 0.05)\n if p.check_bust():\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n else:\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n if i.hand_value(j) > p.hand_value():\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n elif i.hand_value(j) < p.hand_value():\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n else:\n print(\n f'{i.name} tied with the {p.name}! No change. '\n , end='')\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n sys.stdout.flush()\n time.sleep(0.5)\n elif not p.check_blackjack() and not p.check_broke():\n do_decision(p, dealer)\n\n\ndef stand(player, dealer, hand_index=0):\n print(f'{player.name} stands.\\n')\n return True\n\n\ndef hit(player, dealer, hand_index=0):\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if isinstance(player, people.Dealer):\n while not player.check_hard_17() and not done:\n time.sleep(0.5)\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n else:\n choice = ''\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \").lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n while choice == 'y' and not done:\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \"\n ).lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n if not done:\n print()\n return True\n\n\ndef split(player, dealer, hand_index=0):\n if player.hand[hand_index][0] != player.hand[hand_index][1]:\n print(\n \"You can't split on that hand! You need two identical cards to split. Choose again.\"\n )\n return False\n elif player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to split with your current bet (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]]\n player.hand = hands\n print('Now you will play each hand separately: \\n')\n for i in range(0, 2):\n print(f'For Hand #{i + 1}: ')\n do_decision(player, dealer, i)\n return True\n\n\ndef double_down(player, dealer, hand_index=0):\n if player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to do that (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n elif player.did_double_down:\n print('You can double down only once! Choose a different option.')\n return False\n player.bet *= 2\n player.did_double_down = True\n print(f'Bet increased to ${player.bet}!.')\n do_decision(player, dealer, hand_index)\n return True\n\n\ndef check_status(player, hand_index=0):\n done = False\n hand_string = '['\n for card in player.hand[hand_index][:-1]:\n hand_string += card.__str__() + ', '\n print(f'Current Hand: {hand_string}', end='')\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow(f'{player.hand[hand_index][-1].__str__()}]', 0.05)\n time.sleep(0.5)\n if player.check_blackjack(hand_index):\n disp_str_slow(' ==> BLACKJACK!!! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} wins ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n elif player.check_bust(hand_index):\n disp_str_slow(' ==> BUST! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} loses ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n else:\n print()\n return done\n\n\ndef play_again(players):\n print()\n all_broke = True\n for i in players:\n if not i.check_broke():\n all_broke = False\n if not all_broke:\n choice = input(\n \"Do you all want to play another round? Enter 'y' or 'n': \").lower(\n )\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n print()\n return choice\n else:\n print()\n return 'n'\n\n\ndef reset(players):\n dealer = players[-1]\n for player in players:\n dealer.retrieve_cards(player)\n player.bet = 0\n\n\ndef display_accounts(players):\n for player in players[:-1]:\n change = player.money - player.initial_money\n word = 'gain'\n if change < 0:\n word = 'loss'\n print(\n f\"\"\" {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\n\"\"\"\n )\n sys.stdout.flush()\n time.sleep(0.5)\n\n\ndef disp_str_slow(phrase, t):\n for i in phrase:\n print(i, end='')\n sys.stdout.flush()\n time.sleep(t)\n\n\ndef print_players(players):\n for player in players:\n print(player)\n\n\ndef main():\n display_instructions()\n num_players = get_num_players()\n players = create_players(num_players)\n dealer = people.Dealer(Deck(6))\n players.append(dealer)\n replay_choice = 'y'\n while replay_choice == 'y':\n reset(players)\n place_bets(players)\n for i in range(0, 2):\n deal(dealer, players)\n view_hands(players)\n cycle_decisions(players)\n replay_choice = play_again(players)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n disp_str_slow('FINAL PLAYER ACCOUNTS\\n\\n', 0.05)\n sys.stdout.flush()\n time.sleep(0.5)\n display_accounts(players)\n sys.stdout.flush()\n time.sleep(0.2)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n print('Goodbye!')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef display_instructions():\n print(\n \"\"\"\nInstructions: The objective of this game is to obtain a hand of cards whose value is as close to 21 \"\"\"\n )\n print(\n 'as possible without going over. The numbered cards have the value of their number, face cards have '\n )\n print(\n \"a value of 10 each, and the ace can either be counted as 1 or 11 (player's choice)\\n\"\n )\n print(\n 'Each round of the game begins with each player placing a bet. Then, the dealer passes out two cards to '\n )\n print(\n \"each player (up to 7 players) and to the dealer. The player's cards will be face up while one of the \"\n )\n print(\n \"\"\"dealer's cards will be face down. Then, each player will choose to either hit, stand, split, or double down: \n\"\"\"\n )\n print(\n \" Hit: when a player 'hits,' he or she is dealt another card. A player can hit as many \"\n )\n print(\n ' times as wanted, up until the player busts (goes over 21). \\n'\n )\n print(\n \" Stand: To 'stand' means to stay with the current cards. \\n\"\n )\n print(\n \" Split: A player can 'split' only when the first two cards of his or her hand are the \"\n )\n print(\n ' same. When this occurs, the player makes two separate piles, one with each '\n )\n print(\n ' identical card, and places a bet identical to the initial bet for the second '\n )\n print(\n \"\"\" pile. Then, the player can hit or stand with each pile as in a normal round.\n\"\"\"\n )\n print(\n \" Double Down: When a player chooses to 'double down', he or she can increase the current bet \"\n )\n print(\n \"\"\" by 100% in exchange for agreeing to stand after being dealt one more card.\n\"\"\"\n )\n input('Ready to play? Hit any key to continue: ')\n print()\n\n\ndef get_num_players():\n num = input('How many people will be playing (up to 7)? Enter a number: ')\n while not num.isdigit() or int(num) < 1 or int(num) > 7:\n num = input('Please enter a number from 1 to 7: ')\n print(\n \"\"\"\nGreat! Now decide amongst yourselves the order you all will be playing in (who will be Player 1 through 7).\n\"\"\"\n )\n time.sleep(1)\n return int(num)\n\n\ndef create_players(num):\n players_list = []\n for i in range(num):\n name = input(f'Player {i + 1}, what is your name? ')\n while name == '':\n name = input('Please enter your name: ')\n players_list.append(people.Player(name, 1000))\n print(\n '\\nAll players will begin the game with the same amount of $1,000 dollars.\\n'\n )\n return players_list\n\n\ndef deal(dealer, players):\n for player in players[:-1]:\n if not player.check_broke():\n dealer.deal_card(player)\n dealer.deal_card(players[-1])\n\n\ndef place_bets(players):\n print('Now, each of you must place your bets.\\n')\n bets = []\n for player in players[:-1]:\n if not player.check_broke():\n bet = input(f'Bet for {player.name}: ')\n while not bet.isdigit() or int(bet) > player.money:\n msg = 'Please enter a whole number: '\n if bet.isdigit():\n msg = (\n \"You don't have enough money! Enter a different value: \"\n )\n bet = input(msg)\n player.bet = int(bet)\n print()\n\n\ndef view_hands(players):\n print('Here are the hands for each player: \\n')\n for p in players:\n if isinstance(p, people.Dealer):\n print(f'{p.name}: [{p.hand[0][0]}, ?]', end='')\n print()\n elif not p.check_broke():\n print(f'{p.name}: {p.hand}', end='')\n if p.check_blackjack():\n print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!')\n else:\n print()\n print()\n\n\ndef do_decision(player, dealer, hand_index=0):\n choices_dict = {'s': stand, 'h': hit, 'p': split, 'd': double_down}\n valid_choice = False\n while not valid_choice:\n choice = input(\n f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): '\n )\n while choice.lower() not in choices_dict.keys():\n choice = input(\n \"Please enter either 's', 'h', 'p', or 'd', corresponding to your choice: \"\n )\n valid_choice = choices_dict.get(choice)(player, dealer, hand_index)\n\n\ndef cycle_decisions(players):\n dealer = players[-1]\n for p in players:\n if isinstance(p, people.Dealer):\n print(\n f\"{p.name} will hit until reaching a hand of at least 'hard' 17 (without an ace counting for 11).\"\n )\n sys.stdout.flush()\n time.sleep(0.8)\n if not check_status(p) and not p.check_hard_17():\n hit(p, dealer)\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow('\\nEnd-of-Round Earnings: \\n', 0.05)\n if p.check_bust():\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n else:\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n if i.hand_value(j) > p.hand_value():\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n elif i.hand_value(j) < p.hand_value():\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n else:\n print(\n f'{i.name} tied with the {p.name}! No change. '\n , end='')\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n sys.stdout.flush()\n time.sleep(0.5)\n elif not p.check_blackjack() and not p.check_broke():\n do_decision(p, dealer)\n\n\ndef stand(player, dealer, hand_index=0):\n print(f'{player.name} stands.\\n')\n return True\n\n\ndef hit(player, dealer, hand_index=0):\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if isinstance(player, people.Dealer):\n while not player.check_hard_17() and not done:\n time.sleep(0.5)\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n else:\n choice = ''\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \").lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n while choice == 'y' and not done:\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \"\n ).lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n if not done:\n print()\n return True\n\n\ndef split(player, dealer, hand_index=0):\n if player.hand[hand_index][0] != player.hand[hand_index][1]:\n print(\n \"You can't split on that hand! You need two identical cards to split. Choose again.\"\n )\n return False\n elif player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to split with your current bet (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]]\n player.hand = hands\n print('Now you will play each hand separately: \\n')\n for i in range(0, 2):\n print(f'For Hand #{i + 1}: ')\n do_decision(player, dealer, i)\n return True\n\n\ndef double_down(player, dealer, hand_index=0):\n if player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to do that (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n elif player.did_double_down:\n print('You can double down only once! Choose a different option.')\n return False\n player.bet *= 2\n player.did_double_down = True\n print(f'Bet increased to ${player.bet}!.')\n do_decision(player, dealer, hand_index)\n return True\n\n\ndef check_status(player, hand_index=0):\n done = False\n hand_string = '['\n for card in player.hand[hand_index][:-1]:\n hand_string += card.__str__() + ', '\n print(f'Current Hand: {hand_string}', end='')\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow(f'{player.hand[hand_index][-1].__str__()}]', 0.05)\n time.sleep(0.5)\n if player.check_blackjack(hand_index):\n disp_str_slow(' ==> BLACKJACK!!! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} wins ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n elif player.check_bust(hand_index):\n disp_str_slow(' ==> BUST! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} loses ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n else:\n print()\n return done\n\n\ndef play_again(players):\n print()\n all_broke = True\n for i in players:\n if not i.check_broke():\n all_broke = False\n if not all_broke:\n choice = input(\n \"Do you all want to play another round? Enter 'y' or 'n': \").lower(\n )\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n print()\n return choice\n else:\n print()\n return 'n'\n\n\ndef reset(players):\n dealer = players[-1]\n for player in players:\n dealer.retrieve_cards(player)\n player.bet = 0\n\n\ndef display_accounts(players):\n for player in players[:-1]:\n change = player.money - player.initial_money\n word = 'gain'\n if change < 0:\n word = 'loss'\n print(\n f\"\"\" {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\n\"\"\"\n )\n sys.stdout.flush()\n time.sleep(0.5)\n\n\ndef disp_str_slow(phrase, t):\n for i in phrase:\n print(i, end='')\n sys.stdout.flush()\n time.sleep(t)\n\n\ndef print_players(players):\n for player in players:\n print(player)\n\n\ndef main():\n display_instructions()\n num_players = get_num_players()\n players = create_players(num_players)\n dealer = people.Dealer(Deck(6))\n players.append(dealer)\n replay_choice = 'y'\n while replay_choice == 'y':\n reset(players)\n place_bets(players)\n for i in range(0, 2):\n deal(dealer, players)\n view_hands(players)\n cycle_decisions(players)\n replay_choice = play_again(players)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n disp_str_slow('FINAL PLAYER ACCOUNTS\\n\\n', 0.05)\n sys.stdout.flush()\n time.sleep(0.5)\n display_accounts(players)\n sys.stdout.flush()\n time.sleep(0.2)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n print('Goodbye!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from card import Card\nfrom deck import Deck\nimport people\nimport chip\nimport sys\nimport time\n\n\ndef display_instructions():\n print(\n \"\"\"\nInstructions: The objective of this game is to obtain a hand of cards whose value is as close to 21 \"\"\"\n )\n print(\n 'as possible without going over. The numbered cards have the value of their number, face cards have '\n )\n print(\n \"a value of 10 each, and the ace can either be counted as 1 or 11 (player's choice)\\n\"\n )\n print(\n 'Each round of the game begins with each player placing a bet. Then, the dealer passes out two cards to '\n )\n print(\n \"each player (up to 7 players) and to the dealer. The player's cards will be face up while one of the \"\n )\n print(\n \"\"\"dealer's cards will be face down. Then, each player will choose to either hit, stand, split, or double down: \n\"\"\"\n )\n print(\n \" Hit: when a player 'hits,' he or she is dealt another card. A player can hit as many \"\n )\n print(\n ' times as wanted, up until the player busts (goes over 21). \\n'\n )\n print(\n \" Stand: To 'stand' means to stay with the current cards. \\n\"\n )\n print(\n \" Split: A player can 'split' only when the first two cards of his or her hand are the \"\n )\n print(\n ' same. When this occurs, the player makes two separate piles, one with each '\n )\n print(\n ' identical card, and places a bet identical to the initial bet for the second '\n )\n print(\n \"\"\" pile. Then, the player can hit or stand with each pile as in a normal round.\n\"\"\"\n )\n print(\n \" Double Down: When a player chooses to 'double down', he or she can increase the current bet \"\n )\n print(\n \"\"\" by 100% in exchange for agreeing to stand after being dealt one more card.\n\"\"\"\n )\n input('Ready to play? Hit any key to continue: ')\n print()\n\n\ndef get_num_players():\n num = input('How many people will be playing (up to 7)? Enter a number: ')\n while not num.isdigit() or int(num) < 1 or int(num) > 7:\n num = input('Please enter a number from 1 to 7: ')\n print(\n \"\"\"\nGreat! Now decide amongst yourselves the order you all will be playing in (who will be Player 1 through 7).\n\"\"\"\n )\n time.sleep(1)\n return int(num)\n\n\ndef create_players(num):\n players_list = []\n for i in range(num):\n name = input(f'Player {i + 1}, what is your name? ')\n while name == '':\n name = input('Please enter your name: ')\n players_list.append(people.Player(name, 1000))\n print(\n '\\nAll players will begin the game with the same amount of $1,000 dollars.\\n'\n )\n return players_list\n\n\ndef deal(dealer, players):\n for player in players[:-1]:\n if not player.check_broke():\n dealer.deal_card(player)\n dealer.deal_card(players[-1])\n\n\ndef place_bets(players):\n print('Now, each of you must place your bets.\\n')\n bets = []\n for player in players[:-1]:\n if not player.check_broke():\n bet = input(f'Bet for {player.name}: ')\n while not bet.isdigit() or int(bet) > player.money:\n msg = 'Please enter a whole number: '\n if bet.isdigit():\n msg = (\n \"You don't have enough money! Enter a different value: \"\n )\n bet = input(msg)\n player.bet = int(bet)\n print()\n\n\ndef view_hands(players):\n print('Here are the hands for each player: \\n')\n for p in players:\n if isinstance(p, people.Dealer):\n print(f'{p.name}: [{p.hand[0][0]}, ?]', end='')\n print()\n elif not p.check_broke():\n print(f'{p.name}: {p.hand}', end='')\n if p.check_blackjack():\n print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!')\n else:\n print()\n print()\n\n\ndef do_decision(player, dealer, hand_index=0):\n choices_dict = {'s': stand, 'h': hit, 'p': split, 'd': double_down}\n valid_choice = False\n while not valid_choice:\n choice = input(\n f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): '\n )\n while choice.lower() not in choices_dict.keys():\n choice = input(\n \"Please enter either 's', 'h', 'p', or 'd', corresponding to your choice: \"\n )\n valid_choice = choices_dict.get(choice)(player, dealer, hand_index)\n\n\ndef cycle_decisions(players):\n dealer = players[-1]\n for p in players:\n if isinstance(p, people.Dealer):\n print(\n f\"{p.name} will hit until reaching a hand of at least 'hard' 17 (without an ace counting for 11).\"\n )\n sys.stdout.flush()\n time.sleep(0.8)\n if not check_status(p) and not p.check_hard_17():\n hit(p, dealer)\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow('\\nEnd-of-Round Earnings: \\n', 0.05)\n if p.check_bust():\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n else:\n for i in players[:-1]:\n if not i.check_broke():\n sys.stdout.flush()\n time.sleep(0.5)\n print(' ', end='')\n for j in range(0, len(i.hand)):\n if not i.check_bust(j):\n if i.hand_value(j) > p.hand_value():\n print(f'{i.name} wins ${i.bet}! ', end='')\n i.money += i.bet\n elif i.hand_value(j) < p.hand_value():\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n else:\n print(\n f'{i.name} tied with the {p.name}! No change. '\n , end='')\n else:\n print(f'{i.name} loses ${i.bet}! ', end='')\n i.money -= i.bet\n i.chips = chip.convert_to_chips(i.money)\n if i.check_broke():\n print(\n f\"Sorry {i.name}, but you're out of money and can no longer play in this game\"\n )\n else:\n print(\n f'Current Balance: ${i.money} (Chips: {i.chips})'\n )\n sys.stdout.flush()\n time.sleep(0.5)\n elif not p.check_blackjack() and not p.check_broke():\n do_decision(p, dealer)\n\n\ndef stand(player, dealer, hand_index=0):\n print(f'{player.name} stands.\\n')\n return True\n\n\ndef hit(player, dealer, hand_index=0):\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if isinstance(player, people.Dealer):\n while not player.check_hard_17() and not done:\n time.sleep(0.5)\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n else:\n choice = ''\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \").lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n while choice == 'y' and not done:\n dealer.deal_card(player, hand_index)\n done = check_status(player, hand_index)\n if not done:\n choice = input(\"Do you want to hit again ('y' or 'n')? \"\n ).lower()\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n if not done:\n print()\n return True\n\n\ndef split(player, dealer, hand_index=0):\n if player.hand[hand_index][0] != player.hand[hand_index][1]:\n print(\n \"You can't split on that hand! You need two identical cards to split. Choose again.\"\n )\n return False\n elif player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to split with your current bet (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]]\n player.hand = hands\n print('Now you will play each hand separately: \\n')\n for i in range(0, 2):\n print(f'For Hand #{i + 1}: ')\n do_decision(player, dealer, i)\n return True\n\n\ndef double_down(player, dealer, hand_index=0):\n if player.bet * 2 > player.money:\n print(\n f\"You don't have enough money to do that (${player.bet} * 2 = ${player.bet * 2})! Choose again.\"\n )\n return False\n elif player.did_double_down:\n print('You can double down only once! Choose a different option.')\n return False\n player.bet *= 2\n player.did_double_down = True\n print(f'Bet increased to ${player.bet}!.')\n do_decision(player, dealer, hand_index)\n return True\n\n\ndef check_status(player, hand_index=0):\n done = False\n hand_string = '['\n for card in player.hand[hand_index][:-1]:\n hand_string += card.__str__() + ', '\n print(f'Current Hand: {hand_string}', end='')\n sys.stdout.flush()\n time.sleep(0.5)\n disp_str_slow(f'{player.hand[hand_index][-1].__str__()}]', 0.05)\n time.sleep(0.5)\n if player.check_blackjack(hand_index):\n disp_str_slow(' ==> BLACKJACK!!! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} wins ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n elif player.check_bust(hand_index):\n disp_str_slow(' ==> BUST! ', 0.05)\n if not isinstance(player, people.Dealer):\n disp_str_slow(f'-- {player.name} loses ${player.bet}!', 0.05)\n print('\\n\\n', end='')\n done = True\n sys.stdout.flush()\n time.sleep(0.5)\n else:\n print()\n return done\n\n\ndef play_again(players):\n print()\n all_broke = True\n for i in players:\n if not i.check_broke():\n all_broke = False\n if not all_broke:\n choice = input(\n \"Do you all want to play another round? Enter 'y' or 'n': \").lower(\n )\n while choice != 'y' and choice != 'n':\n choice = input(\"Enter either 'y' or 'n': \")\n print()\n return choice\n else:\n print()\n return 'n'\n\n\ndef reset(players):\n dealer = players[-1]\n for player in players:\n dealer.retrieve_cards(player)\n player.bet = 0\n\n\ndef display_accounts(players):\n for player in players[:-1]:\n change = player.money - player.initial_money\n word = 'gain'\n if change < 0:\n word = 'loss'\n print(\n f\"\"\" {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\n\"\"\"\n )\n sys.stdout.flush()\n time.sleep(0.5)\n\n\ndef disp_str_slow(phrase, t):\n for i in phrase:\n print(i, end='')\n sys.stdout.flush()\n time.sleep(t)\n\n\ndef print_players(players):\n for player in players:\n print(player)\n\n\ndef main():\n display_instructions()\n num_players = get_num_players()\n players = create_players(num_players)\n dealer = people.Dealer(Deck(6))\n players.append(dealer)\n replay_choice = 'y'\n while replay_choice == 'y':\n reset(players)\n place_bets(players)\n for i in range(0, 2):\n deal(dealer, players)\n view_hands(players)\n cycle_decisions(players)\n replay_choice = play_again(players)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n disp_str_slow('FINAL PLAYER ACCOUNTS\\n\\n', 0.05)\n sys.stdout.flush()\n time.sleep(0.5)\n display_accounts(players)\n sys.stdout.flush()\n time.sleep(0.2)\n print(\n \"\"\"------------------------------------------------------------------------------------------------\n\"\"\"\n )\n print('Goodbye!')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from card import Card;\r\nfrom deck import Deck;\r\nimport people;\r\nimport chip;\r\nimport sys;\r\nimport time;\r\n\r\ndef display_instructions() :\r\n print('\\nInstructions: The objective of this game is to obtain a hand of cards whose value is as close to 21 ');\r\n print('as possible without going over. The numbered cards have the value of their number, face cards have ');\r\n print('a value of 10 each, and the ace can either be counted as 1 or 11 (player\\'s choice)\\n');\r\n print('Each round of the game begins with each player placing a bet. Then, the dealer passes out two cards to ');\r\n print('each player (up to 7 players) and to the dealer. The player\\'s cards will be face up while one of the ');\r\n print('dealer\\'s cards will be face down. Then, each player will choose to either hit, stand, split, or double down: \\n');\r\n print(' Hit: when a player \\'hits,\\' he or she is dealt another card. A player can hit as many ');\r\n print(' times as wanted, up until the player busts (goes over 21). \\n');\r\n print(' Stand: To \\'stand\\' means to stay with the current cards. \\n');\r\n print(' Split: A player can \\'split\\' only when the first two cards of his or her hand are the ');\r\n print(' same. When this occurs, the player makes two separate piles, one with each ');\r\n print(' identical card, and places a bet identical to the initial bet for the second ');\r\n print(' pile. Then, the player can hit or stand with each pile as in a normal round.\\n');\r\n print(' Double Down: When a player chooses to \\'double down\\', he or she can increase the current bet ');\r\n print(' by 100% in exchange for agreeing to stand after being dealt one more card.\\n');\r\n input('Ready to play? Hit any key to continue: ');\r\n print();\r\n \r\ndef get_num_players() :\r\n num = input('How many people will be playing (up to 7)? Enter a number: ');\r\n while not num.isdigit() or int(num) < 1 or int(num) > 7:\r\n num = input('Please enter a number from 1 to 7: ');\r\n print('\\nGreat! Now decide amongst yourselves the order you all will be playing in (who will be Player 1 through 7).\\n');\r\n time.sleep(1);\r\n return int(num);\r\n \r\ndef create_players(num) :\r\n players_list = [];\r\n for i in range(num) :\r\n name = input(f'Player {i+1}, what is your name? ');\r\n while name == '':\r\n name = input('Please enter your name: ');\r\n players_list.append(people.Player(name, 1000));\r\n print('\\nAll players will begin the game with the same amount of $1,000 dollars.\\n');\r\n return players_list;\r\n \r\ndef deal(dealer, players) :\r\n for player in players[:-1] : \r\n if not player.check_broke() : dealer.deal_card(player);\r\n dealer.deal_card(players[-1]); # dealer deals card to dealer, too\r\n \r\ndef place_bets(players) :\r\n print('Now, each of you must place your bets.\\n');\r\n bets = [];\r\n for player in players[:-1] : # doesn't reach dealer\r\n if not player.check_broke() :\r\n bet = input(f'Bet for {player.name}: ');\r\n while not bet.isdigit() or int(bet) > player.money :\r\n msg = 'Please enter a whole number: ';\r\n if bet.isdigit() :\r\n msg = 'You don\\'t have enough money! Enter a different value: ';\r\n bet = input(msg);\r\n player.bet = int(bet);\r\n print(); \r\n \r\ndef view_hands(players) :\r\n print('Here are the hands for each player: \\n');\r\n for p in players :\r\n if isinstance(p, people.Dealer) :\r\n print(f'{p.name}: [{p.hand[0][0]}, ?]', end='');\r\n print();\r\n else :\r\n if not p.check_broke() :\r\n print(f'{p.name}: {p.hand}', end='');\r\n if p.check_blackjack() :\r\n print(f' ==> BLACKJACK!!! -- {p.name} wins ${p.bet}!');\r\n else : print();\r\n print();\r\n \r\ndef do_decision(player, dealer, hand_index=0) :\r\n choices_dict = {'s':stand, 'h':hit, 'p':split, 'd':double_down};\r\n valid_choice = False;\r\n while not valid_choice :\r\n choice = input(f'{player.name}, what do you want to do (s: stand, h: hit, p: split, d: double down): ');\r\n while choice.lower() not in choices_dict.keys() :\r\n choice = input('Please enter either \\'s\\', \\'h\\', \\'p\\', or \\'d\\', corresponding to your choice: ');\r\n valid_choice = choices_dict.get(choice)(player, dealer, hand_index);\r\n \r\ndef cycle_decisions(players) :\r\n dealer = players[-1];\r\n for p in players :\r\n if isinstance(p, people.Dealer) :\r\n print(f'{p.name} will hit until reaching a hand of at least \\'hard\\' 17 (without an ace counting for 11).');\r\n sys.stdout.flush();\r\n time.sleep(0.8);\r\n if not check_status(p) and not p.check_hard_17() : hit(p, dealer);\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n disp_str_slow('\\nEnd-of-Round Earnings: \\n', 0.05);\r\n if p.check_bust() :\r\n for i in players[:-1] :\r\n if not i.check_broke() :\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n print(' ', end='');\r\n for j in range(0,len(i.hand)) : # this is to loop through each hand for a player (player would have multiple hands after splitting)\r\n if not i.check_bust(j) :\r\n print(f'{i.name} wins ${i.bet}! ', end='');\r\n i.money += i.bet;\r\n else :\r\n print(f'{i.name} loses ${i.bet}! ', end='');\r\n i.money -= i.bet;\r\n i.chips = chip.convert_to_chips(i.money);\r\n if i.check_broke() :\r\n print(f'Sorry {i.name}, but you\\'re out of money and can no longer play in this game');\r\n else :\r\n print(f'Current Balance: ${i.money} (Chips: {i.chips})');\r\n else :\r\n for i in players[:-1] :\r\n if not i.check_broke() :\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n print(' ', end='');\r\n for j in range(0,len(i.hand)) :\r\n if not i.check_bust(j) :\r\n if i.hand_value(j) > p.hand_value() :\r\n print(f'{i.name} wins ${i.bet}! ', end='');\r\n i.money += i.bet;\r\n elif i.hand_value(j) < p.hand_value() :\r\n print(f'{i.name} loses ${i.bet}! ', end='');\r\n i.money -= i.bet;\r\n else :\r\n print(f'{i.name} tied with the {p.name}! No change. ', end='');\r\n else :\r\n print(f'{i.name} loses ${i.bet}! ', end='');\r\n i.money -= i.bet;\r\n i.chips = chip.convert_to_chips(i.money);\r\n if i.check_broke() :\r\n print(f'Sorry {i.name}, but you\\'re out of money and can no longer play in this game');\r\n else :\r\n print(f'Current Balance: ${i.money} (Chips: {i.chips})');\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n else :\r\n if not p.check_blackjack() and not p.check_broke() :\r\n do_decision(p, dealer);\r\n \r\ndef stand(player, dealer, hand_index=0) :\r\n print(f'{player.name} stands.\\n');\r\n return True;\r\n \r\ndef hit(player, dealer, hand_index=0) :\r\n dealer.deal_card(player, hand_index);\r\n done = check_status(player, hand_index);\r\n if isinstance(player, people.Dealer) :\r\n while not player.check_hard_17() and not done:\r\n time.sleep(0.5);\r\n dealer.deal_card(player, hand_index);\r\n done = check_status(player, hand_index);\r\n else :\r\n \r\n choice = '';\r\n if not done :\r\n choice = input('Do you want to hit again (\\'y\\' or \\'n\\')? ').lower();\r\n while choice != 'y' and choice != 'n' :\r\n choice = input('Enter either \\'y\\' or \\'n\\': ');\r\n while choice == 'y' and not done:\r\n dealer.deal_card(player, hand_index);\r\n done = check_status(player, hand_index);\r\n if not done :\r\n choice = input('Do you want to hit again (\\'y\\' or \\'n\\')? ').lower();\r\n while choice != 'y' and choice != 'n' :\r\n choice = input('Enter either \\'y\\' or \\'n\\': ');\r\n if not done : print();\r\n return True;\r\n \r\ndef split(player, dealer, hand_index=0) :\r\n if player.hand[hand_index][0] != player.hand[hand_index][1] :\r\n print('You can\\'t split on that hand! You need two identical cards to split. Choose again.');\r\n return False;\r\n elif player.bet*2 > player.money :\r\n print(f'You don\\'t have enough money to split with your current bet (${player.bet} * 2 = ${player.bet*2})! Choose again.');\r\n return False;\r\n hands = [[player.hand[hand_index][0]], [player.hand[hand_index][1]]];\r\n player.hand = hands;\r\n print('Now you will play each hand separately: \\n');\r\n for i in range(0,2) :\r\n print(f'For Hand #{i+1}: ');\r\n do_decision(player, dealer, i); \r\n return True;\r\n \r\n \r\ndef double_down(player, dealer, hand_index=0) :\r\n if player.bet*2 > player.money :\r\n print(f'You don\\'t have enough money to do that (${player.bet} * 2 = ${player.bet*2})! Choose again.');\r\n return False;\r\n elif player.did_double_down :\r\n print('You can double down only once! Choose a different option.');\r\n return False;\r\n player.bet *= 2;\r\n player.did_double_down = True;\r\n print(f'Bet increased to ${player.bet}!.');\r\n do_decision(player, dealer, hand_index);\r\n return True;\r\n \r\ndef check_status(player, hand_index=0) :\r\n done = False;\r\n hand_string = '[';\r\n for card in player.hand[hand_index][:-1] :\r\n hand_string += card.__str__() + ', ';\r\n print(f'Current Hand: {hand_string}', end='');\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n disp_str_slow(f'{player.hand[hand_index][-1].__str__()}]', 0.05);\r\n time.sleep(0.5);\r\n if player.check_blackjack(hand_index) :\r\n disp_str_slow(' ==> BLACKJACK!!! ', 0.05);\r\n if not isinstance(player, people.Dealer) : \r\n disp_str_slow(f'-- {player.name} wins ${player.bet}!', 0.05);\r\n print('\\n\\n', end='');\r\n done = True;\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n elif player.check_bust(hand_index) :\r\n disp_str_slow(' ==> BUST! ', 0.05);\r\n if not isinstance(player, people.Dealer) : \r\n disp_str_slow(f'-- {player.name} loses ${player.bet}!', 0.05);\r\n print('\\n\\n', end='');\r\n done = True;\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n else :\r\n print();\r\n return done;\r\n \r\ndef play_again(players) :\r\n print();\r\n all_broke = True;\r\n for i in players :\r\n if not i.check_broke() : all_broke = False;\r\n if not all_broke :\r\n choice = input('Do you all want to play another round? Enter \\'y\\' or \\'n\\': ').lower();\r\n while choice != 'y' and choice != 'n' :\r\n choice = input('Enter either \\'y\\' or \\'n\\': ');\r\n print();\r\n return choice;\r\n else :\r\n print();\r\n return 'n';\r\n \r\ndef reset(players) :\r\n dealer = players[-1];\r\n for player in players : \r\n dealer.retrieve_cards(player);\r\n player.bet = 0;\r\n \r\ndef display_accounts(players) :\r\n for player in players[:-1] :\r\n change = player.money - player.initial_money;\r\n word = 'gain';\r\n if change < 0 : \r\n word = 'loss';\r\n print(f' {player.name}: ${player.money} (Chips: {player.chips}), net {word} of ${abs(change)}\\n');\r\n sys.stdout.flush();\r\n time.sleep(0.5);\r\n \r\ndef disp_str_slow(phrase, t) :\r\n for i in phrase :\r\n print(i, end='');\r\n sys.stdout.flush();\r\n time.sleep(t);\r\n\r\ndef print_players(players) :\r\n for player in players :\r\n print(player);\r\n\r\ndef main() :\r\n display_instructions();\r\n num_players = get_num_players();\r\n players = create_players(num_players);\r\n dealer = people.Dealer(Deck(6));\r\n players.append(dealer);\r\n \r\n replay_choice = 'y';\r\n while replay_choice == 'y' :\r\n reset(players);\r\n place_bets(players);\r\n for i in range(0,2) :\r\n deal(dealer, players);\r\n view_hands(players); \r\n cycle_decisions(players);\r\n replay_choice = play_again(players); \r\n \r\n print('------------------------------------------------------------------------------------------------\\n');\r\n disp_str_slow('FINAL PLAYER ACCOUNTS\\n\\n', 0.05);\r\n sys.stdout.flush();\r\n time.sleep(0.5)\r\n display_accounts(players);\r\n sys.stdout.flush(); \r\n time.sleep(0.2)\r\n print('------------------------------------------------------------------------------------------------\\n');\r\n print('Goodbye!');\r\n \r\nif __name__ == '__main__' :\r\n main();",
"step-ids": [
7,
19,
20,
21,
22
]
}
|
[
7,
19,
20,
21,
22
] |
from template.db import Database
from template.query import Query
import os
'''
READ ME!!
Before using this demo, be sure that the Tail_Const is set to a value high enough
to guaranteed that all updates are contained within the same block.
config.py -> TAIL_CONST = 4
This program is meant to run sequentially through all parts starting with an empty ECS165
directory.
'''
db = Database()
db.open("ECS165")
print(db)
g_table = db.get_table('Grades')
q = Query(g_table)
print("Merge Start")
q.table.merge(0)
print("Merge End")
db.close()
|
normal
|
{
"blob_id": "8f5b7711d913c7375d6816dd94731f1ce5ca1a62",
"index": 8289,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.open('ECS165')\nprint(db)\n<mask token>\nprint('Merge Start')\nq.table.merge(0)\nprint('Merge End')\ndb.close()\n",
"step-3": "<mask token>\ndb = Database()\ndb.open('ECS165')\nprint(db)\ng_table = db.get_table('Grades')\nq = Query(g_table)\nprint('Merge Start')\nq.table.merge(0)\nprint('Merge End')\ndb.close()\n",
"step-4": "from template.db import Database\nfrom template.query import Query\nimport os\n<mask token>\ndb = Database()\ndb.open('ECS165')\nprint(db)\ng_table = db.get_table('Grades')\nq = Query(g_table)\nprint('Merge Start')\nq.table.merge(0)\nprint('Merge End')\ndb.close()\n",
"step-5": "from template.db import Database\r\nfrom template.query import Query\r\nimport os\r\n\r\n'''\r\nREAD ME!!\r\n Before using this demo, be sure that the Tail_Const is set to a value high enough\r\n to guaranteed that all updates are contained within the same block.\r\n config.py -> TAIL_CONST = 4\r\n\r\n This program is meant to run sequentially through all parts starting with an empty ECS165\r\n directory.\r\n'''\r\ndb = Database()\r\ndb.open(\"ECS165\")\r\nprint(db)\r\ng_table = db.get_table('Grades')\r\nq = Query(g_table)\r\n\r\nprint(\"Merge Start\")\r\nq.table.merge(0)\r\nprint(\"Merge End\")\r\n\r\ndb.close()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class SmartChineseAnalyzer:
def __init__(self):
pass
def create_components(self, filename):
#tokenizer = SentenceTokenize(filename)
#result = WordTokenFilter(tokenizer)
#result = PorterStemFilter(result)
if self.stopwords:
result = StopFilter(result, self.stopwords)
return TokenStreamComponents(tokenizer, result)
|
normal
|
{
"blob_id": "e486e0ab91a8f5671435f5bbcf5340a62a970d3a",
"index": 8670,
"step-1": "<mask token>\n",
"step-2": "class SmartChineseAnalyzer:\n <mask token>\n <mask token>\n",
"step-3": "class SmartChineseAnalyzer:\n <mask token>\n\n def create_components(self, filename):\n if self.stopwords:\n result = StopFilter(result, self.stopwords)\n return TokenStreamComponents(tokenizer, result)\n",
"step-4": "class SmartChineseAnalyzer:\n\n def __init__(self):\n pass\n\n def create_components(self, filename):\n if self.stopwords:\n result = StopFilter(result, self.stopwords)\n return TokenStreamComponents(tokenizer, result)\n",
"step-5": "class SmartChineseAnalyzer:\n def __init__(self):\n pass\n\n def create_components(self, filename):\n #tokenizer = SentenceTokenize(filename)\n #result = WordTokenFilter(tokenizer)\n #result = PorterStemFilter(result)\n \n if self.stopwords:\n result = StopFilter(result, self.stopwords)\n return TokenStreamComponents(tokenizer, result)\n\n\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Add requests application (adding and managing add-requests)
"""
from flask import Blueprint
__author__ = 'Xomak'
add_requests = Blueprint('addrequests', __name__, template_folder='templates', )
from . import routes
|
normal
|
{
"blob_id": "d39965c3070ec25230b4d6977ff949b3db070ab6",
"index": 7399,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__author__ = 'Xomak'\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates')\n<mask token>\n",
"step-3": "<mask token>\nfrom flask import Blueprint\n__author__ = 'Xomak'\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates')\nfrom . import routes\n",
"step-4": "\"\"\"\nAdd requests application (adding and managing add-requests)\n\"\"\"\n\nfrom flask import Blueprint\n\n__author__ = 'Xomak'\n\nadd_requests = Blueprint('addrequests', __name__, template_folder='templates', )\n\nfrom . import routes",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
# Create your models here.
class Airlines(models.Model):
flight_number=models.CharField(max_length=8,unique=True)
airlines_id=models.CharField(max_length=10)
source=models.CharField(max_length=20)
destination=models.CharField(max_length=20)
departure=models.TimeField()
arrival=models.TimeField()
base_price=models.DecimalField(decimal_places=2,max_digits=10)
def __str__(self):
return self.flight_number
class Users(models.Model):
user_id=models.CharField(max_length=16)
email=models.EmailField(max_length=50,unique=True)
password=models.CharField(max_length=20)
phone_number=models.IntegerField()
gender=models.CharField(max_length=10)
def __str__(self):
return self.email
class Bookings(models.Model):
booking_id=models.AutoField(primary_key=True)
email=models.ForeignKey(Users,on_delete=models.CASCADE)
flight_num=models.ForeignKey(Airlines,on_delete=models.CASCADE,default='00000',editable=True)
|
normal
|
{
"blob_id": "e57b30a7a1cf987918abfb3cb7d612bdead2ddcd",
"index": 406,
"step-1": "<mask token>\n\n\nclass Bookings(models.Model):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Airlines(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Users(models.Model):\n user_id = models.CharField(max_length=16)\n email = models.EmailField(max_length=50, unique=True)\n password = models.CharField(max_length=20)\n phone_number = models.IntegerField()\n gender = models.CharField(max_length=10)\n\n def __str__(self):\n return self.email\n\n\nclass Bookings(models.Model):\n booking_id = models.AutoField(primary_key=True)\n email = models.ForeignKey(Users, on_delete=models.CASCADE)\n flight_num = models.ForeignKey(Airlines, on_delete=models.CASCADE,\n default='00000', editable=True)\n",
"step-3": "<mask token>\n\n\nclass Airlines(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.flight_number\n\n\nclass Users(models.Model):\n user_id = models.CharField(max_length=16)\n email = models.EmailField(max_length=50, unique=True)\n password = models.CharField(max_length=20)\n phone_number = models.IntegerField()\n gender = models.CharField(max_length=10)\n\n def __str__(self):\n return self.email\n\n\nclass Bookings(models.Model):\n booking_id = models.AutoField(primary_key=True)\n email = models.ForeignKey(Users, on_delete=models.CASCADE)\n flight_num = models.ForeignKey(Airlines, on_delete=models.CASCADE,\n default='00000', editable=True)\n",
"step-4": "from django.db import models\n\n\nclass Airlines(models.Model):\n flight_number = models.CharField(max_length=8, unique=True)\n airlines_id = models.CharField(max_length=10)\n source = models.CharField(max_length=20)\n destination = models.CharField(max_length=20)\n departure = models.TimeField()\n arrival = models.TimeField()\n base_price = models.DecimalField(decimal_places=2, max_digits=10)\n\n def __str__(self):\n return self.flight_number\n\n\nclass Users(models.Model):\n user_id = models.CharField(max_length=16)\n email = models.EmailField(max_length=50, unique=True)\n password = models.CharField(max_length=20)\n phone_number = models.IntegerField()\n gender = models.CharField(max_length=10)\n\n def __str__(self):\n return self.email\n\n\nclass Bookings(models.Model):\n booking_id = models.AutoField(primary_key=True)\n email = models.ForeignKey(Users, on_delete=models.CASCADE)\n flight_num = models.ForeignKey(Airlines, on_delete=models.CASCADE,\n default='00000', editable=True)\n",
"step-5": "from django.db import models\n\n# Create your models here.\nclass Airlines(models.Model):\n\tflight_number=models.CharField(max_length=8,unique=True)\n\tairlines_id=models.CharField(max_length=10)\n\tsource=models.CharField(max_length=20)\n\tdestination=models.CharField(max_length=20)\n\tdeparture=models.TimeField()\n\tarrival=models.TimeField()\n\tbase_price=models.DecimalField(decimal_places=2,max_digits=10)\n\n\tdef __str__(self):\n\t\treturn self.flight_number\n\nclass Users(models.Model):\n\tuser_id=models.CharField(max_length=16)\n\temail=models.EmailField(max_length=50,unique=True)\n\tpassword=models.CharField(max_length=20)\n\tphone_number=models.IntegerField()\n\tgender=models.CharField(max_length=10)\n\tdef __str__(self):\n\t\treturn self.email\n\nclass Bookings(models.Model):\n\tbooking_id=models.AutoField(primary_key=True)\n\temail=models.ForeignKey(Users,on_delete=models.CASCADE)\n\tflight_num=models.ForeignKey(Airlines,on_delete=models.CASCADE,default='00000',editable=True)\n\n",
"step-ids": [
1,
6,
7,
9,
10
]
}
|
[
1,
6,
7,
9,
10
] |
import os.path
class State:
def __init__(self):
self.states=[]
self.actions=[]
class Candidate:
def __init__(self,height,lines,holes,bump,fit):
self.heightWeight = height
self.linesWeight = lines
self.holesWeight = holes
self.bumpinessWeight = bump
self.fitness = fit
def __str__(self):
return "%f , %f , %f , %f, %f " % (self.heightWeight, self.linesWeight, self.holesWeight, self.bumpinessWeight, self.fitness)
if __name__=="__main__":
s = Candidate(None,None,None,None,None)
file = open("gen4.txt", "a")
print naming_file(2)
|
normal
|
{
"blob_id": "94100d0253ee82513fe024b2826e6182f852db48",
"index": 2349,
"step-1": "import os.path\nclass State:\n\n\n def __init__(self):\n self.states=[]\n self.actions=[]\n\n\n\nclass Candidate:\n\n def __init__(self,height,lines,holes,bump,fit):\n\n self.heightWeight = height\n self.linesWeight = lines\n self.holesWeight = holes\n self.bumpinessWeight = bump\n self.fitness = fit\n\n def __str__(self):\n return \"%f , %f , %f , %f, %f \" % (self.heightWeight, self.linesWeight, self.holesWeight, self.bumpinessWeight, self.fitness)\n\nif __name__==\"__main__\":\n s = Candidate(None,None,None,None,None)\n file = open(\"gen4.txt\", \"a\")\n\n print naming_file(2)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from tdm.lib.device import DddDevice, DeviceAction, DeviceWHQuery, Validity
class CallJohnDevice(DddDevice):
class MakeCall(DeviceAction):
def perform(self, select_contact, select_number):
contact = self.device.CONTACTS.get(select_contact)
number_type = self.device.CONTACTS.get(select_number)
return True
class contact_lookup(DeviceWHQuery):
def perform(self, select_contact, select_number):
#print("Looking up {}".format(select_contact))
number = self.device.PHONE_NUMBERS.get(select_contact).get(select_number)
#print(number)
return [number]
class PhoneNumberAvailable(Validity):
def is_valid(self, select_contact):
#print(self.device.CONTACTS.values())
if self.device.PHONE_NUMBERS.get(select_contact) == None:
#print("{} is not in contacts".format(select_contact))
return False
else:
#print("{} is in contacts".format(select_contact))
return True
JOHN = "contact_john"
LISA = "contact_lisa"
MARY = "contact_mary"
ANDY = "contact_andy"
MOBILE = "mobile"
WORK = "work"
HOME = "home"
PHONE_NUMBERS = {
JOHN: {
MOBILE: "0701234567",
WORK: "0736582934",
HOME: "031122363"
},
LISA: {
MOBILE: "0709876543",
WORK: "0763559230",
HOME: "031749205"
},
MARY: {
MOBILE: "0706574839",
WORK: "0784736475",
HOME: "031847528"
},
ANDY: None
}
CONTACTS = {
"John": JOHN,
"Lisa": LISA,
"Mary": MARY,
"Andy": ANDY,
}
|
normal
|
{
"blob_id": "1dd235ecfe577b508d0777e8c70026114aeb154f",
"index": 6648,
"step-1": "from tdm.lib.device import DddDevice, DeviceAction, DeviceWHQuery, Validity\r\n\r\n\r\nclass CallJohnDevice(DddDevice):\r\n\r\n class MakeCall(DeviceAction):\r\n def perform(self, select_contact, select_number):\r\n contact = self.device.CONTACTS.get(select_contact)\r\n number_type = self.device.CONTACTS.get(select_number)\r\n return True\r\n\r\n class contact_lookup(DeviceWHQuery):\r\n def perform(self, select_contact, select_number):\r\n #print(\"Looking up {}\".format(select_contact))\r\n number = self.device.PHONE_NUMBERS.get(select_contact).get(select_number)\r\n #print(number)\r\n\t return [number]\r\n\r\n class PhoneNumberAvailable(Validity):\r\n def is_valid(self, select_contact):\r\n #print(self.device.CONTACTS.values())\r\n if self.device.PHONE_NUMBERS.get(select_contact) == None:\r\n #print(\"{} is not in contacts\".format(select_contact))\r\n\t\treturn False\r\n else:\r\n #print(\"{} is in contacts\".format(select_contact))\r\n return True\r\n\r\n JOHN = \"contact_john\"\r\n LISA = \"contact_lisa\"\r\n MARY = \"contact_mary\"\r\n ANDY = \"contact_andy\"\r\n\r\n MOBILE = \"mobile\"\r\n WORK = \"work\"\r\n HOME = \"home\"\r\n\r\n PHONE_NUMBERS = {\r\n JOHN: {\r\n MOBILE: \"0701234567\",\r\n WORK: \"0736582934\",\r\n HOME: \"031122363\"\r\n },\r\n LISA: {\r\n MOBILE: \"0709876543\",\r\n WORK: \"0763559230\",\r\n HOME: \"031749205\"\r\n },\r\n MARY: {\r\n MOBILE: \"0706574839\",\r\n WORK: \"0784736475\",\r\n HOME: \"031847528\"\r\n },\r\n ANDY: None\r\n }\r\n\r\n CONTACTS = {\r\n \"John\": JOHN,\r\n \"Lisa\": LISA,\r\n \"Mary\": MARY,\r\n \"Andy\": ANDY,\r\n }\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#Author: Abeer Rafiq
#Modified: 11/23/2019 3:00pm
#Importing Packages
import socket, sys, time, json, sqlite3
import RPi.GPIO as GPIO
from datetime import datetime, date
#Creating a global server class
class GlobalServer:
#The constructor
def __init__(self, port, room_ip_addrs,
app_ip_addrs):
#Setting port
self.__port = int(port)
#Setting socket to receive
self.__soc_recv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
recv_address = ('', self.__port)
self.__soc_recv.bind(recv_address)
#Setting socket/addresses to send to the room rpi and app
self.__soc_send = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.__room_addrs = (room_ip_addrs, self.__port)
self.__app_addrs = (app_ip_addrs, self.__port)
#Setting up led blinking
self.__receiveLED = 14
self.__sendLED = 15
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.__receiveLED, GPIO.OUT)
GPIO.setup(self.__sendLED, GPIO.OUT)
#Setting up string for acknowldegements
self.__ackstr = "{'opcode':'0'}"
#Setting database connections
dbpath = '/home/pi/Documents/Team_Project/dataBases/plantNursery_DB.db'
self.__dbconnect = sqlite3.connect(dbpath);
self.__dbconnect.row_factory = sqlite3.Row;
self.__cursor = self.__dbconnect.cursor()
#Setting up default threshold variables
self.__defaultThresholdValue = 80
self.__defaultLessGreaterThan = "<"
self.__lightThreshold = self.__defaultThresholdValue
self.__lightLessGreaterThan = self.__defaultLessGreaterThan
self.__soilMoistureThreshold = self.__defaultThresholdValue
self.__soilMoistureLessGreaterThan = self.__defaultLessGreaterThan
self.__roomHumidityThreshold = self.__defaultThresholdValue
self.__roomHumidityLessGreaterThan = self.__defaultLessGreaterThan
self.__roomTemperatureThreshold = self.__defaultThresholdValue
self.__roomTemperatureLessGreaterThan = self.__defaultLessGreaterThan
self.__currentLight = 0
self.__currentSoilMoisture = 0
self.__currentWaterDistance = 0
self.__currentRoomHumidity = 0
self.__currentRoomTemperature = 0
self.__waterPumpDuration = 2
#Setting timeout/end time values
self.__ack_timeout = 1
self.__ack_endTime = 4
print("\nGlobal Server Initialized")
#To blink a pin once
def blink(self, pin):
GPIO.output(pin,GPIO.HIGH)
time.sleep(1)
GPIO.output(pin,GPIO.LOW)
return
#Receives/returns buffer and sends ack
def receive(self):
#Receiving
print("\nWaiting to receive on port %d ... " % self.__port)
buf, address = self.__soc_recv.recvfrom(self.__port)
if(len(buf) > 0):
#Blink receive Led
self.blink(self.__receiveLED)
print ("Received %s bytes from '%s': %s " % (len(buf), address[0], buf))
#Sending ack
self.__soc_send.sendto(self.__ackstr, (address[0], self.__port))
#Blink send Led
self.blink(self.__sendLED)
print ("Sent %s to %s" % (self.__ackstr, (address[0], self.__port)))
#Give time for the ack sent to be acknowledged
time.sleep(self.__ack_endTime)
return buf
else:
return False
#To insert data into the database
def insertDBData(self, mySQL):
#Try inserting data to database table
try:
#Insert data
self.__cursor.execute(mySQL)
self.__dbconnect.commit();
except sqlite3.Error, e:
#If error, exit program
print ('\nDatabase Error %s:' % e.args[0])
self.__soc_recv.shutdown(1)
self.__soc_send.shutdown(1)
self.__cursor.close()
sys.exit(1)
return
#To add default threshold entries into the db
def setDefaultThresholds(self, potID):
potID = str(potID)
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Insert default thresholds into db
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'light', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'soilMoisture', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'roomTemperature', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', 'roomHumidity', '" + \
str(self.__defaultThresholdValue) + "', '" + self.__defaultLessGreaterThan + \
"', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nSet Default Thresholds")
return
#To add user requested threshold entries into the db
def updateUserThresholdsTable(self, threshold):
potID = str(threshold.get("potID"))
lessGreaterThan = str(threshold.get("lessGreaterThan"))
thresholdValue = float(str(threshold.get("thresholdValue")))
sensorType = str(threshold.get("sensorType"))
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Insert thresholds into db
mySQL = "INSERT INTO userThresholds VALUES ('" + potID + "', '" + sensorType + "', '" + str(thresholdValue) + \
"', '" + lessGreaterThan + "', '" + str(tdate) + "', '" + str(ttime) + "')"
self.insertDBData(mySQL)
#Reassign global server's instance threshold variables
if sensorType == "light":
self.__lightThreshold = thresholdValue
self.__lightLessGreaterThan = lessGreaterThan
elif sensorType == "soilMoisture":
self.__soilMoistureThreshold = thresholdValue
self.__soilMoistureLessGreaterThan = lessGreaterThan
elif sensorType == "roomTemperature":
self.__roomHumidityThreshold = thresholdValue
self.__roomHumidityLessGreaterThan = lessGreaterThan
elif sensorType == "roomHumidity":
self.__roomTemperatureThreshold = thresholdValue
self.__roomTemperatureLessGreaterThan = lessGreaterThan
print("\nSet User Requested Thresholds")
return
#To update user data in userPlantsTable
def updateUserPlantsTable(self, userInfo):
potID = str(userInfo.get('potID'))
roomID = str(userInfo.get('roomID'))
ownerID = str(userInfo.get('ownerID'))
#Inserting user data into db
mySQL = "INSERT INTO userPlants VALUES ('" + potID + "', '" + roomID + "', '" + ownerID + "')"
self.insertDBData(mySQL)
print("\nUpdated User Data")
return
#To update notes in userNotesTable
def updateUserNotesTable(self, userNotes):
potID = str(userNotes.get('potID'))
notes = str(userNotes.get('notes'))
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
#Inserting notes into db
mySQL = "INSERT INTO userNotes VALUES ('" + potID + "', '" + notes + "', '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Notes Data")
return
#To update pot data in db
def updatePotTable(self, sensorInfo, tdate, time):
potID = sensorInfo.get('potID')
self.__currentWaterDistance = sensorInfo.get('waterDistance')
self.__currentLight = sensorInfo.get('light')
self.__currentSoilMoisture = sensorInfo.get('soilMoisture')
#Inserting pot data into db
mySQL = "INSERT INTO potData VALUES ('" + str(potID) + "', '" + str(self.__currentLight)+ "', '" + \
str(self.__currentSoilMoisture) + "', '" + str(self.__currentWaterDistance) + "', '" + \
tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Pot Data")
return
#To update room data in db
def updateRoomTable(self, sensorInfo,tdate, time):
self.__currentRoomTemperature = round(sensorInfo.get('temperature'), 2)
self.__currentRoomHumidity = round(sensorInfo.get('humidity'), 2)
roomID = sensorInfo.get('roomID')
#Inserting room data into db
mySQL = "insert into roomData values ('" + str(roomID) + "', '" + str(self.__currentRoomTemperature) + \
"', '" + str(self.__currentRoomHumidity) + "' , '" + tdate + "', '" + ttime + "')"
self.insertDBData(mySQL)
print("\nUpdated Room Data")
return
#To compare current sensor data to threshold values
def checkUserThresholds(self):
#Notification json #Should be receiving an ack so timeout if no ack receivedstrings
lightNotfn = '{"opcode" : "D", "sensorArray" : "1, 0, 0, 0, 0, 0, 0, 0, 0, 0"}'
roomHumidityNotfn = '{"opcode" : "D", "sensorArray" : "0, 1, 0, 0, 0, 0, 0, 0, 0, 0"}'
roomTemperatureNotfn = '{"opcode" : "D", "sensorArray" : "0, 0, 1, 0, 0, 0, 0, 0, 0, 0"}'
soilMoistureNotfn = '{"opcode" : "D", "sensorArray" : "0, 0, 0, 1, 0, 0, 0, 0, 0, 0"}'
#Tuples of sensor data to easily neatly
light = (self.__currentLight, self.__lightThreshold, self.__lightLessGreaterThan, lightNotfn)
soilMoisture = (self.__currentSoilMoisture, self.__soilMoistureThreshold, \
self.__soilMoistureLessGreaterThan, soilMoistureNotfn, self.__waterPumpDuration)
roomHumidity = (self.__currentRoomHumidity, self.__roomHumidityThreshold, \
self.__roomHumidityLessGreaterThan, roomHumidityNotfn)
roomTemperature = (self.__currentRoomTemperature, self.__roomTemperatureThreshold, \
self.__roomTemperatureLessGreaterThan, roomTemperatureNotfn)
#Combined tuples for sensors
sensorArr = [light, roomHumidity, roomTemperature, soilMoisture]
#For each sensor compare current sensor value with threshold value
for sensor in sensorArr:
if sensor[2] == ">":
if sensor[0] > sensor[1]:
#Threshold is met, notify user
notifyApp(sensor[3])
if(len(sensor) == 4):
#Soil moisture's threshold is met, then start water pump, notify user
startPumpStr = '{"opcode" : "4", "pumpDuration" : "' + str(sensor[4]) + '"}'
startWaterPump(startPumpStr)
notifyApp(startPumpStr)
elif sensor[2] == "<":
if sensor[0] < sensor[1]:
#Threshold is met, notify user
notifyApp(sensor[3])
if(length(sensor) == 4):
#Soil moisture's threshold is met, then start water pump, notify user
startPumpStr = '{"opcode" : "4", "pumpDuration" : "' + str(sensor[4]) + '"}'
startWaterPump(startPumpStr)
notifyApp(startPumpStr)
print("\Thresholds Compared")
return
#Send room rpi msg to start water pump
def startWaterPump(self, startPump):
if (self.send_Room_Msg(startPump) == False):
#If no ack received, send msg again
print("\nStart Water Pump sent again to server")
self.startWaterPump(startPump)
return
#To send msgs to the room and wait for ack
def send_Room_Msg(self, message):
self.__soc_send.sendto(message, self.__room_addrs)
#Blink send LED
self.blink(self.__sendLED)
print("\Message sent to Room: " + message)
#Should be receiving an ack so timeout if no ack received
soc_recv.settimeout(self.__ack_timeout)
startTime = time.time()
endTime = self.__ack_endTime
while (True):
#If less than a endTime amount of time
if time.time() < (startTime + endTime):
try:
#Try Receving otherwise timeout and retry
print("Waiting for Acknowledgement . . .")
buf, address = soc_recv.recvfrom(self.__port)
except socket.timeout:
print("Receiving is Timed Out")
#Restart while loop (Retry)
continue
try:
#If buf is received, try to load it
buf = json.loads(buf)
if not len(buf):
#No ack received, retry
continue
else:
if (buf.get("opcode") == "0"):
#Ack recevied!
print("Acknowledgement Received")
return True
else:
#No ack received, retry
continue
except (ValueError, KeyError, TypeError):
#Ack not received, try again
continue
else:
#Failed to receive ack within a endTime amount of time
return False
return
#To notifcations msgs to the app
def notifyApp(self, message):
if (self.send_App_Msg(message) == False):
#If no ack received, send msg again
print("\nNotification sent again to server")
self.notifyApp(message)
return
#To send msgs to the app and wait for ack
def send_App_Msg(self, message):
self.__soc_send.sendto(message, self.__app_addrs)
#Blink send LED
self.blink(self.__sendLED)
print("\nNotifcation sent to App: " + message)
#Should be receiving an ack so timeout if no ack received
soc_recv.settimeout(self.__ack_timeout)
startTime = time.time()
endTime = self.__ack_endTime
while (True):
#If less than a endTime amount of time
if time.time() < (startTime + endTime):
try:
#Try Receving otherwise timeout and retry
print("Waiting for Acknowledgement . . .")
buf, address = soc_recv.recvfrom(self.__port)
except socket.timeout:
print("Receiving is Timed Out")
#Restart while loop (Retry)
continue
try:
#If buf is received, try to load it
buf = json.loads(buf)
if not len(buf):
#No ack received, retry
continue
else:
if (buf.get("opcode") == "0"):
#Ack recevied!
print("Acknowledgement Received")
return True
else:
#No ack received, retry
continue
except (ValueError, KeyError, TypeError):
#Ack not received, try again
continue
else:
#Failed to receive ack within a endTime amount of time
return False
return
#To get requested stats from the db
def get_stats(self, rowNumbers, sensors):
#Try retrieving data from the database
try:
#Retrieve Data
sensors = sensors.replace('"',"").replace("'","").replace('[',"").replace(']',"")
mysql = """SELECT """ + sensors + """, tdate, ttime FROM (
SELECT * FROM userPlants a
INNER JOIN potData b
ON a.potID = b.potID
INNER JOIN roomData c
ON a.roomID = c.roomID AND b.tdate = c.tdate AND b.ttime = c.ttime
ORDER BY c.tdate DESC, c.ttime DESC LIMIT """ + str(rowNumbers) + """)"""
myresult = self.__cursor.execute(mysql).fetchall()
except sqlite3.Error, e:
#If error, exit program
print '\nDatabase Error %s:' % e.args[0]
sys.exit(1)
#Convert data into json format
stats = json.dumps( [dict(i) for i in myresult] )
print("\nData Retreived from DB")
return stats
#To send the stats with the corresponding opcode
def send_stats(self, rowNumbers, sensors):
if rowNumbers == '0':
#0 means to send app just one most recent row of data (opcode E)
oneRow = globalServer.get_stats(1, sensors)
stats = '{"opcode" : "E", "statsArray" : "' + str(oneRow) + '"}'
else:
#Otherwise send mutiple recent rows of data (opcode 6)
manyRows = globalServer.get_stats(rowNumbers, sensors)
stats = '{"opcode" : "6", "statsArray" : "' + str(manyRows) + '"}'
#Send stats to App
#If ack received return
if (self.send_notifyApp(error) == True):
print("\nStats sent to app")
else:
#If no ack received, try sending again
print("\nStats sent again to app (notify again)")
self.send_stats(rowNumbers, sensors)
return
#Main function which receives json data and invokes methods based on opcode received
def main():
#Create GlobalServer object (port, room_ip_addrs, app_ip_addrs)
globalServer = GlobalServer(1000, '192.168.1.47',
'192.168.137.102')
while True:
message = globalServer.receive()
if (message == False):
#If length of buffer is <1
continue
else:
message = json.loads(message)
#User wants to update notes table
if (message.get('opcode') == "1"):
globalServer.updateUserNotesTable(message)
#User wants to add a pot with a room and owner
if (message.get('opcode') == "2"):
globalServer.updateUserPlantsTable(message)
#Set default thresholds for that potID
globalServer.setDefaultThresholds(message.get("potID"))
#If user wants to set thresholds to requested ones
if (message.get('opcode') == "3"):
globalServer.updateUserThresholdsTable(message)
#If user wants to view stats
if (message.get('opcode') == "5"):
rowNumbers = message.get("rowNumbers")
sensors = message.get("sensorType")
globalServer.send_stats(rowNumbers, sensors)
#If an error has occured in the room rpi or arduino
if (message.get('opcode') == "D"):
globalServer.notifyApp(str(message))
#If room rpi sent all sensory data, update tables, compare values to thresholds as well
if (message.get('opcode') == "9"):
tdate = str(date.today())
ttime = str(datetime.now().strftime("%H:%M:%S"))
globalServer.updateRoomTable(message, tdate, ttime)
globalServer.updatePotTable(message, tdate, ttime)
globalServer.checkUserThresholds()
self.__soc_recv.shutdown(1)
self.__soc_send.shutdown(1)
self.__cursor.close()
return
if __name__== "__main__":
main()
|
normal
|
{
"blob_id": "7ce679d5b889493f278de6deca6ec6bdb7acd3f5",
"index": 910,
"step-1": "#Author: Abeer Rafiq\n#Modified: 11/23/2019 3:00pm\n\n#Importing Packages\nimport socket, sys, time, json, sqlite3\nimport RPi.GPIO as GPIO\nfrom datetime import datetime, date\n\n#Creating a global server class\nclass GlobalServer:\n #The constructor\n def __init__(self, port, room_ip_addrs,\n app_ip_addrs):\n #Setting port\n self.__port = int(port)\n #Setting socket to receive\n self.__soc_recv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n recv_address = ('', self.__port)\n self.__soc_recv.bind(recv_address)\n #Setting socket/addresses to send to the room rpi and app\n self.__soc_send = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n self.__room_addrs = (room_ip_addrs, self.__port)\n self.__app_addrs = (app_ip_addrs, self.__port)\n #Setting up led blinking\n self.__receiveLED = 14\n self.__sendLED = 15\n GPIO.setmode(GPIO.BCM)\n GPIO.setwarnings(False)\n GPIO.setup(self.__receiveLED, GPIO.OUT)\n GPIO.setup(self.__sendLED, GPIO.OUT)\n #Setting up string for acknowldegements\n self.__ackstr = \"{'opcode':'0'}\"\n #Setting database connections\n dbpath = '/home/pi/Documents/Team_Project/dataBases/plantNursery_DB.db'\n self.__dbconnect = sqlite3.connect(dbpath); \n self.__dbconnect.row_factory = sqlite3.Row;\n self.__cursor = self.__dbconnect.cursor() \n #Setting up default threshold variables\n self.__defaultThresholdValue = 80\n self.__defaultLessGreaterThan = \"<\"\n self.__lightThreshold = self.__defaultThresholdValue\n self.__lightLessGreaterThan = self.__defaultLessGreaterThan\n self.__soilMoistureThreshold = self.__defaultThresholdValue\n self.__soilMoistureLessGreaterThan = self.__defaultLessGreaterThan\n self.__roomHumidityThreshold = self.__defaultThresholdValue\n self.__roomHumidityLessGreaterThan = self.__defaultLessGreaterThan\n self.__roomTemperatureThreshold = self.__defaultThresholdValue\n self.__roomTemperatureLessGreaterThan = self.__defaultLessGreaterThan\n self.__currentLight = 0\n self.__currentSoilMoisture = 0\n self.__currentWaterDistance = 0\n self.__currentRoomHumidity = 0\n self.__currentRoomTemperature = 0\n self.__waterPumpDuration = 2\n #Setting timeout/end time values\n self.__ack_timeout = 1\n self.__ack_endTime = 4\n print(\"\\nGlobal Server Initialized\")\n \n #To blink a pin once\n def blink(self, pin):\n GPIO.output(pin,GPIO.HIGH)\n time.sleep(1)\n GPIO.output(pin,GPIO.LOW)\n return\n \n #Receives/returns buffer and sends ack \n def receive(self):\n #Receiving\n print(\"\\nWaiting to receive on port %d ... \" % self.__port)\n buf, address = self.__soc_recv.recvfrom(self.__port)\n if(len(buf) > 0):\n #Blink receive Led\n self.blink(self.__receiveLED)\n print (\"Received %s bytes from '%s': %s \" % (len(buf), address[0], buf))\n #Sending ack\n self.__soc_send.sendto(self.__ackstr, (address[0], self.__port))\n #Blink send Led\n self.blink(self.__sendLED)\n print (\"Sent %s to %s\" % (self.__ackstr, (address[0], self.__port)))\n #Give time for the ack sent to be acknowledged\n time.sleep(self.__ack_endTime)\n return buf\n else:\n return False\n \n #To insert data into the database\n def insertDBData(self, mySQL):\n #Try inserting data to database table\n try:\n #Insert data\n self.__cursor.execute(mySQL)\n self.__dbconnect.commit();\n except sqlite3.Error, e:\n #If error, exit program \n print ('\\nDatabase Error %s:' % e.args[0])\n self.__soc_recv.shutdown(1)\n self.__soc_send.shutdown(1)\n self.__cursor.close()\n sys.exit(1)\n return\n \n #To add default threshold entries into the db\n def setDefaultThresholds(self, potID):\n potID = str(potID)\n tdate = str(date.today())\n ttime = str(datetime.now().strftime(\"%H:%M:%S\"))\n #Insert default thresholds into db\n mySQL = \"INSERT INTO userThresholds VALUES ('\" + potID + \"', 'light', '\" + \\\n str(self.__defaultThresholdValue) + \"', '\" + self.__defaultLessGreaterThan + \\\n \"', '\" + tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n mySQL = \"INSERT INTO userThresholds VALUES ('\" + potID + \"', 'soilMoisture', '\" + \\\n str(self.__defaultThresholdValue) + \"', '\" + self.__defaultLessGreaterThan + \\\n \"', '\" + tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n mySQL = \"INSERT INTO userThresholds VALUES ('\" + potID + \"', 'roomTemperature', '\" + \\\n str(self.__defaultThresholdValue) + \"', '\" + self.__defaultLessGreaterThan + \\\n \"', '\" + tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n mySQL = \"INSERT INTO userThresholds VALUES ('\" + potID + \"', 'roomHumidity', '\" + \\\n str(self.__defaultThresholdValue) + \"', '\" + self.__defaultLessGreaterThan + \\\n \"', '\" + tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n print(\"\\nSet Default Thresholds\")\n return\n \n #To add user requested threshold entries into the db\n def updateUserThresholdsTable(self, threshold):\n potID = str(threshold.get(\"potID\"))\n lessGreaterThan = str(threshold.get(\"lessGreaterThan\"))\n thresholdValue = float(str(threshold.get(\"thresholdValue\")))\n sensorType = str(threshold.get(\"sensorType\"))\n tdate = str(date.today())\n ttime = str(datetime.now().strftime(\"%H:%M:%S\"))\n #Insert thresholds into db\n mySQL = \"INSERT INTO userThresholds VALUES ('\" + potID + \"', '\" + sensorType + \"', '\" + str(thresholdValue) + \\\n \"', '\" + lessGreaterThan + \"', '\" + str(tdate) + \"', '\" + str(ttime) + \"')\" \n self.insertDBData(mySQL)\n #Reassign global server's instance threshold variables\n if sensorType == \"light\":\n self.__lightThreshold = thresholdValue\n self.__lightLessGreaterThan = lessGreaterThan\n elif sensorType == \"soilMoisture\":\n self.__soilMoistureThreshold = thresholdValue \n self.__soilMoistureLessGreaterThan = lessGreaterThan\n elif sensorType == \"roomTemperature\":\n self.__roomHumidityThreshold = thresholdValue\n self.__roomHumidityLessGreaterThan = lessGreaterThan\n elif sensorType == \"roomHumidity\":\n self.__roomTemperatureThreshold = thresholdValue\n self.__roomTemperatureLessGreaterThan = lessGreaterThan\n print(\"\\nSet User Requested Thresholds\")\n return\n\n #To update user data in userPlantsTable\n def updateUserPlantsTable(self, userInfo):\n potID = str(userInfo.get('potID'))\n roomID = str(userInfo.get('roomID'))\n ownerID = str(userInfo.get('ownerID'))\n #Inserting user data into db\n mySQL = \"INSERT INTO userPlants VALUES ('\" + potID + \"', '\" + roomID + \"', '\" + ownerID + \"')\" \n self.insertDBData(mySQL)\n print(\"\\nUpdated User Data\")\n return\n \n #To update notes in userNotesTable\n def updateUserNotesTable(self, userNotes):\n potID = str(userNotes.get('potID'))\n notes = str(userNotes.get('notes'))\n tdate = str(date.today())\n ttime = str(datetime.now().strftime(\"%H:%M:%S\"))\n #Inserting notes into db\n mySQL = \"INSERT INTO userNotes VALUES ('\" + potID + \"', '\" + notes + \"', '\" + tdate + \"', '\" + ttime + \"')\"\n self.insertDBData(mySQL)\n print(\"\\nUpdated Notes Data\")\n return\n \n #To update pot data in db\n def updatePotTable(self, sensorInfo, tdate, time):\n potID = sensorInfo.get('potID')\n self.__currentWaterDistance = sensorInfo.get('waterDistance')\n self.__currentLight = sensorInfo.get('light')\n self.__currentSoilMoisture = sensorInfo.get('soilMoisture')\n #Inserting pot data into db\n mySQL = \"INSERT INTO potData VALUES ('\" + str(potID) + \"', '\" + str(self.__currentLight)+ \"', '\" + \\\n str(self.__currentSoilMoisture) + \"', '\" + str(self.__currentWaterDistance) + \"', '\" + \\\n tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n print(\"\\nUpdated Pot Data\")\n return\n \n #To update room data in db\n def updateRoomTable(self, sensorInfo,tdate, time):\n self.__currentRoomTemperature = round(sensorInfo.get('temperature'), 2)\n self.__currentRoomHumidity = round(sensorInfo.get('humidity'), 2)\n roomID = sensorInfo.get('roomID')\n #Inserting room data into db\n mySQL = \"insert into roomData values ('\" + str(roomID) + \"', '\" + str(self.__currentRoomTemperature) + \\\n \"', '\" + str(self.__currentRoomHumidity) + \"' , '\" + tdate + \"', '\" + ttime + \"')\" \n self.insertDBData(mySQL)\n print(\"\\nUpdated Room Data\")\n return\n\n\n #To compare current sensor data to threshold values\n def checkUserThresholds(self):\n #Notification json #Should be receiving an ack so timeout if no ack receivedstrings\n lightNotfn = '{\"opcode\" : \"D\", \"sensorArray\" : \"1, 0, 0, 0, 0, 0, 0, 0, 0, 0\"}' \n roomHumidityNotfn = '{\"opcode\" : \"D\", \"sensorArray\" : \"0, 1, 0, 0, 0, 0, 0, 0, 0, 0\"}'\n roomTemperatureNotfn = '{\"opcode\" : \"D\", \"sensorArray\" : \"0, 0, 1, 0, 0, 0, 0, 0, 0, 0\"}'\n soilMoistureNotfn = '{\"opcode\" : \"D\", \"sensorArray\" : \"0, 0, 0, 1, 0, 0, 0, 0, 0, 0\"}'\n #Tuples of sensor data to easily neatly\n light = (self.__currentLight, self.__lightThreshold, self.__lightLessGreaterThan, lightNotfn)\n soilMoisture = (self.__currentSoilMoisture, self.__soilMoistureThreshold, \\\n self.__soilMoistureLessGreaterThan, soilMoistureNotfn, self.__waterPumpDuration)\n roomHumidity = (self.__currentRoomHumidity, self.__roomHumidityThreshold, \\\n self.__roomHumidityLessGreaterThan, roomHumidityNotfn)\n roomTemperature = (self.__currentRoomTemperature, self.__roomTemperatureThreshold, \\\n self.__roomTemperatureLessGreaterThan, roomTemperatureNotfn)\n #Combined tuples for sensors\n sensorArr = [light, roomHumidity, roomTemperature, soilMoisture]\n #For each sensor compare current sensor value with threshold value\n for sensor in sensorArr:\n if sensor[2] == \">\":\n if sensor[0] > sensor[1]:\n #Threshold is met, notify user\n notifyApp(sensor[3])\n if(len(sensor) == 4):\n #Soil moisture's threshold is met, then start water pump, notify user\n startPumpStr = '{\"opcode\" : \"4\", \"pumpDuration\" : \"' + str(sensor[4]) + '\"}'\n startWaterPump(startPumpStr) \n notifyApp(startPumpStr) \n elif sensor[2] == \"<\":\n if sensor[0] < sensor[1]:\n #Threshold is met, notify user\n notifyApp(sensor[3])\n if(length(sensor) == 4):\n #Soil moisture's threshold is met, then start water pump, notify user\n startPumpStr = '{\"opcode\" : \"4\", \"pumpDuration\" : \"' + str(sensor[4]) + '\"}'\n startWaterPump(startPumpStr) \n notifyApp(startPumpStr) \n print(\"\\Thresholds Compared\")\n return\n \n #Send room rpi msg to start water pump\n def startWaterPump(self, startPump):\n if (self.send_Room_Msg(startPump) == False):\n #If no ack received, send msg again\n print(\"\\nStart Water Pump sent again to server\")\n self.startWaterPump(startPump)\n return\n \n #To send msgs to the room and wait for ack\n def send_Room_Msg(self, message):\n self.__soc_send.sendto(message, self.__room_addrs)\n #Blink send LED\n self.blink(self.__sendLED)\n print(\"\\Message sent to Room: \" + message)\n #Should be receiving an ack so timeout if no ack received\n soc_recv.settimeout(self.__ack_timeout)\n startTime = time.time()\n endTime = self.__ack_endTime\n while (True):\n #If less than a endTime amount of time\n if time.time() < (startTime + endTime):\n try:\n #Try Receving otherwise timeout and retry\n print(\"Waiting for Acknowledgement . . .\")\n buf, address = soc_recv.recvfrom(self.__port)\n except socket.timeout:\n print(\"Receiving is Timed Out\")\n #Restart while loop (Retry)\n continue\n try:\n #If buf is received, try to load it\n buf = json.loads(buf)\n if not len(buf):\n #No ack received, retry\n continue\n else:\n if (buf.get(\"opcode\") == \"0\"):\n #Ack recevied!\n print(\"Acknowledgement Received\")\n return True\n else:\n #No ack received, retry\n continue\n except (ValueError, KeyError, TypeError):\n #Ack not received, try again\n continue\n else:\n #Failed to receive ack within a endTime amount of time\n return False\n return\n \n #To notifcations msgs to the app\n def notifyApp(self, message):\n if (self.send_App_Msg(message) == False):\n #If no ack received, send msg again\n print(\"\\nNotification sent again to server\")\n self.notifyApp(message)\n return\n \n #To send msgs to the app and wait for ack\n def send_App_Msg(self, message):\n self.__soc_send.sendto(message, self.__app_addrs)\n #Blink send LED\n self.blink(self.__sendLED)\n print(\"\\nNotifcation sent to App: \" + message)\n #Should be receiving an ack so timeout if no ack received\n soc_recv.settimeout(self.__ack_timeout)\n startTime = time.time()\n endTime = self.__ack_endTime\n while (True):\n #If less than a endTime amount of time\n if time.time() < (startTime + endTime):\n try:\n #Try Receving otherwise timeout and retry\n print(\"Waiting for Acknowledgement . . .\")\n buf, address = soc_recv.recvfrom(self.__port)\n except socket.timeout:\n print(\"Receiving is Timed Out\")\n #Restart while loop (Retry)\n continue\n try:\n #If buf is received, try to load it\n buf = json.loads(buf)\n if not len(buf):\n #No ack received, retry\n continue\n else:\n if (buf.get(\"opcode\") == \"0\"):\n #Ack recevied!\n print(\"Acknowledgement Received\")\n return True\n else:\n #No ack received, retry\n continue\n except (ValueError, KeyError, TypeError):\n #Ack not received, try again\n continue\n else:\n #Failed to receive ack within a endTime amount of time\n return False\n return\n \n #To get requested stats from the db\n def get_stats(self, rowNumbers, sensors):\n #Try retrieving data from the database\n try:\n #Retrieve Data\n sensors = sensors.replace('\"',\"\").replace(\"'\",\"\").replace('[',\"\").replace(']',\"\")\n mysql = \"\"\"SELECT \"\"\" + sensors + \"\"\", tdate, ttime FROM (\n SELECT * FROM userPlants a\n INNER JOIN potData b\n ON a.potID = b.potID \n INNER JOIN roomData c \n ON a.roomID = c.roomID AND b.tdate = c.tdate AND b.ttime = c.ttime\n ORDER BY c.tdate DESC, c.ttime DESC LIMIT \"\"\" + str(rowNumbers) + \"\"\")\"\"\"\n myresult = self.__cursor.execute(mysql).fetchall()\n except sqlite3.Error, e:\n #If error, exit program \n print '\\nDatabase Error %s:' % e.args[0]\n sys.exit(1)\n #Convert data into json format\n stats = json.dumps( [dict(i) for i in myresult] )\n print(\"\\nData Retreived from DB\")\n return stats\n \n #To send the stats with the corresponding opcode\n def send_stats(self, rowNumbers, sensors):\n if rowNumbers == '0':\n #0 means to send app just one most recent row of data (opcode E)\n oneRow = globalServer.get_stats(1, sensors)\n stats = '{\"opcode\" : \"E\", \"statsArray\" : \"' + str(oneRow) + '\"}'\n else:\n #Otherwise send mutiple recent rows of data (opcode 6)\n manyRows = globalServer.get_stats(rowNumbers, sensors)\n stats = '{\"opcode\" : \"6\", \"statsArray\" : \"' + str(manyRows) + '\"}'\n #Send stats to App\n #If ack received return\n if (self.send_notifyApp(error) == True):\n print(\"\\nStats sent to app\")\n else:\n #If no ack received, try sending again\n print(\"\\nStats sent again to app (notify again)\")\n self.send_stats(rowNumbers, sensors)\n return\n\n#Main function which receives json data and invokes methods based on opcode received\ndef main():\n #Create GlobalServer object (port, room_ip_addrs, app_ip_addrs)\n globalServer = GlobalServer(1000, '192.168.1.47',\n '192.168.137.102')\n while True:\n message = globalServer.receive()\n if (message == False):\n #If length of buffer is <1\n continue\n else:\n message = json.loads(message)\n #User wants to update notes table\n if (message.get('opcode') == \"1\"):\n globalServer.updateUserNotesTable(message)\n #User wants to add a pot with a room and owner\n if (message.get('opcode') == \"2\"): \n globalServer.updateUserPlantsTable(message)\n #Set default thresholds for that potID\n globalServer.setDefaultThresholds(message.get(\"potID\"))\n #If user wants to set thresholds to requested ones\n if (message.get('opcode') == \"3\"): \n globalServer.updateUserThresholdsTable(message)\n #If user wants to view stats\n if (message.get('opcode') == \"5\"):\n rowNumbers = message.get(\"rowNumbers\")\n sensors = message.get(\"sensorType\")\n globalServer.send_stats(rowNumbers, sensors)\n #If an error has occured in the room rpi or arduino\n if (message.get('opcode') == \"D\"): \n globalServer.notifyApp(str(message))\n #If room rpi sent all sensory data, update tables, compare values to thresholds as well\n if (message.get('opcode') == \"9\"): \n tdate = str(date.today())\n ttime = str(datetime.now().strftime(\"%H:%M:%S\"))\n globalServer.updateRoomTable(message, tdate, ttime)\n globalServer.updatePotTable(message, tdate, ttime) \n globalServer.checkUserThresholds() \n self.__soc_recv.shutdown(1)\n self.__soc_send.shutdown(1)\n self.__cursor.close()\n return\n \nif __name__== \"__main__\":\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import SCons.Util
import xml.dom.minidom, re, os.path
################################################################################
# DocBook pseudobuilder
# TODO: Only generate the output formats that are known
################################################################################
def generate(env) :
def remove_doctype(target, source, env) :
f = open(str(target[0]))
output = []
for line in f.readlines() :
output.append(re.sub("^<!DOCTYPE .*", "", line))
f.close()
f = open(str(target[0]), 'wb')
for line in output :
f.write(line)
f.close()
def buildDocBook(env, source) :
db_env = env.Clone()
db_env["XMLCATALOGS"] = [db_env["DOCBOOK_XML"]]
# PDF generation
fo = db_env.XSLT(os.path.splitext(source)[0] + ".fo", source,
XSLTSTYLESHEET = db_env["DOCBOOK_XSL_FO"])
pdf = db_env.FO(fo)
# HTML generation
db_env.XSLT(os.path.splitext(source)[0] + ".html", source,
XSLTSTYLESHEET = db_env["DOCBOOK_XSL_HTML"])
# WordPress generation
wp_params = [("wordpress.dir", env.get("DOCBOOK_WP_DIR", "../../wordpress"))]
wp_pdf_url = env.get("DOCBOOK_WP_PDF_URL", pdf[0].name)
if len(wp_pdf_url) > 0 :
wp_params.append(("pdf.url", wp_pdf_url))
wp_params.append(("pdf.icon", env.get("DOCBOOK_WP_PDF_ICON", "/icons/pdf.png")))
wp = db_env.XSLT(os.path.splitext(source)[0] + ".wp.php", source,
XSLTSTYLESHEET = db_env["DOCBOOK_XSL_WP"],
XSLTPARAMS = wp_params + env.get("XSLTPARAMS", []))
db_env.AddPostAction(wp, SCons.Action.Action(remove_doctype, cmdstr = "$FIXCOMSTR"))
env.AddMethod(buildDocBook, "DocBook")
def exists(env) :
return True
|
normal
|
{
"blob_id": "cae49da8dd436fc51b472c4a88703d8bc6c79bda",
"index": 427,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef generate(env):\n\n def remove_doctype(target, source, env):\n f = open(str(target[0]))\n output = []\n for line in f.readlines():\n output.append(re.sub('^<!DOCTYPE .*', '', line))\n f.close()\n f = open(str(target[0]), 'wb')\n for line in output:\n f.write(line)\n f.close()\n\n def buildDocBook(env, source):\n db_env = env.Clone()\n db_env['XMLCATALOGS'] = [db_env['DOCBOOK_XML']]\n fo = db_env.XSLT(os.path.splitext(source)[0] + '.fo', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_FO'])\n pdf = db_env.FO(fo)\n db_env.XSLT(os.path.splitext(source)[0] + '.html', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_HTML'])\n wp_params = [('wordpress.dir', env.get('DOCBOOK_WP_DIR',\n '../../wordpress'))]\n wp_pdf_url = env.get('DOCBOOK_WP_PDF_URL', pdf[0].name)\n if len(wp_pdf_url) > 0:\n wp_params.append(('pdf.url', wp_pdf_url))\n wp_params.append(('pdf.icon', env.get('DOCBOOK_WP_PDF_ICON',\n '/icons/pdf.png')))\n wp = db_env.XSLT(os.path.splitext(source)[0] + '.wp.php', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_WP'], XSLTPARAMS=wp_params +\n env.get('XSLTPARAMS', []))\n db_env.AddPostAction(wp, SCons.Action.Action(remove_doctype, cmdstr\n ='$FIXCOMSTR'))\n env.AddMethod(buildDocBook, 'DocBook')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef generate(env):\n\n def remove_doctype(target, source, env):\n f = open(str(target[0]))\n output = []\n for line in f.readlines():\n output.append(re.sub('^<!DOCTYPE .*', '', line))\n f.close()\n f = open(str(target[0]), 'wb')\n for line in output:\n f.write(line)\n f.close()\n\n def buildDocBook(env, source):\n db_env = env.Clone()\n db_env['XMLCATALOGS'] = [db_env['DOCBOOK_XML']]\n fo = db_env.XSLT(os.path.splitext(source)[0] + '.fo', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_FO'])\n pdf = db_env.FO(fo)\n db_env.XSLT(os.path.splitext(source)[0] + '.html', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_HTML'])\n wp_params = [('wordpress.dir', env.get('DOCBOOK_WP_DIR',\n '../../wordpress'))]\n wp_pdf_url = env.get('DOCBOOK_WP_PDF_URL', pdf[0].name)\n if len(wp_pdf_url) > 0:\n wp_params.append(('pdf.url', wp_pdf_url))\n wp_params.append(('pdf.icon', env.get('DOCBOOK_WP_PDF_ICON',\n '/icons/pdf.png')))\n wp = db_env.XSLT(os.path.splitext(source)[0] + '.wp.php', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_WP'], XSLTPARAMS=wp_params +\n env.get('XSLTPARAMS', []))\n db_env.AddPostAction(wp, SCons.Action.Action(remove_doctype, cmdstr\n ='$FIXCOMSTR'))\n env.AddMethod(buildDocBook, 'DocBook')\n\n\ndef exists(env):\n return True\n",
"step-4": "import SCons.Util\nimport xml.dom.minidom, re, os.path\n\n\ndef generate(env):\n\n def remove_doctype(target, source, env):\n f = open(str(target[0]))\n output = []\n for line in f.readlines():\n output.append(re.sub('^<!DOCTYPE .*', '', line))\n f.close()\n f = open(str(target[0]), 'wb')\n for line in output:\n f.write(line)\n f.close()\n\n def buildDocBook(env, source):\n db_env = env.Clone()\n db_env['XMLCATALOGS'] = [db_env['DOCBOOK_XML']]\n fo = db_env.XSLT(os.path.splitext(source)[0] + '.fo', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_FO'])\n pdf = db_env.FO(fo)\n db_env.XSLT(os.path.splitext(source)[0] + '.html', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_HTML'])\n wp_params = [('wordpress.dir', env.get('DOCBOOK_WP_DIR',\n '../../wordpress'))]\n wp_pdf_url = env.get('DOCBOOK_WP_PDF_URL', pdf[0].name)\n if len(wp_pdf_url) > 0:\n wp_params.append(('pdf.url', wp_pdf_url))\n wp_params.append(('pdf.icon', env.get('DOCBOOK_WP_PDF_ICON',\n '/icons/pdf.png')))\n wp = db_env.XSLT(os.path.splitext(source)[0] + '.wp.php', source,\n XSLTSTYLESHEET=db_env['DOCBOOK_XSL_WP'], XSLTPARAMS=wp_params +\n env.get('XSLTPARAMS', []))\n db_env.AddPostAction(wp, SCons.Action.Action(remove_doctype, cmdstr\n ='$FIXCOMSTR'))\n env.AddMethod(buildDocBook, 'DocBook')\n\n\ndef exists(env):\n return True\n",
"step-5": "import SCons.Util\nimport xml.dom.minidom, re, os.path\n\n################################################################################\n# DocBook pseudobuilder\n# TODO: Only generate the output formats that are known\n################################################################################\n\ndef generate(env) :\n def remove_doctype(target, source, env) :\n f = open(str(target[0]))\n output = []\n for line in f.readlines() :\n output.append(re.sub(\"^<!DOCTYPE .*\", \"\", line))\n f.close()\n f = open(str(target[0]), 'wb')\n for line in output :\n f.write(line)\n f.close()\n\n def buildDocBook(env, source) :\n db_env = env.Clone()\n db_env[\"XMLCATALOGS\"] = [db_env[\"DOCBOOK_XML\"]]\n\n # PDF generation\n fo = db_env.XSLT(os.path.splitext(source)[0] + \".fo\", source, \n XSLTSTYLESHEET = db_env[\"DOCBOOK_XSL_FO\"])\n pdf = db_env.FO(fo)\n\n # HTML generation\n db_env.XSLT(os.path.splitext(source)[0] + \".html\", source, \n XSLTSTYLESHEET = db_env[\"DOCBOOK_XSL_HTML\"])\n\n # WordPress generation\n wp_params = [(\"wordpress.dir\", env.get(\"DOCBOOK_WP_DIR\", \"../../wordpress\"))]\n wp_pdf_url = env.get(\"DOCBOOK_WP_PDF_URL\", pdf[0].name)\n if len(wp_pdf_url) > 0 :\n wp_params.append((\"pdf.url\", wp_pdf_url))\n wp_params.append((\"pdf.icon\", env.get(\"DOCBOOK_WP_PDF_ICON\", \"/icons/pdf.png\")))\n wp = db_env.XSLT(os.path.splitext(source)[0] + \".wp.php\", source, \n XSLTSTYLESHEET = db_env[\"DOCBOOK_XSL_WP\"],\n XSLTPARAMS = wp_params + env.get(\"XSLTPARAMS\", []))\n db_env.AddPostAction(wp, SCons.Action.Action(remove_doctype, cmdstr = \"$FIXCOMSTR\"))\n\n env.AddMethod(buildDocBook, \"DocBook\")\n \ndef exists(env) :\n return True\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
#pylint: skip-file
"""
HostApi.py
Copyright 2016 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHosts" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('limit' in params):
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if ('offset' in params):
queryParams['offset'] = self.apiClient.toPathValue(params['offset'])
if ('sortBy' in params):
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('hostName' in params):
queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])
if ('hostMac' in params):
queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])
if ('hostType' in params):
queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])
if ('connectedInterfaceName' in params):
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])
if ('hostIp' in params):
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])
if ('connectedDeviceIp' in params):
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
def getHostCount(self, **kwargs):
"""Gives total number of hosts
Args:
scope, str: Authorization Scope for RBAC (required)
Returns: CountResult
"""
allParams = ['scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostCount" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostById" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('id' in params):
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
|
normal
|
{
"blob_id": "4243c863827f1378c364171ca7d8fdabd42be22f",
"index": 3625,
"step-1": "<mask token>\n\n\nclass HostApi(object):\n <mask token>\n <mask token>\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-2": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-3": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-4": "<mask token>\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-5": "#!/usr/bin/env python\n#pylint: skip-file\n\"\"\"\nHostApi.py\n Copyright 2016 Cisco Systems\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n\"\"\"\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\n\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHosts\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n if ('limit' in params):\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n\n if ('offset' in params):\n queryParams['offset'] = self.apiClient.toPathValue(params['offset'])\n\n if ('sortBy' in params):\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])\n\n if ('order' in params):\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n\n if ('hostName' in params):\n queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])\n\n if ('hostMac' in params):\n queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])\n\n if ('hostType' in params):\n queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])\n\n if ('connectedInterfaceName' in params):\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])\n\n if ('hostIp' in params):\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])\n\n if ('connectedDeviceIp' in params):\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n\n\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n\n allParams = ['scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostCount\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n\n\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n\n allParams = ['id', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostById\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n if ('id' in params):\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}',\n replacement)\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n\n\n\n\n\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
x = 5
print(x , " "*3 , "5")
print("{:20d}".format(x))
|
normal
|
{
"blob_id": "88542a18d98a215f58333f5dd2bf5c4b0d37f32f",
"index": 5539,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(x, ' ' * 3, '5')\nprint('{:20d}'.format(x))\n",
"step-3": "x = 5\nprint(x, ' ' * 3, '5')\nprint('{:20d}'.format(x))\n",
"step-4": "x = 5\nprint(x , \" \"*3 , \"5\")\nprint(\"{:20d}\".format(x))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
import time
def initialize(x: object) -> object:
# Create initialization data and take a lot of time
data = []
starttimeinmillis = int(round(time.time()))
c =0
file1 = sys.argv[x]
with open(file1) as datafile:
for line in datafile:
c+=1
if(c%100==0):
print(".",sep='', end='',flush=True)
data.append([int(l) for l in line.split()])
rows = len(data)
cols = len(data[0])
# print(data)
#print("rows=", rows, " cols=", cols)
print("time took:",int(round(time.time()))-starttimeinmillis,"seconds")
return data
|
normal
|
{
"blob_id": "91f3aae4e74f371cadaf10385510bc1c80063f55",
"index": 7765,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef initialize(x: object) ->object:\n data = []\n starttimeinmillis = int(round(time.time()))\n c = 0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c += 1\n if c % 100 == 0:\n print('.', sep='', end='', flush=True)\n data.append([int(l) for l in line.split()])\n rows = len(data)\n cols = len(data[0])\n print('time took:', int(round(time.time())) - starttimeinmillis, 'seconds')\n return data\n",
"step-3": "import sys\nimport time\n\n\ndef initialize(x: object) ->object:\n data = []\n starttimeinmillis = int(round(time.time()))\n c = 0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c += 1\n if c % 100 == 0:\n print('.', sep='', end='', flush=True)\n data.append([int(l) for l in line.split()])\n rows = len(data)\n cols = len(data[0])\n print('time took:', int(round(time.time())) - starttimeinmillis, 'seconds')\n return data\n",
"step-4": "import sys\nimport time\ndef initialize(x: object) -> object:\n # Create initialization data and take a lot of time\n\n data = []\n starttimeinmillis = int(round(time.time()))\n\n c =0\n file1 = sys.argv[x]\n with open(file1) as datafile:\n for line in datafile:\n c+=1\n if(c%100==0):\n print(\".\",sep='', end='',flush=True)\n data.append([int(l) for l in line.split()])\n\n rows = len(data)\n cols = len(data[0])\n # print(data)\n\n #print(\"rows=\", rows, \" cols=\", cols)\n print(\"time took:\",int(round(time.time()))-starttimeinmillis,\"seconds\")\n return data\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import matplotlib.pyplot as plt
##########################################
# line plot
#########################################
# x축 생략시 x축은 0, 1, 2, 3이 됨
"""
plt.plot([1, 4, 9, 16])
plt.show()
"""
# x축과 y축 지정
"""
plt.plot([10, 20, 30, 40], [1, 4, 9, 16])
plt.show()
"""
# 스타일지정
# 색깔, 마커, 선 순서로 지정함
# 색깔 : blue(b), green(g), red(r), cyan(c), magenta(m), yellow(y), block(k), white(w)
# 마커 : point(.), pixel(,), circle(o), triangle_down(v), triangle_up(^),
# triangle_left(<), triangle_right(>), tri_down(1), tri_up(2), tri_left(3),
# tri_right(4), square(s), pentagon(p), star(*), hexagon1(h),
# hexagon2(H), plus(+), x marker(x), diamond(D), thin_diamond(d)
# 선 : solid line(-), dashed line(--), dash-dot line(-.), dotted(:)
"""
plt.plot([1,4,9,16], 'bs:')
plt.show()
"""
# 기타스타일
# http://matplotlib.org/1.5.1/api/lines_api.html#matplotlib.lines.Line2D 참고
# color(c) : 선색깔
# linewidth(lw) : 선굵기
# linestyle(ls) : 선스타일
# marker : 마커종류
# markersize(ms) : 마커크기
# markeredgecolor(mec) : 마커 선 색깔
# markeredgewidth(mew) : 마커 선 굵기
# markerfacecolor(mfc) : 마커 내부 색깔
"""
plt.plot([1,4,9,16], c="b", lw=5, ls="--", marker="o", ms=15, mec="g", mew=5,
mfc="r")
plt.show()
"""
# 그림 범위지정
# xlim, ylim에서 최소, 최대값 지정
"""
plt.plot([1,4,9,16], c="b", lw=5, ls="--", marker="o", ms=15, mec="g", mew=5,
mfc="r")
plt.xlim(-10, 10)
plt.ylim(-10, 30)
plt.show()
"""
# 틱 설정
# 틱 : 플롯이나 차트에서 축상의 위치 표시 지점
# 틱라벨 : 틱 위에 써진 숫자 혹은 글자
# xticks, yticks로 틱라벨 지정
# 틱 라벨 문자열에는 $$사이에 LaTeX 수학 문자식 넣을수 있다
"""
X = np.linspace(-np.pi, np.pi, 256)
C = np.cos(X)
plt.plot(X, C)
plt.xticks([-np.pi, -np.pi/2, 0, np.pi/2, np.pi])
plt.yticks([-1, 0, +1])
plt.xticks([-np.pi, -np.pi/2, 0, np.pi/2, np.pi], [r'$-\pi$', r'$-\pi/2$',
'0', r'$+\pi/2$', r'$+\pi$'])
plt.yticks([-1, 0, +1], ["Low", "Zero", "High"])
plt.grid(False) # grid없애기
plt.show()
"""
# 여러개 선 그리기
# x, y, 스타일을 여러개 지정하면 됨
"""
t = np.arange(0., 5., 0.2)
plt.plot(t, t, 'r--', t, 0.5*t**2, 'bs:', t, 0.2*t**3, 'g^-')
plt.show()
"""
# 하나의 그림에 복수의 plot명령 적용 : 홀드
# hold(True) : 겹치기 시작
# hold(False) : 겹치기 종료
"""
plt.plot([1,4,9,16], c="b", lw=5, ls="--", marker="o", ms=15, mec="g", mew=5,
mfc="r")
plt.hold(True)
plt.plot([9,16,4,1], c="k", lw=3, ls=":", marker="s", ms=10, mec="m", mew=5,
mfc="c")
plt.hold(False)
plt.show()
"""
# 범례
# legent명령으로 범례 추가
# loc인수로 범례의 위치 지정
# loc : best(0), upper right(1), upper left(2), lower left(3),
# lower right(4), right(5), center left(6), center right(7)
# lower center(8), upper center(9), center(10)
"""
X = np.linspace(-np.pi, np.pi, 256)
C, S = np.cos(X), np.sin(X)
plt.plot(X, C, label="cosine")
plt.hold(True)
plt.plot(X, S, label="sine")
plt.legend(loc=5)
plt.show()
"""
# x축, y축 라벨, 타이틀
# xlabel, ylabel, title로 지정
"""
X = np.linspace(-np.pi, np.pi, 256)
C, S = np.cos(X), np.sin(X)
plt.plot(X, C, label="cosine")
plt.xlabel("time")
plt.ylabel("amplitude")
plt.title("Cosine Plot")
plt.show()
"""
# 부가설명
# annotate명령을 사용하여 그림내에 화살표를 포함한 부가 설명 넣을수 있음
"""
X = np.linspace(-np.pi, np.pi, 256)
S = np.sin(X)
plt.plot(X, S, label="sine")
plt.scatter([0], [0], color="r", linewidth=10)
plt.annotate(r'$(0,0)$', xy=(0, 0), xycoords='data', xytext=(-50, 50),
textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="->", linewidth=3, color="g"))
plt.show()
"""
# Figure [ Axes [ Axis] ] 의 구조이다
# Figure : 여러개의 윈도우를 띄우거나, 그림의 크기 지정시 사용
# plot사용시 자동으로 Figure를 생성하므로 명시적으로 생성할 필요는
# 없음
# figure객체를 얻으려면 gcf 명령 사용
"""
f1 = plt.figure(figsize=(100,2))
plt.plot(np.random.randn(100))
plt.show()
"""
"""
f1 = plt.figure(1)
plt.plot([1,2,3,4], 'ro:')
f2= plt.gcf()
print(f1, id(f1))
print(f2, id(f2))
plt.show()
"""
# Axes와 Subplot
# 하나의 윈도우(Figure)안에 여러개의 플롯을 배열하는 경우 각각의 플롯은
# Axes라고 불리는 객체에 속함
# subplot 명령으로 Axes객체를 생성, plot명령 사용시 자동으로 Axes를 생성함
# subplot은 그리드 형태의 Axes객체들을 생성
# Figure가 행렬(matrix)이고 Axes가 행렬의 원소라고 생각하면 됨.
# 위와 아래 두개의 플롯이 있는 경우 2X1행렬
# subplot은 3개의 인수를 가지고 처음 2개가 행렬 정의, 세번째가 위치 지정
"""
x1 = np.linspace(0.0, 5.0)
x2 = np.linspace(0.0, 2.0)
y1 = np.cos(2 * np.pi * x1) * np.exp(-x1)
y2 = np.cos(2 * np.pi * x2)
ax1 = plt.subplot(2, 1, 1)
plt.plot(x1, y1, 'yo-')
plt.title('A tale of 2 subplots')
plt.ylabel('Dampled oscillation')
print(ax1)
ax2 = plt.subplot(2, 1, 2)
plt.plot(x2, y2, 'r.-')
plt.xlabel('time (s)')
plt.ylabel('Undamped')
print(ax2)
plt.show()
"""
# subplot의 인수는 (2,2,1)를 줄여서 221로 표시 가능
"""
plt.subplot(221); plt.plot([1,2]); plt.title(1)
plt.subplot(222); plt.plot([1,2]); plt.title(2)
plt.subplot(223); plt.plot([1,2]); plt.title(3)
plt.subplot(224); plt.plot([1,2]); plt.title(4)
plt.tight_layout()
plt.show()
"""
# xkcd 스타일
X = np.linspace(-3, 3, 4096)
C = np.cos(X)
with plt.xkcd():
plt.title('XKCD style plot!!!')
plt.plot(X, C, label="cosine")
t = 2 * np.pi / 3
plt.scatter(t, np.cos(t), 50, color='blue')
plt.annotate(r'0.5 Here', xy=(t, np.cos(t)), xycoords='data', xytext=(-90,
-50), textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="->", linewidth=3, color="g"))
plt.show()
|
normal
|
{
"blob_id": "89ffb2da456d2edf15fde8adc01615a277c6caa1",
"index": 8522,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith plt.xkcd():\n plt.title('XKCD style plot!!!')\n plt.plot(X, C, label='cosine')\n t = 2 * np.pi / 3\n plt.scatter(t, np.cos(t), 50, color='blue')\n plt.annotate('0.5 Here', xy=(t, np.cos(t)), xycoords='data', xytext=(-\n 90, -50), textcoords='offset points', fontsize=16, arrowprops=dict(\n arrowstyle='->', linewidth=3, color='g'))\nplt.show()\n",
"step-3": "<mask token>\nX = np.linspace(-3, 3, 4096)\nC = np.cos(X)\nwith plt.xkcd():\n plt.title('XKCD style plot!!!')\n plt.plot(X, C, label='cosine')\n t = 2 * np.pi / 3\n plt.scatter(t, np.cos(t), 50, color='blue')\n plt.annotate('0.5 Here', xy=(t, np.cos(t)), xycoords='data', xytext=(-\n 90, -50), textcoords='offset points', fontsize=16, arrowprops=dict(\n arrowstyle='->', linewidth=3, color='g'))\nplt.show()\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\n<mask token>\nX = np.linspace(-3, 3, 4096)\nC = np.cos(X)\nwith plt.xkcd():\n plt.title('XKCD style plot!!!')\n plt.plot(X, C, label='cosine')\n t = 2 * np.pi / 3\n plt.scatter(t, np.cos(t), 50, color='blue')\n plt.annotate('0.5 Here', xy=(t, np.cos(t)), xycoords='data', xytext=(-\n 90, -50), textcoords='offset points', fontsize=16, arrowprops=dict(\n arrowstyle='->', linewidth=3, color='g'))\nplt.show()\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\n\n##########################################\n# line plot\n#########################################\n\n# x축 생략시 x축은 0, 1, 2, 3이 됨\n\"\"\"\nplt.plot([1, 4, 9, 16])\nplt.show()\n\"\"\"\n\n\n# x축과 y축 지정\n\"\"\"\nplt.plot([10, 20, 30, 40], [1, 4, 9, 16])\nplt.show()\n\"\"\"\n\n# 스타일지정\n# 색깔, 마커, 선 순서로 지정함\n# 색깔 : blue(b), green(g), red(r), cyan(c), magenta(m), yellow(y), block(k), white(w)\n# 마커 : point(.), pixel(,), circle(o), triangle_down(v), triangle_up(^),\n# triangle_left(<), triangle_right(>), tri_down(1), tri_up(2), tri_left(3),\n# tri_right(4), square(s), pentagon(p), star(*), hexagon1(h),\n# hexagon2(H), plus(+), x marker(x), diamond(D), thin_diamond(d)\n# 선 : solid line(-), dashed line(--), dash-dot line(-.), dotted(:)\n\"\"\"\nplt.plot([1,4,9,16], 'bs:')\nplt.show()\n\"\"\"\n\n\n# 기타스타일\n# http://matplotlib.org/1.5.1/api/lines_api.html#matplotlib.lines.Line2D 참고\n# color(c) : 선색깔\n# linewidth(lw) : 선굵기\n# linestyle(ls) : 선스타일\n# marker : 마커종류\n# markersize(ms) : 마커크기\n# markeredgecolor(mec) : 마커 선 색깔\n# markeredgewidth(mew) : 마커 선 굵기\n# markerfacecolor(mfc) : 마커 내부 색깔\n\"\"\"\nplt.plot([1,4,9,16], c=\"b\", lw=5, ls=\"--\", marker=\"o\", ms=15, mec=\"g\", mew=5,\n mfc=\"r\")\nplt.show()\n\"\"\"\n\n\n# 그림 범위지정\n# xlim, ylim에서 최소, 최대값 지정\n\"\"\"\nplt.plot([1,4,9,16], c=\"b\", lw=5, ls=\"--\", marker=\"o\", ms=15, mec=\"g\", mew=5,\n mfc=\"r\")\nplt.xlim(-10, 10)\nplt.ylim(-10, 30)\nplt.show()\n\"\"\"\n\n# 틱 설정\n# 틱 : 플롯이나 차트에서 축상의 위치 표시 지점\n# 틱라벨 : 틱 위에 써진 숫자 혹은 글자\n# xticks, yticks로 틱라벨 지정\n# 틱 라벨 문자열에는 $$사이에 LaTeX 수학 문자식 넣을수 있다\n\"\"\"\nX = np.linspace(-np.pi, np.pi, 256)\nC = np.cos(X)\nplt.plot(X, C)\nplt.xticks([-np.pi, -np.pi/2, 0, np.pi/2, np.pi])\nplt.yticks([-1, 0, +1])\nplt.xticks([-np.pi, -np.pi/2, 0, np.pi/2, np.pi], [r'$-\\pi$', r'$-\\pi/2$',\n'0', r'$+\\pi/2$', r'$+\\pi$'])\nplt.yticks([-1, 0, +1], [\"Low\", \"Zero\", \"High\"])\nplt.grid(False) # grid없애기\nplt.show()\n\"\"\"\n\n\n# 여러개 선 그리기\n# x, y, 스타일을 여러개 지정하면 됨\n\"\"\"\nt = np.arange(0., 5., 0.2)\nplt.plot(t, t, 'r--', t, 0.5*t**2, 'bs:', t, 0.2*t**3, 'g^-')\nplt.show()\n\"\"\"\n\n\n# 하나의 그림에 복수의 plot명령 적용 : 홀드\n# hold(True) : 겹치기 시작\n# hold(False) : 겹치기 종료\n\"\"\"\nplt.plot([1,4,9,16], c=\"b\", lw=5, ls=\"--\", marker=\"o\", ms=15, mec=\"g\", mew=5,\n mfc=\"r\")\nplt.hold(True)\nplt.plot([9,16,4,1], c=\"k\", lw=3, ls=\":\", marker=\"s\", ms=10, mec=\"m\", mew=5,\n mfc=\"c\")\nplt.hold(False)\nplt.show()\n\"\"\"\n\n\n# 범례\n# legent명령으로 범례 추가\n# loc인수로 범례의 위치 지정\n# loc : best(0), upper right(1), upper left(2), lower left(3),\n# lower right(4), right(5), center left(6), center right(7)\n# lower center(8), upper center(9), center(10)\n\"\"\"\nX = np.linspace(-np.pi, np.pi, 256)\nC, S = np.cos(X), np.sin(X)\nplt.plot(X, C, label=\"cosine\")\nplt.hold(True)\nplt.plot(X, S, label=\"sine\")\nplt.legend(loc=5)\nplt.show()\n\"\"\"\n\n# x축, y축 라벨, 타이틀\n# xlabel, ylabel, title로 지정\n\"\"\"\nX = np.linspace(-np.pi, np.pi, 256)\nC, S = np.cos(X), np.sin(X)\nplt.plot(X, C, label=\"cosine\")\nplt.xlabel(\"time\")\nplt.ylabel(\"amplitude\")\nplt.title(\"Cosine Plot\")\nplt.show()\n\"\"\"\n\n# 부가설명\n# annotate명령을 사용하여 그림내에 화살표를 포함한 부가 설명 넣을수 있음\n\"\"\"\nX = np.linspace(-np.pi, np.pi, 256)\nS = np.sin(X)\nplt.plot(X, S, label=\"sine\")\nplt.scatter([0], [0], color=\"r\", linewidth=10)\nplt.annotate(r'$(0,0)$', xy=(0, 0), xycoords='data', xytext=(-50, 50),\n textcoords='offset points', fontsize=16,\n arrowprops=dict(arrowstyle=\"->\", linewidth=3, color=\"g\"))\nplt.show()\n\"\"\"\n\n\n# Figure [ Axes [ Axis] ] 의 구조이다\n# Figure : 여러개의 윈도우를 띄우거나, 그림의 크기 지정시 사용\n# plot사용시 자동으로 Figure를 생성하므로 명시적으로 생성할 필요는\n# 없음\n# figure객체를 얻으려면 gcf 명령 사용\n\"\"\"\nf1 = plt.figure(figsize=(100,2))\nplt.plot(np.random.randn(100))\nplt.show()\n\"\"\"\n\"\"\"\nf1 = plt.figure(1)\nplt.plot([1,2,3,4], 'ro:')\nf2= plt.gcf()\nprint(f1, id(f1))\nprint(f2, id(f2))\nplt.show()\n\"\"\"\n\n\n# Axes와 Subplot\n# 하나의 윈도우(Figure)안에 여러개의 플롯을 배열하는 경우 각각의 플롯은\n# Axes라고 불리는 객체에 속함\n# subplot 명령으로 Axes객체를 생성, plot명령 사용시 자동으로 Axes를 생성함\n# subplot은 그리드 형태의 Axes객체들을 생성\n# Figure가 행렬(matrix)이고 Axes가 행렬의 원소라고 생각하면 됨.\n# 위와 아래 두개의 플롯이 있는 경우 2X1행렬\n# subplot은 3개의 인수를 가지고 처음 2개가 행렬 정의, 세번째가 위치 지정\n\"\"\"\nx1 = np.linspace(0.0, 5.0)\nx2 = np.linspace(0.0, 2.0)\ny1 = np.cos(2 * np.pi * x1) * np.exp(-x1)\ny2 = np.cos(2 * np.pi * x2)\n\nax1 = plt.subplot(2, 1, 1)\nplt.plot(x1, y1, 'yo-')\nplt.title('A tale of 2 subplots')\nplt.ylabel('Dampled oscillation')\nprint(ax1)\n\nax2 = plt.subplot(2, 1, 2)\nplt.plot(x2, y2, 'r.-')\nplt.xlabel('time (s)')\nplt.ylabel('Undamped')\nprint(ax2)\n\nplt.show()\n\"\"\"\n\n# subplot의 인수는 (2,2,1)를 줄여서 221로 표시 가능\n\"\"\"\nplt.subplot(221); plt.plot([1,2]); plt.title(1)\nplt.subplot(222); plt.plot([1,2]); plt.title(2)\nplt.subplot(223); plt.plot([1,2]); plt.title(3)\nplt.subplot(224); plt.plot([1,2]); plt.title(4)\nplt.tight_layout()\nplt.show()\n\"\"\"\n\n\n# xkcd 스타일\nX = np.linspace(-3, 3, 4096)\nC = np.cos(X)\n\nwith plt.xkcd():\n plt.title('XKCD style plot!!!')\n plt.plot(X, C, label=\"cosine\")\n t = 2 * np.pi / 3\n plt.scatter(t, np.cos(t), 50, color='blue')\n plt.annotate(r'0.5 Here', xy=(t, np.cos(t)), xycoords='data', xytext=(-90,\n -50), textcoords='offset points', fontsize=16,\n arrowprops=dict(arrowstyle=\"->\", linewidth=3, color=\"g\"))\nplt.show()\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python3
__author__ = "yang.dd"
"""
example 090
"""
# list
# 新建list
testList = [10086, "中国移动", [1, 2, 3, 4]]
# 访问列表长度
print("list len: ", len(testList))
# 切片
print("切片(slice):", testList[1:])
# 追加
print("追加一个元素")
testList.append("i'm new here!");
print("list len: ", len(testList))
print("last item :", testList[-1])
print("pop: ", testList.pop())
print("list len: ", len(testList))
print(testList)
matrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
print(matrix)
print(matrix[1])
col2 = [x[1] for x in matrix]
print(col2)
col2even = [x[1] for x in matrix if x[1] % 2 == 0]
print(col2even)
|
normal
|
{
"blob_id": "4f19eed272c12be137df92bfd3c72e978408c974",
"index": 3216,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('list len: ', len(testList))\nprint('切片(slice):', testList[1:])\nprint('追加一个元素')\ntestList.append(\"i'm new here!\")\nprint('list len: ', len(testList))\nprint('last item :', testList[-1])\nprint('pop: ', testList.pop())\nprint('list len: ', len(testList))\nprint(testList)\n<mask token>\nprint(matrix)\nprint(matrix[1])\n<mask token>\nprint(col2)\n<mask token>\nprint(col2even)\n",
"step-3": "__author__ = 'yang.dd'\n<mask token>\ntestList = [10086, '中国移动', [1, 2, 3, 4]]\nprint('list len: ', len(testList))\nprint('切片(slice):', testList[1:])\nprint('追加一个元素')\ntestList.append(\"i'm new here!\")\nprint('list len: ', len(testList))\nprint('last item :', testList[-1])\nprint('pop: ', testList.pop())\nprint('list len: ', len(testList))\nprint(testList)\nmatrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\nprint(matrix)\nprint(matrix[1])\ncol2 = [x[1] for x in matrix]\nprint(col2)\ncol2even = [x[1] for x in matrix if x[1] % 2 == 0]\nprint(col2even)\n",
"step-4": "#!/usr/bin/python3\r\n\r\n__author__ = \"yang.dd\"\r\n\r\n\"\"\"\r\n example 090\r\n\"\"\"\r\n# list\r\n# 新建list\r\ntestList = [10086, \"中国移动\", [1, 2, 3, 4]]\r\n\r\n# 访问列表长度\r\nprint(\"list len: \", len(testList))\r\n\r\n# 切片\r\nprint(\"切片(slice):\", testList[1:])\r\n\r\n# 追加\r\n\r\nprint(\"追加一个元素\")\r\ntestList.append(\"i'm new here!\");\r\n\r\nprint(\"list len: \", len(testList))\r\nprint(\"last item :\", testList[-1])\r\n\r\nprint(\"pop: \", testList.pop())\r\nprint(\"list len: \", len(testList))\r\nprint(testList)\r\n\r\nmatrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]\r\nprint(matrix)\r\n\r\nprint(matrix[1])\r\n\r\ncol2 = [x[1] for x in matrix]\r\nprint(col2)\r\n\r\ncol2even = [x[1] for x in matrix if x[1] % 2 == 0]\r\nprint(col2even)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
from sqlalchemy import Column, ForeignKey, Integer, String, Float, Boolean, DateTime
from sqlalchemy import and_, or_
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker, scoped_session, load_only
from sqlalchemy.pool import NullPool
from datetime import datetime
Base = declarative_base()
days = ['M','T','W','T','F', 'S', 'S']
# using sqlalchemy declare a class for each table in our database.
class Station(Base):
"""this one is for storing information about each station."""
__tablename__ = "station"
number = Column(Integer, primary_key=True, autoincrement=False)
contract_name = Column(String(250), nullable=False)
name = Column(String(250), nullable=False)
address = Column(String(250), nullable=False)
position_lat = Column(Float, nullable=False)
position_long = Column(Float, nullable=False)
banking = Column(Boolean, nullable=True)
bonus = Column(Boolean, nullable=True)
station_usage = relationship("UsageData", lazy="dynamic")
@property
def last_updated(self):
"""this method is used in the scraper to return the last updated station.
this lets us pull only updated data."""
try:
return max(self.station_usage, key=lambda x: x.last_update).dt_last_update
except ValueError:
return datetime.fromtimestamp(0)
@classmethod
def get_current_station_info(cls, dbsession):
"""as the method name suggests this returns the up to date station information."""
sub = dbsession.query(UsageData.station_id, func.max(UsageData.id).label('max_update')).group_by(
UsageData.station_id).subquery()
return dbsession.query(
UsageData.last_update,
UsageData.available_bike_stands, UsageData.available_bikes).join(sub, and_(
sub.c.max_update == UsageData.id)).all()
class UsageData(Base):
"""holds data about bicycle usage for every station."""
__tablename__ = "bike_usage"
id = Column(Integer, primary_key=True)
station_id = Column(Integer, ForeignKey('station.number'))
status = Column(Boolean, nullable=False)
bike_stands = Column(Integer, nullable=False)
available_bike_stands = Column(Integer, nullable=False)
available_bikes = Column(Integer, nullable=False)
last_update = Column(DateTime, nullable=False)
@property
def dt_last_update(self):
"""return when was the last update. Once again this is used in the scraper to determine newly updated data."""
return self.last_update
@dt_last_update.setter
def dt_last_update(self, val):
"""creates a datetime object which is added to the database with an update from the dublinbikes api.
once again used by the scraper. essentially the adds the time at which the update was entered."""
self.last_update = datetime.fromtimestamp(int(val)/1000)
@classmethod
def get_bikes_for_weekday(cls, dbsession, weekday, station_id):
"""returns a list of bikes for a provided weekday and station.
averaged per hour so 24 results."""
station = [("Time", "Available Bikes", "Available Stands")]
station_data = dbsession.query(func.hour(cls.last_update),
func.avg(cls.available_bikes),
func.avg(cls.available_bike_stands)) \
.filter(cls.station_id == station_id,
func.weekday(cls.last_update) == weekday) \
.group_by(func.hour(cls.last_update)) \
.all()
# this section parses the query return into a readable list.
# from docs:extend() appends the contents of seq to list.
if station_data:
station.extend([(a, float(b), float(c)) for a, b, c in station_data])
else:
station.extend([(0,0,0)])
return station
@classmethod
def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):
"""very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.
returns a list of bikes for a provided datetime object (wetdate) and station."""
# averaged per hour so 24 results.
station = [("Time", "Available Bikes", "Available Stands")]
station_data = dbsession.query(
func.hour(cls.last_update),
func.avg(cls.available_bikes),
func.avg(cls.available_bike_stands))\
.filter(cls.station_id == station_id,
func.date(cls.last_update) == wetdate.date())\
.group_by(func.hour(cls.last_update)).all()
# this section parses the query return into a readable list.
# from docs:extend() appends the contents of seq to list.
if station_data:
station.extend([(a, float(b), float(c)) for a, b, c in station_data])
else:
station.extend([(0,0,0)])
return station
@classmethod
def get_bikes_for_week(cls, dbsession, station_id):
"""as method name describes.
similar to methods above but averaged over week."""
station = [("Day", "Available Bikes")]
station_data = dbsession.query(func.weekday(cls.last_update),
func.avg(cls.available_bikes)) \
.filter(cls.station_id == station_id) \
.group_by(func.weekday(cls.last_update)) \
.all()
# this section parses the query return into a readable list.
# from docs:extend() appends the contents of seq to list.
if station_data:
station.extend([(days[a], float(b)) for a, b in station_data])
else:
station.extend([(0,0)])
return station
class Weather(Base):
"""holds data scraped from the open weather API."""
__tablename__ = "weather"
id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)
coord_lon = Column(Float)
coord_lat = Column(Float)
weather_id = Column(Integer)
weather_main = Column(String(45))
weather_description = Column(String(45))
weather_icon = Column(String(10))
base = Column(String(45))
main_temp = Column(Integer)
main_pressure = Column(Integer)
main_humidity = Column(Integer)
main_temp_min = Column(Integer)
main_temp_max = Column(Integer)
visibility = Column(Integer)
wind_speed = Column(Float)
wind_deg = Column(Integer)
clouds_all = Column(Integer)
dt = Column(DateTime)
sys_type = Column(Integer)
sys_id = Column(Integer)
sys_message = Column(Float)
sys_country = Column(String(2))
sys_sunrise = Column(DateTime)
sys_sunset = Column(DateTime)
city_id = Column(Integer)
city_name = Column(String(6))
cod = Column(Integer)
@classmethod
def findWetWeatherDays(self, dbsession, today):
"""finds days where there was wet weather."""
wetDays = dbsession.query(self.dt).filter(or_(self.weather_description == "light rain", self.weather_description == "moderate rain")).all()
# if one of those days is today return it.
# else just return a wet day.
for i in range(len(wetDays)):
if today == wetDays[i][0].weekday():
return wetDays[i][0]
else:
return wetDays[0][0]
# path to DB
connection_string='mysql+mysqldb://{username}:{password}@{host}:3306/dublinbikesdata'.format(username=os.environ['DatabaseUser'],
password=os.environ['DatabasePassword'],
host=os.environ['DatabaseServer'])
engine = create_engine(connection_string, poolclass=NullPool)
# create the session using sqlalchemy.
db_session = scoped_session(sessionmaker(bind=engine, autocommit=False, autoflush=False))
if __name__=="__main__":
"""Below is used for testing if the database is working by running this file directly.
not used in the actual app."""
station_id = 42
static_info = db_session.query(Station.number,
Station.name,
Station.address,
Station.position_lat,
Station.position_long).all()
dynamic_info = Station.get_current_station_info(db_session)
static_fields = ['number', 'name', 'address', 'position_lat', 'position_long']
dynamic_fields = ['last_update', 'available_bike_stands', 'available_bikes']
json_data = [dict(zip(static_fields + dynamic_fields, static + dynamic))
for static, dynamic in
zip(static_info, dynamic_info)]
print(json_data)
|
normal
|
{
"blob_id": "6db0adf25a7cc38c8965c07cc80bde0d82c75d56",
"index": 3955,
"step-1": "<mask token>\n\n\nclass UsageData(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def dt_last_update(self):\n \"\"\"return when was the last update. Once again this is used in the scraper to determine newly updated data.\"\"\"\n return self.last_update\n <mask token>\n\n @classmethod\n def get_bikes_for_weekday(cls, dbsession, weekday, station_id):\n \"\"\"returns a list of bikes for a provided weekday and station.\n averaged per hour so 24 results.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.weekday(cls.last_update) ==\n weekday).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):\n \"\"\"very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.\n returns a list of bikes for a provided datetime object (wetdate) and station.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.date(cls.last_update) ==\n wetdate.date()).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_week(cls, dbsession, station_id):\n \"\"\"as method name describes.\n similar to methods above but averaged over week.\"\"\"\n station = [('Day', 'Available Bikes')]\n station_data = dbsession.query(func.weekday(cls.last_update), func.\n avg(cls.available_bikes)).filter(cls.station_id == station_id\n ).group_by(func.weekday(cls.last_update)).all()\n if station_data:\n station.extend([(days[a], float(b)) for a, b in station_data])\n else:\n station.extend([(0, 0)])\n return station\n\n\nclass Weather(Base):\n \"\"\"holds data scraped from the open weather API.\"\"\"\n __tablename__ = 'weather'\n id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)\n coord_lon = Column(Float)\n coord_lat = Column(Float)\n weather_id = Column(Integer)\n weather_main = Column(String(45))\n weather_description = Column(String(45))\n weather_icon = Column(String(10))\n base = Column(String(45))\n main_temp = Column(Integer)\n main_pressure = Column(Integer)\n main_humidity = Column(Integer)\n main_temp_min = Column(Integer)\n main_temp_max = Column(Integer)\n visibility = Column(Integer)\n wind_speed = Column(Float)\n wind_deg = Column(Integer)\n clouds_all = Column(Integer)\n dt = Column(DateTime)\n sys_type = Column(Integer)\n sys_id = Column(Integer)\n sys_message = Column(Float)\n sys_country = Column(String(2))\n sys_sunrise = Column(DateTime)\n sys_sunset = Column(DateTime)\n city_id = Column(Integer)\n city_name = Column(String(6))\n cod = Column(Integer)\n\n @classmethod\n def findWetWeatherDays(self, dbsession, today):\n \"\"\"finds days where there was wet weather.\"\"\"\n wetDays = dbsession.query(self.dt).filter(or_(self.\n weather_description == 'light rain', self.weather_description ==\n 'moderate rain')).all()\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UsageData(Base):\n <mask token>\n __tablename__ = 'bike_usage'\n id = Column(Integer, primary_key=True)\n station_id = Column(Integer, ForeignKey('station.number'))\n status = Column(Boolean, nullable=False)\n bike_stands = Column(Integer, nullable=False)\n available_bike_stands = Column(Integer, nullable=False)\n available_bikes = Column(Integer, nullable=False)\n last_update = Column(DateTime, nullable=False)\n\n @property\n def dt_last_update(self):\n \"\"\"return when was the last update. Once again this is used in the scraper to determine newly updated data.\"\"\"\n return self.last_update\n\n @dt_last_update.setter\n def dt_last_update(self, val):\n \"\"\"creates a datetime object which is added to the database with an update from the dublinbikes api.\n once again used by the scraper. essentially the adds the time at which the update was entered.\"\"\"\n self.last_update = datetime.fromtimestamp(int(val) / 1000)\n\n @classmethod\n def get_bikes_for_weekday(cls, dbsession, weekday, station_id):\n \"\"\"returns a list of bikes for a provided weekday and station.\n averaged per hour so 24 results.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.weekday(cls.last_update) ==\n weekday).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):\n \"\"\"very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.\n returns a list of bikes for a provided datetime object (wetdate) and station.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.date(cls.last_update) ==\n wetdate.date()).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_week(cls, dbsession, station_id):\n \"\"\"as method name describes.\n similar to methods above but averaged over week.\"\"\"\n station = [('Day', 'Available Bikes')]\n station_data = dbsession.query(func.weekday(cls.last_update), func.\n avg(cls.available_bikes)).filter(cls.station_id == station_id\n ).group_by(func.weekday(cls.last_update)).all()\n if station_data:\n station.extend([(days[a], float(b)) for a, b in station_data])\n else:\n station.extend([(0, 0)])\n return station\n\n\nclass Weather(Base):\n \"\"\"holds data scraped from the open weather API.\"\"\"\n __tablename__ = 'weather'\n id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)\n coord_lon = Column(Float)\n coord_lat = Column(Float)\n weather_id = Column(Integer)\n weather_main = Column(String(45))\n weather_description = Column(String(45))\n weather_icon = Column(String(10))\n base = Column(String(45))\n main_temp = Column(Integer)\n main_pressure = Column(Integer)\n main_humidity = Column(Integer)\n main_temp_min = Column(Integer)\n main_temp_max = Column(Integer)\n visibility = Column(Integer)\n wind_speed = Column(Float)\n wind_deg = Column(Integer)\n clouds_all = Column(Integer)\n dt = Column(DateTime)\n sys_type = Column(Integer)\n sys_id = Column(Integer)\n sys_message = Column(Float)\n sys_country = Column(String(2))\n sys_sunrise = Column(DateTime)\n sys_sunset = Column(DateTime)\n city_id = Column(Integer)\n city_name = Column(String(6))\n cod = Column(Integer)\n\n @classmethod\n def findWetWeatherDays(self, dbsession, today):\n \"\"\"finds days where there was wet weather.\"\"\"\n wetDays = dbsession.query(self.dt).filter(or_(self.\n weather_description == 'light rain', self.weather_description ==\n 'moderate rain')).all()\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass UsageData(Base):\n \"\"\"holds data about bicycle usage for every station.\"\"\"\n __tablename__ = 'bike_usage'\n id = Column(Integer, primary_key=True)\n station_id = Column(Integer, ForeignKey('station.number'))\n status = Column(Boolean, nullable=False)\n bike_stands = Column(Integer, nullable=False)\n available_bike_stands = Column(Integer, nullable=False)\n available_bikes = Column(Integer, nullable=False)\n last_update = Column(DateTime, nullable=False)\n\n @property\n def dt_last_update(self):\n \"\"\"return when was the last update. Once again this is used in the scraper to determine newly updated data.\"\"\"\n return self.last_update\n\n @dt_last_update.setter\n def dt_last_update(self, val):\n \"\"\"creates a datetime object which is added to the database with an update from the dublinbikes api.\n once again used by the scraper. essentially the adds the time at which the update was entered.\"\"\"\n self.last_update = datetime.fromtimestamp(int(val) / 1000)\n\n @classmethod\n def get_bikes_for_weekday(cls, dbsession, weekday, station_id):\n \"\"\"returns a list of bikes for a provided weekday and station.\n averaged per hour so 24 results.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.weekday(cls.last_update) ==\n weekday).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):\n \"\"\"very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.\n returns a list of bikes for a provided datetime object (wetdate) and station.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.date(cls.last_update) ==\n wetdate.date()).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_week(cls, dbsession, station_id):\n \"\"\"as method name describes.\n similar to methods above but averaged over week.\"\"\"\n station = [('Day', 'Available Bikes')]\n station_data = dbsession.query(func.weekday(cls.last_update), func.\n avg(cls.available_bikes)).filter(cls.station_id == station_id\n ).group_by(func.weekday(cls.last_update)).all()\n if station_data:\n station.extend([(days[a], float(b)) for a, b in station_data])\n else:\n station.extend([(0, 0)])\n return station\n\n\nclass Weather(Base):\n \"\"\"holds data scraped from the open weather API.\"\"\"\n __tablename__ = 'weather'\n id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)\n coord_lon = Column(Float)\n coord_lat = Column(Float)\n weather_id = Column(Integer)\n weather_main = Column(String(45))\n weather_description = Column(String(45))\n weather_icon = Column(String(10))\n base = Column(String(45))\n main_temp = Column(Integer)\n main_pressure = Column(Integer)\n main_humidity = Column(Integer)\n main_temp_min = Column(Integer)\n main_temp_max = Column(Integer)\n visibility = Column(Integer)\n wind_speed = Column(Float)\n wind_deg = Column(Integer)\n clouds_all = Column(Integer)\n dt = Column(DateTime)\n sys_type = Column(Integer)\n sys_id = Column(Integer)\n sys_message = Column(Float)\n sys_country = Column(String(2))\n sys_sunrise = Column(DateTime)\n sys_sunset = Column(DateTime)\n city_id = Column(Integer)\n city_name = Column(String(6))\n cod = Column(Integer)\n\n @classmethod\n def findWetWeatherDays(self, dbsession, today):\n \"\"\"finds days where there was wet weather.\"\"\"\n wetDays = dbsession.query(self.dt).filter(or_(self.\n weather_description == 'light rain', self.weather_description ==\n 'moderate rain')).all()\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Station(Base):\n <mask token>\n __tablename__ = 'station'\n number = Column(Integer, primary_key=True, autoincrement=False)\n contract_name = Column(String(250), nullable=False)\n name = Column(String(250), nullable=False)\n address = Column(String(250), nullable=False)\n position_lat = Column(Float, nullable=False)\n position_long = Column(Float, nullable=False)\n banking = Column(Boolean, nullable=True)\n bonus = Column(Boolean, nullable=True)\n station_usage = relationship('UsageData', lazy='dynamic')\n\n @property\n def last_updated(self):\n \"\"\"this method is used in the scraper to return the last updated station.\n this lets us pull only updated data.\"\"\"\n try:\n return max(self.station_usage, key=lambda x: x.last_update\n ).dt_last_update\n except ValueError:\n return datetime.fromtimestamp(0)\n\n @classmethod\n def get_current_station_info(cls, dbsession):\n \"\"\"as the method name suggests this returns the up to date station information.\"\"\"\n sub = dbsession.query(UsageData.station_id, func.max(UsageData.id).\n label('max_update')).group_by(UsageData.station_id).subquery()\n return dbsession.query(UsageData.last_update, UsageData.\n available_bike_stands, UsageData.available_bikes).join(sub,\n and_(sub.c.max_update == UsageData.id)).all()\n\n\nclass UsageData(Base):\n \"\"\"holds data about bicycle usage for every station.\"\"\"\n __tablename__ = 'bike_usage'\n id = Column(Integer, primary_key=True)\n station_id = Column(Integer, ForeignKey('station.number'))\n status = Column(Boolean, nullable=False)\n bike_stands = Column(Integer, nullable=False)\n available_bike_stands = Column(Integer, nullable=False)\n available_bikes = Column(Integer, nullable=False)\n last_update = Column(DateTime, nullable=False)\n\n @property\n def dt_last_update(self):\n \"\"\"return when was the last update. Once again this is used in the scraper to determine newly updated data.\"\"\"\n return self.last_update\n\n @dt_last_update.setter\n def dt_last_update(self, val):\n \"\"\"creates a datetime object which is added to the database with an update from the dublinbikes api.\n once again used by the scraper. essentially the adds the time at which the update was entered.\"\"\"\n self.last_update = datetime.fromtimestamp(int(val) / 1000)\n\n @classmethod\n def get_bikes_for_weekday(cls, dbsession, weekday, station_id):\n \"\"\"returns a list of bikes for a provided weekday and station.\n averaged per hour so 24 results.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.weekday(cls.last_update) ==\n weekday).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):\n \"\"\"very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.\n returns a list of bikes for a provided datetime object (wetdate) and station.\"\"\"\n station = [('Time', 'Available Bikes', 'Available Stands')]\n station_data = dbsession.query(func.hour(cls.last_update), func.avg\n (cls.available_bikes), func.avg(cls.available_bike_stands)).filter(\n cls.station_id == station_id, func.date(cls.last_update) ==\n wetdate.date()).group_by(func.hour(cls.last_update)).all()\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in\n station_data])\n else:\n station.extend([(0, 0, 0)])\n return station\n\n @classmethod\n def get_bikes_for_week(cls, dbsession, station_id):\n \"\"\"as method name describes.\n similar to methods above but averaged over week.\"\"\"\n station = [('Day', 'Available Bikes')]\n station_data = dbsession.query(func.weekday(cls.last_update), func.\n avg(cls.available_bikes)).filter(cls.station_id == station_id\n ).group_by(func.weekday(cls.last_update)).all()\n if station_data:\n station.extend([(days[a], float(b)) for a, b in station_data])\n else:\n station.extend([(0, 0)])\n return station\n\n\nclass Weather(Base):\n \"\"\"holds data scraped from the open weather API.\"\"\"\n __tablename__ = 'weather'\n id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)\n coord_lon = Column(Float)\n coord_lat = Column(Float)\n weather_id = Column(Integer)\n weather_main = Column(String(45))\n weather_description = Column(String(45))\n weather_icon = Column(String(10))\n base = Column(String(45))\n main_temp = Column(Integer)\n main_pressure = Column(Integer)\n main_humidity = Column(Integer)\n main_temp_min = Column(Integer)\n main_temp_max = Column(Integer)\n visibility = Column(Integer)\n wind_speed = Column(Float)\n wind_deg = Column(Integer)\n clouds_all = Column(Integer)\n dt = Column(DateTime)\n sys_type = Column(Integer)\n sys_id = Column(Integer)\n sys_message = Column(Float)\n sys_country = Column(String(2))\n sys_sunrise = Column(DateTime)\n sys_sunset = Column(DateTime)\n city_id = Column(Integer)\n city_name = Column(String(6))\n cod = Column(Integer)\n\n @classmethod\n def findWetWeatherDays(self, dbsession, today):\n \"\"\"finds days where there was wet weather.\"\"\"\n wetDays = dbsession.query(self.dt).filter(or_(self.\n weather_description == 'light rain', self.weather_description ==\n 'moderate rain')).all()\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]\n\n\n<mask token>\n",
"step-5": "import os\n\nfrom sqlalchemy import Column, ForeignKey, Integer, String, Float, Boolean, DateTime\nfrom sqlalchemy import and_, or_\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy import create_engine, func\nfrom sqlalchemy.orm import sessionmaker, scoped_session, load_only\nfrom sqlalchemy.pool import NullPool\nfrom datetime import datetime\n\nBase = declarative_base()\ndays = ['M','T','W','T','F', 'S', 'S']\n\n# using sqlalchemy declare a class for each table in our database.\nclass Station(Base):\n \"\"\"this one is for storing information about each station.\"\"\"\n __tablename__ = \"station\"\n number = Column(Integer, primary_key=True, autoincrement=False)\n contract_name = Column(String(250), nullable=False)\n name = Column(String(250), nullable=False)\n address = Column(String(250), nullable=False)\n position_lat = Column(Float, nullable=False)\n position_long = Column(Float, nullable=False)\n banking = Column(Boolean, nullable=True)\n bonus = Column(Boolean, nullable=True)\n station_usage = relationship(\"UsageData\", lazy=\"dynamic\")\n\n\n @property\n def last_updated(self):\n \"\"\"this method is used in the scraper to return the last updated station.\n this lets us pull only updated data.\"\"\"\n try:\n return max(self.station_usage, key=lambda x: x.last_update).dt_last_update\n except ValueError:\n return datetime.fromtimestamp(0)\n\n @classmethod\n def get_current_station_info(cls, dbsession):\n \"\"\"as the method name suggests this returns the up to date station information.\"\"\"\n sub = dbsession.query(UsageData.station_id, func.max(UsageData.id).label('max_update')).group_by(\n UsageData.station_id).subquery()\n return dbsession.query(\n UsageData.last_update,\n UsageData.available_bike_stands, UsageData.available_bikes).join(sub, and_(\n sub.c.max_update == UsageData.id)).all()\n\n\nclass UsageData(Base):\n \"\"\"holds data about bicycle usage for every station.\"\"\"\n __tablename__ = \"bike_usage\"\n id = Column(Integer, primary_key=True)\n station_id = Column(Integer, ForeignKey('station.number'))\n status = Column(Boolean, nullable=False)\n bike_stands = Column(Integer, nullable=False)\n available_bike_stands = Column(Integer, nullable=False)\n available_bikes = Column(Integer, nullable=False)\n last_update = Column(DateTime, nullable=False)\n\n\n @property\n def dt_last_update(self):\n \"\"\"return when was the last update. Once again this is used in the scraper to determine newly updated data.\"\"\"\n return self.last_update\n\n\n @dt_last_update.setter\n def dt_last_update(self, val):\n \"\"\"creates a datetime object which is added to the database with an update from the dublinbikes api.\n once again used by the scraper. essentially the adds the time at which the update was entered.\"\"\"\n self.last_update = datetime.fromtimestamp(int(val)/1000)\n\n @classmethod\n def get_bikes_for_weekday(cls, dbsession, weekday, station_id):\n \"\"\"returns a list of bikes for a provided weekday and station.\n averaged per hour so 24 results.\"\"\"\n station = [(\"Time\", \"Available Bikes\", \"Available Stands\")]\n\n station_data = dbsession.query(func.hour(cls.last_update),\n func.avg(cls.available_bikes),\n func.avg(cls.available_bike_stands)) \\\n .filter(cls.station_id == station_id,\n func.weekday(cls.last_update) == weekday) \\\n .group_by(func.hour(cls.last_update)) \\\n .all()\n\n # this section parses the query return into a readable list.\n # from docs:extend() appends the contents of seq to list.\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in station_data])\n else:\n station.extend([(0,0,0)])\n return station\n\n @classmethod\n def get_bikes_for_wetday(cls, dbsession, wetdate, station_id):\n \"\"\"very similar to get_bikes_for_weekday but not the same: date specified is wetdate not weekday.\n returns a list of bikes for a provided datetime object (wetdate) and station.\"\"\"\n # averaged per hour so 24 results.\n station = [(\"Time\", \"Available Bikes\", \"Available Stands\")]\n station_data = dbsession.query(\n func.hour(cls.last_update),\n func.avg(cls.available_bikes),\n func.avg(cls.available_bike_stands))\\\n .filter(cls.station_id == station_id,\n func.date(cls.last_update) == wetdate.date())\\\n .group_by(func.hour(cls.last_update)).all()\n\n # this section parses the query return into a readable list.\n # from docs:extend() appends the contents of seq to list.\n if station_data:\n station.extend([(a, float(b), float(c)) for a, b, c in station_data])\n else:\n station.extend([(0,0,0)])\n return station\n\n\n @classmethod\n def get_bikes_for_week(cls, dbsession, station_id):\n \"\"\"as method name describes.\n similar to methods above but averaged over week.\"\"\"\n station = [(\"Day\", \"Available Bikes\")]\n station_data = dbsession.query(func.weekday(cls.last_update),\n func.avg(cls.available_bikes)) \\\n .filter(cls.station_id == station_id) \\\n .group_by(func.weekday(cls.last_update)) \\\n .all()\n\n # this section parses the query return into a readable list.\n # from docs:extend() appends the contents of seq to list.\n if station_data:\n station.extend([(days[a], float(b)) for a, b in station_data])\n else:\n station.extend([(0,0)])\n\n return station\n\n\nclass Weather(Base):\n \"\"\"holds data scraped from the open weather API.\"\"\"\n __tablename__ = \"weather\"\n id = Column(Integer, nullable=False, primary_key=True, autoincrement=True)\n coord_lon = Column(Float)\n coord_lat = Column(Float)\n weather_id = Column(Integer)\n weather_main = Column(String(45))\n weather_description = Column(String(45))\n weather_icon = Column(String(10))\n base = Column(String(45))\n main_temp = Column(Integer)\n main_pressure = Column(Integer)\n main_humidity = Column(Integer)\n main_temp_min = Column(Integer)\n main_temp_max = Column(Integer)\n visibility = Column(Integer)\n wind_speed = Column(Float)\n wind_deg = Column(Integer)\n clouds_all = Column(Integer)\n dt = Column(DateTime)\n sys_type = Column(Integer)\n sys_id = Column(Integer)\n sys_message = Column(Float)\n sys_country = Column(String(2))\n sys_sunrise = Column(DateTime)\n sys_sunset = Column(DateTime)\n city_id = Column(Integer)\n city_name = Column(String(6))\n cod = Column(Integer)\n\n @classmethod\n def findWetWeatherDays(self, dbsession, today):\n \"\"\"finds days where there was wet weather.\"\"\"\n wetDays = dbsession.query(self.dt).filter(or_(self.weather_description == \"light rain\", self.weather_description == \"moderate rain\")).all()\n # if one of those days is today return it.\n # else just return a wet day.\n for i in range(len(wetDays)):\n if today == wetDays[i][0].weekday():\n return wetDays[i][0]\n else:\n return wetDays[0][0]\n\n\n# path to DB\nconnection_string='mysql+mysqldb://{username}:{password}@{host}:3306/dublinbikesdata'.format(username=os.environ['DatabaseUser'],\n password=os.environ['DatabasePassword'],\n host=os.environ['DatabaseServer'])\nengine = create_engine(connection_string, poolclass=NullPool)\n\n# create the session using sqlalchemy.\ndb_session = scoped_session(sessionmaker(bind=engine, autocommit=False, autoflush=False))\n\n\nif __name__==\"__main__\":\n \"\"\"Below is used for testing if the database is working by running this file directly.\n not used in the actual app.\"\"\"\n station_id = 42\n\n static_info = db_session.query(Station.number,\n Station.name,\n Station.address,\n Station.position_lat,\n Station.position_long).all()\n dynamic_info = Station.get_current_station_info(db_session)\n static_fields = ['number', 'name', 'address', 'position_lat', 'position_long']\n dynamic_fields = ['last_update', 'available_bike_stands', 'available_bikes']\n\n json_data = [dict(zip(static_fields + dynamic_fields, static + dynamic))\n for static, dynamic in\n zip(static_info, dynamic_info)]\n print(json_data)\n",
"step-ids": [
9,
11,
12,
16,
21
]
}
|
[
9,
11,
12,
16,
21
] |
# SPDX-FileCopyrightText: 2013 The glucometerutils Authors
#
# SPDX-License-Identifier: Unlicense
|
normal
|
{
"blob_id": "39ffb85fb10882041c2c9a81d796e7ff9df7d930",
"index": 8551,
"step-1": "# SPDX-FileCopyrightText: 2013 The glucometerutils Authors\n#\n# SPDX-License-Identifier: Unlicense\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
import sys
def input(_type=str):
return _type(sys.stdin.readline().strip())
def main():
N, K, D = map(int, input().split())
rules = [tuple(map(int, input().split())) for _ in range(K)]
minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])
while minv + 1 < maxv:
midv = (minv + maxv)//2
cnt, max_in = 0, 0
for A, B, C in rules:
if midv < A:
continue
n = (min(midv, B)-A)//C
max_in = max(A + n * C, max_in)
cnt += n + 1
# print(minv, midv, maxv, max_in, cnt)
if cnt >= D:
maxv = max_in
else:
minv = midv + 1
if minv < maxv:
cnt, max_in = 0, 0
for A, B, C in rules:
if minv < A:
continue
max_in = max(A + (min(minv, B)-A)//C * C, max_in)
cnt += (min(minv, B) - A)//C + 1
if cnt >= D:
maxv = max_in
print(maxv)
main()
# 10 20 30 40 50
# 30 60 90
# 20 45 70
# 70 95
|
normal
|
{
"blob_id": "f0b98a3d6015d57a49e315ac984cac1cccf0b382",
"index": 6084,
"step-1": "<mask token>\n\n\ndef main():\n N, K, D = map(int, input().split())\n rules = [tuple(map(int, input().split())) for _ in range(K)]\n minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])\n while minv + 1 < maxv:\n midv = (minv + maxv) // 2\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if midv < A:\n continue\n n = (min(midv, B) - A) // C\n max_in = max(A + n * C, max_in)\n cnt += n + 1\n if cnt >= D:\n maxv = max_in\n else:\n minv = midv + 1\n if minv < maxv:\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if minv < A:\n continue\n max_in = max(A + (min(minv, B) - A) // C * C, max_in)\n cnt += (min(minv, B) - A) // C + 1\n if cnt >= D:\n maxv = max_in\n print(maxv)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef input(_type=str):\n return _type(sys.stdin.readline().strip())\n\n\ndef main():\n N, K, D = map(int, input().split())\n rules = [tuple(map(int, input().split())) for _ in range(K)]\n minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])\n while minv + 1 < maxv:\n midv = (minv + maxv) // 2\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if midv < A:\n continue\n n = (min(midv, B) - A) // C\n max_in = max(A + n * C, max_in)\n cnt += n + 1\n if cnt >= D:\n maxv = max_in\n else:\n minv = midv + 1\n if minv < maxv:\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if minv < A:\n continue\n max_in = max(A + (min(minv, B) - A) // C * C, max_in)\n cnt += (min(minv, B) - A) // C + 1\n if cnt >= D:\n maxv = max_in\n print(maxv)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef input(_type=str):\n return _type(sys.stdin.readline().strip())\n\n\ndef main():\n N, K, D = map(int, input().split())\n rules = [tuple(map(int, input().split())) for _ in range(K)]\n minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])\n while minv + 1 < maxv:\n midv = (minv + maxv) // 2\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if midv < A:\n continue\n n = (min(midv, B) - A) // C\n max_in = max(A + n * C, max_in)\n cnt += n + 1\n if cnt >= D:\n maxv = max_in\n else:\n minv = midv + 1\n if minv < maxv:\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if minv < A:\n continue\n max_in = max(A + (min(minv, B) - A) // C * C, max_in)\n cnt += (min(minv, B) - A) // C + 1\n if cnt >= D:\n maxv = max_in\n print(maxv)\n\n\nmain()\n",
"step-4": "import sys\n\n\ndef input(_type=str):\n return _type(sys.stdin.readline().strip())\n\n\ndef main():\n N, K, D = map(int, input().split())\n rules = [tuple(map(int, input().split())) for _ in range(K)]\n minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])\n while minv + 1 < maxv:\n midv = (minv + maxv) // 2\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if midv < A:\n continue\n n = (min(midv, B) - A) // C\n max_in = max(A + n * C, max_in)\n cnt += n + 1\n if cnt >= D:\n maxv = max_in\n else:\n minv = midv + 1\n if minv < maxv:\n cnt, max_in = 0, 0\n for A, B, C in rules:\n if minv < A:\n continue\n max_in = max(A + (min(minv, B) - A) // C * C, max_in)\n cnt += (min(minv, B) - A) // C + 1\n if cnt >= D:\n maxv = max_in\n print(maxv)\n\n\nmain()\n",
"step-5": "import sys\ndef input(_type=str):\n\treturn _type(sys.stdin.readline().strip())\n\ndef main():\n\tN, K, D = map(int, input().split())\n\trules = [tuple(map(int, input().split())) for _ in range(K)]\n\tminv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])\n\twhile minv + 1 < maxv:\n\t\tmidv = (minv + maxv)//2 \n\t\tcnt, max_in = 0, 0\n\t\tfor A, B, C in rules:\n\t\t\tif midv < A:\n\t\t\t\tcontinue\n\t\t\tn = (min(midv, B)-A)//C\n\t\t\tmax_in = max(A + n * C, max_in)\n\t\t\tcnt += n + 1\n\t\t# print(minv, midv, maxv, max_in, cnt)\n\t\tif cnt >= D:\n\t\t\tmaxv = max_in\n\t\telse:\n\t\t\tminv = midv + 1\n\n\tif minv < maxv:\n\t\tcnt, max_in = 0, 0\n\t\tfor A, B, C in rules:\n\t\t\tif minv < A:\n\t\t\t\tcontinue\n\t\t\tmax_in = max(A + (min(minv, B)-A)//C * C, max_in)\n\t\t\tcnt += (min(minv, B) - A)//C + 1\n\t\tif cnt >= D:\n\t\t\tmaxv = max_in\n\tprint(maxv)\n\nmain()\n\n# 10 20 30 40 50\n# 30 60 90\n# 20 45 70\n# 70 95",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 24 20:59:36 2021
@author: Abeg
"""
#factorial using recursion
"""def factorial(n):
if n==0 or n==1:
return 1
elif n==2:
return n
else:
return n*factorial(n-1)
n=int(input("enter the no"))
print(factorial(n))"""
#fibonancci using recursion
"""def fiborecursively(n):
if n<=1:
return n
else:
return(fiborecursively(n-1) + fiborecursively(n-2))
for i in range(0,10):
print(fiborecursively(i))"""
#reverse a string with recursion
def reverse(string):
if len(string) == 0:
return
temp = string[0]
reverse(string[1:])
print(temp,end="")
string=(input())
reverse(string)
|
normal
|
{
"blob_id": "d1ee33ce6fb071aae800b0597a09e7039a209ec8",
"index": 2574,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef reverse(string):\n if len(string) == 0:\n return\n temp = string[0]\n reverse(string[1:])\n print(temp, end='')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef reverse(string):\n if len(string) == 0:\n return\n temp = string[0]\n reverse(string[1:])\n print(temp, end='')\n\n\n<mask token>\nreverse(string)\n",
"step-4": "<mask token>\n\n\ndef reverse(string):\n if len(string) == 0:\n return\n temp = string[0]\n reverse(string[1:])\n print(temp, end='')\n\n\nstring = input()\nreverse(string)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Mar 24 20:59:36 2021\r\n\r\n@author: Abeg\r\n\"\"\"\r\n#factorial using recursion\r\n\"\"\"def factorial(n):\r\n if n==0 or n==1:\r\n return 1\r\n elif n==2:\r\n return n \r\n else:\r\n return n*factorial(n-1)\r\nn=int(input(\"enter the no\"))\r\nprint(factorial(n))\"\"\"\r\n#fibonancci using recursion\r\n\"\"\"def fiborecursively(n):\r\n if n<=1:\r\n return n\r\n else:\r\n return(fiborecursively(n-1) + fiborecursively(n-2))\r\nfor i in range(0,10):\r\n print(fiborecursively(i))\"\"\"\r\n \r\n#reverse a string with recursion\r\ndef reverse(string): \r\n if len(string) == 0: \r\n return\r\n temp = string[0] \r\n reverse(string[1:]) \r\n print(temp,end=\"\")\r\nstring=(input())\r\nreverse(string)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: UTF-8
import os
import sys
if len(sys.argv) == 3:
fname = sys.argv[1]
out_dir = sys.argv[2]
else:
print "usage: vcf_spliter <input file> <output dir>"
exit()
count = 0
if not os.path.exists(out_dir):
os.makedirs(out_dir)
with open(fname, 'r') as f:
for l in f:
if l.strip() == "BEGIN:VCARD":
count += 1
fw = open(os.path.join(out_dir, str(count)+'.vcf'), 'w')
fw.write(l)
elif l.strip() == "END:VCARD":
fw.write(l)
fw.close()
else:
fw.write(l)
|
normal
|
{
"blob_id": "f410a77d4041514383110d9fd16f896178924d59",
"index": 8871,
"step-1": "# coding: UTF-8\n\nimport os \nimport sys\n\nif len(sys.argv) == 3:\n fname = sys.argv[1]\n out_dir = sys.argv[2]\nelse:\n print \"usage: vcf_spliter <input file> <output dir>\"\n exit()\n\ncount = 0\nif not os.path.exists(out_dir):\n os.makedirs(out_dir)\n\nwith open(fname, 'r') as f:\n for l in f:\n if l.strip() == \"BEGIN:VCARD\":\n count += 1\n fw = open(os.path.join(out_dir, str(count)+'.vcf'), 'w')\n fw.write(l)\n elif l.strip() == \"END:VCARD\":\n fw.write(l)\n fw.close()\n else:\n fw.write(l)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
count=0
def merge(a, b):
global count
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] <= b[h]:
c.append(a[j])
j += 1
else:
count+=(len(a[j:]))
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
# count += h+1
return c
def merge_sort(lists):
if len(lists) <= 1:
return lists
middle = len(lists)//2
left = merge_sort(lists[:middle])
right = merge_sort(lists[middle:])
return merge(left, right)
if __name__ == '__main__':
a = [7, 6, 5,9, 10, 11]
print(merge_sort(a))
print(count)
hash(i)
|
normal
|
{
"blob_id": "cf3b66a635c6549553af738f263b035217e75a7a",
"index": 903,
"step-1": "<mask token>\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5, 9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)\n",
"step-4": "count = 0\n\n\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count += len(a[j:])\n c.append(b[h])\n h += 1\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n return c\n\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists) // 2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5, 9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)\n",
"step-5": "count=0\ndef merge(a, b):\n global count\n c = []\n h = j = 0\n while j < len(a) and h < len(b):\n if a[j] <= b[h]:\n c.append(a[j])\n j += 1\n else:\n count+=(len(a[j:]))\n c.append(b[h])\n h += 1\n\n if j == len(a):\n for i in b[h:]:\n c.append(i)\n else:\n for i in a[j:]:\n c.append(i)\n # count += h+1\n\n return c\n\ndef merge_sort(lists):\n if len(lists) <= 1:\n return lists\n middle = len(lists)//2\n left = merge_sort(lists[:middle])\n right = merge_sort(lists[middle:])\n return merge(left, right)\n\n\nif __name__ == '__main__':\n a = [7, 6, 5,9, 10, 11]\n print(merge_sort(a))\n print(count)\n hash(i)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from math import sqrt
from numpy import concatenate
from matplotlib import pyplot
from pandas import read_csv
from pandas import DataFrame
from pandas import concat
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import mean_squared_error
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
from tensorflow.keras.models import load_model
import matplotlib.pyplot as plt
import numpy as np
def plot(actual, prediction):
plt.figure(figsize=(16,6))
plt.plot(actual, label='Actual',color='b',linewidth=3)
plt.plot((prediction), label='Prediction',color='y')
print("Plotting")
plt.legend()
plt.show()
timesteps = 2
params = 5
samples = 500000
# load dataset
dataset = read_csv('merged.csv', header=0, usecols = ['time', 'src', 'dst', 'length', 'protocol', 'people'])
values = dataset.values
encoder = LabelEncoder()
values[:,5] = encoder.fit_transform(values[:,5])
values = values.astype('float32')
# normalize features
scaler = MinMaxScaler(feature_range=(0, 1))
scaled = scaler.fit_transform(values)
labels = scaled.copy()
scaled = np.delete(scaled, 5, axis=1)
labels = np.delete(labels, 0, axis =1)
labels = np.delete(labels, 0, axis =1)
labels = np.delete(labels, 0, axis =1)
labels = np.delete(labels, 0, axis =1)
labels = np.delete(labels, 0, axis =1)
labels = scaler.fit_transform(labels)
labels = labels[:(samples/timesteps)]
scaled = scaled[:samples]
reframed = np.reshape(scaled,(samples, params))
values = np.reshape(reframed,((samples/timesteps), timesteps,-1))
size = ((len(values))/timesteps)
sizeL = ((len(labels))/timesteps)
test_X = values[:size]
test_y = labels[:sizeL]
model = load_model("test50.h5")
#predicts
yhat = model.predict(test_X)
plot(test_y, yhat)
|
normal
|
{
"blob_id": "11984027baf6d4c97b2976e4ac49a0e8ec62f893",
"index": 8709,
"step-1": "from math import sqrt\nfrom numpy import concatenate\nfrom matplotlib import pyplot\nfrom pandas import read_csv\nfrom pandas import DataFrame\nfrom pandas import concat\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn.preprocessing import LabelEncoder\nfrom sklearn.metrics import mean_squared_error\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense\nfrom tensorflow.keras.layers import LSTM\nfrom tensorflow.keras.models import load_model\nimport matplotlib.pyplot as plt\nimport numpy as np\n\ndef plot(actual, prediction):\n\tplt.figure(figsize=(16,6))\n\tplt.plot(actual, label='Actual',color='b',linewidth=3)\n\tplt.plot((prediction), label='Prediction',color='y') \n\tprint(\"Plotting\")\n plt.legend()\n plt.show()\n\ntimesteps = 2\nparams = 5\nsamples = 500000\n\n# load dataset\ndataset = read_csv('merged.csv', header=0, usecols = ['time', 'src', 'dst', 'length', 'protocol', 'people'])\nvalues = dataset.values\n\nencoder = LabelEncoder()\nvalues[:,5] = encoder.fit_transform(values[:,5])\n\nvalues = values.astype('float32')\n\n# normalize features\nscaler = MinMaxScaler(feature_range=(0, 1))\nscaled = scaler.fit_transform(values)\n\nlabels = scaled.copy()\nscaled = np.delete(scaled, 5, axis=1)\nlabels = np.delete(labels, 0, axis =1)\nlabels = np.delete(labels, 0, axis =1)\nlabels = np.delete(labels, 0, axis =1)\nlabels = np.delete(labels, 0, axis =1)\nlabels = np.delete(labels, 0, axis =1)\nlabels = scaler.fit_transform(labels)\n\nlabels = labels[:(samples/timesteps)]\n\nscaled = scaled[:samples]\nreframed = np.reshape(scaled,(samples, params))\nvalues = np.reshape(reframed,((samples/timesteps), timesteps,-1))\n\nsize = ((len(values))/timesteps)\nsizeL = ((len(labels))/timesteps)\n\ntest_X = values[:size]\ntest_y = labels[:sizeL]\n\nmodel = load_model(\"test50.h5\")\n\n#predicts\nyhat = model.predict(test_X)\nplot(test_y, yhat)\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from bs4 import BeautifulSoup
import urllib.request
import re
import math
url_header = "http://srh.bankofchina.com/search/whpj/search.jsp?erectDate=2016-01-25¬hing=2016-02-25&pjname=1314"
Webpage = urllib.request.urlopen(url_header).read()
Webpage=Webpage.decode('UTF-8')
# soup = BeautifulSoup(Webpage)
print (Webpage)
a=re.findall(r'var m_nRecordCount = (\d+)',str(Webpage))
print(a)
# page_count=soup.find('script')
# print(page_count)
total_page=math.ceil(int(a[0])/20)
print(total_page)
|
normal
|
{
"blob_id": "62a86bd33755510f0d71f4920e63be1a3ce8c563",
"index": 6304,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(Webpage)\n<mask token>\nprint(a)\n<mask token>\nprint(total_page)\n",
"step-3": "<mask token>\nurl_header = (\n 'http://srh.bankofchina.com/search/whpj/search.jsp?erectDate=2016-01-25¬hing=2016-02-25&pjname=1314'\n )\nWebpage = urllib.request.urlopen(url_header).read()\nWebpage = Webpage.decode('UTF-8')\nprint(Webpage)\na = re.findall('var m_nRecordCount = (\\\\d+)', str(Webpage))\nprint(a)\ntotal_page = math.ceil(int(a[0]) / 20)\nprint(total_page)\n",
"step-4": "from bs4 import BeautifulSoup\nimport urllib.request\nimport re\nimport math\nurl_header = (\n 'http://srh.bankofchina.com/search/whpj/search.jsp?erectDate=2016-01-25¬hing=2016-02-25&pjname=1314'\n )\nWebpage = urllib.request.urlopen(url_header).read()\nWebpage = Webpage.decode('UTF-8')\nprint(Webpage)\na = re.findall('var m_nRecordCount = (\\\\d+)', str(Webpage))\nprint(a)\ntotal_page = math.ceil(int(a[0]) / 20)\nprint(total_page)\n",
"step-5": "from bs4 import BeautifulSoup\nimport urllib.request\nimport re\nimport math\n\nurl_header = \"http://srh.bankofchina.com/search/whpj/search.jsp?erectDate=2016-01-25¬hing=2016-02-25&pjname=1314\"\nWebpage = urllib.request.urlopen(url_header).read()\nWebpage=Webpage.decode('UTF-8')\n# soup = BeautifulSoup(Webpage)\nprint (Webpage)\na=re.findall(r'var m_nRecordCount = (\\d+)',str(Webpage))\nprint(a)\n# page_count=soup.find('script')\n# print(page_count)\ntotal_page=math.ceil(int(a[0])/20)\nprint(total_page)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
import pprint
import requests
import string
import subprocess
#Create three files
f_arptable = open( 'arptable', 'w+' )
f_maclist = open( 'maclist', 'w+' )
f_maclookup = open( 'maclookup', 'w+' )
#Give write permissions the three files
subprocess.call([ 'chmod','+w','maclist' ])
subprocess.call([ 'chmod','+w','arptable' ])
subprocess.call([ 'chmod','+w','maclookup' ])
#cols = subprocess.Popen(["arp","-a"],stdout=f)
#Run an arp -a command and write the output to the arptable file
subprocess.Popen(['arp','-a'],stdout=f_arptable)
#Pull the company name from the maclookup and save the value
#in the variable devmon
maclookup_url = 'http://macvendors.co/api%s'
req = requests.get( maclookup_url % 'macs' )
req_result = pprint.pprint(req.json())
#Pull the IP and MAC from the arptable file and put them in the
#maclist file along with the value from devmon
for line in open('arptable'):
if line.startswith('?'):
ips = line.split()[1]
macs = line.split()[3]
f_maclist.write('\nIP Address: ' + ips + '\nMAC: ' + macs +
'\nDevice Manufacturer: ' + devmon + '\n' )
subprocess.Popen(['cat','maclist'])
#print("Phase 1 complete")
#with open('maclist') as fp:
# for line in fp:
# #line.getline(1)
# #mac_field = line.split(':')
# print('line'+"\n")
|
normal
|
{
"blob_id": "d566104b00ffd5f08c564ed554e0d71279a93047",
"index": 6394,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsubprocess.call(['chmod', '+w', 'maclist'])\nsubprocess.call(['chmod', '+w', 'arptable'])\nsubprocess.call(['chmod', '+w', 'maclookup'])\nsubprocess.Popen(['arp', '-a'], stdout=f_arptable)\n<mask token>\nfor line in open('arptable'):\n if line.startswith('?'):\n ips = line.split()[1]\n macs = line.split()[3]\n f_maclist.write('\\nIP Address: ' + ips + '\\nMAC: ' + macs +\n '\\nDevice Manufacturer: ' + devmon + '\\n')\nsubprocess.Popen(['cat', 'maclist'])\n",
"step-3": "<mask token>\nf_arptable = open('arptable', 'w+')\nf_maclist = open('maclist', 'w+')\nf_maclookup = open('maclookup', 'w+')\nsubprocess.call(['chmod', '+w', 'maclist'])\nsubprocess.call(['chmod', '+w', 'arptable'])\nsubprocess.call(['chmod', '+w', 'maclookup'])\nsubprocess.Popen(['arp', '-a'], stdout=f_arptable)\nmaclookup_url = 'http://macvendors.co/api%s'\nreq = requests.get(maclookup_url % 'macs')\nreq_result = pprint.pprint(req.json())\nfor line in open('arptable'):\n if line.startswith('?'):\n ips = line.split()[1]\n macs = line.split()[3]\n f_maclist.write('\\nIP Address: ' + ips + '\\nMAC: ' + macs +\n '\\nDevice Manufacturer: ' + devmon + '\\n')\nsubprocess.Popen(['cat', 'maclist'])\n",
"step-4": "import pprint\nimport requests\nimport string\nimport subprocess\nf_arptable = open('arptable', 'w+')\nf_maclist = open('maclist', 'w+')\nf_maclookup = open('maclookup', 'w+')\nsubprocess.call(['chmod', '+w', 'maclist'])\nsubprocess.call(['chmod', '+w', 'arptable'])\nsubprocess.call(['chmod', '+w', 'maclookup'])\nsubprocess.Popen(['arp', '-a'], stdout=f_arptable)\nmaclookup_url = 'http://macvendors.co/api%s'\nreq = requests.get(maclookup_url % 'macs')\nreq_result = pprint.pprint(req.json())\nfor line in open('arptable'):\n if line.startswith('?'):\n ips = line.split()[1]\n macs = line.split()[3]\n f_maclist.write('\\nIP Address: ' + ips + '\\nMAC: ' + macs +\n '\\nDevice Manufacturer: ' + devmon + '\\n')\nsubprocess.Popen(['cat', 'maclist'])\n",
"step-5": "#!/usr/bin/python\n\nimport pprint\nimport requests\nimport string \nimport subprocess\n\n#Create three files\nf_arptable = open( 'arptable', 'w+' )\nf_maclist = open( 'maclist', 'w+' )\nf_maclookup = open( 'maclookup', 'w+' )\n\n#Give write permissions the three files\nsubprocess.call([ 'chmod','+w','maclist' ])\nsubprocess.call([ 'chmod','+w','arptable' ])\nsubprocess.call([ 'chmod','+w','maclookup' ])\n\n#cols = subprocess.Popen([\"arp\",\"-a\"],stdout=f)\n\n#Run an arp -a command and write the output to the arptable file\nsubprocess.Popen(['arp','-a'],stdout=f_arptable)\n\n#Pull the company name from the maclookup and save the value\n#in the variable devmon\nmaclookup_url = 'http://macvendors.co/api%s'\nreq = requests.get( maclookup_url % 'macs' )\nreq_result = pprint.pprint(req.json())\n\n#Pull the IP and MAC from the arptable file and put them in the\n#maclist file along with the value from devmon\nfor line in open('arptable'):\n if line.startswith('?'):\n ips = line.split()[1]\n macs = line.split()[3]\t\n f_maclist.write('\\nIP Address: ' + ips + '\\nMAC: ' + macs +\n '\\nDevice Manufacturer: ' + devmon + '\\n' )\n\nsubprocess.Popen(['cat','maclist'])\n\n#print(\"Phase 1 complete\")\n\n#with open('maclist') as fp:\n# for line in fp:\n# #line.getline(1)\n# #mac_field = line.split(':')\n# print('line'+\"\\n\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
#
# Script written by Legoktm, 2011
# Released into the Public Domain on November, 16, 2011
# This product comes with no warranty of any sort.
# Enjoy!
#
from commands import getoutput
def notify(string, program=False):
if not program:
command = 'growlnotify Python -m "%s"' %string
else:
command = 'growlnotify "%s" -m "%s"' %(program, string)
getoutput(command)
def notifyold(string):
#THIS IS THE OLD METHOD. YOU SHOULD ONLY USE THIS IF YOU DO NOT HAVE growlnotify INSTALLED.
print"""]9;%s
""" %string
|
normal
|
{
"blob_id": "4318c99b3de9bb9c44eed57525c9ccbe82a17276",
"index": 5946,
"step-1": "#!/usr/bin/python\n#\n# Script written by Legoktm, 2011\n# Released into the Public Domain on November, 16, 2011\n# This product comes with no warranty of any sort.\n# Enjoy!\n#\nfrom commands import getoutput\ndef notify(string, program=False):\n\tif not program:\n\t\tcommand = 'growlnotify Python -m \"%s\"' %string\n\telse:\n\t\tcommand = 'growlnotify \"%s\" -m \"%s\"' %(program, string)\n\tgetoutput(command)\n\ndef notifyold(string):\n\t#THIS IS THE OLD METHOD. YOU SHOULD ONLY USE THIS IF YOU DO NOT HAVE growlnotify INSTALLED.\n\tprint\"\"\"\u001b]9;%s\u0007\n\"\"\" %string\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import io
import os
from flask import Flask
from werkzeug.datastructures import FileStorage
import pytest
PNG_FILE = os.path.join(os.path.dirname(__file__), 'flask.png')
JPG_FILE = os.path.join(os.path.dirname(__file__), 'flask.jpg')
class TestConfig:
TESTING = True
MONGODB_DB = 'flask-fs-test'
MONGODB_HOST = 'localhost'
MONGODB_PORT = 27017
class TestFlask(Flask):
def configure(self, *storages, **configs):
import flask_file_system as fs
for key, value in configs.items():
self.config[key] = value
fs.init_app(self, *storages)
@pytest.fixture
def app():
app = TestFlask('flaskfs-tests')
app.config.from_object(TestConfig)
yield app
@pytest.fixture
def binfile():
return PNG_FILE
@pytest.fixture
def pngfile():
return PNG_FILE
@pytest.fixture
def jpgfile():
return JPG_FILE
class Utils:
def filestorage(self, filename, content, content_type=None):
return FileStorage(
self.file(content),
filename,
content_type=content_type
)
def file(self, content):
if isinstance(content, bytes):
return io.BytesIO(content)
elif isinstance(content, str):
return io.BytesIO(content.encode('utf-8'))
else:
return content
@pytest.fixture
def utils(faker):
return Utils()
@pytest.fixture
def mock_backend(app, mocker):
app.config['FS_BACKEND'] = 'mock'
mock = mocker.patch('flask_file_system.backends.mock.MockBackend')
yield mock
|
normal
|
{
"blob_id": "dfc412acc9b69f50396680db1b9f6feafe162996",
"index": 5571,
"step-1": "<mask token>\n\n\nclass TestConfig:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestFlask(Flask):\n\n def configure(self, *storages, **configs):\n import flask_file_system as fs\n for key, value in configs.items():\n self.config[key] = value\n fs.init_app(self, *storages)\n\n\n<mask token>\n\n\nclass Utils:\n\n def filestorage(self, filename, content, content_type=None):\n return FileStorage(self.file(content), filename, content_type=\n content_type)\n\n def file(self, content):\n if isinstance(content, bytes):\n return io.BytesIO(content)\n elif isinstance(content, str):\n return io.BytesIO(content.encode('utf-8'))\n else:\n return content\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestConfig:\n TESTING = True\n MONGODB_DB = 'flask-fs-test'\n MONGODB_HOST = 'localhost'\n MONGODB_PORT = 27017\n\n\nclass TestFlask(Flask):\n\n def configure(self, *storages, **configs):\n import flask_file_system as fs\n for key, value in configs.items():\n self.config[key] = value\n fs.init_app(self, *storages)\n\n\n@pytest.fixture\ndef app():\n app = TestFlask('flaskfs-tests')\n app.config.from_object(TestConfig)\n yield app\n\n\n<mask token>\n\n\n@pytest.fixture\ndef jpgfile():\n return JPG_FILE\n\n\nclass Utils:\n\n def filestorage(self, filename, content, content_type=None):\n return FileStorage(self.file(content), filename, content_type=\n content_type)\n\n def file(self, content):\n if isinstance(content, bytes):\n return io.BytesIO(content)\n elif isinstance(content, str):\n return io.BytesIO(content.encode('utf-8'))\n else:\n return content\n\n\n<mask token>\n\n\n@pytest.fixture\ndef mock_backend(app, mocker):\n app.config['FS_BACKEND'] = 'mock'\n mock = mocker.patch('flask_file_system.backends.mock.MockBackend')\n yield mock\n",
"step-3": "<mask token>\n\n\nclass TestConfig:\n TESTING = True\n MONGODB_DB = 'flask-fs-test'\n MONGODB_HOST = 'localhost'\n MONGODB_PORT = 27017\n\n\nclass TestFlask(Flask):\n\n def configure(self, *storages, **configs):\n import flask_file_system as fs\n for key, value in configs.items():\n self.config[key] = value\n fs.init_app(self, *storages)\n\n\n@pytest.fixture\ndef app():\n app = TestFlask('flaskfs-tests')\n app.config.from_object(TestConfig)\n yield app\n\n\n<mask token>\n\n\n@pytest.fixture\ndef pngfile():\n return PNG_FILE\n\n\n@pytest.fixture\ndef jpgfile():\n return JPG_FILE\n\n\nclass Utils:\n\n def filestorage(self, filename, content, content_type=None):\n return FileStorage(self.file(content), filename, content_type=\n content_type)\n\n def file(self, content):\n if isinstance(content, bytes):\n return io.BytesIO(content)\n elif isinstance(content, str):\n return io.BytesIO(content.encode('utf-8'))\n else:\n return content\n\n\n<mask token>\n\n\n@pytest.fixture\ndef mock_backend(app, mocker):\n app.config['FS_BACKEND'] = 'mock'\n mock = mocker.patch('flask_file_system.backends.mock.MockBackend')\n yield mock\n",
"step-4": "<mask token>\n\n\nclass TestConfig:\n TESTING = True\n MONGODB_DB = 'flask-fs-test'\n MONGODB_HOST = 'localhost'\n MONGODB_PORT = 27017\n\n\nclass TestFlask(Flask):\n\n def configure(self, *storages, **configs):\n import flask_file_system as fs\n for key, value in configs.items():\n self.config[key] = value\n fs.init_app(self, *storages)\n\n\n@pytest.fixture\ndef app():\n app = TestFlask('flaskfs-tests')\n app.config.from_object(TestConfig)\n yield app\n\n\n@pytest.fixture\ndef binfile():\n return PNG_FILE\n\n\n@pytest.fixture\ndef pngfile():\n return PNG_FILE\n\n\n@pytest.fixture\ndef jpgfile():\n return JPG_FILE\n\n\nclass Utils:\n\n def filestorage(self, filename, content, content_type=None):\n return FileStorage(self.file(content), filename, content_type=\n content_type)\n\n def file(self, content):\n if isinstance(content, bytes):\n return io.BytesIO(content)\n elif isinstance(content, str):\n return io.BytesIO(content.encode('utf-8'))\n else:\n return content\n\n\n@pytest.fixture\ndef utils(faker):\n return Utils()\n\n\n@pytest.fixture\ndef mock_backend(app, mocker):\n app.config['FS_BACKEND'] = 'mock'\n mock = mocker.patch('flask_file_system.backends.mock.MockBackend')\n yield mock\n",
"step-5": "import io\nimport os\n\nfrom flask import Flask\nfrom werkzeug.datastructures import FileStorage\n\nimport pytest\n\nPNG_FILE = os.path.join(os.path.dirname(__file__), 'flask.png')\nJPG_FILE = os.path.join(os.path.dirname(__file__), 'flask.jpg')\n\n\nclass TestConfig:\n TESTING = True\n MONGODB_DB = 'flask-fs-test'\n MONGODB_HOST = 'localhost'\n MONGODB_PORT = 27017\n\n\nclass TestFlask(Flask):\n def configure(self, *storages, **configs):\n import flask_file_system as fs\n for key, value in configs.items():\n self.config[key] = value\n fs.init_app(self, *storages)\n\n\n@pytest.fixture\ndef app():\n app = TestFlask('flaskfs-tests')\n app.config.from_object(TestConfig)\n yield app\n\n\n@pytest.fixture\ndef binfile():\n return PNG_FILE\n\n\n@pytest.fixture\ndef pngfile():\n return PNG_FILE\n\n\n@pytest.fixture\ndef jpgfile():\n return JPG_FILE\n\n\nclass Utils:\n def filestorage(self, filename, content, content_type=None):\n return FileStorage(\n self.file(content),\n filename,\n content_type=content_type\n )\n\n def file(self, content):\n if isinstance(content, bytes):\n return io.BytesIO(content)\n elif isinstance(content, str):\n return io.BytesIO(content.encode('utf-8'))\n else:\n return content\n\n\n@pytest.fixture\ndef utils(faker):\n return Utils()\n\n\n@pytest.fixture\ndef mock_backend(app, mocker):\n app.config['FS_BACKEND'] = 'mock'\n mock = mocker.patch('flask_file_system.backends.mock.MockBackend')\n yield mock\n",
"step-ids": [
6,
10,
11,
13,
16
]
}
|
[
6,
10,
11,
13,
16
] |
class Figure:
area = 0
def __new__(cls, *args):
if cls is Figure:
return None
return object.__new__(cls)
def add_area(self, other):
if isinstance(other, Figure):
return self.area + other.area
else:
raise ValueError("Should pass Figure as parameter")
|
normal
|
{
"blob_id": "ceab21e41adf171e99e6c3c8541c418d82db6168",
"index": 3272,
"step-1": "class Figure:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Figure:\n <mask token>\n\n def __new__(cls, *args):\n if cls is Figure:\n return None\n return object.__new__(cls)\n <mask token>\n",
"step-3": "class Figure:\n <mask token>\n\n def __new__(cls, *args):\n if cls is Figure:\n return None\n return object.__new__(cls)\n\n def add_area(self, other):\n if isinstance(other, Figure):\n return self.area + other.area\n else:\n raise ValueError('Should pass Figure as parameter')\n",
"step-4": "class Figure:\n area = 0\n\n def __new__(cls, *args):\n if cls is Figure:\n return None\n return object.__new__(cls)\n\n def add_area(self, other):\n if isinstance(other, Figure):\n return self.area + other.area\n else:\n raise ValueError('Should pass Figure as parameter')\n",
"step-5": "class Figure:\n area = 0\n\n def __new__(cls, *args):\n if cls is Figure:\n return None\n return object.__new__(cls)\n\n def add_area(self, other):\n if isinstance(other, Figure):\n return self.area + other.area\n else:\n raise ValueError(\"Should pass Figure as parameter\")\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# caixinjun
import argparse
from sklearn import metrics
import datetime
import jieba
from sklearn.feature_extraction.text import TfidfVectorizer
import pickle
from sklearn import svm
import os
import warnings
warnings.filterwarnings('ignore')
def get_data(train_file):
target = []
data = []
with open(train_file, 'r', encoding='utf-8') as f:
for line in f.readlines():
line = line.strip().split("\t")
if len(line) == 1:
continue
target.append(int(line[0]))
data.append(line[1])
data = list(map(jieba.lcut, data))
data = [" ".join(d) for d in data]
return data, target
def train(cls, data, target, model_path):
cls = cls.fit(data, target)
with open(model_path, 'wb') as f:
pickle.dump(cls, f)
def trans(data, matrix_path, stopword_path):
with open(stopword_path, 'r', encoding='utf-8') as fs:
stop_words = [line.strip() for line in fs.readline()]
tfidf = TfidfVectorizer(token_pattern=r"(?u)\b\w+\b", stop_words=stop_words)
features = tfidf.fit_transform(data)
with open(matrix_path, 'wb') as f:
pickle.dump(tfidf, f)
return features
def load_models(matrix_path, model_path):
tfidf, cls = None, None
if os.path.isfile(model_path):
with open(model_path, 'rb') as f:
cls = pickle.load(f)
if os.path.isfile(matrix_path):
with open(matrix_path, 'rb') as f:
tfidf = pickle.load(f)
return tfidf, cls
def test(matrix_path, model_path, data_path, outdir):
curr_time = datetime.datetime.now()
time_str = curr_time.strftime("%Y-%m-%d %H-%M-%S")
out_path = outdir + '/%s/' % time_str
out_file = os.path.join(out_path, "results.txt")
if not os.path.exists(out_path):
os.makedirs(out_path)
data, target = get_data(data_path)
tfidf, cls = load_models(matrix_path, model_path)
if tfidf==None or cls==None:
print("cannot load models........")
return
feature = tfidf.transform(data)
predicted = cls.predict(feature)
acc = metrics.accuracy_score(target, predicted)
pre = metrics.precision_score(target, predicted)
recall = metrics.recall_score(target, predicted)
f1 = metrics.f1_score(target, predicted)
fpr, tpr, thresholds = metrics.roc_curve(target, predicted)
auc = metrics.auc(fpr, tpr)
print("accuracy_score: ", acc)
print("precision_score: ", pre)
print("recall_score: ", recall)
print("f1_score: ", f1)
print("auc: ", auc)
with open(out_file, 'w', encoding='utf-8') as f:
for label in predicted:
f.write(str(label) + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--train', type=str, default='./data/train.txt', help='training data')
parser.add_argument('--test', type=str, default='./data/test.txt', help='test data')
parser.add_argument('--stopwords', type=str, default='./data/hit_stopwords.txt', help='stop words')
parser.add_argument('--model', type=str, default='./model/svm_model.pkl', help='classification model')
parser.add_argument('--matrix', type=str, default='./model/tfidf.pkl', help='tfidf model')
parser.add_argument('--outpath', type=str, default='./results/', help='out path')
args = parser.parse_args()
print("data processing.......")
data, target = get_data(args.train)
print("transform data.......")
features = trans(data, args.matrix, args.stopwords)
print("training model.......")
cls = svm.LinearSVC()
train(cls, features, target, args.model)
print("test.......")
test(args.matrix, args.model, args.test, args.outpath)
|
normal
|
{
"blob_id": "199872ea459a9dba9975c6531034bdbc1e77f1db",
"index": 5875,
"step-1": "<mask token>\n\n\ndef train(cls, data, target, model_path):\n cls = cls.fit(data, target)\n with open(model_path, 'wb') as f:\n pickle.dump(cls, f)\n\n\n<mask token>\n\n\ndef load_models(matrix_path, model_path):\n tfidf, cls = None, None\n if os.path.isfile(model_path):\n with open(model_path, 'rb') as f:\n cls = pickle.load(f)\n if os.path.isfile(matrix_path):\n with open(matrix_path, 'rb') as f:\n tfidf = pickle.load(f)\n return tfidf, cls\n\n\ndef test(matrix_path, model_path, data_path, outdir):\n curr_time = datetime.datetime.now()\n time_str = curr_time.strftime('%Y-%m-%d %H-%M-%S')\n out_path = outdir + '/%s/' % time_str\n out_file = os.path.join(out_path, 'results.txt')\n if not os.path.exists(out_path):\n os.makedirs(out_path)\n data, target = get_data(data_path)\n tfidf, cls = load_models(matrix_path, model_path)\n if tfidf == None or cls == None:\n print('cannot load models........')\n return\n feature = tfidf.transform(data)\n predicted = cls.predict(feature)\n acc = metrics.accuracy_score(target, predicted)\n pre = metrics.precision_score(target, predicted)\n recall = metrics.recall_score(target, predicted)\n f1 = metrics.f1_score(target, predicted)\n fpr, tpr, thresholds = metrics.roc_curve(target, predicted)\n auc = metrics.auc(fpr, tpr)\n print('accuracy_score: ', acc)\n print('precision_score: ', pre)\n print('recall_score: ', recall)\n print('f1_score: ', f1)\n print('auc: ', auc)\n with open(out_file, 'w', encoding='utf-8') as f:\n for label in predicted:\n f.write(str(label) + '\\n')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef train(cls, data, target, model_path):\n cls = cls.fit(data, target)\n with open(model_path, 'wb') as f:\n pickle.dump(cls, f)\n\n\ndef trans(data, matrix_path, stopword_path):\n with open(stopword_path, 'r', encoding='utf-8') as fs:\n stop_words = [line.strip() for line in fs.readline()]\n tfidf = TfidfVectorizer(token_pattern='(?u)\\\\b\\\\w+\\\\b', stop_words=\n stop_words)\n features = tfidf.fit_transform(data)\n with open(matrix_path, 'wb') as f:\n pickle.dump(tfidf, f)\n return features\n\n\ndef load_models(matrix_path, model_path):\n tfidf, cls = None, None\n if os.path.isfile(model_path):\n with open(model_path, 'rb') as f:\n cls = pickle.load(f)\n if os.path.isfile(matrix_path):\n with open(matrix_path, 'rb') as f:\n tfidf = pickle.load(f)\n return tfidf, cls\n\n\ndef test(matrix_path, model_path, data_path, outdir):\n curr_time = datetime.datetime.now()\n time_str = curr_time.strftime('%Y-%m-%d %H-%M-%S')\n out_path = outdir + '/%s/' % time_str\n out_file = os.path.join(out_path, 'results.txt')\n if not os.path.exists(out_path):\n os.makedirs(out_path)\n data, target = get_data(data_path)\n tfidf, cls = load_models(matrix_path, model_path)\n if tfidf == None or cls == None:\n print('cannot load models........')\n return\n feature = tfidf.transform(data)\n predicted = cls.predict(feature)\n acc = metrics.accuracy_score(target, predicted)\n pre = metrics.precision_score(target, predicted)\n recall = metrics.recall_score(target, predicted)\n f1 = metrics.f1_score(target, predicted)\n fpr, tpr, thresholds = metrics.roc_curve(target, predicted)\n auc = metrics.auc(fpr, tpr)\n print('accuracy_score: ', acc)\n print('precision_score: ', pre)\n print('recall_score: ', recall)\n print('f1_score: ', f1)\n print('auc: ', auc)\n with open(out_file, 'w', encoding='utf-8') as f:\n for label in predicted:\n f.write(str(label) + '\\n')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_data(train_file):\n target = []\n data = []\n with open(train_file, 'r', encoding='utf-8') as f:\n for line in f.readlines():\n line = line.strip().split('\\t')\n if len(line) == 1:\n continue\n target.append(int(line[0]))\n data.append(line[1])\n data = list(map(jieba.lcut, data))\n data = [' '.join(d) for d in data]\n return data, target\n\n\ndef train(cls, data, target, model_path):\n cls = cls.fit(data, target)\n with open(model_path, 'wb') as f:\n pickle.dump(cls, f)\n\n\ndef trans(data, matrix_path, stopword_path):\n with open(stopword_path, 'r', encoding='utf-8') as fs:\n stop_words = [line.strip() for line in fs.readline()]\n tfidf = TfidfVectorizer(token_pattern='(?u)\\\\b\\\\w+\\\\b', stop_words=\n stop_words)\n features = tfidf.fit_transform(data)\n with open(matrix_path, 'wb') as f:\n pickle.dump(tfidf, f)\n return features\n\n\ndef load_models(matrix_path, model_path):\n tfidf, cls = None, None\n if os.path.isfile(model_path):\n with open(model_path, 'rb') as f:\n cls = pickle.load(f)\n if os.path.isfile(matrix_path):\n with open(matrix_path, 'rb') as f:\n tfidf = pickle.load(f)\n return tfidf, cls\n\n\ndef test(matrix_path, model_path, data_path, outdir):\n curr_time = datetime.datetime.now()\n time_str = curr_time.strftime('%Y-%m-%d %H-%M-%S')\n out_path = outdir + '/%s/' % time_str\n out_file = os.path.join(out_path, 'results.txt')\n if not os.path.exists(out_path):\n os.makedirs(out_path)\n data, target = get_data(data_path)\n tfidf, cls = load_models(matrix_path, model_path)\n if tfidf == None or cls == None:\n print('cannot load models........')\n return\n feature = tfidf.transform(data)\n predicted = cls.predict(feature)\n acc = metrics.accuracy_score(target, predicted)\n pre = metrics.precision_score(target, predicted)\n recall = metrics.recall_score(target, predicted)\n f1 = metrics.f1_score(target, predicted)\n fpr, tpr, thresholds = metrics.roc_curve(target, predicted)\n auc = metrics.auc(fpr, tpr)\n print('accuracy_score: ', acc)\n print('precision_score: ', pre)\n print('recall_score: ', recall)\n print('f1_score: ', f1)\n print('auc: ', auc)\n with open(out_file, 'w', encoding='utf-8') as f:\n for label in predicted:\n f.write(str(label) + '\\n')\n\n\n<mask token>\n",
"step-4": "<mask token>\nwarnings.filterwarnings('ignore')\n\n\ndef get_data(train_file):\n target = []\n data = []\n with open(train_file, 'r', encoding='utf-8') as f:\n for line in f.readlines():\n line = line.strip().split('\\t')\n if len(line) == 1:\n continue\n target.append(int(line[0]))\n data.append(line[1])\n data = list(map(jieba.lcut, data))\n data = [' '.join(d) for d in data]\n return data, target\n\n\ndef train(cls, data, target, model_path):\n cls = cls.fit(data, target)\n with open(model_path, 'wb') as f:\n pickle.dump(cls, f)\n\n\ndef trans(data, matrix_path, stopword_path):\n with open(stopword_path, 'r', encoding='utf-8') as fs:\n stop_words = [line.strip() for line in fs.readline()]\n tfidf = TfidfVectorizer(token_pattern='(?u)\\\\b\\\\w+\\\\b', stop_words=\n stop_words)\n features = tfidf.fit_transform(data)\n with open(matrix_path, 'wb') as f:\n pickle.dump(tfidf, f)\n return features\n\n\ndef load_models(matrix_path, model_path):\n tfidf, cls = None, None\n if os.path.isfile(model_path):\n with open(model_path, 'rb') as f:\n cls = pickle.load(f)\n if os.path.isfile(matrix_path):\n with open(matrix_path, 'rb') as f:\n tfidf = pickle.load(f)\n return tfidf, cls\n\n\ndef test(matrix_path, model_path, data_path, outdir):\n curr_time = datetime.datetime.now()\n time_str = curr_time.strftime('%Y-%m-%d %H-%M-%S')\n out_path = outdir + '/%s/' % time_str\n out_file = os.path.join(out_path, 'results.txt')\n if not os.path.exists(out_path):\n os.makedirs(out_path)\n data, target = get_data(data_path)\n tfidf, cls = load_models(matrix_path, model_path)\n if tfidf == None or cls == None:\n print('cannot load models........')\n return\n feature = tfidf.transform(data)\n predicted = cls.predict(feature)\n acc = metrics.accuracy_score(target, predicted)\n pre = metrics.precision_score(target, predicted)\n recall = metrics.recall_score(target, predicted)\n f1 = metrics.f1_score(target, predicted)\n fpr, tpr, thresholds = metrics.roc_curve(target, predicted)\n auc = metrics.auc(fpr, tpr)\n print('accuracy_score: ', acc)\n print('precision_score: ', pre)\n print('recall_score: ', recall)\n print('f1_score: ', f1)\n print('auc: ', auc)\n with open(out_file, 'w', encoding='utf-8') as f:\n for label in predicted:\n f.write(str(label) + '\\n')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--train', type=str, default='./data/train.txt',\n help='training data')\n parser.add_argument('--test', type=str, default='./data/test.txt', help\n ='test data')\n parser.add_argument('--stopwords', type=str, default=\n './data/hit_stopwords.txt', help='stop words')\n parser.add_argument('--model', type=str, default=\n './model/svm_model.pkl', help='classification model')\n parser.add_argument('--matrix', type=str, default='./model/tfidf.pkl',\n help='tfidf model')\n parser.add_argument('--outpath', type=str, default='./results/', help=\n 'out path')\n args = parser.parse_args()\n print('data processing.......')\n data, target = get_data(args.train)\n print('transform data.......')\n features = trans(data, args.matrix, args.stopwords)\n print('training model.......')\n cls = svm.LinearSVC()\n train(cls, features, target, args.model)\n print('test.......')\n test(args.matrix, args.model, args.test, args.outpath)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# caixinjun\r\n\r\nimport argparse\r\nfrom sklearn import metrics\r\nimport datetime\r\nimport jieba\r\nfrom sklearn.feature_extraction.text import TfidfVectorizer\r\nimport pickle\r\nfrom sklearn import svm\r\nimport os\r\nimport warnings\r\nwarnings.filterwarnings('ignore')\r\n\r\n\r\ndef get_data(train_file):\r\n target = []\r\n data = []\r\n with open(train_file, 'r', encoding='utf-8') as f:\r\n for line in f.readlines():\r\n line = line.strip().split(\"\\t\")\r\n if len(line) == 1:\r\n continue\r\n target.append(int(line[0]))\r\n data.append(line[1])\r\n data = list(map(jieba.lcut, data))\r\n data = [\" \".join(d) for d in data]\r\n return data, target\r\n\r\n\r\ndef train(cls, data, target, model_path):\r\n cls = cls.fit(data, target)\r\n with open(model_path, 'wb') as f:\r\n pickle.dump(cls, f)\r\n\r\ndef trans(data, matrix_path, stopword_path):\r\n with open(stopword_path, 'r', encoding='utf-8') as fs:\r\n stop_words = [line.strip() for line in fs.readline()]\r\n tfidf = TfidfVectorizer(token_pattern=r\"(?u)\\b\\w+\\b\", stop_words=stop_words)\r\n features = tfidf.fit_transform(data)\r\n with open(matrix_path, 'wb') as f:\r\n pickle.dump(tfidf, f)\r\n return features\r\n\r\n\r\ndef load_models(matrix_path, model_path):\r\n tfidf, cls = None, None\r\n if os.path.isfile(model_path):\r\n with open(model_path, 'rb') as f:\r\n cls = pickle.load(f)\r\n if os.path.isfile(matrix_path):\r\n with open(matrix_path, 'rb') as f:\r\n tfidf = pickle.load(f)\r\n return tfidf, cls\r\n\r\ndef test(matrix_path, model_path, data_path, outdir):\r\n\r\n curr_time = datetime.datetime.now()\r\n time_str = curr_time.strftime(\"%Y-%m-%d %H-%M-%S\")\r\n out_path = outdir + '/%s/' % time_str\r\n out_file = os.path.join(out_path, \"results.txt\")\r\n if not os.path.exists(out_path):\r\n os.makedirs(out_path)\r\n data, target = get_data(data_path)\r\n tfidf, cls = load_models(matrix_path, model_path)\r\n if tfidf==None or cls==None:\r\n print(\"cannot load models........\")\r\n return\r\n\r\n feature = tfidf.transform(data)\r\n predicted = cls.predict(feature)\r\n\r\n acc = metrics.accuracy_score(target, predicted)\r\n pre = metrics.precision_score(target, predicted)\r\n recall = metrics.recall_score(target, predicted)\r\n f1 = metrics.f1_score(target, predicted)\r\n fpr, tpr, thresholds = metrics.roc_curve(target, predicted)\r\n auc = metrics.auc(fpr, tpr)\r\n\r\n print(\"accuracy_score: \", acc)\r\n print(\"precision_score: \", pre)\r\n print(\"recall_score: \", recall)\r\n print(\"f1_score: \", f1)\r\n print(\"auc: \", auc)\r\n\r\n with open(out_file, 'w', encoding='utf-8') as f:\r\n for label in predicted:\r\n f.write(str(label) + '\\n')\r\n\r\n\r\nif __name__ == '__main__':\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument('--train', type=str, default='./data/train.txt', help='training data')\r\n parser.add_argument('--test', type=str, default='./data/test.txt', help='test data')\r\n parser.add_argument('--stopwords', type=str, default='./data/hit_stopwords.txt', help='stop words')\r\n parser.add_argument('--model', type=str, default='./model/svm_model.pkl', help='classification model')\r\n parser.add_argument('--matrix', type=str, default='./model/tfidf.pkl', help='tfidf model')\r\n parser.add_argument('--outpath', type=str, default='./results/', help='out path')\r\n args = parser.parse_args()\r\n\r\n print(\"data processing.......\")\r\n data, target = get_data(args.train)\r\n\r\n print(\"transform data.......\")\r\n features = trans(data, args.matrix, args.stopwords)\r\n\r\n print(\"training model.......\")\r\n cls = svm.LinearSVC()\r\n train(cls, features, target, args.model)\r\n\r\n print(\"test.......\")\r\n test(args.matrix, args.model, args.test, args.outpath)\r\n\r\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
TheBeatles = ['John', 'Paul', 'George', 'Ringo']
Wings = ['Paul']
for Beatle in TheBeatles:
if Beatle in Wings:
continue
print Beatle
|
normal
|
{
"blob_id": "9a54ff8e7e8d6d46860cb6173f03c52655b30f43",
"index": 6449,
"step-1": "TheBeatles = ['John', 'Paul', 'George', 'Ringo']\nWings = ['Paul']\n\nfor Beatle in TheBeatles:\n\t\tif Beatle in Wings:\n\t\t\t\tcontinue\n\t\tprint Beatle\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Import smtplib for the actual sending function
import smtplib
# Import the email modules we'll need
from email.message import EmailMessage
# Open the plain text file whose name is in textfile for reading.
with open("testfile.txt") as fp:
# Create a text/plain message
msg = EmailMessage()
msg.set_content("test")
me = "njordan@kohanakai.com"
you = ['john.pelletier@ymail.com', 'jdp2766@gmail.com']
msg['Subject'] = 'The tester email'
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server.
s = smtplib.SMTP('smtp-relay.gmail.com', 25)
s.send_message(msg)
s.quit()
|
normal
|
{
"blob_id": "9feb24da78113310509664fa9efcf5f399be5335",
"index": 5914,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('testfile.txt') as fp:\n msg = EmailMessage()\n msg.set_content('test')\n<mask token>\ns.send_message(msg)\ns.quit()\n",
"step-3": "<mask token>\nwith open('testfile.txt') as fp:\n msg = EmailMessage()\n msg.set_content('test')\nme = 'njordan@kohanakai.com'\nyou = ['john.pelletier@ymail.com', 'jdp2766@gmail.com']\nmsg['Subject'] = 'The tester email'\nmsg['From'] = me\nmsg['To'] = you\ns = smtplib.SMTP('smtp-relay.gmail.com', 25)\ns.send_message(msg)\ns.quit()\n",
"step-4": "import smtplib\nfrom email.message import EmailMessage\nwith open('testfile.txt') as fp:\n msg = EmailMessage()\n msg.set_content('test')\nme = 'njordan@kohanakai.com'\nyou = ['john.pelletier@ymail.com', 'jdp2766@gmail.com']\nmsg['Subject'] = 'The tester email'\nmsg['From'] = me\nmsg['To'] = you\ns = smtplib.SMTP('smtp-relay.gmail.com', 25)\ns.send_message(msg)\ns.quit()\n",
"step-5": "# Import smtplib for the actual sending function\nimport smtplib\n\n# Import the email modules we'll need\nfrom email.message import EmailMessage\n\n# Open the plain text file whose name is in textfile for reading.\nwith open(\"testfile.txt\") as fp:\n # Create a text/plain message\n msg = EmailMessage()\n msg.set_content(\"test\")\n\nme = \"njordan@kohanakai.com\"\nyou = ['john.pelletier@ymail.com', 'jdp2766@gmail.com']\nmsg['Subject'] = 'The tester email'\nmsg['From'] = me\nmsg['To'] = you\n\n# Send the message via our own SMTP server.\ns = smtplib.SMTP('smtp-relay.gmail.com', 25)\ns.send_message(msg)\ns.quit()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# The project is based on Tensorflow's Text Generation with RNN tutorial
# Copyright Petros Demetrakopoulos 2020
import tensorflow as tf
import numpy as np
import os
import time
# The project is based on Tensorflow's Text Generation with RNN tutorial
# Copyright Petros Demetrakopoulos 2020
import tensorflow as tf
import numpy as np
import os
import time
from random import seed
from random import randint
import sys
import urllib.request
stopChars = [',', '(', ')', '.', '-', '[', ']', '"']
corpus_path = "/tmp/data.txt"
text = open(corpus_path, 'rb').read().decode(encoding='utf-8')
text = preprocessText(text)
corpus_words = corpusToList(text)
map(str.strip, corpus_words) # trim words
vocab = sorted(set(corpus_words))
print('Corpus length (in words):', len(corpus_words))
print('Unique words in corpus: {}'.format(len(vocab)))
word2idx = {u: i for i, u in enumerate(vocab)}
idx2words = np.array(vocab)
word_as_int = np.array([word2idx[c] for c in corpus_words])
# The maximum length sentence we want for a single input in words
seqLength = 10
examples_per_epoch = len(corpus_words)//(seqLength + 1)
# Create training examples / targets
wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)
# generating batches of 10 words each
sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)
def yuh():
corpus_path = "/tmp/data.txt"
text = open(corpus_path, 'rb').read().decode(encoding='utf-8')
text = preprocessText(text)
corpus_words = corpusToList(text)
map(str.strip, corpus_words) # trim words
vocab = sorted(set(corpus_words))
print('Corpus length (in words):', len(corpus_words))
print('Unique words in corpus: {}'.format(len(vocab)))
word2idx = {u: i for i, u in enumerate(vocab)}
idx2words = np.array(vocab)
word_as_int = np.array([word2idx[c] for c in corpus_words])
# The maximum length sentence we want for a single input in words
seqLength = 10
examples_per_epoch = len(corpus_words)//(seqLength + 1)
# Create training examples / targets
wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)
# generating batches of 10 words each
sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)
def preprocessText(text):
text = text.replace('\n', ' ').replace('\t', '')
processedText = text.lower()
for char in stopChars:
processedText = processedText.replace(char, ' ')
return processedText
def corpusToList(corpus):
corpusList = [w for w in corpus.split(' ')]
# removing empty strings from list
corpusList = [i for i in corpusList if i]
return corpusList
def split_input_target(chunk):
input_text = chunk[:-1]
target_text = chunk[1:]
return input_text, target_text
def loss(labels, logits):
return tf.keras.losses.sparse_categorical_crossentropy(labels, logits, from_logits=True)
def generateLyrics(model, startString, temp):
# Number of words to generate
num_generate = 30
# Converting our start string to numbers (vectorizing)
start_string_list = [w for w in startString.split(' ')]
input_eval = [word2idx[s] for s in start_string_list]
input_eval = tf.expand_dims(input_eval, 0)
text_generated = []
model.reset_states()
for i in range(num_generate):
predictions = model(input_eval)
predictions = tf.squeeze(predictions, 0)
predictions = predictions / temp
predicted_id = tf.random.categorical(
predictions, num_samples=1)[-1, 0].numpy()
input_eval = tf.expand_dims([predicted_id], 0)
text_generated.append(' ' + idx2words[predicted_id])
return (startString + ''.join(text_generated))
def doSomeWork(artist):
url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'''
urllib.request.urlretrieve(url, '/tmp/data.txt')
if artist == "kanye":
url = '''
https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd'''
if artist == "nas":
url = '''
https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289'''
if artist == "biggie":
url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'''
if artist == "jayz":
url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'''
if artist == "ross" or artist == "kendrick" or artist == "50cent":
url = '''
https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f
'''
named = "/tmp/" + artist + ".h5"
if (artist == "biggie") or artist == "50cent":
named = "/tmp/kendrick" + ".h5"
urllib.request.urlretrieve(url, named)
yuh()
model = tf.keras.models.load_model(named)
seed(1)
input_str = vocab[randint(0, len(vocab))]
lyricz = []
for i in range(10):
lyrics = generateLyrics(model, startString=input_str, temp=0.6)
temp = lyrics.replace("nigga", "homie").replace("niggas", "homies").replace("nigger", "homie").replace(
"niggers", "homies").replace("faggot", "maggot").replace("fag", "mag").replace('\r', '')
lyricz.append(lyrics.replace("nigga", "homie").replace('\r', ''))
input_str = temp.split()[-1]
return jsonify({
"Success": "It worked",
"Url": " ".join(lyricz)
})
|
normal
|
{
"blob_id": "5ff0c6bde8f3ffcb1f5988b0bbd1dfdd7fa2e818",
"index": 8800,
"step-1": "<mask token>\n\n\ndef yuh():\n corpus_path = '/tmp/data.txt'\n text = open(corpus_path, 'rb').read().decode(encoding='utf-8')\n text = preprocessText(text)\n corpus_words = corpusToList(text)\n map(str.strip, corpus_words)\n vocab = sorted(set(corpus_words))\n print('Corpus length (in words):', len(corpus_words))\n print('Unique words in corpus: {}'.format(len(vocab)))\n word2idx = {u: i for i, u in enumerate(vocab)}\n idx2words = np.array(vocab)\n word_as_int = np.array([word2idx[c] for c in corpus_words])\n seqLength = 10\n examples_per_epoch = len(corpus_words) // (seqLength + 1)\n wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\n\ndef preprocessText(text):\n text = text.replace('\\n', ' ').replace('\\t', '')\n processedText = text.lower()\n for char in stopChars:\n processedText = processedText.replace(char, ' ')\n return processedText\n\n\n<mask token>\n\n\ndef split_input_target(chunk):\n input_text = chunk[:-1]\n target_text = chunk[1:]\n return input_text, target_text\n\n\ndef loss(labels, logits):\n return tf.keras.losses.sparse_categorical_crossentropy(labels, logits,\n from_logits=True)\n\n\ndef generateLyrics(model, startString, temp):\n num_generate = 30\n start_string_list = [w for w in startString.split(' ')]\n input_eval = [word2idx[s] for s in start_string_list]\n input_eval = tf.expand_dims(input_eval, 0)\n text_generated = []\n model.reset_states()\n for i in range(num_generate):\n predictions = model(input_eval)\n predictions = tf.squeeze(predictions, 0)\n predictions = predictions / temp\n predicted_id = tf.random.categorical(predictions, num_samples=1)[-1, 0\n ].numpy()\n input_eval = tf.expand_dims([predicted_id], 0)\n text_generated.append(' ' + idx2words[predicted_id])\n return startString + ''.join(text_generated)\n\n\ndef doSomeWork(artist):\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'\n )\n urllib.request.urlretrieve(url, '/tmp/data.txt')\n if artist == 'kanye':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd\"\"\"\n if artist == 'nas':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289\"\"\"\n if artist == 'biggie':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'\n )\n if artist == 'jayz':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'\n )\n if artist == 'ross' or artist == 'kendrick' or artist == '50cent':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f\n \"\"\"\n named = '/tmp/' + artist + '.h5'\n if artist == 'biggie' or artist == '50cent':\n named = '/tmp/kendrick' + '.h5'\n urllib.request.urlretrieve(url, named)\n yuh()\n model = tf.keras.models.load_model(named)\n seed(1)\n input_str = vocab[randint(0, len(vocab))]\n lyricz = []\n for i in range(10):\n lyrics = generateLyrics(model, startString=input_str, temp=0.6)\n temp = lyrics.replace('nigga', 'homie').replace('niggas', 'homies'\n ).replace('nigger', 'homie').replace('niggers', 'homies').replace(\n 'faggot', 'maggot').replace('fag', 'mag').replace('\\r', '')\n lyricz.append(lyrics.replace('nigga', 'homie').replace('\\r', ''))\n input_str = temp.split()[-1]\n return jsonify({'Success': 'It worked', 'Url': ' '.join(lyricz)})\n",
"step-2": "<mask token>\n\n\ndef yuh():\n corpus_path = '/tmp/data.txt'\n text = open(corpus_path, 'rb').read().decode(encoding='utf-8')\n text = preprocessText(text)\n corpus_words = corpusToList(text)\n map(str.strip, corpus_words)\n vocab = sorted(set(corpus_words))\n print('Corpus length (in words):', len(corpus_words))\n print('Unique words in corpus: {}'.format(len(vocab)))\n word2idx = {u: i for i, u in enumerate(vocab)}\n idx2words = np.array(vocab)\n word_as_int = np.array([word2idx[c] for c in corpus_words])\n seqLength = 10\n examples_per_epoch = len(corpus_words) // (seqLength + 1)\n wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\n\ndef preprocessText(text):\n text = text.replace('\\n', ' ').replace('\\t', '')\n processedText = text.lower()\n for char in stopChars:\n processedText = processedText.replace(char, ' ')\n return processedText\n\n\ndef corpusToList(corpus):\n corpusList = [w for w in corpus.split(' ')]\n corpusList = [i for i in corpusList if i]\n return corpusList\n\n\ndef split_input_target(chunk):\n input_text = chunk[:-1]\n target_text = chunk[1:]\n return input_text, target_text\n\n\ndef loss(labels, logits):\n return tf.keras.losses.sparse_categorical_crossentropy(labels, logits,\n from_logits=True)\n\n\ndef generateLyrics(model, startString, temp):\n num_generate = 30\n start_string_list = [w for w in startString.split(' ')]\n input_eval = [word2idx[s] for s in start_string_list]\n input_eval = tf.expand_dims(input_eval, 0)\n text_generated = []\n model.reset_states()\n for i in range(num_generate):\n predictions = model(input_eval)\n predictions = tf.squeeze(predictions, 0)\n predictions = predictions / temp\n predicted_id = tf.random.categorical(predictions, num_samples=1)[-1, 0\n ].numpy()\n input_eval = tf.expand_dims([predicted_id], 0)\n text_generated.append(' ' + idx2words[predicted_id])\n return startString + ''.join(text_generated)\n\n\ndef doSomeWork(artist):\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'\n )\n urllib.request.urlretrieve(url, '/tmp/data.txt')\n if artist == 'kanye':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd\"\"\"\n if artist == 'nas':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289\"\"\"\n if artist == 'biggie':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'\n )\n if artist == 'jayz':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'\n )\n if artist == 'ross' or artist == 'kendrick' or artist == '50cent':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f\n \"\"\"\n named = '/tmp/' + artist + '.h5'\n if artist == 'biggie' or artist == '50cent':\n named = '/tmp/kendrick' + '.h5'\n urllib.request.urlretrieve(url, named)\n yuh()\n model = tf.keras.models.load_model(named)\n seed(1)\n input_str = vocab[randint(0, len(vocab))]\n lyricz = []\n for i in range(10):\n lyrics = generateLyrics(model, startString=input_str, temp=0.6)\n temp = lyrics.replace('nigga', 'homie').replace('niggas', 'homies'\n ).replace('nigger', 'homie').replace('niggers', 'homies').replace(\n 'faggot', 'maggot').replace('fag', 'mag').replace('\\r', '')\n lyricz.append(lyrics.replace('nigga', 'homie').replace('\\r', ''))\n input_str = temp.split()[-1]\n return jsonify({'Success': 'It worked', 'Url': ' '.join(lyricz)})\n",
"step-3": "<mask token>\nmap(str.strip, corpus_words)\n<mask token>\nprint('Corpus length (in words):', len(corpus_words))\nprint('Unique words in corpus: {}'.format(len(vocab)))\n<mask token>\n\n\ndef yuh():\n corpus_path = '/tmp/data.txt'\n text = open(corpus_path, 'rb').read().decode(encoding='utf-8')\n text = preprocessText(text)\n corpus_words = corpusToList(text)\n map(str.strip, corpus_words)\n vocab = sorted(set(corpus_words))\n print('Corpus length (in words):', len(corpus_words))\n print('Unique words in corpus: {}'.format(len(vocab)))\n word2idx = {u: i for i, u in enumerate(vocab)}\n idx2words = np.array(vocab)\n word_as_int = np.array([word2idx[c] for c in corpus_words])\n seqLength = 10\n examples_per_epoch = len(corpus_words) // (seqLength + 1)\n wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\n\ndef preprocessText(text):\n text = text.replace('\\n', ' ').replace('\\t', '')\n processedText = text.lower()\n for char in stopChars:\n processedText = processedText.replace(char, ' ')\n return processedText\n\n\ndef corpusToList(corpus):\n corpusList = [w for w in corpus.split(' ')]\n corpusList = [i for i in corpusList if i]\n return corpusList\n\n\ndef split_input_target(chunk):\n input_text = chunk[:-1]\n target_text = chunk[1:]\n return input_text, target_text\n\n\ndef loss(labels, logits):\n return tf.keras.losses.sparse_categorical_crossentropy(labels, logits,\n from_logits=True)\n\n\ndef generateLyrics(model, startString, temp):\n num_generate = 30\n start_string_list = [w for w in startString.split(' ')]\n input_eval = [word2idx[s] for s in start_string_list]\n input_eval = tf.expand_dims(input_eval, 0)\n text_generated = []\n model.reset_states()\n for i in range(num_generate):\n predictions = model(input_eval)\n predictions = tf.squeeze(predictions, 0)\n predictions = predictions / temp\n predicted_id = tf.random.categorical(predictions, num_samples=1)[-1, 0\n ].numpy()\n input_eval = tf.expand_dims([predicted_id], 0)\n text_generated.append(' ' + idx2words[predicted_id])\n return startString + ''.join(text_generated)\n\n\ndef doSomeWork(artist):\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'\n )\n urllib.request.urlretrieve(url, '/tmp/data.txt')\n if artist == 'kanye':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd\"\"\"\n if artist == 'nas':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289\"\"\"\n if artist == 'biggie':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'\n )\n if artist == 'jayz':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'\n )\n if artist == 'ross' or artist == 'kendrick' or artist == '50cent':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f\n \"\"\"\n named = '/tmp/' + artist + '.h5'\n if artist == 'biggie' or artist == '50cent':\n named = '/tmp/kendrick' + '.h5'\n urllib.request.urlretrieve(url, named)\n yuh()\n model = tf.keras.models.load_model(named)\n seed(1)\n input_str = vocab[randint(0, len(vocab))]\n lyricz = []\n for i in range(10):\n lyrics = generateLyrics(model, startString=input_str, temp=0.6)\n temp = lyrics.replace('nigga', 'homie').replace('niggas', 'homies'\n ).replace('nigger', 'homie').replace('niggers', 'homies').replace(\n 'faggot', 'maggot').replace('fag', 'mag').replace('\\r', '')\n lyricz.append(lyrics.replace('nigga', 'homie').replace('\\r', ''))\n input_str = temp.split()[-1]\n return jsonify({'Success': 'It worked', 'Url': ' '.join(lyricz)})\n",
"step-4": "<mask token>\nstopChars = [',', '(', ')', '.', '-', '[', ']', '\"']\ncorpus_path = '/tmp/data.txt'\ntext = open(corpus_path, 'rb').read().decode(encoding='utf-8')\ntext = preprocessText(text)\ncorpus_words = corpusToList(text)\nmap(str.strip, corpus_words)\nvocab = sorted(set(corpus_words))\nprint('Corpus length (in words):', len(corpus_words))\nprint('Unique words in corpus: {}'.format(len(vocab)))\nword2idx = {u: i for i, u in enumerate(vocab)}\nidx2words = np.array(vocab)\nword_as_int = np.array([word2idx[c] for c in corpus_words])\nseqLength = 10\nexamples_per_epoch = len(corpus_words) // (seqLength + 1)\nwordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\nsequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\n\ndef yuh():\n corpus_path = '/tmp/data.txt'\n text = open(corpus_path, 'rb').read().decode(encoding='utf-8')\n text = preprocessText(text)\n corpus_words = corpusToList(text)\n map(str.strip, corpus_words)\n vocab = sorted(set(corpus_words))\n print('Corpus length (in words):', len(corpus_words))\n print('Unique words in corpus: {}'.format(len(vocab)))\n word2idx = {u: i for i, u in enumerate(vocab)}\n idx2words = np.array(vocab)\n word_as_int = np.array([word2idx[c] for c in corpus_words])\n seqLength = 10\n examples_per_epoch = len(corpus_words) // (seqLength + 1)\n wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\n\ndef preprocessText(text):\n text = text.replace('\\n', ' ').replace('\\t', '')\n processedText = text.lower()\n for char in stopChars:\n processedText = processedText.replace(char, ' ')\n return processedText\n\n\ndef corpusToList(corpus):\n corpusList = [w for w in corpus.split(' ')]\n corpusList = [i for i in corpusList if i]\n return corpusList\n\n\ndef split_input_target(chunk):\n input_text = chunk[:-1]\n target_text = chunk[1:]\n return input_text, target_text\n\n\ndef loss(labels, logits):\n return tf.keras.losses.sparse_categorical_crossentropy(labels, logits,\n from_logits=True)\n\n\ndef generateLyrics(model, startString, temp):\n num_generate = 30\n start_string_list = [w for w in startString.split(' ')]\n input_eval = [word2idx[s] for s in start_string_list]\n input_eval = tf.expand_dims(input_eval, 0)\n text_generated = []\n model.reset_states()\n for i in range(num_generate):\n predictions = model(input_eval)\n predictions = tf.squeeze(predictions, 0)\n predictions = predictions / temp\n predicted_id = tf.random.categorical(predictions, num_samples=1)[-1, 0\n ].numpy()\n input_eval = tf.expand_dims([predicted_id], 0)\n text_generated.append(' ' + idx2words[predicted_id])\n return startString + ''.join(text_generated)\n\n\ndef doSomeWork(artist):\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'\n )\n urllib.request.urlretrieve(url, '/tmp/data.txt')\n if artist == 'kanye':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd\"\"\"\n if artist == 'nas':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289\"\"\"\n if artist == 'biggie':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'\n )\n if artist == 'jayz':\n url = (\n 'https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'\n )\n if artist == 'ross' or artist == 'kendrick' or artist == '50cent':\n url = \"\"\"\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f\n \"\"\"\n named = '/tmp/' + artist + '.h5'\n if artist == 'biggie' or artist == '50cent':\n named = '/tmp/kendrick' + '.h5'\n urllib.request.urlretrieve(url, named)\n yuh()\n model = tf.keras.models.load_model(named)\n seed(1)\n input_str = vocab[randint(0, len(vocab))]\n lyricz = []\n for i in range(10):\n lyrics = generateLyrics(model, startString=input_str, temp=0.6)\n temp = lyrics.replace('nigga', 'homie').replace('niggas', 'homies'\n ).replace('nigger', 'homie').replace('niggers', 'homies').replace(\n 'faggot', 'maggot').replace('fag', 'mag').replace('\\r', '')\n lyricz.append(lyrics.replace('nigga', 'homie').replace('\\r', ''))\n input_str = temp.split()[-1]\n return jsonify({'Success': 'It worked', 'Url': ' '.join(lyricz)})\n",
"step-5": "# The project is based on Tensorflow's Text Generation with RNN tutorial\n# Copyright Petros Demetrakopoulos 2020\nimport tensorflow as tf\nimport numpy as np\nimport os\nimport time\n# The project is based on Tensorflow's Text Generation with RNN tutorial\n# Copyright Petros Demetrakopoulos 2020\nimport tensorflow as tf\nimport numpy as np\nimport os\nimport time\nfrom random import seed\nfrom random import randint\nimport sys\nimport urllib.request\n\nstopChars = [',', '(', ')', '.', '-', '[', ']', '\"']\n\n\ncorpus_path = \"/tmp/data.txt\"\ntext = open(corpus_path, 'rb').read().decode(encoding='utf-8')\ntext = preprocessText(text)\ncorpus_words = corpusToList(text)\nmap(str.strip, corpus_words) # trim words\n\nvocab = sorted(set(corpus_words))\nprint('Corpus length (in words):', len(corpus_words))\nprint('Unique words in corpus: {}'.format(len(vocab)))\n\n\nword2idx = {u: i for i, u in enumerate(vocab)}\nidx2words = np.array(vocab)\nword_as_int = np.array([word2idx[c] for c in corpus_words])\n# The maximum length sentence we want for a single input in words\nseqLength = 10\nexamples_per_epoch = len(corpus_words)//(seqLength + 1)\n\n# Create training examples / targets\nwordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n\n# generating batches of 10 words each\nsequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\ndef yuh(): \n corpus_path = \"/tmp/data.txt\"\n text = open(corpus_path, 'rb').read().decode(encoding='utf-8')\n text = preprocessText(text)\n corpus_words = corpusToList(text)\n map(str.strip, corpus_words) # trim words\n\n vocab = sorted(set(corpus_words))\n print('Corpus length (in words):', len(corpus_words))\n print('Unique words in corpus: {}'.format(len(vocab)))\n\n\n word2idx = {u: i for i, u in enumerate(vocab)}\n idx2words = np.array(vocab)\n word_as_int = np.array([word2idx[c] for c in corpus_words])\n # The maximum length sentence we want for a single input in words\n seqLength = 10\n examples_per_epoch = len(corpus_words)//(seqLength + 1)\n\n# Create training examples / targets\n wordDataset = tf.data.Dataset.from_tensor_slices(word_as_int)\n\n# generating batches of 10 words each\n sequencesOfWords = wordDataset.batch(seqLength + 1, drop_remainder=True)\n\ndef preprocessText(text):\n text = text.replace('\\n', ' ').replace('\\t', '')\n processedText = text.lower()\n for char in stopChars:\n processedText = processedText.replace(char, ' ')\n return processedText\n\n\ndef corpusToList(corpus):\n corpusList = [w for w in corpus.split(' ')]\n # removing empty strings from list\n corpusList = [i for i in corpusList if i]\n return corpusList\n\ndef split_input_target(chunk):\n input_text = chunk[:-1]\n target_text = chunk[1:]\n return input_text, target_text\n\n\ndef loss(labels, logits):\n return tf.keras.losses.sparse_categorical_crossentropy(labels, logits, from_logits=True)\n\n\ndef generateLyrics(model, startString, temp):\n # Number of words to generate\n num_generate = 30\n\n # Converting our start string to numbers (vectorizing)\n start_string_list = [w for w in startString.split(' ')]\n input_eval = [word2idx[s] for s in start_string_list]\n input_eval = tf.expand_dims(input_eval, 0)\n\n text_generated = []\n\n model.reset_states()\n for i in range(num_generate):\n predictions = model(input_eval)\n predictions = tf.squeeze(predictions, 0)\n\n predictions = predictions / temp\n predicted_id = tf.random.categorical(\n predictions, num_samples=1)[-1, 0].numpy()\n\n input_eval = tf.expand_dims([predicted_id], 0)\n text_generated.append(' ' + idx2words[predicted_id])\n\n return (startString + ''.join(text_generated))\n\n\ndef doSomeWork(artist):\n url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.txt?alt=media&token=604b7b6c-2ef0-4611-ab6e-a08dd53e99be'''\n urllib.request.urlretrieve(url, '/tmp/data.txt')\n\n if artist == \"kanye\": \n url = '''\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkanye.h5?alt=media&token=a0b94c61-e696-453d-9a16-110af66f6afd'''\n if artist == \"nas\": \n url = '''\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fnas.h5?alt=media&token=037ef224-be5f-4449-a89c-c1897e164289'''\n if artist == \"biggie\": \n url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fbiggie.h5?alt=media&token=3244a8e2-017c-472f-a66b-7810a198d038'''\n if artist == \"jayz\": \n url = '''https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fjayz.h5?alt=media&token=500ff44d-60fe-4774-9c85-5ea6f06da81b'''\n if artist == \"ross\" or artist == \"kendrick\" or artist == \"50cent\": \n url = '''\n https://firebasestorage.googleapis.com/v0/b/shellhacks-327117.appspot.com/o/models%2Fkendrick.h5?alt=media&token=6ceff75d-5a71-49d4-b927-e727888d872f\n '''\n \n\n named = \"/tmp/\" + artist + \".h5\"\n if (artist == \"biggie\") or artist == \"50cent\":\n named = \"/tmp/kendrick\" + \".h5\"\n\n urllib.request.urlretrieve(url, named)\n\n\n\n\n yuh()\n \n\n model = tf.keras.models.load_model(named)\n\n seed(1)\n input_str = vocab[randint(0, len(vocab))]\n lyricz = []\n\n for i in range(10):\n lyrics = generateLyrics(model, startString=input_str, temp=0.6)\n temp = lyrics.replace(\"nigga\", \"homie\").replace(\"niggas\", \"homies\").replace(\"nigger\", \"homie\").replace(\n \"niggers\", \"homies\").replace(\"faggot\", \"maggot\").replace(\"fag\", \"mag\").replace('\\r', '')\n lyricz.append(lyrics.replace(\"nigga\", \"homie\").replace('\\r', ''))\n input_str = temp.split()[-1]\n\n return jsonify({\n \"Success\": \"It worked\",\n \"Url\": \" \".join(lyricz)\n })\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from ingredients_http.schematics.types import ArrowType, KubeName
from schematics import Model
from schematics.exceptions import ValidationError
from schematics.types import UUIDType, IntType, StringType
from deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair
class ParamsKeypair(Model):
keypair_name = KubeName(required=True)
class ParamsListKeypair(Model):
limit = IntType(default=100, max_value=100, min_value=1)
marker = UUIDType()
class RequestCreateKeypair(Model):
name = KubeName(required=True, min_length=3)
public_key = StringType(required=True)
def validate_public_key(self, data, value):
try:
load_ssh_public_key(value.encode(), default_backend())
except ValueError:
raise ValidationError("public_key could not be decoded or is not in the proper format")
except UnsupportedAlgorithm:
raise ValidationError("public_key serialization type is not supported")
return value
class ResponseKeypair(Model):
name = KubeName(required=True, min_length=3)
public_key = StringType(required=True)
created_at = ArrowType(required=True)
updated_at = ArrowType(required=True)
@classmethod
def from_database(cls, keypair: Keypair):
model = cls()
model.name = keypair.name
model.public_key = keypair.public_key
model.created_at = keypair.created_at
model.updated_at = keypair.updated_at
return model
|
normal
|
{
"blob_id": "a521220ac287a840b5c69e2d0f33daa588132083",
"index": 4983,
"step-1": "<mask token>\n\n\nclass RequestCreateKeypair(Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-2": "<mask token>\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-3": "<mask token>\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-4": "from cryptography.exceptions import UnsupportedAlgorithm\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives.serialization import load_ssh_public_key\nfrom ingredients_http.schematics.types import ArrowType, KubeName\nfrom schematics import Model\nfrom schematics.exceptions import ValidationError\nfrom schematics.types import UUIDType, IntType, StringType\nfrom deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\n 'public_key could not be decoded or is not in the proper format'\n )\n except UnsupportedAlgorithm:\n raise ValidationError(\n 'public_key serialization type is not supported')\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n return model\n",
"step-5": "from cryptography.exceptions import UnsupportedAlgorithm\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives.serialization import load_ssh_public_key\nfrom ingredients_http.schematics.types import ArrowType, KubeName\nfrom schematics import Model\nfrom schematics.exceptions import ValidationError\nfrom schematics.types import UUIDType, IntType, StringType\n\nfrom deli.kubernetes.resources.v1alpha1.keypair.keypair import Keypair\n\n\nclass ParamsKeypair(Model):\n keypair_name = KubeName(required=True)\n\n\nclass ParamsListKeypair(Model):\n limit = IntType(default=100, max_value=100, min_value=1)\n marker = UUIDType()\n\n\nclass RequestCreateKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n\n def validate_public_key(self, data, value):\n try:\n load_ssh_public_key(value.encode(), default_backend())\n except ValueError:\n raise ValidationError(\"public_key could not be decoded or is not in the proper format\")\n except UnsupportedAlgorithm:\n raise ValidationError(\"public_key serialization type is not supported\")\n\n return value\n\n\nclass ResponseKeypair(Model):\n name = KubeName(required=True, min_length=3)\n public_key = StringType(required=True)\n created_at = ArrowType(required=True)\n updated_at = ArrowType(required=True)\n\n @classmethod\n def from_database(cls, keypair: Keypair):\n model = cls()\n model.name = keypair.name\n model.public_key = keypair.public_key\n model.created_at = keypair.created_at\n model.updated_at = keypair.updated_at\n\n return model\n",
"step-ids": [
4,
6,
10,
11,
12
]
}
|
[
4,
6,
10,
11,
12
] |
import unittest
from domain.Activity import Activity
from domain.NABException import NABException
from domain.Person import Person
from domain.ActivityValidator import ActivityValidator
from repository.PersonRepository import PersonRepository
from repository.PersonFileRepository import PersonFileRepository
from repository.ActivityRepository import ActivityRepository
from repository.ActivityFileRepository import ActivityFileRepository
from controller.StatsController import StatsController
class StatsControllerTestCase(unittest.TestCase):
def setUp(self):
pR = PersonRepository()
aR = ActivityRepository()
self.L = StatsController(pR, aR)
self.p = Person(1, "John", "1", "A")
self.q = Person(2, "Mary", "1", "B")
self.a1 = Activity(self.p, "2015.12.20", "12:12", "Swimming")
self.a2 = Activity(self.p, "2016.01.20", "12:12", "Mapping")
self.a3 = Activity(self.q, "2015.12.21", "12:12", "Swimming")
self.a4 = Activity(self.q, "2015.12.20", "10:12", "Reading")
pR.add(self.p)
pR.add(self.q)
aR.add(self.a1)
aR.add(self.a2)
aR.add(self.a3)
aR.add(self.a4)
def test_activities_for_person_alphabetically(self):
L = self.L
a1 = self.a1
a2 = self.a2
a3 = self.a3
a4 = self.a4
assert L.activities_for_person_alphabetically(1) == [a2, a1]
assert L.activities_for_person_alphabetically(2) == [a4, a3]
assert L.activities_for_person_alphabetically(4) == []
def test_activities_for_person_by_date(self):
L = self.L
a1 = self.a1
a2 = self.a2
a3 = self.a3
a4 = self.a4
assert L.activities_for_person_by_date(1) == [a1, a2]
assert L.activities_for_person_by_date(2) == [a4, a3]
assert L.activities_for_person_by_date(4) == []
def test_people_with_activities_in_interval(self):
L = self.L
p = self.p
q = self.q
assert L.people_with_activities_in_interval("2015.12.20", "2016.01.01") == [p, q]
assert L.people_with_activities_in_interval("2000.01.01", "2010.01.01") == []
assert L.people_with_activities_in_interval("2016.01.01", "2017.01.01") == [p]
assert L.people_with_activities_in_interval("2015.12.21", "2015.12.21") == [q]
def test_activities_in_interval_alphabetically(self):
L = self.L
a1 = self.a1
a2 = self.a2
a3 = self.a3
a4 = self.a4
assert L.activities_in_interval_alphabetically("2015.12.20", "2016.01.01") == [a4, a1, a3]
assert L.activities_in_interval_alphabetically("2000.01.01", "2010.01.01") == []
assert L.activities_in_interval_alphabetically("2016.01.01", "2017.01.01") == [a2]
assert L.activities_in_interval_alphabetically("2015.12.21", "2015.12.21") == [a3]
def test_activities_in_interval_by_date(self):
L = self.L
a1 = self.a1
a2 = self.a2
a3 = self.a3
a4 = self.a4
assert L.activities_in_interval_by_date("2015.12.20", "2016.01.01") == [a4, a1, a3]
assert L.activities_in_interval_by_date("2000.01.01", "2010.01.01") == []
assert L.activities_in_interval_by_date("2016.01.01", "2017.01.01") == [a2]
assert L.activities_in_interval_by_date("2015.12.21", "2015.12.21") == [a3]
|
normal
|
{
"blob_id": "130581ddb0394dcceabc316468385d4e21959b63",
"index": 8682,
"step-1": "<mask token>\n\n\nclass StatsControllerTestCase(unittest.TestCase):\n\n def setUp(self):\n pR = PersonRepository()\n aR = ActivityRepository()\n self.L = StatsController(pR, aR)\n self.p = Person(1, 'John', '1', 'A')\n self.q = Person(2, 'Mary', '1', 'B')\n self.a1 = Activity(self.p, '2015.12.20', '12:12', 'Swimming')\n self.a2 = Activity(self.p, '2016.01.20', '12:12', 'Mapping')\n self.a3 = Activity(self.q, '2015.12.21', '12:12', 'Swimming')\n self.a4 = Activity(self.q, '2015.12.20', '10:12', 'Reading')\n pR.add(self.p)\n pR.add(self.q)\n aR.add(self.a1)\n aR.add(self.a2)\n aR.add(self.a3)\n aR.add(self.a4)\n <mask token>\n <mask token>\n\n def test_people_with_activities_in_interval(self):\n L = self.L\n p = self.p\n q = self.q\n assert L.people_with_activities_in_interval('2015.12.20', '2016.01.01'\n ) == [p, q]\n assert L.people_with_activities_in_interval('2000.01.01', '2010.01.01'\n ) == []\n assert L.people_with_activities_in_interval('2016.01.01', '2017.01.01'\n ) == [p]\n assert L.people_with_activities_in_interval('2015.12.21', '2015.12.21'\n ) == [q]\n <mask token>\n\n def test_activities_in_interval_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_in_interval_by_date('2015.12.20', '2016.01.01'\n ) == [a4, a1, a3]\n assert L.activities_in_interval_by_date('2000.01.01', '2010.01.01'\n ) == []\n assert L.activities_in_interval_by_date('2016.01.01', '2017.01.01'\n ) == [a2]\n assert L.activities_in_interval_by_date('2015.12.21', '2015.12.21'\n ) == [a3]\n",
"step-2": "<mask token>\n\n\nclass StatsControllerTestCase(unittest.TestCase):\n\n def setUp(self):\n pR = PersonRepository()\n aR = ActivityRepository()\n self.L = StatsController(pR, aR)\n self.p = Person(1, 'John', '1', 'A')\n self.q = Person(2, 'Mary', '1', 'B')\n self.a1 = Activity(self.p, '2015.12.20', '12:12', 'Swimming')\n self.a2 = Activity(self.p, '2016.01.20', '12:12', 'Mapping')\n self.a3 = Activity(self.q, '2015.12.21', '12:12', 'Swimming')\n self.a4 = Activity(self.q, '2015.12.20', '10:12', 'Reading')\n pR.add(self.p)\n pR.add(self.q)\n aR.add(self.a1)\n aR.add(self.a2)\n aR.add(self.a3)\n aR.add(self.a4)\n <mask token>\n\n def test_activities_for_person_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_for_person_by_date(1) == [a1, a2]\n assert L.activities_for_person_by_date(2) == [a4, a3]\n assert L.activities_for_person_by_date(4) == []\n\n def test_people_with_activities_in_interval(self):\n L = self.L\n p = self.p\n q = self.q\n assert L.people_with_activities_in_interval('2015.12.20', '2016.01.01'\n ) == [p, q]\n assert L.people_with_activities_in_interval('2000.01.01', '2010.01.01'\n ) == []\n assert L.people_with_activities_in_interval('2016.01.01', '2017.01.01'\n ) == [p]\n assert L.people_with_activities_in_interval('2015.12.21', '2015.12.21'\n ) == [q]\n <mask token>\n\n def test_activities_in_interval_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_in_interval_by_date('2015.12.20', '2016.01.01'\n ) == [a4, a1, a3]\n assert L.activities_in_interval_by_date('2000.01.01', '2010.01.01'\n ) == []\n assert L.activities_in_interval_by_date('2016.01.01', '2017.01.01'\n ) == [a2]\n assert L.activities_in_interval_by_date('2015.12.21', '2015.12.21'\n ) == [a3]\n",
"step-3": "<mask token>\n\n\nclass StatsControllerTestCase(unittest.TestCase):\n\n def setUp(self):\n pR = PersonRepository()\n aR = ActivityRepository()\n self.L = StatsController(pR, aR)\n self.p = Person(1, 'John', '1', 'A')\n self.q = Person(2, 'Mary', '1', 'B')\n self.a1 = Activity(self.p, '2015.12.20', '12:12', 'Swimming')\n self.a2 = Activity(self.p, '2016.01.20', '12:12', 'Mapping')\n self.a3 = Activity(self.q, '2015.12.21', '12:12', 'Swimming')\n self.a4 = Activity(self.q, '2015.12.20', '10:12', 'Reading')\n pR.add(self.p)\n pR.add(self.q)\n aR.add(self.a1)\n aR.add(self.a2)\n aR.add(self.a3)\n aR.add(self.a4)\n\n def test_activities_for_person_alphabetically(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_for_person_alphabetically(1) == [a2, a1]\n assert L.activities_for_person_alphabetically(2) == [a4, a3]\n assert L.activities_for_person_alphabetically(4) == []\n\n def test_activities_for_person_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_for_person_by_date(1) == [a1, a2]\n assert L.activities_for_person_by_date(2) == [a4, a3]\n assert L.activities_for_person_by_date(4) == []\n\n def test_people_with_activities_in_interval(self):\n L = self.L\n p = self.p\n q = self.q\n assert L.people_with_activities_in_interval('2015.12.20', '2016.01.01'\n ) == [p, q]\n assert L.people_with_activities_in_interval('2000.01.01', '2010.01.01'\n ) == []\n assert L.people_with_activities_in_interval('2016.01.01', '2017.01.01'\n ) == [p]\n assert L.people_with_activities_in_interval('2015.12.21', '2015.12.21'\n ) == [q]\n <mask token>\n\n def test_activities_in_interval_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_in_interval_by_date('2015.12.20', '2016.01.01'\n ) == [a4, a1, a3]\n assert L.activities_in_interval_by_date('2000.01.01', '2010.01.01'\n ) == []\n assert L.activities_in_interval_by_date('2016.01.01', '2017.01.01'\n ) == [a2]\n assert L.activities_in_interval_by_date('2015.12.21', '2015.12.21'\n ) == [a3]\n",
"step-4": "<mask token>\n\n\nclass StatsControllerTestCase(unittest.TestCase):\n\n def setUp(self):\n pR = PersonRepository()\n aR = ActivityRepository()\n self.L = StatsController(pR, aR)\n self.p = Person(1, 'John', '1', 'A')\n self.q = Person(2, 'Mary', '1', 'B')\n self.a1 = Activity(self.p, '2015.12.20', '12:12', 'Swimming')\n self.a2 = Activity(self.p, '2016.01.20', '12:12', 'Mapping')\n self.a3 = Activity(self.q, '2015.12.21', '12:12', 'Swimming')\n self.a4 = Activity(self.q, '2015.12.20', '10:12', 'Reading')\n pR.add(self.p)\n pR.add(self.q)\n aR.add(self.a1)\n aR.add(self.a2)\n aR.add(self.a3)\n aR.add(self.a4)\n\n def test_activities_for_person_alphabetically(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_for_person_alphabetically(1) == [a2, a1]\n assert L.activities_for_person_alphabetically(2) == [a4, a3]\n assert L.activities_for_person_alphabetically(4) == []\n\n def test_activities_for_person_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_for_person_by_date(1) == [a1, a2]\n assert L.activities_for_person_by_date(2) == [a4, a3]\n assert L.activities_for_person_by_date(4) == []\n\n def test_people_with_activities_in_interval(self):\n L = self.L\n p = self.p\n q = self.q\n assert L.people_with_activities_in_interval('2015.12.20', '2016.01.01'\n ) == [p, q]\n assert L.people_with_activities_in_interval('2000.01.01', '2010.01.01'\n ) == []\n assert L.people_with_activities_in_interval('2016.01.01', '2017.01.01'\n ) == [p]\n assert L.people_with_activities_in_interval('2015.12.21', '2015.12.21'\n ) == [q]\n\n def test_activities_in_interval_alphabetically(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_in_interval_alphabetically('2015.12.20',\n '2016.01.01') == [a4, a1, a3]\n assert L.activities_in_interval_alphabetically('2000.01.01',\n '2010.01.01') == []\n assert L.activities_in_interval_alphabetically('2016.01.01',\n '2017.01.01') == [a2]\n assert L.activities_in_interval_alphabetically('2015.12.21',\n '2015.12.21') == [a3]\n\n def test_activities_in_interval_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n assert L.activities_in_interval_by_date('2015.12.20', '2016.01.01'\n ) == [a4, a1, a3]\n assert L.activities_in_interval_by_date('2000.01.01', '2010.01.01'\n ) == []\n assert L.activities_in_interval_by_date('2016.01.01', '2017.01.01'\n ) == [a2]\n assert L.activities_in_interval_by_date('2015.12.21', '2015.12.21'\n ) == [a3]\n",
"step-5": "import unittest\nfrom domain.Activity import Activity\nfrom domain.NABException import NABException\nfrom domain.Person import Person\nfrom domain.ActivityValidator import ActivityValidator\nfrom repository.PersonRepository import PersonRepository\nfrom repository.PersonFileRepository import PersonFileRepository\nfrom repository.ActivityRepository import ActivityRepository\nfrom repository.ActivityFileRepository import ActivityFileRepository\nfrom controller.StatsController import StatsController\n\n\nclass StatsControllerTestCase(unittest.TestCase):\n\n def setUp(self):\n pR = PersonRepository()\n aR = ActivityRepository()\n self.L = StatsController(pR, aR)\n self.p = Person(1, \"John\", \"1\", \"A\")\n self.q = Person(2, \"Mary\", \"1\", \"B\")\n self.a1 = Activity(self.p, \"2015.12.20\", \"12:12\", \"Swimming\")\n self.a2 = Activity(self.p, \"2016.01.20\", \"12:12\", \"Mapping\")\n self.a3 = Activity(self.q, \"2015.12.21\", \"12:12\", \"Swimming\")\n self.a4 = Activity(self.q, \"2015.12.20\", \"10:12\", \"Reading\")\n\n pR.add(self.p)\n pR.add(self.q)\n aR.add(self.a1)\n aR.add(self.a2)\n aR.add(self.a3)\n aR.add(self.a4)\n\n\n def test_activities_for_person_alphabetically(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n\n assert L.activities_for_person_alphabetically(1) == [a2, a1]\n assert L.activities_for_person_alphabetically(2) == [a4, a3]\n assert L.activities_for_person_alphabetically(4) == []\n\n\n def test_activities_for_person_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n\n assert L.activities_for_person_by_date(1) == [a1, a2]\n assert L.activities_for_person_by_date(2) == [a4, a3]\n assert L.activities_for_person_by_date(4) == []\n\n\n def test_people_with_activities_in_interval(self):\n L = self.L\n p = self.p\n q = self.q\n\n assert L.people_with_activities_in_interval(\"2015.12.20\", \"2016.01.01\") == [p, q]\n assert L.people_with_activities_in_interval(\"2000.01.01\", \"2010.01.01\") == []\n assert L.people_with_activities_in_interval(\"2016.01.01\", \"2017.01.01\") == [p]\n assert L.people_with_activities_in_interval(\"2015.12.21\", \"2015.12.21\") == [q]\n\n\n def test_activities_in_interval_alphabetically(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n\n assert L.activities_in_interval_alphabetically(\"2015.12.20\", \"2016.01.01\") == [a4, a1, a3]\n assert L.activities_in_interval_alphabetically(\"2000.01.01\", \"2010.01.01\") == []\n assert L.activities_in_interval_alphabetically(\"2016.01.01\", \"2017.01.01\") == [a2]\n assert L.activities_in_interval_alphabetically(\"2015.12.21\", \"2015.12.21\") == [a3]\n\n\n def test_activities_in_interval_by_date(self):\n L = self.L\n a1 = self.a1\n a2 = self.a2\n a3 = self.a3\n a4 = self.a4\n\n assert L.activities_in_interval_by_date(\"2015.12.20\", \"2016.01.01\") == [a4, a1, a3]\n assert L.activities_in_interval_by_date(\"2000.01.01\", \"2010.01.01\") == []\n assert L.activities_in_interval_by_date(\"2016.01.01\", \"2017.01.01\") == [a2]\n assert L.activities_in_interval_by_date(\"2015.12.21\", \"2015.12.21\") == [a3]",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
# -*- coding: utf-8 -*-
from matplotlib import pyplot as plt
from matplotlib import colors
import numpy as np
import sys
max_value = int(sys.argv[1])
file1 = open(sys.argv[2])
file2 = open(sys.argv[3])
file3 = open(sys.argv[4])
histogram = np.zeros(max_value, dtype=int).tolist()
highest_value = 0.0
sample_size = 0.5
sample = []
for i,line in enumerate(file1.readlines()):
values = line.strip().split(",")
for j, value in enumerate(values):
if(j == 0):
histogram[int(value.split("[")[1])] += 1
elif(j == len(values) - 1):
histogram[int(value.split("]")[0])] += 1
else:
histogram[int(value)] += 1
for i,line in enumerate(file2.readlines()):
values = line.strip().split(",")
for j, value in enumerate(values):
if(j == 0):
histogram[int(value.split("[")[1])] += 1
elif(j == len(values) - 1):
histogram[int(value.split("]")[0])] += 1
else:
histogram[int(value)] += 1
for i,line in enumerate(file3.readlines()):
values = line.strip().split(",")
for j, value in enumerate(values):
if(j == 0):
histogram[int(value.split("[")[1])] += 1
elif(j == len(values) - 1):
histogram[int(value.split("]")[0])] += 1
else:
histogram[int(value)] += 1
for i in range(len(histogram)):
histogram[i] = histogram[i] / 3.0
for value in histogram:
if(value > highest_value):
highest_value = float(value)
print highest_value
for i,value in enumerate(histogram):
if(value >= (highest_value - (highest_value * sample_size))):
sample.append(i)
sample_file = open(sys.argv[1].split("_mean.")[0] + ".sample", "w")
for value in sample:
sample_file.write(str(value) + " ")
sample_file.close()
objects = []
for i in range(max_value):
objects.append(str(i))
y_pos = np.arange(len(objects))
ibar = plt.bar(y_pos, histogram, align='center', alpha=0.5)
for i,element in enumerate(ibar):
norm = colors.Normalize(0.0, 1.0)
color = plt.cm.winter(norm(histogram[i]/highest_value))
element.set_color(color)
#plt.xticks(y_pos, objects)
plt.xlabel('Individuo')
plt.ylabel('Vezes Selecionado')
plt.title('GASIR - Genetic Algorithm for SIR Model')
plt.savefig(sys.argv[1].split(".")[0] + "_mean.svg", format="svg")
#plt.show()
|
normal
|
{
"blob_id": "8356bc92a3a8b561d55bf5f2d9aeb0da89b730ca",
"index": 1387,
"step-1": "# -*- coding: utf-8 -*-\nfrom matplotlib import pyplot as plt\nfrom matplotlib import colors\nimport numpy as np\nimport sys\n\nmax_value = int(sys.argv[1])\n\nfile1 = open(sys.argv[2])\nfile2 = open(sys.argv[3])\nfile3 = open(sys.argv[4])\n\n\nhistogram = np.zeros(max_value, dtype=int).tolist()\nhighest_value = 0.0\n\nsample_size = 0.5\nsample = []\n\nfor i,line in enumerate(file1.readlines()):\n\tvalues = line.strip().split(\",\")\n\tfor j, value in enumerate(values):\n\t\tif(j == 0):\n\t\t\thistogram[int(value.split(\"[\")[1])] += 1\n\t\telif(j == len(values) - 1):\n\t\t\thistogram[int(value.split(\"]\")[0])] += 1\n\t\telse:\n\t\t\thistogram[int(value)] += 1\n\nfor i,line in enumerate(file2.readlines()):\n\tvalues = line.strip().split(\",\")\n\tfor j, value in enumerate(values):\n\t\tif(j == 0):\n\t\t\thistogram[int(value.split(\"[\")[1])] += 1\n\t\telif(j == len(values) - 1):\n\t\t\thistogram[int(value.split(\"]\")[0])] += 1\n\t\telse:\n\t\t\thistogram[int(value)] += 1\n\nfor i,line in enumerate(file3.readlines()):\n\tvalues = line.strip().split(\",\")\n\tfor j, value in enumerate(values):\n\t\tif(j == 0):\n\t\t\thistogram[int(value.split(\"[\")[1])] += 1\n\t\telif(j == len(values) - 1):\n\t\t\thistogram[int(value.split(\"]\")[0])] += 1\n\t\telse:\n\t\t\thistogram[int(value)] += 1\n\nfor i in range(len(histogram)):\n histogram[i] = histogram[i] / 3.0\n\nfor value in histogram:\n\tif(value > highest_value):\n\t\thighest_value = float(value)\n\nprint highest_value\nfor i,value in enumerate(histogram):\n\tif(value >= (highest_value - (highest_value * sample_size))):\n\t\tsample.append(i)\n\nsample_file = open(sys.argv[1].split(\"_mean.\")[0] + \".sample\", \"w\")\nfor value in sample:\n\tsample_file.write(str(value) + \" \")\nsample_file.close()\n\nobjects = []\nfor i in range(max_value):\n\tobjects.append(str(i))\n\ny_pos = np.arange(len(objects))\n\nibar = plt.bar(y_pos, histogram, align='center', alpha=0.5)\n\nfor i,element in enumerate(ibar):\n\tnorm = colors.Normalize(0.0, 1.0)\n\tcolor = plt.cm.winter(norm(histogram[i]/highest_value))\n\telement.set_color(color)\n#plt.xticks(y_pos, objects)\nplt.xlabel('Individuo')\nplt.ylabel('Vezes Selecionado')\nplt.title('GASIR - Genetic Algorithm for SIR Model')\n\nplt.savefig(sys.argv[1].split(\".\")[0] + \"_mean.svg\", format=\"svg\")\n#plt.show()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
salario = float(input('Qual o valor do seu Salario atual? R$ '))
novo = salario + (salario * 15 / 100)
print('Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'.format(salario, novo))
|
normal
|
{
"blob_id": "ffcd3c0086ff73eb722d867b335df23382615d20",
"index": 1657,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(\n 'Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'\n .format(salario, novo))\n",
"step-3": "salario = float(input('Qual o valor do seu Salario atual? R$ '))\nnovo = salario + salario * 15 / 100\nprint(\n 'Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'\n .format(salario, novo))\n",
"step-4": "salario = float(input('Qual o valor do seu Salario atual? R$ '))\nnovo = salario + (salario * 15 / 100)\nprint('Um funcioario que ganhava R$ {:.2f} com o aumento de 15% passa a ganhar R$ {:.2f}'.format(salario, novo))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from allcode.controllers.image_classifiers.image_classifier import ImageClassifier
class ImageClassifierMockup(ImageClassifier):
def classify_images(self, images):
pass
def classify_image(self, image):
return {'final_class': 'dog',
'final_prob': .8}
|
normal
|
{
"blob_id": "71fb9dc9f9ac8b1cdbc6af8a859dbc211512b4d1",
"index": 1675,
"step-1": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n <mask token>\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-3": "<mask token>\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-4": "from allcode.controllers.image_classifiers.image_classifier import ImageClassifier\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog', 'final_prob': 0.8}\n",
"step-5": "from allcode.controllers.image_classifiers.image_classifier import ImageClassifier\n\n\nclass ImageClassifierMockup(ImageClassifier):\n\n def classify_images(self, images):\n pass\n\n def classify_image(self, image):\n return {'final_class': 'dog',\n 'final_prob': .8}\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def filter_lines(in_filename, in_filename2,out_filename):
"""Read records from in_filename and write records to out_filename if
the beginning of the line (taken up to the first comma at or after
position 11) is found in keys (which must be a set of byte strings).
"""
proper_convert = 0
missing_convert = 0
fourteen_set = set()
with open(in_filename, 'r') as in_f, open(in_filename2, 'r') as in_f2, open(out_filename, 'w') as out_f:
for line in in_f:
vals = line.strip().split(",")
fips = vals[0]
if(fips not in fourteen_set):
fourteen_set.add(fips)
for line in in_f2:
vals = line.strip().split(",")
fips = vals[0]
count = vals[1]
proper_convert += 1
if(fips not in fourteen_set):
new_line = str(fips)+","+str(count)+"\n"
out_f.write(new_line)
missing_convert += 1
return (proper_convert, missing_convert)
in_filename = "/Users/VamsiG/Music/2014_Data/FCC_Final_Output.csv"
in_filename1 = "/Users/VamsiG/Music/2016_Data/FCC_Final_Output.csv"
out_filename= "/Users/VamsiG/Music/FCC_Overlap_CompleteFips.csv"
counter1, new_vals1 = filter_lines(in_filename,in_filename1,out_filename)
print(counter1)
print(new_vals1)
|
normal
|
{
"blob_id": "502e0f0c6376617dc094fcdd47bea9773d011864",
"index": 900,
"step-1": "<mask token>\n",
"step-2": "def filter_lines(in_filename, in_filename2, out_filename):\n \"\"\"Read records from in_filename and write records to out_filename if\n the beginning of the line (taken up to the first comma at or after\n position 11) is found in keys (which must be a set of byte strings).\n\n \"\"\"\n proper_convert = 0\n missing_convert = 0\n fourteen_set = set()\n with open(in_filename, 'r') as in_f, open(in_filename2, 'r'\n ) as in_f2, open(out_filename, 'w') as out_f:\n for line in in_f:\n vals = line.strip().split(',')\n fips = vals[0]\n if fips not in fourteen_set:\n fourteen_set.add(fips)\n for line in in_f2:\n vals = line.strip().split(',')\n fips = vals[0]\n count = vals[1]\n proper_convert += 1\n if fips not in fourteen_set:\n new_line = str(fips) + ',' + str(count) + '\\n'\n out_f.write(new_line)\n missing_convert += 1\n return proper_convert, missing_convert\n\n\n<mask token>\n",
"step-3": "def filter_lines(in_filename, in_filename2, out_filename):\n \"\"\"Read records from in_filename and write records to out_filename if\n the beginning of the line (taken up to the first comma at or after\n position 11) is found in keys (which must be a set of byte strings).\n\n \"\"\"\n proper_convert = 0\n missing_convert = 0\n fourteen_set = set()\n with open(in_filename, 'r') as in_f, open(in_filename2, 'r'\n ) as in_f2, open(out_filename, 'w') as out_f:\n for line in in_f:\n vals = line.strip().split(',')\n fips = vals[0]\n if fips not in fourteen_set:\n fourteen_set.add(fips)\n for line in in_f2:\n vals = line.strip().split(',')\n fips = vals[0]\n count = vals[1]\n proper_convert += 1\n if fips not in fourteen_set:\n new_line = str(fips) + ',' + str(count) + '\\n'\n out_f.write(new_line)\n missing_convert += 1\n return proper_convert, missing_convert\n\n\n<mask token>\nprint(counter1)\nprint(new_vals1)\n",
"step-4": "def filter_lines(in_filename, in_filename2, out_filename):\n \"\"\"Read records from in_filename and write records to out_filename if\n the beginning of the line (taken up to the first comma at or after\n position 11) is found in keys (which must be a set of byte strings).\n\n \"\"\"\n proper_convert = 0\n missing_convert = 0\n fourteen_set = set()\n with open(in_filename, 'r') as in_f, open(in_filename2, 'r'\n ) as in_f2, open(out_filename, 'w') as out_f:\n for line in in_f:\n vals = line.strip().split(',')\n fips = vals[0]\n if fips not in fourteen_set:\n fourteen_set.add(fips)\n for line in in_f2:\n vals = line.strip().split(',')\n fips = vals[0]\n count = vals[1]\n proper_convert += 1\n if fips not in fourteen_set:\n new_line = str(fips) + ',' + str(count) + '\\n'\n out_f.write(new_line)\n missing_convert += 1\n return proper_convert, missing_convert\n\n\nin_filename = '/Users/VamsiG/Music/2014_Data/FCC_Final_Output.csv'\nin_filename1 = '/Users/VamsiG/Music/2016_Data/FCC_Final_Output.csv'\nout_filename = '/Users/VamsiG/Music/FCC_Overlap_CompleteFips.csv'\ncounter1, new_vals1 = filter_lines(in_filename, in_filename1, out_filename)\nprint(counter1)\nprint(new_vals1)\n",
"step-5": "def filter_lines(in_filename, in_filename2,out_filename):\n \"\"\"Read records from in_filename and write records to out_filename if\n the beginning of the line (taken up to the first comma at or after\n position 11) is found in keys (which must be a set of byte strings).\n\n \"\"\"\n proper_convert = 0\n missing_convert = 0\n fourteen_set = set()\n with open(in_filename, 'r') as in_f, open(in_filename2, 'r') as in_f2, open(out_filename, 'w') as out_f:\n for line in in_f:\n vals = line.strip().split(\",\")\n fips = vals[0]\n if(fips not in fourteen_set):\n fourteen_set.add(fips)\n \n for line in in_f2:\n vals = line.strip().split(\",\")\n fips = vals[0]\n count = vals[1]\n proper_convert += 1\n if(fips not in fourteen_set):\n new_line = str(fips)+\",\"+str(count)+\"\\n\"\n out_f.write(new_line)\n missing_convert += 1\n\n return (proper_convert, missing_convert)\n\nin_filename = \"/Users/VamsiG/Music/2014_Data/FCC_Final_Output.csv\"\nin_filename1 = \"/Users/VamsiG/Music/2016_Data/FCC_Final_Output.csv\"\nout_filename= \"/Users/VamsiG/Music/FCC_Overlap_CompleteFips.csv\"\n\ncounter1, new_vals1 = filter_lines(in_filename,in_filename1,out_filename)\nprint(counter1)\nprint(new_vals1)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests
import codecs
import urllib.request
import time
from bs4 import BeautifulSoup
from html.parser import HTMLParser
import re
import os
#input
Result_File="report.txt"
#deleting result file if exists
if os.path.exists(Result_File):
os.remove(Result_File)
#reading html file and parsing logic
f=codecs.open("test.html", 'r', 'utf-8')
xhtml = f.read()
data = []
# instantiate the parser and feed data to it
soup = BeautifulSoup(xhtml,"html.parser")
#print(soup)
main_table = soup.find('table', { 'id': 'octable' })
#print(main_table)
with open(Result_File, 'w') as r:
r.write("OI_CE|Chng_in_OI_CE |Volume_CE|IV_CE|LTP_CE|NetChng_CE|Bid_Qty_CE|Bid_Price_CE|Ask_Price_CE|Ask_Qty_CE|StrikePrice|Bid_Qty_PE|Bid_Price_PE|Ask_Price_PE|Ask_Qty_PE|Net_Chng_PE|LTP_PE|IV_PE|Volume_PE|Chng_in_OI_PE|OI_PE")
for rows in main_table.find_all('tr'):
for cell in rows.find_all('td'):
#print(data)
if(len(cell.text) != 0):
cell_text = cell.text.strip()
a = re.sub(r"\n", "", cell_text, 0)
r.write(a)
r.write("|")
r.write("\n")
|
normal
|
{
"blob_id": "869bbc8da8cdb5de0bcaf5664b5482814daae53a",
"index": 6212,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif os.path.exists(Result_File):\n os.remove(Result_File)\n<mask token>\nwith open(Result_File, 'w') as r:\n r.write(\n 'OI_CE|Chng_in_OI_CE |Volume_CE|IV_CE|LTP_CE|NetChng_CE|Bid_Qty_CE|Bid_Price_CE|Ask_Price_CE|Ask_Qty_CE|StrikePrice|Bid_Qty_PE|Bid_Price_PE|Ask_Price_PE|Ask_Qty_PE|Net_Chng_PE|LTP_PE|IV_PE|Volume_PE|Chng_in_OI_PE|OI_PE'\n )\n for rows in main_table.find_all('tr'):\n for cell in rows.find_all('td'):\n if len(cell.text) != 0:\n cell_text = cell.text.strip()\n a = re.sub('\\\\n', '', cell_text, 0)\n r.write(a)\n r.write('|')\n r.write('\\n')\n",
"step-3": "<mask token>\nResult_File = 'report.txt'\nif os.path.exists(Result_File):\n os.remove(Result_File)\nf = codecs.open('test.html', 'r', 'utf-8')\nxhtml = f.read()\ndata = []\nsoup = BeautifulSoup(xhtml, 'html.parser')\nmain_table = soup.find('table', {'id': 'octable'})\nwith open(Result_File, 'w') as r:\n r.write(\n 'OI_CE|Chng_in_OI_CE |Volume_CE|IV_CE|LTP_CE|NetChng_CE|Bid_Qty_CE|Bid_Price_CE|Ask_Price_CE|Ask_Qty_CE|StrikePrice|Bid_Qty_PE|Bid_Price_PE|Ask_Price_PE|Ask_Qty_PE|Net_Chng_PE|LTP_PE|IV_PE|Volume_PE|Chng_in_OI_PE|OI_PE'\n )\n for rows in main_table.find_all('tr'):\n for cell in rows.find_all('td'):\n if len(cell.text) != 0:\n cell_text = cell.text.strip()\n a = re.sub('\\\\n', '', cell_text, 0)\n r.write(a)\n r.write('|')\n r.write('\\n')\n",
"step-4": "import requests\nimport codecs\nimport urllib.request\nimport time\nfrom bs4 import BeautifulSoup\nfrom html.parser import HTMLParser\nimport re\nimport os\nResult_File = 'report.txt'\nif os.path.exists(Result_File):\n os.remove(Result_File)\nf = codecs.open('test.html', 'r', 'utf-8')\nxhtml = f.read()\ndata = []\nsoup = BeautifulSoup(xhtml, 'html.parser')\nmain_table = soup.find('table', {'id': 'octable'})\nwith open(Result_File, 'w') as r:\n r.write(\n 'OI_CE|Chng_in_OI_CE |Volume_CE|IV_CE|LTP_CE|NetChng_CE|Bid_Qty_CE|Bid_Price_CE|Ask_Price_CE|Ask_Qty_CE|StrikePrice|Bid_Qty_PE|Bid_Price_PE|Ask_Price_PE|Ask_Qty_PE|Net_Chng_PE|LTP_PE|IV_PE|Volume_PE|Chng_in_OI_PE|OI_PE'\n )\n for rows in main_table.find_all('tr'):\n for cell in rows.find_all('td'):\n if len(cell.text) != 0:\n cell_text = cell.text.strip()\n a = re.sub('\\\\n', '', cell_text, 0)\n r.write(a)\n r.write('|')\n r.write('\\n')\n",
"step-5": "import requests\nimport codecs\nimport urllib.request\nimport time\nfrom bs4 import BeautifulSoup\nfrom html.parser import HTMLParser\nimport re\nimport os\n\n#input\nResult_File=\"report.txt\"\n\n#deleting result file if exists\nif os.path.exists(Result_File):\n os.remove(Result_File)\n\n#reading html file and parsing logic\nf=codecs.open(\"test.html\", 'r', 'utf-8')\nxhtml = f.read()\ndata = []\n# instantiate the parser and feed data to it\nsoup = BeautifulSoup(xhtml,\"html.parser\")\n#print(soup)\nmain_table = soup.find('table', { 'id': 'octable' })\n#print(main_table)\nwith open(Result_File, 'w') as r:\n r.write(\"OI_CE|Chng_in_OI_CE |Volume_CE|IV_CE|LTP_CE|NetChng_CE|Bid_Qty_CE|Bid_Price_CE|Ask_Price_CE|Ask_Qty_CE|StrikePrice|Bid_Qty_PE|Bid_Price_PE|Ask_Price_PE|Ask_Qty_PE|Net_Chng_PE|LTP_PE|IV_PE|Volume_PE|Chng_in_OI_PE|OI_PE\")\n for rows in main_table.find_all('tr'):\n for cell in rows.find_all('td'):\n\n#print(data)\n if(len(cell.text) != 0):\n cell_text = cell.text.strip()\n a = re.sub(r\"\\n\", \"\", cell_text, 0)\n\n r.write(a)\n r.write(\"|\")\n r.write(\"\\n\")\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests, csv, configuration
headers = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}
url = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'
date = configuration.DATE
i = 1
params = {'limit': '1', 'order': 'date_stop asc', 'filter':
f'date_stop ge {date}'}
r = requests.get(url, headers=headers, params=params)
dr_items = r.json()['items']
if len(dr_items):
with open('calls.csv', 'w', encoding='UTF8') as csv_file:
csv_writer = csv.writer(csv_file)
csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',
'direction', 'duration', 'price']
csv_writer.writerow(csv_header)
dr_sid = dr_items[0]['dr_sid']
csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],
dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[
0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]
csv_writer.writerow(csv_row)
print(f"{i}. {dr_items[0]['dr_sid']}")
while True:
params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid
}
r = requests.get(url, headers=headers, params=params)
if len(r.json()['items']):
dr_items = r.json()['items']
for item in dr_items:
i += 1
dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']
csv_row = [item['dr_sid'], item['date_start'], item[
'number_src'], item['number_dst'], item['direction'
], item['duration'], item['price']]
csv_writer.writerow(csv_row)
print(f"{i}. {item['dr_sid']}")
else:
print('No more new calls')
break
else:
print(f'No calls since {date}')
|
normal
|
{
"blob_id": "8262d8b5bbb156eccae021c1c9333d3cd1a6260f",
"index": 9030,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n",
"step-3": "<mask token>\nheaders = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}\nurl = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'\ndate = configuration.DATE\ni = 1\nparams = {'limit': '1', 'order': 'date_stop asc', 'filter':\n f'date_stop ge {date}'}\nr = requests.get(url, headers=headers, params=params)\ndr_items = r.json()['items']\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n",
"step-4": "import requests, csv, configuration\nheaders = {'Authorization': f'Bearer {configuration.CARRIERX_API_TOKEN}'}\nurl = f'{configuration.BASE_CARRIERX_API_URL}/core/v2/calls/call_drs'\ndate = configuration.DATE\ni = 1\nparams = {'limit': '1', 'order': 'date_stop asc', 'filter':\n f'date_stop ge {date}'}\nr = requests.get(url, headers=headers, params=params)\ndr_items = r.json()['items']\nif len(dr_items):\n with open('calls.csv', 'w', encoding='UTF8') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_header = ['dr_sid', 'date_start', 'number_src', 'number_dst',\n 'direction', 'duration', 'price']\n csv_writer.writerow(csv_header)\n dr_sid = dr_items[0]['dr_sid']\n csv_row = [dr_items[0]['dr_sid'], dr_items[0]['date_start'],\n dr_items[0]['number_src'], dr_items[0]['number_dst'], dr_items[\n 0]['direction'], dr_items[0]['duration'], dr_items[0]['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {dr_items[0]['dr_sid']}\")\n while True:\n params = {'limit': '100', 'order': 'date_stop asc', 'after': dr_sid\n }\n r = requests.get(url, headers=headers, params=params)\n if len(r.json()['items']):\n dr_items = r.json()['items']\n for item in dr_items:\n i += 1\n dr_sid = dr_items[len(r.json()['items']) - 1]['dr_sid']\n csv_row = [item['dr_sid'], item['date_start'], item[\n 'number_src'], item['number_dst'], item['direction'\n ], item['duration'], item['price']]\n csv_writer.writerow(csv_row)\n print(f\"{i}. {item['dr_sid']}\")\n else:\n print('No more new calls')\n break\nelse:\n print(f'No calls since {date}')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class Solution:
def countBits(self, num: int) -> List[int]:
total = []
for i in range(num + 1):
counter = bin(i).count('1')
# for j in bin(i):
# if j == '1':
# counter += 1
total.append(counter)
return total
# bin(i).count('1') is the easy way to do it with built in functions
# for loop to search each char in the returned string is slower
|
normal
|
{
"blob_id": "c6554ff18c23a61d3694e73b808f44c96f9a19c4",
"index": 2012,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def countBits(self, num: int) ->List[int]:\n total = []\n for i in range(num + 1):\n counter = bin(i).count('1')\n total.append(counter)\n return total\n",
"step-4": "class Solution:\n def countBits(self, num: int) -> List[int]:\n total = []\n for i in range(num + 1):\n counter = bin(i).count('1')\n # for j in bin(i):\n # if j == '1':\n # counter += 1\n total.append(counter)\n \n return total\n \n # bin(i).count('1') is the easy way to do it with built in functions\n # for loop to search each char in the returned string is slower\n \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from torchvision import datasets, transforms
import torch
def load_data(data_folder, batch_size, train, num_workers=0, **kwargs):
transform = {
'train': transforms.Compose(
[transforms.Resize([256, 256]),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])]),
'test': transforms.Compose(
[transforms.Resize([224, 224]),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])])
}
data = datasets.ImageFolder(root=data_folder, transform=transform['train' if train else 'test'])
data_loader = get_data_loader(data, batch_size=batch_size,
shuffle=True if train else False,
num_workers=num_workers, **kwargs, drop_last=True if train else False)
n_class = len(data.classes)
return data_loader, n_class
def get_data_loader(dataset, batch_size, shuffle=True, drop_last=False, num_workers=0, infinite_data_loader=False, **kwargs):
if not infinite_data_loader:
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)
else:
return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)
class _InfiniteSampler(torch.utils.data.Sampler):
"""Wraps another Sampler to yield an infinite stream."""
def __init__(self, sampler):
self.sampler = sampler
def __iter__(self):
while True:
for batch in self.sampler:
yield batch
class InfiniteDataLoader:
def __init__(self, dataset, batch_size, shuffle=True, drop_last=False, num_workers=0, weights=None, **kwargs):
if weights is not None:
sampler = torch.utils.data.WeightedRandomSampler(weights,
replacement=False,
num_samples=batch_size)
else:
sampler = torch.utils.data.RandomSampler(dataset,
replacement=False)
batch_sampler = torch.utils.data.BatchSampler(
sampler,
batch_size=batch_size,
drop_last=drop_last)
self._infinite_iterator = iter(torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=_InfiniteSampler(batch_sampler)
))
def __iter__(self):
while True:
yield next(self._infinite_iterator)
def __len__(self):
return 0 # Always return 0
|
normal
|
{
"blob_id": "d99fd3dc63f6a40dde5a6230111b9f3598d3c5fd",
"index": 7830,
"step-1": "<mask token>\n\n\nclass _InfiniteSampler(torch.utils.data.Sampler):\n \"\"\"Wraps another Sampler to yield an infinite stream.\"\"\"\n\n def __init__(self, sampler):\n self.sampler = sampler\n\n def __iter__(self):\n while True:\n for batch in self.sampler:\n yield batch\n\n\nclass InfiniteDataLoader:\n\n def __init__(self, dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, weights=None, **kwargs):\n if weights is not None:\n sampler = torch.utils.data.WeightedRandomSampler(weights,\n replacement=False, num_samples=batch_size)\n else:\n sampler = torch.utils.data.RandomSampler(dataset, replacement=False\n )\n batch_sampler = torch.utils.data.BatchSampler(sampler, batch_size=\n batch_size, drop_last=drop_last)\n self._infinite_iterator = iter(torch.utils.data.DataLoader(dataset,\n num_workers=num_workers, batch_sampler=_InfiniteSampler(\n batch_sampler)))\n\n def __iter__(self):\n while True:\n yield next(self._infinite_iterator)\n\n def __len__(self):\n return 0\n",
"step-2": "<mask token>\n\n\ndef get_data_loader(dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, infinite_data_loader=False, **kwargs):\n if not infinite_data_loader:\n return torch.utils.data.DataLoader(dataset, batch_size=batch_size,\n shuffle=True, drop_last=drop_last, num_workers=num_workers, **\n kwargs)\n else:\n return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=\n True, drop_last=drop_last, num_workers=num_workers, **kwargs)\n\n\nclass _InfiniteSampler(torch.utils.data.Sampler):\n \"\"\"Wraps another Sampler to yield an infinite stream.\"\"\"\n\n def __init__(self, sampler):\n self.sampler = sampler\n\n def __iter__(self):\n while True:\n for batch in self.sampler:\n yield batch\n\n\nclass InfiniteDataLoader:\n\n def __init__(self, dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, weights=None, **kwargs):\n if weights is not None:\n sampler = torch.utils.data.WeightedRandomSampler(weights,\n replacement=False, num_samples=batch_size)\n else:\n sampler = torch.utils.data.RandomSampler(dataset, replacement=False\n )\n batch_sampler = torch.utils.data.BatchSampler(sampler, batch_size=\n batch_size, drop_last=drop_last)\n self._infinite_iterator = iter(torch.utils.data.DataLoader(dataset,\n num_workers=num_workers, batch_sampler=_InfiniteSampler(\n batch_sampler)))\n\n def __iter__(self):\n while True:\n yield next(self._infinite_iterator)\n\n def __len__(self):\n return 0\n",
"step-3": "<mask token>\n\n\ndef load_data(data_folder, batch_size, train, num_workers=0, **kwargs):\n transform = {'train': transforms.Compose([transforms.Resize([256, 256]),\n transforms.RandomCrop(224), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, \n 0.406], std=[0.229, 0.224, 0.225])]), 'test': transforms.Compose([\n transforms.Resize([224, 224]), transforms.ToTensor(), transforms.\n Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])}\n data = datasets.ImageFolder(root=data_folder, transform=transform[\n 'train' if train else 'test'])\n data_loader = get_data_loader(data, batch_size=batch_size, shuffle=True if\n train else False, num_workers=num_workers, **kwargs, drop_last=True if\n train else False)\n n_class = len(data.classes)\n return data_loader, n_class\n\n\ndef get_data_loader(dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, infinite_data_loader=False, **kwargs):\n if not infinite_data_loader:\n return torch.utils.data.DataLoader(dataset, batch_size=batch_size,\n shuffle=True, drop_last=drop_last, num_workers=num_workers, **\n kwargs)\n else:\n return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=\n True, drop_last=drop_last, num_workers=num_workers, **kwargs)\n\n\nclass _InfiniteSampler(torch.utils.data.Sampler):\n \"\"\"Wraps another Sampler to yield an infinite stream.\"\"\"\n\n def __init__(self, sampler):\n self.sampler = sampler\n\n def __iter__(self):\n while True:\n for batch in self.sampler:\n yield batch\n\n\nclass InfiniteDataLoader:\n\n def __init__(self, dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, weights=None, **kwargs):\n if weights is not None:\n sampler = torch.utils.data.WeightedRandomSampler(weights,\n replacement=False, num_samples=batch_size)\n else:\n sampler = torch.utils.data.RandomSampler(dataset, replacement=False\n )\n batch_sampler = torch.utils.data.BatchSampler(sampler, batch_size=\n batch_size, drop_last=drop_last)\n self._infinite_iterator = iter(torch.utils.data.DataLoader(dataset,\n num_workers=num_workers, batch_sampler=_InfiniteSampler(\n batch_sampler)))\n\n def __iter__(self):\n while True:\n yield next(self._infinite_iterator)\n\n def __len__(self):\n return 0\n",
"step-4": "from torchvision import datasets, transforms\nimport torch\n\n\ndef load_data(data_folder, batch_size, train, num_workers=0, **kwargs):\n transform = {'train': transforms.Compose([transforms.Resize([256, 256]),\n transforms.RandomCrop(224), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, \n 0.406], std=[0.229, 0.224, 0.225])]), 'test': transforms.Compose([\n transforms.Resize([224, 224]), transforms.ToTensor(), transforms.\n Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])}\n data = datasets.ImageFolder(root=data_folder, transform=transform[\n 'train' if train else 'test'])\n data_loader = get_data_loader(data, batch_size=batch_size, shuffle=True if\n train else False, num_workers=num_workers, **kwargs, drop_last=True if\n train else False)\n n_class = len(data.classes)\n return data_loader, n_class\n\n\ndef get_data_loader(dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, infinite_data_loader=False, **kwargs):\n if not infinite_data_loader:\n return torch.utils.data.DataLoader(dataset, batch_size=batch_size,\n shuffle=True, drop_last=drop_last, num_workers=num_workers, **\n kwargs)\n else:\n return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=\n True, drop_last=drop_last, num_workers=num_workers, **kwargs)\n\n\nclass _InfiniteSampler(torch.utils.data.Sampler):\n \"\"\"Wraps another Sampler to yield an infinite stream.\"\"\"\n\n def __init__(self, sampler):\n self.sampler = sampler\n\n def __iter__(self):\n while True:\n for batch in self.sampler:\n yield batch\n\n\nclass InfiniteDataLoader:\n\n def __init__(self, dataset, batch_size, shuffle=True, drop_last=False,\n num_workers=0, weights=None, **kwargs):\n if weights is not None:\n sampler = torch.utils.data.WeightedRandomSampler(weights,\n replacement=False, num_samples=batch_size)\n else:\n sampler = torch.utils.data.RandomSampler(dataset, replacement=False\n )\n batch_sampler = torch.utils.data.BatchSampler(sampler, batch_size=\n batch_size, drop_last=drop_last)\n self._infinite_iterator = iter(torch.utils.data.DataLoader(dataset,\n num_workers=num_workers, batch_sampler=_InfiniteSampler(\n batch_sampler)))\n\n def __iter__(self):\n while True:\n yield next(self._infinite_iterator)\n\n def __len__(self):\n return 0\n",
"step-5": "from torchvision import datasets, transforms\nimport torch\n\ndef load_data(data_folder, batch_size, train, num_workers=0, **kwargs):\n transform = {\n 'train': transforms.Compose(\n [transforms.Resize([256, 256]),\n transforms.RandomCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])]),\n 'test': transforms.Compose(\n [transforms.Resize([224, 224]),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])])\n }\n data = datasets.ImageFolder(root=data_folder, transform=transform['train' if train else 'test'])\n data_loader = get_data_loader(data, batch_size=batch_size, \n shuffle=True if train else False, \n num_workers=num_workers, **kwargs, drop_last=True if train else False)\n n_class = len(data.classes)\n return data_loader, n_class\n\n\ndef get_data_loader(dataset, batch_size, shuffle=True, drop_last=False, num_workers=0, infinite_data_loader=False, **kwargs):\n if not infinite_data_loader:\n return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)\n else:\n return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)\n\nclass _InfiniteSampler(torch.utils.data.Sampler):\n \"\"\"Wraps another Sampler to yield an infinite stream.\"\"\"\n def __init__(self, sampler):\n self.sampler = sampler\n\n def __iter__(self):\n while True:\n for batch in self.sampler:\n yield batch\n\nclass InfiniteDataLoader:\n def __init__(self, dataset, batch_size, shuffle=True, drop_last=False, num_workers=0, weights=None, **kwargs):\n if weights is not None:\n sampler = torch.utils.data.WeightedRandomSampler(weights,\n replacement=False,\n num_samples=batch_size)\n else:\n sampler = torch.utils.data.RandomSampler(dataset,\n replacement=False)\n \n batch_sampler = torch.utils.data.BatchSampler(\n sampler,\n batch_size=batch_size,\n drop_last=drop_last)\n\n self._infinite_iterator = iter(torch.utils.data.DataLoader(\n dataset,\n num_workers=num_workers,\n batch_sampler=_InfiniteSampler(batch_sampler)\n ))\n\n def __iter__(self):\n while True:\n yield next(self._infinite_iterator)\n\n def __len__(self):\n return 0 # Always return 0",
"step-ids": [
8,
9,
10,
11,
12
]
}
|
[
8,
9,
10,
11,
12
] |
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from ...models.brand import Brand
from ...models.product import type_currency_choices, type_condition_choices, User, Product
from ...models.product_color import color_choices, Product_Color
from ...models.product_size import type_size_choices, Product_Size
from ...models.product_image import Product_Image
from ...models.product_specification import Product_Specification
from ...models.product_platform import Product_Platform
from ...models.product_recommended_use import Product_Recommended_Use
from ...models.product_terms_condition import Product_Terms_Condition
from ...serilaizers.products.updateSerializer import UpdateSerializer
from ...serilaizers.products.specificationSerializer import SpecificationSerializer
from ...serilaizers.products.imageSerializer import ImageSerializer
from ...serilaizers.products.colorSerializer import ColorSerializer
from ...serilaizers.products.platformSerializer import PlatformSerializer
from ...serilaizers.products.recommendedUseSerializer import RecommendedUseSerializer
from ...serilaizers.products.sizeSerializer import SizeSerializer
from ...serilaizers.products.termConditionSerializer import TermConditionSerializer
class UpdateProduct(GenericAPIView):
serializer_class = UpdateSerializer
_product_obj = None
_brands = Brand.objects.values("id", "name")
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects.values(
"name", "value").filter(product=data.id))
extract_img = self.extract_filter_data(
Product_Image.objects.values('image').filter(product=data.id))
if data:
return Response(self.get_data({
"product": data,
"specific": extract_sp,
"img": extract_img
}))
else:
return Response({"errors": False}, status=HTTP_404_NOT_FOUND)
def extract_filter_data(self, data):
arr = []
for i in data:
arr.append(i)
return arr
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values(
'platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values(
'size', 'type_size').filter(product=id)
recom = Product_Recommended_Use.objects.values(
'recommended_use').filter(product=id)
terms = Product_Terms_Condition.objects.values(
'terms_condition').filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {
"user": User.objects.values('id', 'username').get(username="root"),
"name": main['product'].title,
"brand": main['product'].brand.id,
"quantity": main['product'].quantity,
"price": main['product'].price,
"currency": main['product'].currency,
"condition": main['product'].condition,
"description": main['product'].description,
"brands": self._brands,
"conditions": type_condition_choices,
"currencys": type_currency_choices,
"colors": color_choices,
"sizes": type_size_choices,
"specific": self.extract_filter_data(main['specific']),
"images": self.extract_filter_data(main['img']),
"extra_data": self.get_extra_data(main['product'].id)
}
def prepare_data(self, data, img_data=None):
# prepared the data extract all data from request and loads using json
# extract images from request files and
# return data as a dict
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {
i.split("_")[2]: data['img_current'][i] for i in data['img_current']}
if len(img_data) > 0:
img = {i.split("_")[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.objects.get(
product=self._product_obj.id, name=i['current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(
Product_Image.objects.get(
product=self._product_obj.id,
image=img['current'][i]), img['images'][i])
return {
"product": pro,
"specific": Product_Specification.objects.values('name', 'value').filter(product=pro.id),
"img": Product_Image.objects.values('image').filter(product=pro.id)
}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and ('color' in ser_data):
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(
product=self._product_obj.id).delete()
for i in data['color']['color']:
ser_data['color'].create(
{"product": self._product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create(
{"product": self._product_obj, 'color': i})
else:
col = Product_Color.objects.filter(
product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and ('size' in ser_data):
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {"size": siz, "type_size": typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {"size": siz, "type_size": typ})
else:
ser_data['size'].create(
{"product": self._product_obj, "size": siz, "type_size": typ})
else:
siz = Product_Size.objects.filter(
product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and ('platform' in ser_data):
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(Product_Platform.objects.get(
product=self._product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create(
{"product": self._product_obj, "platform": data['platform']})
else:
pl = Product_Platform.objects.filter(
product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and ('recom_use' in ser_data):
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(Product_Recommended_Use.objects.get(
product=self._product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create(
{"product": self._product_obj, "recommended_use": data['recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(
product=self._product_obj.id)
if recom.exists():
recom.delete()
if ser_data and ('term_condition' in ser_data):
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'].update(
Product_Terms_Condition.objects.get(product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create(
{"product": self._product_obj, "terms_condition": data['term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(
product=self._product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(
product=self._product_obj.id)
extra_d['size'] = Product_Size.objects.filter(
product=self._product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(
self._product_obj, data['specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {"images": data['images'],
'current': data['img_current']}
img = self.validate_image(
self._product_obj, data['images'])
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return{"errors": img}
else:
return ser_data
else:
return {"errors": sp} # return error
else:
return {"errors": pro_ser.errors}
def validate_extra_data(self, data):
ser_data = {}
if 'color' in data:
col = self.validate_color(data['color']['color'])
if isinstance(col, ColorSerializer):
ser_data['color'] = col
else:
return {"errors": col}
if 'size' in data:
siz = self.validate_size(data['size']['size'])
if isinstance(siz, SizeSerializer):
ser_data['size'] = siz
else:
return {"errors": siz}
if 'platform' in data:
pl = PlatformSerializer(data={"platform": data['platform']})
if pl.is_valid():
ser_data['platform'] = pl
else:
return {"errors": pl.errors}
if 'recom_use' in data:
recom = RecommendedUseSerializer(
data={"recommended_use": data['recom_use']})
if recom.is_valid():
ser_data['recom_use'] = recom
else:
return {"errors": recom.errors}
if 'term_condition' in data:
term = TermConditionSerializer(
data={"terms_condition": data['term_condition']})
if term.is_valid():
ser_data['term_condition'] = term
else:
return {"errors": term.errors}
if ser_data:
return ser_data
else:
return False
def validate_specification(self, pro, data):
for i in data:
sp = SpecificationSerializer(
data={"name": i['name'], "value": i['value']})
if not sp.is_valid():
return sp.errors
return sp
def validate_image(self, pro, data):
for i in data['images']:
img = ImageSerializer(data={"image": data['images'][i]})
if not img.is_valid():
return img.errors
return img
def validate_color(self, data):
for i in data:
col = ColorSerializer(data={"color": i})
if not col.is_valid():
return col.errors
return col
def validate_size(self, data):
size = SizeSerializer(data={"size": data[0],
"type_size": data[1]})
if not size.is_valid():
return size.errors
return size
|
normal
|
{
"blob_id": "47e9b73fc7f6b3c8295e78d0cdb5aa51ca4c5f8d",
"index": 8140,
"step-1": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n <mask token>\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-2": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n <mask token>\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n <mask token>\n <mask token>\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-3": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n for i in data:\n sp = SpecificationSerializer(data={'name': i['name'], 'value':\n i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n for i in data['images']:\n img = ImageSerializer(data={'image': data['images'][i]})\n if not img.is_valid():\n return img.errors\n return img\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-4": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n serializer_class = UpdateSerializer\n _product_obj = None\n _brands = Brand.objects.values('id', 'name')\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n for i in data:\n sp = SpecificationSerializer(data={'name': i['name'], 'value':\n i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n for i in data['images']:\n img = ImageSerializer(data={'image': data['images'][i]})\n if not img.is_valid():\n return img.errors\n return img\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-5": "from rest_framework.generics import GenericAPIView\nfrom rest_framework.response import Response\nfrom rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\nfrom ...models.brand import Brand\nfrom ...models.product import type_currency_choices, type_condition_choices, User, Product\nfrom ...models.product_color import color_choices, Product_Color\nfrom ...models.product_size import type_size_choices, Product_Size\nfrom ...models.product_image import Product_Image\nfrom ...models.product_specification import Product_Specification\nfrom ...models.product_platform import Product_Platform\nfrom ...models.product_recommended_use import Product_Recommended_Use\nfrom ...models.product_terms_condition import Product_Terms_Condition\nfrom ...serilaizers.products.updateSerializer import UpdateSerializer\nfrom ...serilaizers.products.specificationSerializer import SpecificationSerializer\nfrom ...serilaizers.products.imageSerializer import ImageSerializer\nfrom ...serilaizers.products.colorSerializer import ColorSerializer\nfrom ...serilaizers.products.platformSerializer import PlatformSerializer\nfrom ...serilaizers.products.recommendedUseSerializer import RecommendedUseSerializer\nfrom ...serilaizers.products.sizeSerializer import SizeSerializer\nfrom ...serilaizers.products.termConditionSerializer import TermConditionSerializer\n\n\nclass UpdateProduct(GenericAPIView):\n serializer_class = UpdateSerializer\n _product_obj = None\n _brands = Brand.objects.values(\"id\", \"name\")\n def get(self, request, *args, **kwargs):\n\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects.values(\n \"name\", \"value\").filter(product=data.id))\n extract_img = self.extract_filter_data(\n Product_Image.objects.values('image').filter(product=data.id))\n if data:\n return Response(self.get_data({\n \"product\": data,\n \"specific\": extract_sp,\n \"img\": extract_img\n }))\n else:\n return Response({\"errors\": False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n\n extra_data = {}\n pl = Product_Platform.objects.values(\n 'platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values(\n 'size', 'type_size').filter(product=id)\n recom = Product_Recommended_Use.objects.values(\n 'recommended_use').filter(product=id)\n terms = Product_Terms_Condition.objects.values(\n 'terms_condition').filter(product=id)\n\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n\n return {\n \"user\": User.objects.values('id', 'username').get(username=\"root\"),\n \"name\": main['product'].title,\n \"brand\": main['product'].brand.id,\n \"quantity\": main['product'].quantity,\n \"price\": main['product'].price,\n \"currency\": main['product'].currency,\n \"condition\": main['product'].condition,\n \"description\": main['product'].description,\n \"brands\": self._brands,\n \"conditions\": type_condition_choices,\n \"currencys\": type_currency_choices,\n \"colors\": color_choices,\n \"sizes\": type_size_choices,\n \"specific\": self.extract_filter_data(main['specific']),\n \"images\": self.extract_filter_data(main['img']),\n \"extra_data\": self.get_extra_data(main['product'].id)\n }\n\n def prepare_data(self, data, img_data=None):\n # prepared the data extract all data from request and loads using json\n # extract images from request files and\n # return data as a dict\n\n from json import loads\n\n data = data['data']\n data = loads(data)\n data['img_current'] = {\n i.split(\"_\")[2]: data['img_current'][i] for i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split(\"_\")[1]: img_data[i] for i in img_data}\n data['images'] = img\n\n return data\n\n def update_main_data(self, data, ser_data):\n\n pro = ser_data['product'].update(self._product_obj, data)\n\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.objects.get(\n product=self._product_obj.id, name=i['current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(\n Product_Image.objects.get(\n product=self._product_obj.id,\n image=img['current'][i]), img['images'][i])\n return {\n \"product\": pro,\n \"specific\": Product_Specification.objects.values('name', 'value').filter(product=pro.id),\n \"img\": Product_Image.objects.values('image').filter(product=pro.id)\n }\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n\n if ser_data and ('color' in ser_data):\n\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(\n product=self._product_obj.id).delete()\n for i in data['color']['color']:\n ser_data['color'].create(\n {\"product\": self._product_obj, 'color': i})\n\n else:\n for i in data['color']['color']:\n ser_data['color'].create(\n {\"product\": self._product_obj, 'color': i})\n else:\n col = Product_Color.objects.filter(\n product=self._product_obj.id)\n if col.exists():\n col.delete()\n\n if ser_data and ('size' in ser_data):\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {\"size\": siz, \"type_size\": typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {\"size\": siz, \"type_size\": typ})\n else:\n ser_data['size'].create(\n {\"product\": self._product_obj, \"size\": siz, \"type_size\": typ})\n else:\n siz = Product_Size.objects.filter(\n product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n\n if ser_data and ('platform' in ser_data):\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(Product_Platform.objects.get(\n product=self._product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create(\n {\"product\": self._product_obj, \"platform\": data['platform']})\n else:\n pl = Product_Platform.objects.filter(\n product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n\n if ser_data and ('recom_use' in ser_data):\n\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(Product_Recommended_Use.objects.get(\n product=self._product_obj.id), data['recom_use'])\n else:\n\n extra_d['recom_use'] = ser_data['recom_use'].create(\n {\"product\": self._product_obj, \"recommended_use\": data['recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(\n product=self._product_obj.id)\n if recom.exists():\n recom.delete()\n\n if ser_data and ('term_condition' in ser_data):\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'].update(\n Product_Terms_Condition.objects.get(product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create(\n {\"product\": self._product_obj, \"terms_condition\": data['term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(\n product=self._product_obj.id)\n if terms.exists():\n terms.delete()\n\n extra_d['color'] = Product_Color.objects.filter(\n product=self._product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(\n product=self._product_obj.id)\n\n return extra_d\n\n def validate_main_data(self, data):\n\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(\n self._product_obj, data['specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {\"images\": data['images'],\n 'current': data['img_current']}\n img = self.validate_image(\n self._product_obj, data['images'])\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return{\"errors\": img}\n else:\n return ser_data\n else:\n return {\"errors\": sp} # return error\n else:\n return {\"errors\": pro_ser.errors}\n\n def validate_extra_data(self, data):\n\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {\"errors\": col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {\"errors\": siz}\n\n if 'platform' in data:\n pl = PlatformSerializer(data={\"platform\": data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {\"errors\": pl.errors}\n\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(\n data={\"recommended_use\": data['recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {\"errors\": recom.errors}\n\n if 'term_condition' in data:\n term = TermConditionSerializer(\n data={\"terms_condition\": data['term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {\"errors\": term.errors}\n\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n\n for i in data:\n sp = SpecificationSerializer(\n data={\"name\": i['name'], \"value\": i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n\n for i in data['images']:\n img = ImageSerializer(data={\"image\": data['images'][i]})\n if not img.is_valid():\n return img.errors\n\n return img\n\n def validate_color(self, data):\n\n for i in data:\n col = ColorSerializer(data={\"color\": i})\n if not col.is_valid():\n return col.errors\n\n return col\n\n def validate_size(self, data):\n\n size = SizeSerializer(data={\"size\": data[0],\n \"type_size\": data[1]})\n if not size.is_valid():\n return size.errors\n\n return size\n",
"step-ids": [
11,
13,
16,
17,
19
]
}
|
[
11,
13,
16,
17,
19
] |
"""Functions for parsing various strings to RGB tuples."""
import json
import re
from pathlib import Path
import importlib.resources as resources
from pilutils.basic import hex_to_rgb
__all__ = [
"parse_hex6",
"parse_hex3",
"parse_rgbfunc_int",
"parse_rgbfunc_float",
"parse_rgbfunc_percent",
"parse_name_css",
"parse_name_crayola",
"parse_name_xkcd",
"parse_name_meodai_best",
"parse_name_meodai",
"parse",
]
_css_names = json.loads(resources.read_text("pilutils.colornames", "css.json"))
_crayola_names = json.loads(resources.read_text("pilutils.colornames", "crayola.json"))
_xkcd_names = json.loads(resources.read_text("pilutils.colornames", "xkcd.json"))
_meodai_best_names = json.loads(
resources.read_text("pilutils.colornames", "meodai-best.json")
)
_meodai_names = json.loads(resources.read_text("pilutils.colornames", "meodai.json"))
def parse_hex6(hex6):
"""Example: #ab34df"""
if m := re.match(r"^#?([0-9A-Fa-f]{6})$", hex6.strip()):
h = int(m.group(1), 16)
return hex_to_rgb(h)
raise ValueError(f"String {hex6!r} does not match hex6 format.")
def parse_hex3(hex3):
"""Example: #a3d"""
if m := re.match(r"^#?([0-9A-Fa-f]{3})$", hex3.strip()):
h3 = m.group(1)
return tuple(int(c * 2, 16) for c in h3)
raise ValueError(f"String {hex3!r} does not match hex3 format.")
def parse_rgbfunc_int(rgbfunc):
"""Example: rgb(171, 52, 223)"""
if m := re.match(
r"^rgb\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*\)$", rgbfunc.strip()
):
t = tuple(map(int, m.groups()))
if not any(n > 255 for n in t):
return t
raise ValueError(f"String {rgbfunc!r} does not match rgbfunc_int format.")
def parse_rgbfunc_float(rgbfunc):
"""Example: rgb(0.67, 0.2, 0.87)"""
if m := re.match(
r"^rgb\(\s*([01]\.\d+)\s*,\s*([01]\.\d+)\s*,\s*([01]\.\d+)\s*\)$",
rgbfunc.strip(),
):
t = tuple(map(float, m.groups()))
if not any(n > 1 for n in t):
return tuple(int(round(n * 255)) for n in t)
raise ValueError(f"String {rgbfunc!r} does not match rgbfunc_float format.")
def parse_rgbfunc_percent(rgbfunc):
"""Example: rgb(67%, 20%, 87.5%)"""
if m := re.match(
r"^rgb\(\s*(\d{1,3}(?:\.\d+)?)%\s*,\s*(\d{1,3}(?:\.\d+)?)%\s*,\s*(\d{1,3}(?:\.\d+)?)%\s*\)$",
rgbfunc.strip(),
):
t = tuple(map(float, m.groups()))
if not any(n > 100 for n in t):
return tuple(int(round(n * 255 / 100)) for n in t)
raise ValueError(f"String {rgbfunc!r} does not match rgbfunc_percent format.")
def parse_name_css(name):
name = name.lower()
if name not in _css_names:
raise ValueError(f"Color {name!r} is not named in the CSS dataset.")
return parse_hex6(_css_names[name])
def parse_name_crayola(name):
name = name.lower()
if name not in _crayola_names:
raise ValueError(f"Color {name!r} is not named in the crayola dataset.")
return parse_hex6(_crayola_names[name])
def parse_name_xkcd(name):
name = name.lower()
if name not in _xkcd_names:
raise ValueError(f"Color {name!r} is not named in the xkcd dataset.")
return parse_hex6(_xkcd_names[name])
def parse_name_meodai_best(name):
name = name.lower()
if name not in _meodai_best_names:
raise ValueError(f"Color {name!r} is not named in the meodai-best dataset.")
return parse_hex6(_meodai_best_names[name])
def parse_name_meodai(name):
name = name.lower()
if name not in _meodai_names:
raise ValueError(f"Color {name!r} is not named in the meodai dataset.")
return parse_hex6(_meodai_names[name])
def parse(
colstr,
*,
hex6=True,
hex3=True,
rgbfunc_int=True,
rgbfunc_float=True,
rgbfunc_percent=True,
name_css=True,
name_crayola=True,
name_xkcd=True,
name_meodai_best=True,
name_meodai=True,
):
"""Combine all other parse functions into one "universal" function. Use kwargs to disable certain parsers."""
funcs = []
if hex6:
funcs.append(parse_hex6)
if hex3:
funcs.append(parse_hex3)
if rgbfunc_int:
funcs.append(parse_rgbfunc_int)
if rgbfunc_float:
funcs.append(parse_rgbfunc_float)
if rgbfunc_percent:
funcs.append(parse_rgbfunc_percent)
if name_css:
funcs.append(parse_name_css)
if name_crayola:
funcs.append(parse_name_crayola)
if name_xkcd:
funcs.append(parse_name_xkcd)
if name_meodai_best:
funcs.append(parse_name_meodai_best)
if name_meodai:
funcs.append(parse_name_meodai)
res = None
for func in funcs:
try:
res = func(colstr)
except ValueError:
pass
if res is None:
raise ValueError(f"Could not find a working parser for {colstr!r}.")
return res
|
normal
|
{
"blob_id": "978f3979aee1c4361483fd61b54352e7fff8d3b3",
"index": 697,
"step-1": "<mask token>\n\n\ndef parse_hex3(hex3):\n \"\"\"Example: #a3d\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{3})$', hex3.strip())):\n h3 = m.group(1)\n return tuple(int(c * 2, 16) for c in h3)\n raise ValueError(f'String {hex3!r} does not match hex3 format.')\n\n\n<mask token>\n\n\ndef parse_rgbfunc_float(rgbfunc):\n \"\"\"Example: rgb(0.67, 0.2, 0.87)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 1 for n in t):\n return tuple(int(round(n * 255)) for n in t)\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_float format.'\n )\n\n\ndef parse_rgbfunc_percent(rgbfunc):\n \"\"\"Example: rgb(67%, 20%, 87.5%)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 100 for n in t):\n return tuple(int(round(n * 255 / 100)) for n in t)\n raise ValueError(\n f'String {rgbfunc!r} does not match rgbfunc_percent format.')\n\n\n<mask token>\n\n\ndef parse_name_crayola(name):\n name = name.lower()\n if name not in _crayola_names:\n raise ValueError(f'Color {name!r} is not named in the crayola dataset.'\n )\n return parse_hex6(_crayola_names[name])\n\n\n<mask token>\n\n\ndef parse_name_meodai_best(name):\n name = name.lower()\n if name not in _meodai_best_names:\n raise ValueError(\n f'Color {name!r} is not named in the meodai-best dataset.')\n return parse_hex6(_meodai_best_names[name])\n\n\ndef parse_name_meodai(name):\n name = name.lower()\n if name not in _meodai_names:\n raise ValueError(f'Color {name!r} is not named in the meodai dataset.')\n return parse_hex6(_meodai_names[name])\n\n\ndef parse(colstr, *, hex6=True, hex3=True, rgbfunc_int=True, rgbfunc_float=\n True, rgbfunc_percent=True, name_css=True, name_crayola=True, name_xkcd\n =True, name_meodai_best=True, name_meodai=True):\n \"\"\"Combine all other parse functions into one \"universal\" function. Use kwargs to disable certain parsers.\"\"\"\n funcs = []\n if hex6:\n funcs.append(parse_hex6)\n if hex3:\n funcs.append(parse_hex3)\n if rgbfunc_int:\n funcs.append(parse_rgbfunc_int)\n if rgbfunc_float:\n funcs.append(parse_rgbfunc_float)\n if rgbfunc_percent:\n funcs.append(parse_rgbfunc_percent)\n if name_css:\n funcs.append(parse_name_css)\n if name_crayola:\n funcs.append(parse_name_crayola)\n if name_xkcd:\n funcs.append(parse_name_xkcd)\n if name_meodai_best:\n funcs.append(parse_name_meodai_best)\n if name_meodai:\n funcs.append(parse_name_meodai)\n res = None\n for func in funcs:\n try:\n res = func(colstr)\n except ValueError:\n pass\n if res is None:\n raise ValueError(f'Could not find a working parser for {colstr!r}.')\n return res\n",
"step-2": "<mask token>\n\n\ndef parse_hex6(hex6):\n \"\"\"Example: #ab34df\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{6})$', hex6.strip())):\n h = int(m.group(1), 16)\n return hex_to_rgb(h)\n raise ValueError(f'String {hex6!r} does not match hex6 format.')\n\n\ndef parse_hex3(hex3):\n \"\"\"Example: #a3d\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{3})$', hex3.strip())):\n h3 = m.group(1)\n return tuple(int(c * 2, 16) for c in h3)\n raise ValueError(f'String {hex3!r} does not match hex3 format.')\n\n\ndef parse_rgbfunc_int(rgbfunc):\n \"\"\"Example: rgb(171, 52, 223)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*\\\\)$',\n rgbfunc.strip())):\n t = tuple(map(int, m.groups()))\n if not any(n > 255 for n in t):\n return t\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_int format.')\n\n\ndef parse_rgbfunc_float(rgbfunc):\n \"\"\"Example: rgb(0.67, 0.2, 0.87)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 1 for n in t):\n return tuple(int(round(n * 255)) for n in t)\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_float format.'\n )\n\n\ndef parse_rgbfunc_percent(rgbfunc):\n \"\"\"Example: rgb(67%, 20%, 87.5%)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 100 for n in t):\n return tuple(int(round(n * 255 / 100)) for n in t)\n raise ValueError(\n f'String {rgbfunc!r} does not match rgbfunc_percent format.')\n\n\ndef parse_name_css(name):\n name = name.lower()\n if name not in _css_names:\n raise ValueError(f'Color {name!r} is not named in the CSS dataset.')\n return parse_hex6(_css_names[name])\n\n\ndef parse_name_crayola(name):\n name = name.lower()\n if name not in _crayola_names:\n raise ValueError(f'Color {name!r} is not named in the crayola dataset.'\n )\n return parse_hex6(_crayola_names[name])\n\n\n<mask token>\n\n\ndef parse_name_meodai_best(name):\n name = name.lower()\n if name not in _meodai_best_names:\n raise ValueError(\n f'Color {name!r} is not named in the meodai-best dataset.')\n return parse_hex6(_meodai_best_names[name])\n\n\ndef parse_name_meodai(name):\n name = name.lower()\n if name not in _meodai_names:\n raise ValueError(f'Color {name!r} is not named in the meodai dataset.')\n return parse_hex6(_meodai_names[name])\n\n\ndef parse(colstr, *, hex6=True, hex3=True, rgbfunc_int=True, rgbfunc_float=\n True, rgbfunc_percent=True, name_css=True, name_crayola=True, name_xkcd\n =True, name_meodai_best=True, name_meodai=True):\n \"\"\"Combine all other parse functions into one \"universal\" function. Use kwargs to disable certain parsers.\"\"\"\n funcs = []\n if hex6:\n funcs.append(parse_hex6)\n if hex3:\n funcs.append(parse_hex3)\n if rgbfunc_int:\n funcs.append(parse_rgbfunc_int)\n if rgbfunc_float:\n funcs.append(parse_rgbfunc_float)\n if rgbfunc_percent:\n funcs.append(parse_rgbfunc_percent)\n if name_css:\n funcs.append(parse_name_css)\n if name_crayola:\n funcs.append(parse_name_crayola)\n if name_xkcd:\n funcs.append(parse_name_xkcd)\n if name_meodai_best:\n funcs.append(parse_name_meodai_best)\n if name_meodai:\n funcs.append(parse_name_meodai)\n res = None\n for func in funcs:\n try:\n res = func(colstr)\n except ValueError:\n pass\n if res is None:\n raise ValueError(f'Could not find a working parser for {colstr!r}.')\n return res\n",
"step-3": "<mask token>\n__all__ = ['parse_hex6', 'parse_hex3', 'parse_rgbfunc_int',\n 'parse_rgbfunc_float', 'parse_rgbfunc_percent', 'parse_name_css',\n 'parse_name_crayola', 'parse_name_xkcd', 'parse_name_meodai_best',\n 'parse_name_meodai', 'parse']\n_css_names = json.loads(resources.read_text('pilutils.colornames', 'css.json'))\n_crayola_names = json.loads(resources.read_text('pilutils.colornames',\n 'crayola.json'))\n_xkcd_names = json.loads(resources.read_text('pilutils.colornames',\n 'xkcd.json'))\n_meodai_best_names = json.loads(resources.read_text('pilutils.colornames',\n 'meodai-best.json'))\n_meodai_names = json.loads(resources.read_text('pilutils.colornames',\n 'meodai.json'))\n\n\ndef parse_hex6(hex6):\n \"\"\"Example: #ab34df\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{6})$', hex6.strip())):\n h = int(m.group(1), 16)\n return hex_to_rgb(h)\n raise ValueError(f'String {hex6!r} does not match hex6 format.')\n\n\ndef parse_hex3(hex3):\n \"\"\"Example: #a3d\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{3})$', hex3.strip())):\n h3 = m.group(1)\n return tuple(int(c * 2, 16) for c in h3)\n raise ValueError(f'String {hex3!r} does not match hex3 format.')\n\n\ndef parse_rgbfunc_int(rgbfunc):\n \"\"\"Example: rgb(171, 52, 223)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*\\\\)$',\n rgbfunc.strip())):\n t = tuple(map(int, m.groups()))\n if not any(n > 255 for n in t):\n return t\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_int format.')\n\n\ndef parse_rgbfunc_float(rgbfunc):\n \"\"\"Example: rgb(0.67, 0.2, 0.87)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 1 for n in t):\n return tuple(int(round(n * 255)) for n in t)\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_float format.'\n )\n\n\ndef parse_rgbfunc_percent(rgbfunc):\n \"\"\"Example: rgb(67%, 20%, 87.5%)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 100 for n in t):\n return tuple(int(round(n * 255 / 100)) for n in t)\n raise ValueError(\n f'String {rgbfunc!r} does not match rgbfunc_percent format.')\n\n\ndef parse_name_css(name):\n name = name.lower()\n if name not in _css_names:\n raise ValueError(f'Color {name!r} is not named in the CSS dataset.')\n return parse_hex6(_css_names[name])\n\n\ndef parse_name_crayola(name):\n name = name.lower()\n if name not in _crayola_names:\n raise ValueError(f'Color {name!r} is not named in the crayola dataset.'\n )\n return parse_hex6(_crayola_names[name])\n\n\ndef parse_name_xkcd(name):\n name = name.lower()\n if name not in _xkcd_names:\n raise ValueError(f'Color {name!r} is not named in the xkcd dataset.')\n return parse_hex6(_xkcd_names[name])\n\n\ndef parse_name_meodai_best(name):\n name = name.lower()\n if name not in _meodai_best_names:\n raise ValueError(\n f'Color {name!r} is not named in the meodai-best dataset.')\n return parse_hex6(_meodai_best_names[name])\n\n\ndef parse_name_meodai(name):\n name = name.lower()\n if name not in _meodai_names:\n raise ValueError(f'Color {name!r} is not named in the meodai dataset.')\n return parse_hex6(_meodai_names[name])\n\n\ndef parse(colstr, *, hex6=True, hex3=True, rgbfunc_int=True, rgbfunc_float=\n True, rgbfunc_percent=True, name_css=True, name_crayola=True, name_xkcd\n =True, name_meodai_best=True, name_meodai=True):\n \"\"\"Combine all other parse functions into one \"universal\" function. Use kwargs to disable certain parsers.\"\"\"\n funcs = []\n if hex6:\n funcs.append(parse_hex6)\n if hex3:\n funcs.append(parse_hex3)\n if rgbfunc_int:\n funcs.append(parse_rgbfunc_int)\n if rgbfunc_float:\n funcs.append(parse_rgbfunc_float)\n if rgbfunc_percent:\n funcs.append(parse_rgbfunc_percent)\n if name_css:\n funcs.append(parse_name_css)\n if name_crayola:\n funcs.append(parse_name_crayola)\n if name_xkcd:\n funcs.append(parse_name_xkcd)\n if name_meodai_best:\n funcs.append(parse_name_meodai_best)\n if name_meodai:\n funcs.append(parse_name_meodai)\n res = None\n for func in funcs:\n try:\n res = func(colstr)\n except ValueError:\n pass\n if res is None:\n raise ValueError(f'Could not find a working parser for {colstr!r}.')\n return res\n",
"step-4": "<mask token>\nimport json\nimport re\nfrom pathlib import Path\nimport importlib.resources as resources\nfrom pilutils.basic import hex_to_rgb\n__all__ = ['parse_hex6', 'parse_hex3', 'parse_rgbfunc_int',\n 'parse_rgbfunc_float', 'parse_rgbfunc_percent', 'parse_name_css',\n 'parse_name_crayola', 'parse_name_xkcd', 'parse_name_meodai_best',\n 'parse_name_meodai', 'parse']\n_css_names = json.loads(resources.read_text('pilutils.colornames', 'css.json'))\n_crayola_names = json.loads(resources.read_text('pilutils.colornames',\n 'crayola.json'))\n_xkcd_names = json.loads(resources.read_text('pilutils.colornames',\n 'xkcd.json'))\n_meodai_best_names = json.loads(resources.read_text('pilutils.colornames',\n 'meodai-best.json'))\n_meodai_names = json.loads(resources.read_text('pilutils.colornames',\n 'meodai.json'))\n\n\ndef parse_hex6(hex6):\n \"\"\"Example: #ab34df\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{6})$', hex6.strip())):\n h = int(m.group(1), 16)\n return hex_to_rgb(h)\n raise ValueError(f'String {hex6!r} does not match hex6 format.')\n\n\ndef parse_hex3(hex3):\n \"\"\"Example: #a3d\"\"\"\n if (m := re.match('^#?([0-9A-Fa-f]{3})$', hex3.strip())):\n h3 = m.group(1)\n return tuple(int(c * 2, 16) for c in h3)\n raise ValueError(f'String {hex3!r} does not match hex3 format.')\n\n\ndef parse_rgbfunc_int(rgbfunc):\n \"\"\"Example: rgb(171, 52, 223)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*,\\\\s*(\\\\d{1,3})\\\\s*\\\\)$',\n rgbfunc.strip())):\n t = tuple(map(int, m.groups()))\n if not any(n > 255 for n in t):\n return t\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_int format.')\n\n\ndef parse_rgbfunc_float(rgbfunc):\n \"\"\"Example: rgb(0.67, 0.2, 0.87)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*,\\\\s*([01]\\\\.\\\\d+)\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 1 for n in t):\n return tuple(int(round(n * 255)) for n in t)\n raise ValueError(f'String {rgbfunc!r} does not match rgbfunc_float format.'\n )\n\n\ndef parse_rgbfunc_percent(rgbfunc):\n \"\"\"Example: rgb(67%, 20%, 87.5%)\"\"\"\n if (m := re.match(\n '^rgb\\\\(\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*,\\\\s*(\\\\d{1,3}(?:\\\\.\\\\d+)?)%\\\\s*\\\\)$'\n , rgbfunc.strip())):\n t = tuple(map(float, m.groups()))\n if not any(n > 100 for n in t):\n return tuple(int(round(n * 255 / 100)) for n in t)\n raise ValueError(\n f'String {rgbfunc!r} does not match rgbfunc_percent format.')\n\n\ndef parse_name_css(name):\n name = name.lower()\n if name not in _css_names:\n raise ValueError(f'Color {name!r} is not named in the CSS dataset.')\n return parse_hex6(_css_names[name])\n\n\ndef parse_name_crayola(name):\n name = name.lower()\n if name not in _crayola_names:\n raise ValueError(f'Color {name!r} is not named in the crayola dataset.'\n )\n return parse_hex6(_crayola_names[name])\n\n\ndef parse_name_xkcd(name):\n name = name.lower()\n if name not in _xkcd_names:\n raise ValueError(f'Color {name!r} is not named in the xkcd dataset.')\n return parse_hex6(_xkcd_names[name])\n\n\ndef parse_name_meodai_best(name):\n name = name.lower()\n if name not in _meodai_best_names:\n raise ValueError(\n f'Color {name!r} is not named in the meodai-best dataset.')\n return parse_hex6(_meodai_best_names[name])\n\n\ndef parse_name_meodai(name):\n name = name.lower()\n if name not in _meodai_names:\n raise ValueError(f'Color {name!r} is not named in the meodai dataset.')\n return parse_hex6(_meodai_names[name])\n\n\ndef parse(colstr, *, hex6=True, hex3=True, rgbfunc_int=True, rgbfunc_float=\n True, rgbfunc_percent=True, name_css=True, name_crayola=True, name_xkcd\n =True, name_meodai_best=True, name_meodai=True):\n \"\"\"Combine all other parse functions into one \"universal\" function. Use kwargs to disable certain parsers.\"\"\"\n funcs = []\n if hex6:\n funcs.append(parse_hex6)\n if hex3:\n funcs.append(parse_hex3)\n if rgbfunc_int:\n funcs.append(parse_rgbfunc_int)\n if rgbfunc_float:\n funcs.append(parse_rgbfunc_float)\n if rgbfunc_percent:\n funcs.append(parse_rgbfunc_percent)\n if name_css:\n funcs.append(parse_name_css)\n if name_crayola:\n funcs.append(parse_name_crayola)\n if name_xkcd:\n funcs.append(parse_name_xkcd)\n if name_meodai_best:\n funcs.append(parse_name_meodai_best)\n if name_meodai:\n funcs.append(parse_name_meodai)\n res = None\n for func in funcs:\n try:\n res = func(colstr)\n except ValueError:\n pass\n if res is None:\n raise ValueError(f'Could not find a working parser for {colstr!r}.')\n return res\n",
"step-5": "\"\"\"Functions for parsing various strings to RGB tuples.\"\"\"\nimport json\nimport re\nfrom pathlib import Path\nimport importlib.resources as resources\n\nfrom pilutils.basic import hex_to_rgb\n\n__all__ = [\n \"parse_hex6\",\n \"parse_hex3\",\n \"parse_rgbfunc_int\",\n \"parse_rgbfunc_float\",\n \"parse_rgbfunc_percent\",\n \"parse_name_css\",\n \"parse_name_crayola\",\n \"parse_name_xkcd\",\n \"parse_name_meodai_best\",\n \"parse_name_meodai\",\n \"parse\",\n]\n\n_css_names = json.loads(resources.read_text(\"pilutils.colornames\", \"css.json\"))\n_crayola_names = json.loads(resources.read_text(\"pilutils.colornames\", \"crayola.json\"))\n_xkcd_names = json.loads(resources.read_text(\"pilutils.colornames\", \"xkcd.json\"))\n_meodai_best_names = json.loads(\n resources.read_text(\"pilutils.colornames\", \"meodai-best.json\")\n)\n_meodai_names = json.loads(resources.read_text(\"pilutils.colornames\", \"meodai.json\"))\n\n\ndef parse_hex6(hex6):\n \"\"\"Example: #ab34df\"\"\"\n if m := re.match(r\"^#?([0-9A-Fa-f]{6})$\", hex6.strip()):\n h = int(m.group(1), 16)\n return hex_to_rgb(h)\n raise ValueError(f\"String {hex6!r} does not match hex6 format.\")\n\n\ndef parse_hex3(hex3):\n \"\"\"Example: #a3d\"\"\"\n if m := re.match(r\"^#?([0-9A-Fa-f]{3})$\", hex3.strip()):\n h3 = m.group(1)\n return tuple(int(c * 2, 16) for c in h3)\n raise ValueError(f\"String {hex3!r} does not match hex3 format.\")\n\n\ndef parse_rgbfunc_int(rgbfunc):\n \"\"\"Example: rgb(171, 52, 223)\"\"\"\n if m := re.match(\n r\"^rgb\\(\\s*(\\d{1,3})\\s*,\\s*(\\d{1,3})\\s*,\\s*(\\d{1,3})\\s*\\)$\", rgbfunc.strip()\n ):\n t = tuple(map(int, m.groups()))\n if not any(n > 255 for n in t):\n return t\n raise ValueError(f\"String {rgbfunc!r} does not match rgbfunc_int format.\")\n\n\ndef parse_rgbfunc_float(rgbfunc):\n \"\"\"Example: rgb(0.67, 0.2, 0.87)\"\"\"\n if m := re.match(\n r\"^rgb\\(\\s*([01]\\.\\d+)\\s*,\\s*([01]\\.\\d+)\\s*,\\s*([01]\\.\\d+)\\s*\\)$\",\n rgbfunc.strip(),\n ):\n t = tuple(map(float, m.groups()))\n if not any(n > 1 for n in t):\n return tuple(int(round(n * 255)) for n in t)\n raise ValueError(f\"String {rgbfunc!r} does not match rgbfunc_float format.\")\n\n\ndef parse_rgbfunc_percent(rgbfunc):\n \"\"\"Example: rgb(67%, 20%, 87.5%)\"\"\"\n if m := re.match(\n r\"^rgb\\(\\s*(\\d{1,3}(?:\\.\\d+)?)%\\s*,\\s*(\\d{1,3}(?:\\.\\d+)?)%\\s*,\\s*(\\d{1,3}(?:\\.\\d+)?)%\\s*\\)$\",\n rgbfunc.strip(),\n ):\n t = tuple(map(float, m.groups()))\n if not any(n > 100 for n in t):\n return tuple(int(round(n * 255 / 100)) for n in t)\n raise ValueError(f\"String {rgbfunc!r} does not match rgbfunc_percent format.\")\n\n\ndef parse_name_css(name):\n name = name.lower()\n if name not in _css_names:\n raise ValueError(f\"Color {name!r} is not named in the CSS dataset.\")\n return parse_hex6(_css_names[name])\n\n\ndef parse_name_crayola(name):\n name = name.lower()\n if name not in _crayola_names:\n raise ValueError(f\"Color {name!r} is not named in the crayola dataset.\")\n return parse_hex6(_crayola_names[name])\n\n\ndef parse_name_xkcd(name):\n name = name.lower()\n if name not in _xkcd_names:\n raise ValueError(f\"Color {name!r} is not named in the xkcd dataset.\")\n return parse_hex6(_xkcd_names[name])\n\n\ndef parse_name_meodai_best(name):\n name = name.lower()\n if name not in _meodai_best_names:\n raise ValueError(f\"Color {name!r} is not named in the meodai-best dataset.\")\n return parse_hex6(_meodai_best_names[name])\n\n\ndef parse_name_meodai(name):\n name = name.lower()\n if name not in _meodai_names:\n raise ValueError(f\"Color {name!r} is not named in the meodai dataset.\")\n return parse_hex6(_meodai_names[name])\n\n\ndef parse(\n colstr,\n *,\n hex6=True,\n hex3=True,\n rgbfunc_int=True,\n rgbfunc_float=True,\n rgbfunc_percent=True,\n name_css=True,\n name_crayola=True,\n name_xkcd=True,\n name_meodai_best=True,\n name_meodai=True,\n):\n \"\"\"Combine all other parse functions into one \"universal\" function. Use kwargs to disable certain parsers.\"\"\"\n funcs = []\n if hex6:\n funcs.append(parse_hex6)\n if hex3:\n funcs.append(parse_hex3)\n if rgbfunc_int:\n funcs.append(parse_rgbfunc_int)\n if rgbfunc_float:\n funcs.append(parse_rgbfunc_float)\n if rgbfunc_percent:\n funcs.append(parse_rgbfunc_percent)\n if name_css:\n funcs.append(parse_name_css)\n if name_crayola:\n funcs.append(parse_name_crayola)\n if name_xkcd:\n funcs.append(parse_name_xkcd)\n if name_meodai_best:\n funcs.append(parse_name_meodai_best)\n if name_meodai:\n funcs.append(parse_name_meodai)\n\n res = None\n for func in funcs:\n try:\n res = func(colstr)\n except ValueError:\n pass\n if res is None:\n raise ValueError(f\"Could not find a working parser for {colstr!r}.\")\n return res\n",
"step-ids": [
7,
10,
12,
13,
14
]
}
|
[
7,
10,
12,
13,
14
] |
import redis
r = redis.StrictRedis()
r.set("counter", 40)
print(r.get("counter"))
print(r.incr("counter"))
print(r.incr("counter"))
print(r.get("counter"))
|
normal
|
{
"blob_id": "b38c9357030b2eac8298743cfb4d6c4d58c99ed4",
"index": 7463,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nr.set('counter', 40)\nprint(r.get('counter'))\nprint(r.incr('counter'))\nprint(r.incr('counter'))\nprint(r.get('counter'))\n",
"step-3": "<mask token>\nr = redis.StrictRedis()\nr.set('counter', 40)\nprint(r.get('counter'))\nprint(r.incr('counter'))\nprint(r.incr('counter'))\nprint(r.get('counter'))\n",
"step-4": "import redis\nr = redis.StrictRedis()\nr.set('counter', 40)\nprint(r.get('counter'))\nprint(r.incr('counter'))\nprint(r.incr('counter'))\nprint(r.get('counter'))\n",
"step-5": "import redis\nr = redis.StrictRedis()\n\nr.set(\"counter\", 40) \nprint(r.get(\"counter\"))\nprint(r.incr(\"counter\"))\nprint(r.incr(\"counter\"))\nprint(r.get(\"counter\"))\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from setuptools import setup
setup(
name="CoreMLModules",
version="0.1.0",
url="https://github.com/AfricasVoices/CoreMLModules",
packages=["core_ml_modules"],
setup_requires=["pytest-runner"],
install_requires=["numpy", "scikit-learn", "nltk"],
tests_require=["pytest<=3.6.4"]
)
|
normal
|
{
"blob_id": "24cd3a1a05a1cfa638b8264fd89b36ee63b29f89",
"index": 1625,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='CoreMLModules', version='0.1.0', url=\n 'https://github.com/AfricasVoices/CoreMLModules', packages=[\n 'core_ml_modules'], setup_requires=['pytest-runner'], install_requires=\n ['numpy', 'scikit-learn', 'nltk'], tests_require=['pytest<=3.6.4'])\n",
"step-3": "from setuptools import setup\nsetup(name='CoreMLModules', version='0.1.0', url=\n 'https://github.com/AfricasVoices/CoreMLModules', packages=[\n 'core_ml_modules'], setup_requires=['pytest-runner'], install_requires=\n ['numpy', 'scikit-learn', 'nltk'], tests_require=['pytest<=3.6.4'])\n",
"step-4": "from setuptools import setup\n\nsetup(\n name=\"CoreMLModules\",\n version=\"0.1.0\",\n url=\"https://github.com/AfricasVoices/CoreMLModules\",\n packages=[\"core_ml_modules\"],\n setup_requires=[\"pytest-runner\"],\n install_requires=[\"numpy\", \"scikit-learn\", \"nltk\"],\n tests_require=[\"pytest<=3.6.4\"]\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
CARD_SIZE = (70, 90)
SPACING = 3
|
normal
|
{
"blob_id": "b8ebbef7403a71d6165a5462bc08e2634b4cebc5",
"index": 4287,
"step-1": "<mask token>\n",
"step-2": "CARD_SIZE = 70, 90\nSPACING = 3\n",
"step-3": "CARD_SIZE = (70, 90)\nSPACING = 3",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# 1.- Crear una grafica que muestre la desviacion tipica de los datos cada dia para todos los pacientes
# 2.- Crear una grafica que muestre a la vez la inflamacion maxima, media y minima para cada dia
import numpy as np
data = np.loadtxt(fname='inflammation-01.csv', delimiter=',')
import matplotlib.pyplot as plt
plt.plot(data.std(axis=0)) # Desviacion tipica por dia
plt.show()
plt.plot(data.max(axis=0)) # Inflamacion maxima, media y minima para cada dia
plt.plot(data.mean(axis=0))
plt.plot(data.min(axis=0))
|
normal
|
{
"blob_id": "52064b518ad067c9906e7de8542d9a399076a0b5",
"index": 4214,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n",
"step-3": "<mask token>\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\n<mask token>\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n",
"step-4": "import numpy as np\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\nimport matplotlib.pyplot as plt\nplt.plot(data.std(axis=0))\nplt.show()\nplt.plot(data.max(axis=0))\nplt.plot(data.mean(axis=0))\nplt.plot(data.min(axis=0))\n",
"step-5": "# 1.- Crear una grafica que muestre la desviacion tipica de los datos cada dia para todos los pacientes\r\n# 2.- Crear una grafica que muestre a la vez la inflamacion maxima, media y minima para cada dia\r\n\r\nimport numpy as np\r\ndata = np.loadtxt(fname='inflammation-01.csv', delimiter=',')\r\n\r\nimport matplotlib.pyplot as plt\r\n\r\nplt.plot(data.std(axis=0)) # Desviacion tipica por dia\r\nplt.show()\r\n\r\nplt.plot(data.max(axis=0)) # Inflamacion maxima, media y minima para cada dia\r\nplt.plot(data.mean(axis=0))\r\nplt.plot(data.min(axis=0))\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
../../2.0.2/mpl_examples/axes_grid/simple_axesgrid2.py
|
normal
|
{
"blob_id": "73d1129418711c35046a99c1972a413357079836",
"index": 3022,
"step-1": "../../2.0.2/mpl_examples/axes_grid/simple_axesgrid2.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""API - Files endpoints."""
import os
import click
import cloudsmith_api
import requests
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor
from .. import ratelimits
from ..rest import create_requests_session
from ..utils import calculate_file_md5
from .exceptions import ApiException, catch_raise_api_exception
from .init import get_api_client
def get_files_api():
"""Get the files API client."""
return get_api_client(cloudsmith_api.FilesApi)
def validate_request_file_upload(owner, repo, filepath, md5_checksum=None):
"""Validate parameters for requesting a file upload."""
client = get_files_api()
md5_checksum = md5_checksum or calculate_file_md5(filepath)
with catch_raise_api_exception():
_, _, headers = client.files_validate_with_http_info(
owner=owner,
repo=repo,
data={"filename": os.path.basename(filepath), "md5_checksum": md5_checksum},
)
ratelimits.maybe_rate_limit(client, headers)
return md5_checksum
def request_file_upload(owner, repo, filepath, md5_checksum=None):
"""Request a new package file upload (for creating packages)."""
client = get_files_api()
md5_checksum = md5_checksum or calculate_file_md5(filepath)
with catch_raise_api_exception():
data, _, headers = client.files_create_with_http_info(
owner=owner,
repo=repo,
data={"filename": os.path.basename(filepath), "md5_checksum": md5_checksum},
)
# pylint: disable=no-member
# Pylint detects the returned value as a tuple
ratelimits.maybe_rate_limit(client, headers)
return data.identifier, data.upload_url, data.upload_fields
def upload_file(upload_url, upload_fields, filepath, callback=None):
"""Upload a pre-signed file to Cloudsmith."""
upload_fields = list(upload_fields.items())
upload_fields.append(
("file", (os.path.basename(filepath), click.open_file(filepath, "rb")))
)
encoder = MultipartEncoder(upload_fields)
monitor = MultipartEncoderMonitor(encoder, callback=callback)
config = cloudsmith_api.Configuration()
if config.proxy:
proxies = {"http": config.proxy, "https": config.proxy}
else:
proxies = None
headers = {"content-type": monitor.content_type}
client = get_files_api()
headers["user-agent"] = client.api_client.user_agent
session = create_requests_session()
resp = session.post(upload_url, data=monitor, headers=headers, proxies=proxies)
try:
resp.raise_for_status()
except requests.RequestException as exc:
raise ApiException(
resp.status_code, headers=exc.response.headers, body=exc.response.content
)
|
normal
|
{
"blob_id": "ee03263d92372899ec1feaf3a8ea48677b053676",
"index": 6281,
"step-1": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\n<mask token>\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n",
"step-3": "<mask token>\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n",
"step-4": "<mask token>\nimport os\nimport click\nimport cloudsmith_api\nimport requests\nfrom requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor\nfrom .. import ratelimits\nfrom ..rest import create_requests_session\nfrom ..utils import calculate_file_md5\nfrom .exceptions import ApiException, catch_raise_api_exception\nfrom .init import get_api_client\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(owner=owner,\n repo=repo, data={'filename': os.path.basename(filepath),\n 'md5_checksum': md5_checksum})\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(('file', (os.path.basename(filepath), click.\n open_file(filepath, 'rb'))))\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {'http': config.proxy, 'https': config.proxy}\n else:\n proxies = None\n headers = {'content-type': monitor.content_type}\n client = get_files_api()\n headers['user-agent'] = client.api_client.user_agent\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=\n proxies)\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(resp.status_code, headers=exc.response.headers,\n body=exc.response.content)\n",
"step-5": "\"\"\"API - Files endpoints.\"\"\"\n\nimport os\n\nimport click\nimport cloudsmith_api\nimport requests\nfrom requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor\n\nfrom .. import ratelimits\nfrom ..rest import create_requests_session\nfrom ..utils import calculate_file_md5\nfrom .exceptions import ApiException, catch_raise_api_exception\nfrom .init import get_api_client\n\n\ndef get_files_api():\n \"\"\"Get the files API client.\"\"\"\n return get_api_client(cloudsmith_api.FilesApi)\n\n\ndef validate_request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Validate parameters for requesting a file upload.\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n\n with catch_raise_api_exception():\n _, _, headers = client.files_validate_with_http_info(\n owner=owner,\n repo=repo,\n data={\"filename\": os.path.basename(filepath), \"md5_checksum\": md5_checksum},\n )\n\n ratelimits.maybe_rate_limit(client, headers)\n return md5_checksum\n\n\ndef request_file_upload(owner, repo, filepath, md5_checksum=None):\n \"\"\"Request a new package file upload (for creating packages).\"\"\"\n client = get_files_api()\n md5_checksum = md5_checksum or calculate_file_md5(filepath)\n\n with catch_raise_api_exception():\n data, _, headers = client.files_create_with_http_info(\n owner=owner,\n repo=repo,\n data={\"filename\": os.path.basename(filepath), \"md5_checksum\": md5_checksum},\n )\n\n # pylint: disable=no-member\n # Pylint detects the returned value as a tuple\n ratelimits.maybe_rate_limit(client, headers)\n return data.identifier, data.upload_url, data.upload_fields\n\n\ndef upload_file(upload_url, upload_fields, filepath, callback=None):\n \"\"\"Upload a pre-signed file to Cloudsmith.\"\"\"\n upload_fields = list(upload_fields.items())\n upload_fields.append(\n (\"file\", (os.path.basename(filepath), click.open_file(filepath, \"rb\")))\n )\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {\"http\": config.proxy, \"https\": config.proxy}\n else:\n proxies = None\n\n headers = {\"content-type\": monitor.content_type}\n\n client = get_files_api()\n headers[\"user-agent\"] = client.api_client.user_agent\n\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=proxies)\n\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(\n resp.status_code, headers=exc.response.headers, body=exc.response.content\n )\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import torch
import torchvision
from torch import nn
def get_resnet18(pre_imgnet=False, num_classes=64):
model = torchvision.models.resnet18(pretrained=pre_imgnet)
model.fc = nn.Linear(512, 64)
return model
|
normal
|
{
"blob_id": "8e05b2723d8c50354e785b4bc7c5de8860aa706d",
"index": 5355,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_resnet18(pre_imgnet=False, num_classes=64):\n model = torchvision.models.resnet18(pretrained=pre_imgnet)\n model.fc = nn.Linear(512, 64)\n return model\n",
"step-3": "import torch\nimport torchvision\nfrom torch import nn\n\n\ndef get_resnet18(pre_imgnet=False, num_classes=64):\n model = torchvision.models.resnet18(pretrained=pre_imgnet)\n model.fc = nn.Linear(512, 64)\n return model\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
def add(a, b):
print "ADDING %d + %d" % (a, b)
return a + b
def subtract(a, b):
print "SUBTRACTING %d - %d" %(a, b)
return a - b
def multipy(a, b):
print "MULTIPLYING %d * %d" % (a, b)
return a * b
def divide(a, b):
print "DIVIDING %d / %d" % (a, b)
return a / b
print "Let's do some math with just functions!"
age = add(30, 5)
height = subtract(78, 4)
weight = multipy(90, 2)
iq = divide(100, 2)
print "Age: %d, Height: %d, Weight: %d, IQ: %d" %(age, height, weight, iq)
# A puzzle for the extra credit, type in anyway.
print "Here is a puzzle."
what = add(age, subtract(height, multipy(weight, divide(iq, 2))))
print "That becomes: ", what, "Can you do it by hand?"
print "Sure, let me show you!"
solution = ((30 + 5)+((78 - 4)-((90 * 2) * ((100 / 2) / 2))))
print "See how smart I am, look at this", solution, "!"
print "Now I break the formula by deleteing all ():"
solution2 = 30 + 5 + 78 - 4 - 90 * 2 * 100 / 2 / 2
print solution, "is the same as", solution2, ". Because python knows math rules."
solution3 = 30 * 2 + 5 * 100 + 78 / 2 - 4 / 2 - 90
print solution3, "is different, because I changed the order of the terms."
print "Now I will make my own formula into a function."
solution4 = 24.0 + 34.0 / 100.0 - 1023.0
term1 = divide(34.0, 100.0)
term2 = add(24.0, term1)
term3 = subtract(term2, 1023.0)
print term3, "and", solution4
# Mistakes I make:
# Typos
# forgot quotes
# forgot formatter %
# forgot \n
# things to remember:
# go back to study drills of ex 10
|
normal
|
{
"blob_id": "b4b80e40d12486881e37dd7ddeeef9c76417ebd9",
"index": 5906,
"step-1": "def add(a, b):\n print \"ADDING %d + %d\" % (a, b)\n return a + b\n\ndef subtract(a, b):\n print \"SUBTRACTING %d - %d\" %(a, b)\n return a - b\n\ndef multipy(a, b):\n print \"MULTIPLYING %d * %d\" % (a, b)\n return a * b\n\ndef divide(a, b):\n print \"DIVIDING %d / %d\" % (a, b)\n return a / b\n\n\nprint \"Let's do some math with just functions!\"\n\nage = add(30, 5)\nheight = subtract(78, 4)\nweight = multipy(90, 2)\niq = divide(100, 2)\n\nprint \"Age: %d, Height: %d, Weight: %d, IQ: %d\" %(age, height, weight, iq)\n\n\n# A puzzle for the extra credit, type in anyway.\n\nprint \"Here is a puzzle.\"\n\nwhat = add(age, subtract(height, multipy(weight, divide(iq, 2))))\n\nprint \"That becomes: \", what, \"Can you do it by hand?\"\n\nprint \"Sure, let me show you!\"\n\nsolution = ((30 + 5)+((78 - 4)-((90 * 2) * ((100 / 2) / 2))))\n\nprint \"See how smart I am, look at this\", solution, \"!\"\n\nprint \"Now I break the formula by deleteing all ():\"\n\nsolution2 = 30 + 5 + 78 - 4 - 90 * 2 * 100 / 2 / 2\n\nprint solution, \"is the same as\", solution2, \". Because python knows math rules.\"\n\nsolution3 = 30 * 2 + 5 * 100 + 78 / 2 - 4 / 2 - 90\n\nprint solution3, \"is different, because I changed the order of the terms.\"\n\nprint \"Now I will make my own formula into a function.\"\n\nsolution4 = 24.0 + 34.0 / 100.0 - 1023.0\n\nterm1 = divide(34.0, 100.0)\nterm2 = add(24.0, term1)\nterm3 = subtract(term2, 1023.0)\n\nprint term3, \"and\", solution4\n\n\n# Mistakes I make:\n# Typos\n# forgot quotes\n# forgot formatter %\n# forgot \\n\n\n# things to remember:\n# go back to study drills of ex 10\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
'''Mock classes that imitate idlelib modules or classes.
Attributes and methods will be added as needed for tests.
'''
from idlelib.idle_test.mock_tk import Text
class Editor:
'''Minimally imitate EditorWindow.EditorWindow class.
'''
def __init__(self, flist=None, filename=None, key=None, root=None):
self.text = Text()
self.undo = UndoDelegator()
def get_selection_indices(self):
first = self.text.index('1.0')
last = self.text.index('end')
return first, last
class UndoDelegator:
'''Minimally imitate UndoDelegator,UndoDelegator class.
'''
# A real undo block is only needed for user interaction.
def undo_block_start(*args):
pass
def undo_block_stop(*args):
pass
|
normal
|
{
"blob_id": "3b7c30718838a164eaf3aa12cd7b6a68930346f8",
"index": 8604,
"step-1": "<mask token>\n\n\nclass UndoDelegator:\n <mask token>\n\n def undo_block_start(*args):\n pass\n\n def undo_block_stop(*args):\n pass\n",
"step-2": "<mask token>\n\n\nclass Editor:\n <mask token>\n\n def __init__(self, flist=None, filename=None, key=None, root=None):\n self.text = Text()\n self.undo = UndoDelegator()\n\n def get_selection_indices(self):\n first = self.text.index('1.0')\n last = self.text.index('end')\n return first, last\n\n\nclass UndoDelegator:\n \"\"\"Minimally imitate UndoDelegator,UndoDelegator class.\n \"\"\"\n\n def undo_block_start(*args):\n pass\n\n def undo_block_stop(*args):\n pass\n",
"step-3": "<mask token>\n\n\nclass Editor:\n \"\"\"Minimally imitate EditorWindow.EditorWindow class.\n \"\"\"\n\n def __init__(self, flist=None, filename=None, key=None, root=None):\n self.text = Text()\n self.undo = UndoDelegator()\n\n def get_selection_indices(self):\n first = self.text.index('1.0')\n last = self.text.index('end')\n return first, last\n\n\nclass UndoDelegator:\n \"\"\"Minimally imitate UndoDelegator,UndoDelegator class.\n \"\"\"\n\n def undo_block_start(*args):\n pass\n\n def undo_block_stop(*args):\n pass\n",
"step-4": "<mask token>\nfrom idlelib.idle_test.mock_tk import Text\n\n\nclass Editor:\n \"\"\"Minimally imitate EditorWindow.EditorWindow class.\n \"\"\"\n\n def __init__(self, flist=None, filename=None, key=None, root=None):\n self.text = Text()\n self.undo = UndoDelegator()\n\n def get_selection_indices(self):\n first = self.text.index('1.0')\n last = self.text.index('end')\n return first, last\n\n\nclass UndoDelegator:\n \"\"\"Minimally imitate UndoDelegator,UndoDelegator class.\n \"\"\"\n\n def undo_block_start(*args):\n pass\n\n def undo_block_stop(*args):\n pass\n",
"step-5": "'''Mock classes that imitate idlelib modules or classes.\n\nAttributes and methods will be added as needed for tests.\n'''\n\nfrom idlelib.idle_test.mock_tk import Text\n\nclass Editor:\n '''Minimally imitate EditorWindow.EditorWindow class.\n '''\n def __init__(self, flist=None, filename=None, key=None, root=None):\n self.text = Text()\n self.undo = UndoDelegator()\n\n def get_selection_indices(self):\n first = self.text.index('1.0')\n last = self.text.index('end')\n return first, last\n\nclass UndoDelegator:\n '''Minimally imitate UndoDelegator,UndoDelegator class.\n '''\n # A real undo block is only needed for user interaction.\n def undo_block_start(*args):\n pass\n def undo_block_stop(*args):\n pass\n",
"step-ids": [
3,
7,
8,
9,
10
]
}
|
[
3,
7,
8,
9,
10
] |
import numpy as np
import cv2
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
img = cv2.imread('modi.jpg')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
#Write the for loop code here
cv2.imshow('img',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "759ff4cc123e85bdc8c1457bb521cd35841956cd",
"index": 482,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncv2.imshow('img', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nface_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')\nimg = cv2.imread('modi.jpg')\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\nfaces = face_cascade.detectMultiScale(gray, 1.3, 5)\ncv2.imshow('img', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"step-4": "import numpy as np\nimport cv2\nface_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')\nimg = cv2.imread('modi.jpg')\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\nfaces = face_cascade.detectMultiScale(gray, 1.3, 5)\ncv2.imshow('img', img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n",
"step-5": "import numpy as np\nimport cv2\n\nface_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')\neye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')\n\nimg = cv2.imread('modi.jpg')\ngray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\nfaces = face_cascade.detectMultiScale(gray, 1.3, 5)\n#Write the for loop code here\n\ncv2.imshow('img',img)\ncv2.waitKey(0)\ncv2.destroyAllWindows()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.