query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| negatives
listlengths 19
20
| metadata
dict |
---|---|---|---|
Calculate diou on box array
|
def box_diou(boxes):
# get box coordinate and area
x = boxes[:, 0]
y = boxes[:, 1]
w = boxes[:, 2]
h = boxes[:, 3]
areas = w * h
# check IoU
inter_xmin = np.maximum(x[:-1], x[-1])
inter_ymin = np.maximum(y[:-1], y[-1])
inter_xmax = np.minimum(x[:-1] + w[:-1], x[-1] + w[-1])
inter_ymax = np.minimum(y[:-1] + h[:-1], y[-1] + h[-1])
inter_w = np.maximum(0.0, inter_xmax - inter_xmin + 1)
inter_h = np.maximum(0.0, inter_ymax - inter_ymin + 1)
inter = inter_w * inter_h
iou = inter / (areas[:-1] + areas[-1] - inter)
# box center distance
x_center = x + w / 2
y_center = y + h / 2
center_distance = np.power(x_center[:-1] - x_center[-1], 2) + np.power(
y_center[:-1] - y_center[-1], 2)
# get enclosed area
enclose_xmin = np.minimum(x[:-1], x[-1])
enclose_ymin = np.minimum(y[:-1], y[-1])
enclose_xmax = np.maximum(x[:-1] + w[:-1], x[-1] + w[-1])
enclose_ymax = np.maximum(x[:-1] + w[:-1], x[-1] + w[-1])
enclose_w = np.maximum(0.0, enclose_xmax - enclose_xmin + 1)
enclose_h = np.maximum(0.0, enclose_ymax - enclose_ymin + 1)
# get enclosed diagonal distance
enclose_diagonal = np.power(enclose_w, 2) + np.power(enclose_h, 2)
# calculate DIoU, add epsilon in denominator to avoid dividing by 0
diou = iou - 1.0 * (center_distance) / (
enclose_diagonal + np.finfo(float).eps)
return diou
|
[
"def compute_iou(box, boxes, box_area, boxes_area):\n # Calculate intersection areas\n y1 = np.maximum(box[0], boxes[:, 0])\n y2 = np.minimum(box[2], boxes[:, 2])\n x1 = np.maximum(box[1], boxes[:, 1])\n x2 = np.minimum(box[3], boxes[:, 3])\n intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)\n union = box_area + boxes_area[:] - intersection[:]\n iou = intersection / union\n return iou",
"def bbox_iou(main_box, aux_boxes):\n\n max_Xmin = np.maximum(main_box[0], aux_boxes[0,:]) # Valor máximo de los \"X min\"\n max_Ymin = np.maximum(main_box[1], aux_boxes[1,:]) # Valor máximo de los \"Y min\"\n min_Xmax = np.minimum(main_box[2], aux_boxes[2,:]) # Valor mínimo de los \"X max\"\n min_Ymax = np.minimum(main_box[3], aux_boxes[3,:]) # Valor mínimo de los \"Y max\"\n\n X_overlap = np.maximum(0, min_Xmax - max_Xmin) # Overlap de las cajas sobre el eje X. Valor mínimo de los \"X max\" - Valor máximo de los \"X min\". Si la resta < 0, el valor se trunca en 0\n Y_overlap = np.maximum(0, min_Ymax - max_Ymin) # Overlap de las cajas sobre el eje Y. Valor mínimo de los \"Y max\" - Valor máximo de los \"Y min\". Si la resta < 0, el valor se trunca en 0\n Intersection = X_overlap * Y_overlap # Intersection = Multiplicación de los overlaps en ambos ejes.\n\n area_main = (main_box[2] - main_box[0]) * (main_box[3] - main_box[1]) # Area de la caja principal\n area_aux = (aux_boxes[2,:] - aux_boxes[0,:]) * (aux_boxes[3,:] - aux_boxes[1,:]) # Area de las cajas auxiliares\n Union = area_main + area_aux - Intersection # Union = La suma de las areas de cada caja - la intersección (Ya que de lo contrario la región de la intersección se incluiría dos veces.)\n\n return (Intersection/Union).astype(float) # Se retorna el IOU (Intersection/Union) en tipo float",
"def calc_ubudget(datafiles, ndays, lon1, lon2, plev=200):\n\n # Read data\n data = xray.Dataset()\n for nm in datafiles:\n print('Reading ' + datafiles[nm])\n with xray.open_dataset(datafiles[nm]) as ds:\n if nm in ds.data_vars:\n var = ds[nm]\n else:\n var = ds[nm + '%d' % plev]\n if 'Day' in var.dims:\n var = var.rename({'Day' : 'day'})\n data[nm] = atm.squeeze(var)\n data[nm].load()\n data['PHI'] = atm.constants.g.values * data['H']\n\n # Put zeros in for any missing variables (e.g. du/dp)\n for nm in ['OMEGA', 'DUDP', 'DOMEGADP', 'DUDTANA']:\n if nm not in data.data_vars:\n data[nm] = 0.0 * data['U']\n\n # Eddy decomposition\n taxis = 0\n for nm in data.data_vars:\n print('Eddy decomposition for ' + nm)\n comp = eddy_decomp(data[nm], ndays, lon1, lon2, taxis)\n for compnm in comp:\n data[compnm] = comp[compnm]\n\n # Momentum budget calcs\n # du/dt = sum of terms in ubudget\n ubudget = xray.Dataset()\n readme = 'Momentum budget: ACCEL = sum of all other data variables'\n ubudget.attrs['readme'] = readme\n ubudget.attrs['ndays'] = ndays\n ubudget.attrs['lon1'] = lon1\n ubudget.attrs['lon2'] = lon2\n\n # Advective terms\n keypairs = [ ('AVG', 'AVG'), ('AVG', 'ST'), ('ST', 'AVG')]\n print('Computing advective terms')\n for pair in keypairs:\n print(pair)\n ukey, flowkey = pair\n u = data['U_' + ukey]\n dudp = data['DUDP_' + ukey]\n uflow = data['U_' + flowkey]\n vflow = data['V_' + flowkey]\n omegaflow = data['OMEGA_' + flowkey]\n adv = advection(uflow, vflow, omegaflow, u, dudp)\n for nm in adv.data_vars:\n key = 'ADV_%s_%s_%s' % (ukey, flowkey, nm)\n ubudget[key] = - adv[nm]\n long_name = 'Advection of %s momentum by %s' % (ukey, flowkey)\n ubudget[key].attrs['long_name'] = long_name\n\n # EMFD terms\n keys = ['TR', 'ST']\n print('Computing EMFD terms')\n for key in keys:\n print(key)\n u = data['U_' + key]\n v = data['V_' + key]\n omega = data['OMEGA_' + key]\n dudp = data['DUDP_' + key]\n domegadp = data['DOMEGADP_' + key]\n emfd = fluxdiv(u, v, omega, dudp, domegadp)\n for nm in emfd.data_vars:\n ubudget['EMFC_%s_%s' % (key, nm)] = - emfd[nm]\n\n # Coriolis terms\n latlon = latlon_data(data['V_ST'])\n lat = latlon['LAT']\n f = atm.coriolis(lat)\n ubudget['COR_AVG'] = data['V_AVG'] * f\n ubudget['COR_ST'] = data['V_ST'] * f\n\n # Pressure gradient terms\n a = atm.constants.radius_earth.values\n coslat = latlon['COSLAT']\n lonrad = latlon['LONRAD']\n londim = atm.get_coord(data['PHI_ST'], 'lon', 'dim')\n ubudget['PGF_ST'] = - atm.gradient(data['PHI_ST'], lonrad, londim) / (a*coslat)\n\n # Analysis increment for dU/dt\n ubudget['ANA'] = data['DUDTANA']\n\n # Time mean\n print('Computing rolling time mean')\n for nm in ubudget.data_vars:\n ubudget[nm] = atm.rolling_mean(ubudget[nm], ndays, axis=taxis, center=True)\n\n # Acceleration\n nseconds = 60 * 60 * 24 * ndays\n delta_u = np.nan * data['U']\n u = data['U'].values\n delta_u.values[ndays//2:-ndays//2] = (u[ndays:] - u[:-ndays]) / nseconds\n ubudget['ACCEL'] = delta_u\n\n return ubudget, data",
"def boxify(phase):\n return np.sign(phase[:, 1] - phase[:, 1].mean())",
"def esquemadesaltos(dx,nu,TermNoHom,ValorInic,ContIzq,ContDer,tmax,dt):\r\n CantSubintEsp=int(1/dx)\r\n CantSubintTiempo=int(tmax/dt)\r\n u=zeros([CantSubintEsp+1,CantSubintTiempo+1])\r\n u[:,0]=ValorInic(linspace(0,1,CantSubintEsp+1))\r\n u[0,:]=ContIzq(linspace(0,tmax,CantSubintTiempo+1))\r\n u[CantSubintEsp,:]=ContDer(linspace(0,tmax,CantSubintTiempo+1))\r\n [Y,X]=meshgrid(linspace(0,tmax,CantSubintTiempo+1),\\\r\n linspace(0,1,CantSubintEsp+1))\r\n NoHom=TermNoHom(X,Y)\r\n factor=2*nu*dt/(dx**2)\r\n u[1:CantSubintEsp,1]=u[1:CantSubintEsp,0]+factor/2*\\\r\n (u[2:CantSubintEsp+1,0]-2*u[1:CantSubintEsp,0]+u[0:CantSubintEsp-1,0])\\\r\n +dt*NoHom[1:CantSubintEsp,0]\r\n for n in range(1,CantSubintTiempo):\r\n u[1:CantSubintEsp,n+1]=u[1:CantSubintEsp,n-1]\\\r\n +factor*(u[2:CantSubintEsp+1,n]-2*u[1:CantSubintEsp,n]\\\r\n +u[0:CantSubintEsp-1,n])+dt*NoHom[1:CantSubintEsp,n]\r\n fig = plt.figure()\r\n ax = Axes3D(fig)\r\n ax.plot_surface(X, Y, u, rstride=1, cstride=1, cmap=cm.coolwarm,\\\r\n linewidth=0, antialiased=False)\r\n plt.show()\r\n return X,Y,u",
"def dudz(self):\n\n if \"dudz\" not in self.ds:\n var = xroms.dudz(self.ds.u, self.grid, sboundary=\"extend\")\n self.ds[\"dudz\"] = var\n return self.ds[\"dudz\"]",
"def iou(box, clusters):\r\n x = np.minimum(clusters[:, 0], box[0])\r\n y = np.minimum(clusters[:, 1], box[1])\r\n if np.count_nonzero(x == 0) > 0 or np.count_nonzero(y == 0) > 0:\r\n raise ValueError(\"Box has no area\")\r\n\r\n intersection = x * y\r\n box_area = box[0] * box[1]\r\n cluster_area = clusters[:, 0] * clusters[:, 1]\r\n\r\n iou_ = np.true_divide(intersection, box_area + cluster_area - intersection + 1e-10)\r\n # iou_ = intersection / (box_area + cluster_area - intersection + 1e-10)\r\n\r\n return iou_",
"def dEdt( binEnergySums, binWidth ):\n return binEnergySums/binWidth",
"def vacuum_dm(self):\n vac = np.zeros(self.n_cav)\n vac[0] = 1.\n return ket2dm(vac)",
"def simulate_box_1d(self):\n \n Lx = 800.0\n Ly = 10.0\n Nx = 200\n Ny = 3\n T = 14.0\n dt = 0.04\n \n g = 9.81 # Acceleration of gravity (m/s^2)\n \n I0 = 0.0\n Ia = 140.0\n Im = 0.0\n Is = 40.2\n \n B0 = -300.0\n Ba = 275.0\n Bmx = 0.75*Lx\n Bs = 0.25*Lx\n \n zmin = -320.0\n zmax = 150.0\n \n t0 = time.clock()\n\n # Initial water surface with tsunami\n def I(x, y):\n return I0 + Ia*exp(-((x-Im)/Is)**2)\n \n # shape of sea bottom\n def bottom(x, y):\n if type(x) == float64:\n if (x >= Bmx-Bs) and (x <= Bmx+Bs):\n b_value = B0 + Ba\n else:\n b_value = B0\n else:\n b_value = zeros((x.shape[0], y.shape[1]))\n for i in range(0, x.shape[0]):\n for j in range(0, y.shape[1]):\n xx = x[i,0]\n yy = y[0,j]\n if (xx >= Bmx-Bs) and (xx <= Bmx+Bs):\n b_value[i,j] = B0 + Ba\n else:\n b_value[i,j] = B0\n return b_value\n \n def q(x, y):\n return -g*bottom(x, y)\n\n\n def plot_u(u, x, xv, y, yv, t, n):\n \n b_a = zeros((xv.shape[0],yv.shape[1]))\n b_a[:,:] = bottom(xv, yv)\n plot(xv[:,0], u[:,0], '-', xv[:,0], b_a[:,0], '--', ylim=[zmin, zmax])\n #show()\n #hold('on')\n \n #axis([0.0,400.0,0.0,430.0,-500.0,300.0])\n #show()\n #time.sleep(5.0)\n filename = 'tmp_1d_box%04d.png' % n\n savefig(filename)\n \n\n # Construct problem object\n problem = Problem(I=I, V=None, f=None, q=q, b=0.0, Lx=Lx, \n Ly=Ly, T=T)\n \n # Construct solver object\n solver = Solver(problem=problem, Nx=Nx, Ny=Ny,\n dt=dt, user_action=plot_u, \n version=self.version)\n \n # Solve the PDE\n solver.solve() \n \n t1 = time.clock()\n print 'used time: ', t1-t0",
"def to_box_ctr_np(boxes):\n h = boxes[:, 2] - boxes[:, 0]\n w = boxes[:, 3] - boxes[:, 1]\n ctr_y = boxes[:, 0] + 0.5 * h\n ctr_x = boxes[:, 1] + 0.5 * w\n\n boxes = np.vstack((ctr_y, ctr_x, h, w)).transpose()\n\n return boxes",
"def DMFluxneuDet(flavor,Enu,ch,DMm,DMsig,body,param,osc): \n ##B From Arxiv: 0506298 ec. 21 & 24\n #DM_annihilation_rate_Earth = 1.0e14*(100*param.GeV/DMm)**2/param.sec #[annhilations/s]\n #DM_annihilation_rate_Sun = ((1.0*param.AU)/(param.EARTHRADIUS*param.km))**2*DM_annihilation_rate_Earth\n DM_annihilation_rate_Sun = float(np.sum(DMSunAnnihilationRate(DMm,DMsig,param)))# [eV]\n ##E\n \n flux = 0.0\n \n if param.neutype == \"neutrino\":\n if osc :\n for flv in range(3):\n #p = DMParameters(flv)\n #if param.name == \"STD\":\n flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flv*2,ch,DMm/param.GeV)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #flux = flux + (1.0/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #else :\n # flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flv*2,ch,DMm/param.GeV)*no.AvgNeuProb_RK(flv,flavor,Enu,param)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK(flv,flavor,Enu,param)\n else :\n #p = DMParameters(flavor)\n flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flavor*2,ch,DMm/param.GeV)\n #flux = flux + (1.0/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)\n return flux\n elif param.neutype == \"antineutrino\":\n if osc :\n for flv in range(3):\n #p = DMParameters(flv)\n #if param.name == \"STD\":\n flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flv*2+1,ch,DMm/param.GeV)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #flux = flux + (1.0/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK_STD(flv,flavor,Enu,param)\n #else :\n # flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flv*2+1,ch,DMm/param.GeV)*no.AvgNeuProb_RK(flv,flavor,Enu,param)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)*no.AvgNeuProb_RK(flv,flavor,Enu,param)\n else :\n #p = DMParameters(flavor)\n flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMSweFlux(Enu/param.GeV,flavor*2+1,ch,DMm/param.GeV)\n #flux = flux + (1.0/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)\n #flux = flux + (DM_annihilation_rate_Sun/(4.0*np.pi*param.AU**2))*DMFlux(Enu,DMm,ch,p)\n return flux\n else :\n print \"Wrong neutrino type.\"\n quit()",
"def dU_dx(U,z):\n\treturn [U[1], (g*V*(p_atm-p_He)-(m_b+m_p)*g+(1/2)*p_atm*U[1]**2*c_d*S)/(m_He+m_b+m_p)]",
"def get_Hu():\n \n ue = np.zeros((nx+1,ny)) \n uw = np.zeros((nx+1,ny))\n un = np.zeros((nx+1,ny))\n us = np.zeros((nx+1,ny))\n vn = np.zeros((nx+1,ny))\n vs = np.zeros((nx+1,ny))\n τxxe = np.zeros((nx+1,ny))\n τxxw = np.zeros((nx+1,ny))\n τxyn = np.zeros((nx+1,ny))\n τxys = np.zeros((nx+1,ny))\n Hu = np.zeros((nx+1,ny))\n \n i = np.arange(1,nx) # u-cell centers in domain interior\n \n ue[i,:] = (u[i+1,:] + u[i,:])/2\n uw[i,:] = (u[i,:] + u[i-1,:])/2\n \n j = np.arange(0,ny-1)\n un[IJ(i,j)] = (u[IJ(i,j+1)] + u[IJ(i,j)])/2\n un[i,ny-1] = ubc_t\n j = np.arange(1,ny)\n us[IJ(i,j)] = (u[IJ(i,j)] + u[IJ(i,j-1)])/2\n us[i,0] = ubc_b\n \n j = np.arange(0,ny)\n vn[IJ(i,j)] = (v[IJ(i-1,j+1)]+v[IJ(i,j+1)])/2\n vs[IJ(i,j)] = (v[IJ(i-1,j)] +v[IJ(i,j)]) /2\n \n τxxe[i,:] = -2*ν*(u[i+1,:] - u[i,:]) /Δx\n τxxw[i,:] = -2*ν*(u[i,:] - u[i-1,:])/Δx\n \n j = np.arange(0,ny-1)\n τxyn[IJ(i,j)] = -ν*(u[IJ(i,j+1)]-u[IJ(i,j)])/Δy - ν*(v[IJ(i,j+1)]-v[IJ(i-1,j+1)])/Δx\n τxyn[i,ny-1] = -ν*(ubc_t-u[i,ny-1])/(Δy/2) - ν*(v[i,ny]-v[i-1,ny])/Δx \n \n j = np.arange(1,ny)\n τxys[IJ(i,j)] = -ν*(u[IJ(i,j)]-u[IJ(i,j-1)])/Δy - ν*(v[IJ(i,j)]-v[IJ(i-1,j)])/Δx\n τxys[i,0] = -ν*(u[i,0]-ubc_b)/(Δy/2) - ν*(v[i,0]-v[i-1,0])/Δx\n \n Hu[i,:] = -((ue[i,:]*ue[i,:] - uw[i,:]*uw[i,:])/Δx + (un[i,:]*vn[i,:] - us[i,:]*vs[i,:])/Δy) \\\n -((τxxe[i,:] - τxxw[i,:])/Δx + (τxyn[i,:] - τxys[i,:])/Δy)\n \n return Hu",
"def _init_box2unit(self):\n self.box2unit = np.empty((9,9,3), dtype=np.int)\n count = 0\n for i in range(9):\n for j in range(9):\n # for each box, we add the row, column, and box unit indices\n self.box2unit[i, j] = [i, 9+j, 18 + (i // 3)*3 + (j // 3)]",
"def celdasNumeradas(self):\n\t\tfor coords in self.coordsSimple.keys():\n\t\t\tself.go(coords[0],coords[1])\n\t\t\tself.t.write(self.coordsSimple[coords])",
"def dichotomie(pFixedBox, pMovingBox, pMove):\n return _almathswig.dichotomie(pFixedBox, pMovingBox, pMove)",
"def aptitud(v):\n \n size = len(v)\n \n # Los ataques sólo pueden ser en las diagonales\n diagonal_izquierda_derecha = [0] * (2*size-1)\n diagonal_derecha_izquierda = [0] * (2*size-1)\n horizontal = [0] * size\n \n # Número de reinas en cada diagonal\n for i in range(size): # recorremos las columnas\n diagonal_izquierda_derecha[i+v[i]] += 1 # [columna + fila]\n diagonal_derecha_izquierda[size-1-i+v[i]] += 1 # [size-1-columna+ fila]\n horizontal[v[i]] += 1 \n \n # Número de ataques en cada diagonal\n s = 0\n for i in range(2*size-1): # recorremos todas las diagonales\n if diagonal_izquierda_derecha[i] > 1: # hay ataques\n s += diagonal_izquierda_derecha[i] - 1 # n-1 ataques\n if diagonal_derecha_izquierda[i] > 1:\n s += diagonal_derecha_izquierda[i] - 1\n \n # Numero de ataques en las horizontales\n for i in range(size):\n if horizontal[i] > 1:\n s += horizontal[i]\n\n return s",
"def ode(u: float) -> float:\n return u ** 2"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
conv => drop => conv => maxpooling
|
def conv_pooling(init_tensor, n_filters, drop_rate):
c = conv_twice(init_tensor, n_filters, drop_rate)
p = MaxPooling2D((2, 2))(c)
return c, p
|
[
"def __apply_conv_pool(self, tensor_in, params,\n activation, op_name):\n weights, biases = self.__make_conv_wb(params,op_name)\n tensor_out = tf.nn.max_pool(\n activation(tf.nn.conv2d(\n tensor_in, weights, strides=self.conv_strides,\n padding=self.pad) + biases), ksize=self.pool_ksize,\n strides=self.pool_strides, padding=self.pad,\n name=op_name)\n return tensor_out",
"def max_pool_forward_naive(x, pool_param):\n out = None\n\n\n pool_width = pool_param['pool_width']\n pool_height = pool_param['pool_height']\n stride = pool_param['stride']\n\n sample_width = x.shape[3]\n sample_height = x.shape[2]\n N = x.shape[0]\n F = x.shape[1]\n\n outputSizeWidth = int((sample_width - pool_width) / stride + 1)\n outputSizeHeight = int((sample_height - pool_height) / stride + 1)\n\n\n\n out = np.zeros((N, F, outputSizeHeight, outputSizeWidth))\n\n for sample_index in range(N):\n for activationFilter_index in range(F):\n for poolOutput_row in range(outputSizeHeight):\n for poolOutput_column in range(outputSizeWidth):\n dataToCompute = x[sample_index, activationFilter_index][poolOutput_row * stride: poolOutput_row * stride + pool_height, poolOutput_column * stride: poolOutput_column * stride + pool_width]\n out[sample_index,activationFilter_index][poolOutput_row,poolOutput_column] = np.max(dataToCompute)\n\n\n\n cache = (x, pool_param)\n return out, cache",
"def dpcnn_pooling_two_conv(config, conv, layer_index, is_training_flag):\n with tf.variable_scope(\"pooling_two_conv_\" + str(layer_index)):\n # 1. pooling:max-pooling with size 3 and stride 2==>reduce shape to half\n pooling = tf.nn.max_pool(conv, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME',name=\"pool\") # [batch_size,total_sequence_length/2,embed_size/2,hpcnn_number_filters]\n print(layer_index, \"dpcnn_pooling_two_conv.pooling:\", pooling)\n\n # 2. two layer of conv\n conv = dpcnn_two_layers_conv(config, pooling, is_training_flag, \n double_num_filters=False) #TODO double num_filters\n # print(\"dpcnn_pooling_two_conv.layer_index\", layer_index, \"conv:\", conv)\n\n # 3. skip connection and activation\n conv = conv + pooling\n b = tf.get_variable(\"b-poolcnn%s\" % config.hpcnn_number_filters, [config.hpcnn_number_filters])\n conv = tf.nn.relu(tf.nn.bias_add(conv, b),\"relu-poolcnn\") # shape:[batch_size,total_sequence_length/2,embed_size/2,hpcnn_number_filters]\n return conv",
"def training_pool(self):",
"def backward_max_pool(data, pool_width, pool_height, output_grad):\n\n # *** START CODE HERE ***\n input_channels = data.shape[0]\n input_width = data.shape[1]\n input_height = data.shape[2]\n partial = np.zeros((input_channels, input_width , input_height))\n for x in range(0, input_width, pool_width):\n for y in range(0, input_height, pool_height):\n #25 iterations\n window_max = np.amax(data[:, x:(x + pool_width), y:(y + pool_height)], axis=(1, 2))\n for c in range(input_channels):\n found_it = False\n for dx in range(pool_width):\n for dy in range(pool_height):\n if (data[c,x + dx,y + dy] == window_max[c]) and (found_it == False):\n partial[c,x + dx, y + dy] = output_grad[c, x // pool_width, y // pool_height]\n found_it = True\n return partial \n # *** END CODE HERE ***",
"def squeeze_net(input, classes):\n\n weights = {'conv1': tf.Variable(tf.truncated_normal([7, 7, 3, 96])),\n 'conv10': tf.Variable(tf.truncated_normal([1, 1, 512, classes]))}\n\n biases = {'conv1': tf.Variable(tf.truncated_normal([96])),\n 'conv10': tf.Variable(tf.truncated_normal([classes]))}\n\n output = tf.nn.conv2d(input, weights['conv1'], strides=[1,2,2,1], padding='SAME', name='conv1')\n output = tf.nn.bias_add(output, biases['conv1'])\n\n output = tf.nn.max_pool(output, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='maxpool1')\n\n output = fire_module(output, s1=16, e1=64, e3=64, channel=96, fire_id='fire2')\n output = fire_module(output, s1=16, e1=64, e3=64, channel=128, fire_id='fire3')\n output = fire_module(output, s1=32, e1=128, e3=128, channel=128, fire_id='fire4')\n\n output = tf.nn.max_pool(output, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='maxpool4')\n\n output = fire_module(output, s1=32, e1=128, e3=128, channel=256, fire_id='fire5')\n output = fire_module(output, s1=48, e1=192, e3=192, channel=256, fire_id='fire6')\n output = fire_module(output, s1=48, e1=192, e3=192, channel=384, fire_id='fire7')\n output = fire_module(output, s1=64, e1=256, e3=256, channel=384, fire_id='fire8')\n\n output = tf.nn.max_pool(output, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='maxpool8')\n\n output = fire_module(output, s1=64, e1=256, e3=256, channel=512, fire_id='fire9')\n\n output = tf.nn.dropout(output, keep_prob=0.5, name='dropout9')\n\n output = tf.nn.conv2d(output, weights['conv10'], strides=[1, 1, 1, 1], padding='SAME', name='conv10')\n output = tf.nn.bias_add(output, biases['conv10'])\n\n output = tf.nn.avg_pool(output, ksize=[1, 13, 13, 1], strides=[1, 2, 2, 1], padding='SAME', name='avgpool10')\n\n return output",
"def max_pool_backward_naive(dout, cache):\n dx = None\n\n x, pool_param = cache\n\n pool_width = pool_param['pool_width']\n pool_height = pool_param['pool_height']\n stride = pool_param['stride']\n\n sample_width = x.shape[3]\n sample_height = x.shape[2]\n N = x.shape[0]\n F = x.shape[1]\n\n outputSizeWidth = int((sample_width - pool_width) / stride + 1)\n outputSizeHeight = int((sample_height - pool_height) / stride + 1)\n\n dx = np.zeros_like(x)\n\n # iterate to all items\n for sample_index in range(N):\n for activationFilter_index in range(F):\n for poolOutput_row in range(outputSizeHeight):\n for poolOutput_column in range(outputSizeWidth):\n dataToCompute = x[sample_index, activationFilter_index][\n poolOutput_row * stride: poolOutput_row * stride + pool_height,\n poolOutput_column * stride: poolOutput_column * stride + pool_width]\n\n arguments = np.unravel_index(np.argmax(dataToCompute), dataToCompute.shape)\n dx[sample_index, activationFilter_index][poolOutput_row * stride + arguments[0], poolOutput_column * stride +arguments[1]] += dout[sample_index,activationFilter_index,poolOutput_row,poolOutput_column]\n\n\n return dx",
"def max_pool_forward_naive(x, pool_param):\r\n out = None\r\n ###########################################################################\r\n # TODO: Implement the max-pooling forward pass #\r\n ###########################################################################\r\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\r\n N, C, H, W = x.shape\r\n pool_height, pool_width, stride = pool_param['pool_height'], pool_param['pool_width'], pool_param['stride']\r\n H_out = 1 + (H - pool_height) // stride\r\n W_out = 1 + (W - pool_width) // stride\r\n out = np.zeros((N, C, H_out, W_out))\r\n for h_out in range(H_out):\r\n for w_out in range(W_out):\r\n xx = x[:, :, stride * h_out:stride * h_out + pool_height, stride * w_out:stride * w_out + pool_width]\r\n out[:, :, h_out, w_out] = np.max(xx, axis=(2, 3))\r\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\r\n ###########################################################################\r\n # END OF YOUR CODE #\r\n ###########################################################################\r\n cache = (x, pool_param)\r\n return out, cache",
"def forward_max_pool(data, pool_width, pool_height):\n input_channels, input_width, input_height = data.shape\n \n output = np.zeros((input_channels, input_width // pool_width, input_height // pool_height))\n\n for x in range(0, input_width, pool_width):\n for y in range(0, input_height, pool_height):\n\n output[:, x // pool_width, y // pool_height] = np.amax(data[:, x:(x + pool_width), y:(y + pool_height)], axis=(1, 2))\n\n return output",
"def conv2d_maxpool(x_tensor, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides):\n filter_size = [conv_ksize[0], conv_ksize[1], x_tensor.get_shape().as_list()[3], conv_num_outputs]\n weight = tf.Variable(tf.truncated_normal(filter_size, stddev = 0.01))\n conv = tf.nn.conv2d(x_tensor, weight, [1, conv_strides[0], conv_strides[1], 1], padding = \"SAME\")\n \n bias = tf.Variable(tf.zeros([conv_num_outputs]))\n \n conv = tf.nn.bias_add(conv, bias)\n conv = tf.nn.relu(conv)\n \n conv = tf.nn.max_pool(conv, [1, pool_ksize[0], pool_ksize[1], 1], [1, pool_strides[0], pool_strides[1], 1], padding = \"SAME\")\n \n return conv",
"def _make_conv_pool_block(\n cls,\n in_channels: int,\n out_channels: int,\n kernel_size: int,\n activation: nn.Module,\n pool_size: int,\n ) -> nn.Module:\n return nn.Sequential(\n nn.ConstantPad1d((0, kernel_size - 1), 0),\n nn.Conv1d(\n in_channels=in_channels,\n out_channels=out_channels,\n kernel_size=kernel_size\n ),\n activation,\n nn.MaxPool1d(kernel_size=pool_size)\n )",
"def cnn_oned_60(self,BINS,WIN_LEN):\n\n print(\"Attention: The current TF Version requirs weights to be saved seperatly in sparsly connected Nets\")\n ceil_bins=joblib.load(\"ceil_bins3.pkl\")\n ceil_bins=list(ceil_bins)\n #when using customLoss squeeze axis 3:\n noise_in = keras.Input((BINS,WIN_LEN,1))\n noise_fft=tf.squeeze(noise_in,3)\n\n \"\"\"Split up Subbands from STFT\"\"\"\n group=[1]*60\n sum_of_bins=0\n ceil_bins[59]=9\n\n for k in range(0,len(ceil_bins)):\n print(k)\n ## FFT Bins getting split for processing with specific neurons\n sum_of_bins=sum_of_bins+ceil_bins[k]\n if k==0:\n group[k]= Lambda(lambda x: x[:,0:2,:], output_shape=((2,WIN_LEN)))(noise_fft)\n print(group[k])\n\n if k==59:\n print( Lambda(lambda x: x[248:,:], output_shape=((9,16)))(noise_fft))\n group[k]= Lambda(lambda x: x[:,248:,:], output_shape=((9,16)))(noise_fft)\n\n else:\n print(int(sum_of_bins+ceil_bins[k]))\n group[k]=Lambda(lambda x: x[:,int(sum_of_bins):int(sum_of_bins+ceil_bins[k]),:], output_shape=((int(ceil_bins[k]),WIN_LEN)))(noise_fft)\n print(group[k])\n\n\n for e in range(0,len(ceil_bins)):\n group[e]=tf.keras.layers.Conv1D(64, 4, strides=1, padding='same',dilation_rate=1, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n group[e]=tf.keras.layers.Conv1D(64, 8, strides=1, padding='same',dilation_rate=2, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n group[e]=tf.keras.layers.Conv1D(64, 16, strides=1, padding='same',dilation_rate=4, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n\n for j in range(0,len(ceil_bins)):\n group[j]=tf.keras.layers.GlobalAveragePooling1D()(group[j])\n\n for b in range(0,len(ceil_bins)):\n group[b]=tf.expand_dims(group[b],1)\n\n for i in range(0,len(ceil_bins)):\n group[i]=tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(32))(group[i])\n\n \"\"\"\n Concatenate Feature Vectors, init x_Tensor as first element\n \"\"\"\n x_Tensor = group[0]\n for g in range(1,60):\n x_Tensor = Concatenate(axis=1)([x_Tensor,group[g]])\n\n\n x_Tensor = Dense(60*64, activation='relu')(x_Tensor)\n x = tf.keras.layers.Dropout(0.05)(x_Tensor)\n outputs = tf.keras.layers.Dense(257, activation='sigmoid')(x)\n model = tf.keras.Model(noise_in, outputs)\n\n\n return model",
"def convolution_pooling(prev_layer, n_filters, hype_space):\n current_layer = tensorflow.keras.layers.Conv2D(\n filters=n_filters, kernel_size=(3, 3), strides=(2, 2),\n padding='same', activation='linear',\n kernel_regularizer=tensorflow.keras.regularizers.l2(\n STARTING_L2_REG * hype_space['l2_weight_reg_mult'])\n )(prev_layer)\n\n if hype_space['use_BN']:\n current_layer = bn(current_layer)\n\n return current_layer",
"def train_conv_net(datasets,\n U,\n word_idx_map,\n img_w=300, \n filter_hs=[3,4,5],\n hidden_units=[100,2], \n dropout_rate=[0.5],\n shuffle_batch=True,\n n_epochs=11, \n batch_size=50, \n lr_decay = 0.95,\n conv_non_linear=\"relu\",\n activations=[Iden],\n sqr_norm_lim=9,\n non_static=True,\n pi_params=[1.,0],\n C=1.0,\n patience=20): \n rng = np.random.RandomState(3435)\n # 其实为句子的长度sent_len\n img_h = len(datasets[0][0])-1\n filter_w = img_w \n feature_maps = hidden_units[0]\n filter_shapes = []\n pool_sizes = []\n for filter_h in filter_hs:\n filter_shapes.append((feature_maps, 1, filter_h, filter_w))\n # 在img_h×img_w大小的图片上进行s=1,f=f_h×f_w的卷积操作时,\n # 所得的卷积结果图大小为(img_h-f_h+1)×(img_w-f_w+1)\n # 然后经过大小为(img_h-f_h+1)×(img_w-f_w+1)的池化层后,就只剩下一个“点”了\n pool_sizes.append((img_h-filter_h+1, img_w-filter_w+1))\n # [('image shape', 61, 300), ('filter shape', [(100, 1, 3, 300), (100, 1, 4, 300), (100, 1, 5, 300)]),\n # ('hidden_units', [100, 2]), ('dropout', [0.4]), ('batch_size', 50), ('non_static', True),\n # ('learn_decay', 0.95), ('conv_non_linear', 'relu'), ('non_static', True), ('sqr_norm_lim', 9),\n # ('shuffle_batch', True), ('pi_params', [0.95, 0]), ('C', 6.0)]\n parameters = [(\"image shape\",img_h,img_w), (\"filter shape\",filter_shapes), (\"hidden_units\",hidden_units),\n (\"dropout\",dropout_rate), (\"batch_size\",batch_size), (\"non_static\",non_static),\n (\"learn_decay\",lr_decay), (\"conv_non_linear\",conv_non_linear), (\"non_static\",non_static),\n (\"sqr_norm_lim\",sqr_norm_lim), (\"shuffle_batch\",shuffle_batch), (\"pi_params\",pi_params),\n (\"C\",C)]\n print(parameters) \n \n #define model architecture\n index = T.lscalar()\n # shape=([sent_sum|batch_size], [sent_len|img_h]): 即共有sent_sum句话,每句由sent_len个单词的id组成\n x = T.matrix('x')\n # shape=(sent_sum, 1) \n y = T.ivector('y')\n # shape=(vocal_size, word_size)\n Words = theano.shared(value = U, name = \"Words\")\n zero_vec_tensor = T.vector()\n zero_vec = np.zeros(img_w)\n set_zero = theano.function([zero_vec_tensor], updates=[(Words, T.set_subtensor(Words[0,:], zero_vec_tensor))], allow_input_downcast=True)\n # x.flatten(): 将 x 按行展开\n # shape=(sent_sum,1,sent_len,word_size)\n # 对应于图像,其意思即为:共有sent_sum张图像,每张图像的通道为1且大小为sent_len×word_size\n layer0_input = Words[T.cast(x.flatten(),dtype=\"int32\")].reshape((x.shape[0], 1, x.shape[1], Words.shape[1]))\n conv_layers = []\n layer1_inputs = []\n # 第1层输入有filter_hs种卷积核\n for i in xrange(len(filter_hs)):\n # value=[filter_sum,filter_layer,filter_h,filter_w]\n # 即共有filter_sum个卷积核,每个卷积核的大小为word_h×word_w且层数/通道为filter_layer\n filter_shape = filter_shapes[i]\n pool_size = pool_sizes[i]\n # image_shape is actually the shape of input\n conv_layer = LeNetConvPoolLayer(rng, input=layer0_input, image_shape=(batch_size, 1, img_h, img_w),\n filter_shape=filter_shape, poolsize=pool_size, non_linear=conv_non_linear)\n # flatten(axis):axis>0, 即将tensor从axis维度开始的所有维度进行“坍缩”,具体如下\n # conv_layer.output: shape=(sent_sum,filter_sum)\n # layer1_input: shape=(sent_sum,filter_sum)\n layer1_input = conv_layer.output.flatten(2)\n conv_layers.append(conv_layer)\n layer1_inputs.append(layer1_input)\n # shape=(sent_sum, filter_sum*len(filter_hs)=300)\n layer1_input = T.concatenate(layer1_inputs, 1)\n hidden_units[0] = feature_maps*len(filter_hs)\n # 实际上,这里的CNN仅有两层:input-conv(-max_pool)-output\n classifier = MLPDropout(rng, input=layer1_input, layer_sizes=hidden_units, activations=activations, dropout_rates=dropout_rate)\n \n # build the feature of BUT-rule\n # shape=([sent_sum|batch_size], [sent_len|img_h]): 即共有sent_sum句话,每句由sent_len个单词的id组成\n f_but = T.fmatrix('f_but')\n # shape=(batch_size,1)\n f_but_ind = T.fmatrix('f_ind') # indicators\n f_but_layer0_input = Words[T.cast(f_but.flatten(),dtype=\"int32\")].reshape((f_but.shape[0],1,f_but.shape[1],Words.shape[1]))\n f_but_pred_layers = []\n for conv_layer in conv_layers:\n # shape=(batch_size, filter_sum=filter_shape[0], 1, 1)\n # after flatten: shape=(batch_size, filter_sum)\n f_but_layer0_output = conv_layer.predict(f_but_layer0_input, batch_size)\n f_but_pred_layers.append(f_but_layer0_output.flatten(2))\n # shape=(batch_size, filter_sum*len(filter_hs)=300)\n f_but_layer1_input = T.concatenate(f_but_pred_layers, 1)\n # shape=(batch_size, class=2)\n f_but_y_pred_p = classifier.predict_p(f_but_layer1_input)\n # shape=(batch_size, label+class=1+2=3)\n f_but_full = T.concatenate([f_but_ind,f_but_y_pred_p], axis=1) # batch_size x 1 + batch_size x K\n f_but_full = theano.gradient.disconnected_grad(f_but_full)\n\n # add logic layer\n nclasses = 2\n rules = [FOL_But(nclasses, x, f_but_full)]\n rule_lambda = [1]\n new_pi = get_pi(cur_iter=0, params=pi_params)\n logic_nn = LogicNN(rng, input=x, network=classifier, rules=rules, rule_lambda=rule_lambda, pi=new_pi, C=C)\n \n # define parameters of the model and update functions using adadelta\n # list\n params_p = logic_nn.params_p\n for conv_layer in conv_layers:\n # append list\n params_p += conv_layer.params\n if non_static:\n #if word vectors are allowed to change, add them as model parameters\n params_p += [Words]\n # 公式 (2)——objective function\n cost_p = logic_nn.negative_log_likelihood(y) \n dropout_cost_p = logic_nn.dropout_negative_log_likelihood(y) \n grad_updates_p = sgd_updates_adadelta(params_p, dropout_cost_p, lr_decay, 1e-6, sqr_norm_lim)\n \n # shuffle dataset and assign to mini batches. if dataset size is not a multiple of mini batches, replicate\n # extra data (at random)\n np.random.seed(3435)\n # training data\n if datasets[0].shape[0] % batch_size > 0:\n extra_data_num = batch_size - datasets[0].shape[0] % batch_size\n # shuffle both train data and features\n permutation_order = np.random.permutation(datasets[0].shape[0])\n train_set = datasets[0][permutation_order]\n extra_data = train_set[:extra_data_num]\n new_data=np.append(datasets[0],extra_data,axis=0)\n new_fea = {}\n train_fea = datasets[3]\n for k in train_fea.keys():\n train_fea_k = train_fea[k][permutation_order]\n extra_fea = train_fea_k[:extra_data_num]\n new_fea[k] = np.append(train_fea[k],extra_fea,axis=0)\n train_text = datasets[6][permutation_order]\n extra_text = train_text[:extra_data_num]\n new_text=np.append(datasets[6],extra_text,axis=0)\n else:\n new_data = datasets[0]\n new_fea = datasets[3]\n new_text = datasets[6]\n # shuffle both training data and features\n permutation_order = np.random.permutation(new_data.shape[0])\n new_data = new_data[permutation_order]\n for k in new_fea.keys():\n new_fea[k] = new_fea[k][permutation_order]\n new_text = new_text[permutation_order]\n n_batches = new_data.shape[0] / batch_size\n n_train_batches = n_batches\n train_set = new_data\n train_set_x, train_set_y = shared_dataset((train_set[:,:img_h],train_set[:,-1]))\n train_fea = new_fea\n train_fea_but_ind = train_fea['but_ind'].reshape([train_fea['but_ind'].shape[0],1])\n train_fea_but_ind = shared_fea(train_fea_but_ind)\n for k in new_fea.keys():\n if k!='but_text':\n train_fea[k] = shared_fea(new_fea[k])\n\n # val data\n if datasets[1].shape[0] % batch_size > 0:\n extra_data_num = batch_size - datasets[1].shape[0] % batch_size\n # shuffle both val data and features\n permutation_order = np.random.permutation(datasets[1].shape[0])\n val_set = datasets[1][permutation_order]\n extra_data = val_set[:extra_data_num]\n new_val_data=np.append(datasets[1],extra_data,axis=0)\n new_val_fea = {}\n val_fea = datasets[4]\n for k in val_fea.keys():\n val_fea_k = val_fea[k][permutation_order]\n extra_fea = val_fea_k[:extra_data_num]\n new_val_fea[k] = np.append(val_fea[k],extra_fea,axis=0)\n val_text = datasets[7][permutation_order]\n extra_text = val_text[:extra_data_num]\n new_val_text = np.append(datasets[7],extra_text,axis=0)\n else:\n new_val_data = datasets[1]\n new_val_fea = datasets[4]\n new_val_text = datasets[7]\n val_set = new_val_data\n val_set_x, val_set_y = shared_dataset((val_set[:,:img_h],val_set[:,-1]))\n n_batches = new_val_data.shape[0] / batch_size\n n_val_batches = n_batches\n val_fea = new_val_fea\n val_fea_but_ind = val_fea['but_ind'].reshape([val_fea['but_ind'].shape[0],1])\n val_fea_but_ind = shared_fea(val_fea_but_ind)\n for k in val_fea.keys():\n if k!='but_text':\n val_fea[k] = shared_fea(val_fea[k])\n\n # test data\n test_set_x = datasets[2][:,:img_h] \n test_set_y = np.asarray(datasets[2][:,-1],\"int32\")\n test_fea = datasets[5]\n test_fea_but_ind = test_fea['but_ind']\n test_fea_but_ind = test_fea_but_ind.reshape([test_fea_but_ind.shape[0],1])\n test_text = datasets[8]\n\n ### compile theano functions to get train/val/test errors\n val_model = theano.function([index], logic_nn.errors(y),\n givens={\n x: val_set_x[index * batch_size: (index + 1) * batch_size],\n y: val_set_y[index * batch_size: (index + 1) * batch_size],\n f_but: val_fea['but'][index * batch_size: (index + 1) * batch_size],\n f_but_ind: val_fea_but_ind[index * batch_size: (index + 1) * batch_size,:] },\n allow_input_downcast=True,\n on_unused_input='warn')\n \n test_model = theano.function([index], logic_nn.errors(y),\n givens={\n x: train_set_x[index * batch_size: (index + 1) * batch_size],\n y: train_set_y[index * batch_size: (index + 1) * batch_size],\n f_but: train_fea['but'][index * batch_size: (index + 1) * batch_size],\n f_but_ind: train_fea_but_ind[index * batch_size: (index + 1) * batch_size,:]},\n allow_input_downcast=True,\n on_unused_input='warn')\n\n train_model = theano.function([index], cost_p, updates=grad_updates_p,\n givens={\n x: train_set_x[index*batch_size:(index+1)*batch_size],\n y: train_set_y[index*batch_size:(index+1)*batch_size],\n f_but: train_fea['but'][index*batch_size:(index+1)*batch_size],\n f_but_ind: train_fea_but_ind[index*batch_size:(index+1)*batch_size,:]},\n allow_input_downcast = True,\n on_unused_input='warn')\n\n ### setup testing\n test_size = test_set_x.shape[0]\n print('test size ', test_size) \n test_pred_layers = []\n test_layer0_input = Words[T.cast(x.flatten(),dtype=\"int32\")].reshape((test_size,1,img_h,Words.shape[1]))\n f_but_test_pred_layers = []\n f_but_test_layer0_input = Words[T.cast(f_but.flatten(),dtype=\"int32\")].reshape((test_size,1,img_h,Words.shape[1]))\n for conv_layer in conv_layers:\n test_layer0_output = conv_layer.predict(test_layer0_input, test_size)\n test_pred_layers.append(test_layer0_output.flatten(2))\n f_but_test_layer0_output = conv_layer.predict(f_but_test_layer0_input, test_size)\n f_but_test_pred_layers.append(f_but_test_layer0_output.flatten(2))\n test_layer1_input = T.concatenate(test_pred_layers, 1)\n f_but_test_layer1_input = T.concatenate(f_but_test_pred_layers, 1)\n f_but_test_y_pred_p = classifier.predict_p(f_but_test_layer1_input)\n f_but_test_full = T.concatenate([f_but_ind,f_but_test_y_pred_p],axis=1) # Ns x 1 + Ns x K\n\n # transform to shared variables\n test_set_x_shr, test_set_y_shr = shared_dataset((test_set_x,test_set_y))\n\n test_q_y_pred, test_p_y_pred = logic_nn.predict(test_layer1_input,\n test_set_x_shr,\n [f_but_test_full])\n test_q_error = T.mean(T.neq(test_q_y_pred, y))\n test_p_error = T.mean(T.neq(test_p_y_pred, y))\n test_model_all = theano.function([x,y,f_but,f_but_ind],\n [test_q_error, test_p_error], allow_input_downcast = True,\n on_unused_input='warn')\n \n ### start training over mini-batches\n print('... training')\n epoch = 0\n batch = 0\n best_val_q_perf = 0\n val_p_perf = 0\n val_q_perf = 0\n cost_epoch = 0 \n stop_count = 0\n while (epoch < n_epochs):\n start_time = time.time()\n epoch = epoch + 1\n # train\n if shuffle_batch:\n for minibatch_index in np.random.permutation(range(n_train_batches)):\n batch = batch + 1\n new_pi = get_pi(cur_iter=batch*1./n_train_batches, params=pi_params)\n logic_nn.set_pi(new_pi)\n cost_epoch = train_model(minibatch_index)\n set_zero(zero_vec)\n else:\n for minibatch_index in xrange(n_train_batches):\n batch = batch + 1\n new_pi = get_pi(cur_iter=batch*1./n_train_batches, params=pi_params)\n logic_nn.set_pi(new_pi)\n cost_epoch = train_model(minibatch_index) \n set_zero(zero_vec)\n # eval\n train_losses = [test_model(i) for i in xrange(n_train_batches)]\n train_losses = np.array(train_losses)\n train_q_perf = 1 - np.mean(train_losses[:,0])\n train_p_perf = 1 - np.mean(train_losses[:,1])\n val_losses = [val_model(i) for i in xrange(n_val_batches)]\n val_losses = np.array(val_losses)\n val_q_perf = 1 - np.mean(val_losses[:,0])\n val_p_perf = 1 - np.mean(val_losses[:,1])\n print('epoch: %i, training time: %.2f secs; (q): train perf: %.4f %%, val perf: %.4f %%; (p): train perf: %.4f %%, val perf: %.4f %%' % \\\n (epoch, time.time()-start_time, train_q_perf * 100., val_q_perf*100., train_p_perf * 100., val_p_perf*100.))\n test_loss = test_model_all(test_set_x,test_set_y,test_fea['but'],test_fea_but_ind)\n test_loss = np.array(test_loss)\n test_perf = 1 - test_loss\n print('test perf: q %.4f %%, p %.4f %%' % (test_perf[0]*100., test_perf[1]*100.))\n if val_q_perf > best_val_q_perf:\n best_val_q_perf = val_q_perf\n ret_test_perf = test_perf\n stop_count = 0\n else:\n stop_count += 1\n if stop_count == patience:\n break\n return ret_test_perf",
"async def infer_max_pool2d_grad(\n self,\n engine,\n input: lib.AbstractArray,\n kernel_size: lib.u64tup_typecheck,\n stride: lib.u64tup_typecheck,\n padding: lib.u64tup_typecheck,\n dilation: lib.u64tup_typecheck,\n ceil_mode: xtype.Bool,\n dout: lib.AbstractArray,\n):\n return input",
"def cnn_oned(self,BINS,WIN_LEN):\n\n print(\"Attention: The current TF Version requirs weights to be saved seperatly in sparsly connected Nets\")\n\n ceil_bins=joblib.load(\"ceil_bins.pkl\")\n ceil_bins=list(ceil_bins)\n\n noise_fft = keras.Input((BINS,WIN_LEN))\n\n \"\"\"Split up Subbands from STFT\"\"\"\n\n group=[1]*23\n sum_of_bins=0\n ceil_bins[22]=56\n\n for k in range(0,len(ceil_bins)):\n print(k)\n ## FFT Bins getting split for processing with specific neurons\n sum_of_bins=sum_of_bins+ceil_bins[k]\n if k==0:\n group[k]= Lambda(lambda x: x[:,0:2], output_shape=((2,WIN_LEN)))(noise_fft)\n print(group[k])\n if k==22:\n print(\"K=22\")\n print( Lambda(lambda x: x[:,201:], output_shape=((56,WIN_LEN)))(noise_fft))\n group[k]= Lambda(lambda x: x[:,201:], output_shape=((56,WIN_LEN)))(noise_fft)\n else:\n print(int(sum_of_bins+ceil_bins[k]))\n group[k]=Lambda(lambda x: x[:,int(sum_of_bins):int(sum_of_bins+ceil_bins[k])], output_shape=((int(ceil_bins[k]),WIN_LEN)))(noise_fft)\n print(group[k])\n\n\n for e in range(0,len(ceil_bins)):\n group[e]=tf.keras.layers.Conv1D(64, 4, strides=1, padding='same',dilation_rate=1, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n group[e]=tf.keras.layers.Conv1D(64, 8, strides=1, padding='same',dilation_rate=2, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n group[e]=tf.keras.layers.Conv1D(64, 16, strides=1, padding='same',dilation_rate=4, activation='relu')(group[e])\n group[e]=tf.keras.layers.MaxPooling1D(pool_size=2, strides=None, padding='same', data_format=None)(group[e])\n\n for j in range(0,len(ceil_bins)):\n group[j]=tf.keras.layers.GlobalAveragePooling1D()(group[j])\n\n for b in range(0,len(ceil_bins)):\n group[b]=tf.expand_dims(group[b],1)\n\n\n for i in range(0,len(ceil_bins)):\n group[i]=tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(32))(group[i])\n\n\n x_Tensor = Concatenate(axis=1)([group[0],group[1],group[2],group[3],group[4],group[5],group[6],group[7],group[8], \\\n group[9],group[10],group[11],group[12],group[13],group[14],group[15],group[16],group[17],\\\n group[18],group[19],group[20],group[21],group[22]])\n\n\n x_Tensor = Dense(23*64, activation='relu')(x_Tensor)\n x = tf.keras.layers.Dropout(0.05)(x_Tensor)\n outputs = tf.keras.layers.Dense(257, activation='sigmoid')(x)\n model = tf.keras.Model(noise_fft, outputs)\n\n return model",
"def graph_pooling(gw, node_feat, pool_type):\n graph_feat = op.nested_lod_reset(node_feat, gw.graph_lod)\n graph_feat = L.sequence_pool(graph_feat, pool_type)\n return graph_feat",
"def nasnet_maxpool():\n return nn.MaxPool2D(\n pool_size=3,\n strides=2,\n padding=1)",
"def inference(data, conv_settings, full_settings, n_labels, dropout_pl):\n assert len(conv_settings) > 0 and len(full_settings) > 0\n\n tf.image_summary('input', data, max_images=3, collections=None, name=None)\n\n # 2D convolution, with 'SAME' padding (i.e. the output feature map has\n # the same size as the input). Note that {strides} is a 4D array whose\n # shape matches the data layout: [image index, y, x, depth].\n\n # Add first convl layer\n with tf.variable_scope('conv1') as scope:\n initializer = tf.truncated_normal_initializer(stddev=0.1,\n seed=SEED,\n dtype=tf.float32)\n kernel = tf.get_variable('weights',\n [5, 5, N_CHANNELS, conv_settings[0]],\n initializer=initializer)\n conv = tf.nn.conv2d(data,\n kernel,\n strides=[1, 1, 1, 1],\n padding='SAME')\n initializer = tf.zeros_initializer([conv_settings[0]], dtype=data_type())\n biases = tf.get_variable('biases', initializer=initializer)\n bias = tf.nn.bias_add(conv, biases)\n relu = tf.nn.relu(bias, name=scope.name)\n\n pool = tf.nn.max_pool(relu,\n ksize=[1, 2, 2, 1],\n strides=[1, 2, 2, 1],\n padding='SAME',\n name='pool1')\n\n # tensor = tf.split(3, conv_settings[0], pool, name='split')\n # for i in xrange(len(tensor)):\n # tf.image_summary('conv1_kernel-' + str(i),\n # tensor[i],\n # max_images=3,\n # collections=None,\n # name=None)\n\n # Add second convl layer\n if len(conv_settings) > 1:\n with tf.variable_scope('conv2') as scope:\n initializer = tf.truncated_normal_initializer(stddev=0.1,\n seed=SEED,\n dtype=data_type())\n kernel = tf.get_variable('weights',\n [5, 5, conv_settings[0], conv_settings[1]],\n initializer=initializer)\n conv = tf.nn.conv2d(pool,\n kernel,\n strides=[1, 1, 1, 1],\n padding='SAME')\n initializer = tf.constant_initializer(0.1, dtype=data_type())\n biases = tf.get_variable('biases',\n shape=[conv_settings[1]],\n initializer=initializer)\n bias = tf.nn.bias_add(conv, biases)\n relu = tf.nn.relu(bias, name=scope.name)\n\n pool = tf.nn.max_pool(relu,\n ksize=[1, 2, 2, 1],\n strides=[1, 2, 2, 1],\n padding='SAME',\n name='pool2')\n\n # Add first dense layer\n with tf.variable_scope('local1') as scope:\n # Reshape the feature map cuboid into a 2D matrix to feed it to the\n # fully connected layers.\n pool_shape = pool.get_shape().as_list()\n reshape = tf.reshape(\n pool,\n [pool_shape[0], pool_shape[1] * pool_shape[2] * pool_shape[3]])\n # Fully connected layer. Note that the '+' operation automatically\n # broadcasts the biases.\n initializer = tf.truncated_normal_initializer(stddev=0.1,\n seed=SEED,\n dtype=data_type())\n # img height/width after pooling, note each convl layer is followed by a\n # single pool layer\n img_height = (IMAGE_SIZE // (2 * len(conv_settings)))\n img_width = (IMAGE_SIZE // (2 * len(conv_settings)))\n img_size = img_width * img_height\n # convl_sizes[-1] images are produced by the last convl layer, each pixel\n # in those images is connected with each node in the dense layer\n fc_size = conv_settings[-1] * img_size\n weights = tf.get_variable('weights',\n [fc_size, full_settings[0]],\n initializer=initializer)\n initializer = tf.constant_initializer(0.1, dtype=data_type())\n biases = tf.get_variable('biases',\n shape=[full_settings[0]],\n initializer=initializer)\n local1 = tf.nn.relu(tf.matmul(reshape, weights) + biases, name=scope.name)\n # Add a 50% dropout during training only. Dropout also scales\n # activations such that no rescaling is needed at evaluation time.\n\n with tf.name_scope('dropout'):\n local1 = tf.nn.dropout(local1, dropout_pl, seed=SEED)\n\n # Add final softmax layer\n with tf.variable_scope('softmax_linear') as scope:\n initializer = tf.truncated_normal_initializer(stddev=0.1,\n seed=SEED,\n dtype=data_type())\n weights = tf.get_variable('weights',\n shape=[full_settings[0], n_labels],\n initializer=initializer)\n initializer = tf.constant_initializer(0.1, dtype=data_type())\n biases = tf.get_variable('biases',\n shape=[n_labels],\n initializer=initializer)\n softmax_linear = tf.add(tf.matmul(local1, weights),\n biases,\n name=scope.name)\n\n return softmax_linear"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the parent directory object that should be used for a given document. If the document is tagged with a project name (nonRESERVED DocumentTag), a Directory object with the first project tag found is returned. Otherwise, the owner's home directory is returned.
|
def _get_parent_directory(self, document):
parent_dir = self.home_dir
project_tags = document.tags.exclude(tag__in=DocumentTag.RESERVED)
if project_tags.exists():
first_tag = project_tags[0]
parent_dir, created = Directory.objects.get_or_create(
owner=self.user,
name=first_tag.tag,
parent_directory=self.home_dir
)
return parent_dir
|
[
"def parse_parent(docname):\n\n lineage = docname.split('/')\n lineage_count = len(lineage)\n\n if docname == 'index':\n # This is the top of the Sphinx project\n parent = None\n elif lineage_count == 1:\n # This is a non-index doc in root, e.g. about\n parent = 'index'\n elif lineage_count == 2 and lineage[-1] == 'index':\n # This is blog/index, parent is the root\n parent = 'index'\n elif lineage_count == 2:\n # This is blog/about\n parent = lineage[0] + '/index'\n elif lineage[-1] == 'index':\n # This is blog/sub/index\n parent = '/'.join(lineage[:-2]) + '/index'\n else:\n # This should be blog/sub/about\n parent = '/'.join(lineage[:-1]) + '/index'\n\n return parent",
"def _get_parentDocument(self) -> \"adsk::core::Ptr< adsk::core::Document >\" :\n return _core.Product__get_parentDocument(self)",
"def _get_parentProject(self) -> \"adsk::core::Ptr< adsk::core::DataProject >\" :\n return _core.DataFolder__get_parentProject(self)",
"def parent(self):\n if self.is_leaf:\n return self.relative('.')\n return self.relative('..')",
"def get_repo_parent(path):\n # path is a repository\n if is_repo(path):\n return Local(path)\n\n # path is inside a repository\n elif not os.path.isdir(path):\n _rel = ''\n while path and path != '/':\n if is_repo(path):\n return Local(path)\n else:\n _rel = os.path.join(os.path.basename(path), _rel)\n path = os.path.dirname(path)\n return path",
"def _get_parentProject(self) -> \"adsk::core::Ptr< adsk::core::DataProject >\" :\n return _core.DataFile__get_parentProject(self)",
"def get_parent_dir(path):\n\treturn os.path.dirname(os.path.abspath(path))",
"def f_get_parent(self):\n if self.v_is_root:\n raise TypeError('Root does not have a parent')\n elif self.v_location == '':\n return self.v_root\n else:\n return self.v_root.f_get(self.v_location, fast_access=False, shortcuts=False)",
"def parent(self):\n parent_key = self.parent_key()\n if parent_key:\n return db.get(parent_key)",
"def closest_parent(self):\n # type: () -> Optional[Tag]\n parent = self.parent\n while parent:\n if parent.name in self.PARENT_TAGS:\n return parent\n parent = parent.parent\n return None # pragma: no cover",
"def GetDocument(self):\n if self.parent:\n if isinstance(self.parent, Document):\n return self.parent\n else:\n return self.parent.GetDocument()\n else:\n return None",
"def _get_parentFolder(self) -> \"adsk::core::Ptr< adsk::core::DataFolder >\" :\n return _core.DataFolder__get_parentFolder(self)",
"def _get_parent(*, schema: oa_types.Schema, schemas: oa_types.Schemas) -> str:\n ref = peek.ref(schema=schema, schemas=schemas)\n assert ref is not None\n parent, _ = ref_helper.get_ref(ref=ref, schemas=schemas)\n return parent",
"def _GetParentContainer(self, cwc):\n if cwc.uid == 1:\n return self.ToDoList().root\n names_seen = set()\n for f, path in self.ToDoList().ContainersPreorder():\n names_seen.add(f.name)\n if f.uid == cwc.uid:\n if not path:\n raise InvalidPathError('Already at the root Folder; cannot ascend.')\n return path[0]\n raise InvalidPathError(\n 'No such folder. All folders:\\n%s'\n % (common.Indented('\\n'.join(sorted(names_seen)))))",
"def get_parent(reporter):\n\n parents = Institution.objects.filter(\n year=reporter.year,\n respondent_id=reporter.parent_id,\n zip_code__state=reporter.parent_state)\n if len(parents) > 0:\n return parents[0]\n else:\n # Use the RSSD ID to look for the parent. There's at least one case\n # where the RSSD ID matches, but the FFIEC ID does not. Also, in cases\n # where the RSSD ID matches, the state does not. We'll go based on\n # RSSD ID - but that still indicates weirdness in the data.\n parents = Institution.objects.filter(\n year=reporter.year,\n rssd_id=reporter.parent_rssd_id)\n\n if len(parents) > 0:\n return parents[0]",
"def _get_parent_element(self, root_element, element, tag):\n parent_element = root_element\n find_string = \".//%s[@id='%s']/..\" % (element.tag, element.get('id'))\n while parent_element is not None:\n parent_element = root_element.find(find_string)\n try:\n if parent_element.tag is tag:\n parent = parent_element\n parent_element = None\n else:\n find_string = \"%s/..\" % find_string\n except:\n continue\n\n return parent",
"def get_parent ( self ):\n return self.parent_ref.deref_safe()",
"def _get_parent(self) -> \"adsk::core::Ptr< adsk::core::Application >\" :\n return _core.Document__get_parent(self)",
"def GetParentFileEntry(self):\n location = getattr(self.path_spec, u'location', None)\n if location is None:\n return\n\n parent_location = self._file_system.DirnamePath(location)\n if parent_location is None:\n return\n if parent_location == u'':\n parent_location = self._file_system.PATH_SEPARATOR\n\n parent_path_spec = getattr(self.path_spec, u'parent', None)\n path_spec = zip_path_spec.ZipPathSpec(\n location=parent_location, parent=parent_path_spec)\n return ZipFileEntry(self._resolver_context, self._file_system, path_spec)",
"def GetParentFileEntry(self):\n location = getattr(self.path_spec, u'location', None)\n if location is None:\n return\n\n parent_location = self._file_system.DirnamePath(location)\n if parent_location is None:\n return\n if parent_location == u'':\n parent_location = self._file_system.PATH_SEPARATOR\n\n parent_path_spec = getattr(self.path_spec, u'parent', None)\n path_spec = tar_path_spec.TARPathSpec(\n location=parent_location, parent=parent_path_spec)\n return TARFileEntry(self._resolver_context, self._file_system, path_spec)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Syncs (creates) Document2Permissions based on the DocumentPermissions found for a given document.
|
def _sync_permissions(self, document, document2):
doc_permissions = DocumentPermission.objects.filter(doc=document)
for perm in doc_permissions:
try:
doc2_permission, created = Document2Permission.objects.get_or_create(doc=document2, perms=perm.perms)
if perm.users:
doc2_permission.users.add(*perm.users.all())
if perm.groups:
doc2_permission.groups.add(*perm.groups.all())
except:
pass
|
[
"def test_set_document_permissions(self):\n\t\tcreate_document()\n\t\t# Get a document to work with\n\t\tdocument = Document.objects.all()[0]\n\n\t\t# Save the Layers current permissions\n\t\tcurrent_perms = document.get_all_level_info() \n\t \n\t\t# Set the Permissions\n\t\tdocuments.views.set_document_permissions(document, self.perm_spec)\n\n\t\t# Test that the Permissions for ANONYMOUS_USERS and AUTHENTICATED_USERS were set correctly\t\t \n\t\tself.assertEqual(document.get_gen_level(geonode.core.models.ANONYMOUS_USERS), document.LEVEL_NONE) \n\t\tself.assertEqual(document.get_gen_level(geonode.core.models.AUTHENTICATED_USERS), document.LEVEL_NONE)\n\n\t\t# Test that previous permissions for users other than ones specified in\n\t\t# the perm_spec (and the document owner) were removed\n\t\tusers = [n for (n, p) in self.perm_spec['users']]\n\t\tlevels = document.get_user_levels().exclude(user__username__in = users + [document.owner])\n\t\tself.assertEqual(len(levels), 0)\n\t \n\t\t# Test that the User permissions specified in the perm_spec were applied properly\n\t\tfor username, level in self.perm_spec['users']:\n\t\t\tuser = geonode.maps.models.User.objects.get(username=username)\n\t\t\tself.assertEqual(document.get_user_level(user), level)",
"def update_document(self, document, doc_id, update_as_script):\n def _get_update_action(source, id_suffix=''):\n action = {'_id': doc_id + id_suffix, '_op_type': 'update'}\n if update_as_script:\n action.update(source)\n else:\n action['doc'] = source\n\n return action\n\n if self.plugin.requires_role_separation:\n user_doc = (self._remove_admin_fields(document)\n if update_as_script else document)\n actions = [_get_update_action(document, ADMIN_ID_SUFFIX),\n _get_update_action(user_doc, USER_ID_SUFFIX)]\n else:\n actions = [_get_update_action(document)]\n result = helpers.bulk(\n client=self.engine,\n index=self.index_name,\n doc_type=self.document_type,\n chunk_size=self.index_chunk_size,\n actions=actions)\n LOG.debug(\"Update result: %s\", result)",
"def add_user_perm(self, user, doc, write = False):\n\n try:\n perm = self.user_perms.get(doc = doc, user = user)\n perm.write = write\n perm.save()\n except ObjectDoesNotExist:\n self.user_perms.create(doc = doc, user = user, write = write)",
"def add_group_perm(self, group, doc, write = False):\n\n try:\n perm = self.group_perms.get(group = group, doc = doc)\n perm.write = write\n perm.save()\n except ObjectDoesNotExist:\n self.group_perms.create(doc = doc, group = group, write = write)",
"def _assign_permissions(contact, permission, targets):\n for target in targets:\n assign_perm(permission, target, contact)",
"def process_document_admin_acls(connection, document_ids, new_doc_id):\n acls_to_copy = get_acls_to_merge(connection, document_ids)\n for old_acl in acls_to_copy:\n create_acl(connection, old_acl, new_doc_id)\n old_acl_ids = [a.id for a in acls_to_copy]\n delete_old_acls(connection, old_acl_ids)\n delete_revisions(connection, \"AccessControlList\", old_acl_ids)",
"def process_document(connection, document_data):\n first_document_data = document_data[0]\n doc_id = copy_document(connection, first_document_data)\n document_ids = [d.id for d in document_data]\n process_document_admin_acls(connection, document_ids, doc_id)\n\n process_relationships(connection, document_data, doc_id)\n\n relationship_ids = [d.rel_id for d in document_data]\n delete_documents(connection, document_ids)\n delete_revisions(connection, \"Document\", document_ids)\n delete_relationships(connection, relationship_ids)",
"def bulkSave(self, objList: List[Permission], tokenData: TokenData):",
"def update_object_permissions(self, user, account, container, name,\n permissions):\n return",
"def update_document_sharing_info(self, user_role_assignments,\n validate_existing_permissions=None, additive_mode=None,\n send_server_managed_notification=None, custom_message=None,\n include_anonymous_links_in_notification=None, propagate_acl=None):\n\n return_type = ClientResult(self.context, ClientValueCollection(UserSharingResult))\n\n def _loaded():\n resource_address = SPResPath.create_absolute(self.context.base_url, str(self.server_relative_path))\n DocumentSharingManager.update_document_sharing_info(self.context,\n str(resource_address),\n user_role_assignments,\n validate_existing_permissions,\n additive_mode,\n send_server_managed_notification,\n custom_message,\n include_anonymous_links_in_notification,\n propagate_acl,\n return_type)\n\n self.ensure_property(\"ServerRelativePath\", _loaded)\n return return_type",
"def _updateCalendarPermissions(self, account):\n calendar = self.getCalendar(account)\n\n if calendar:\n calendar.setViewers(account, self.getAuthorisedUsers(account))",
"def index_document(self, document):\n\n from .models import Index\n\n with transaction.atomic():\n self.remove_document(document)\n\n # Only update indexes where the document type is found\n for index in Index.objects.filter(enabled=True, document_types=document.document_type):\n root_instance, created = self.get_or_create(\n index_template_node=index.template_root, parent=None\n )\n for template_node in index.template_root.get_children():\n self.cascade_eval(document, template_node, root_instance)",
"def test_documents_access_manager_read_write(self):\n\n folder_a = self.env['documents.folder'].create({\n 'name': 'folder A',\n 'group_ids': [(6, 0, [self.ref('documents.group_documents_manager')])],\n })\n\n document_a = self.env['documents.document'].create({\n 'name': 'document A',\n 'folder_id': folder_a.id,\n })\n\n with self.assertRaises(AccessError):\n document_a.with_user(self.basic_user).read()\n with self.assertRaises(AccessError):\n document_a.with_user(self.test_group_user).read()\n with self.assertRaises(AccessError):\n document_a.with_user(self.document_user).read()\n with self.assertRaises(AccessError):\n document_a.with_user(self.basic_user).write({'name': 'nameChangedA'})\n with self.assertRaises(AccessError):\n document_a.with_user(self.test_group_user).write({'name': 'nameChangedA'})\n with self.assertRaises(AccessError):\n document_a.with_user(self.document_user).write({'name': 'nameChangedA'})\n\n document_a.with_user(self.document_manager).write({'name': 'nameChangedManagerA'})\n self.assertEqual(document_a.name, 'nameChangedManagerA',\n 'document manager should be able to write document_a')",
"def SetPermissions(self, script):\n\n self.CountChildMetadata()\n\n def recurse(item, current):\n # current is the (uid, gid, dmode, fmode) tuple that the current\n # item (and all its children) have already been set to. We only\n # need to issue set_perm/set_perm_recursive commands if we're\n # supposed to be something different.\n if item.dir:\n if current != item.best_subtree:\n script.SetPermissionsRecursive(\"/\"+item.name, *item.best_subtree)\n current = item.best_subtree\n\n if item.uid != current[0] or item.gid != current[1] or \\\n item.mode != current[2]:\n if item.uid is not None and item.gid is not None:\n script.SetPermissions(\"/\"+item.name, item.uid, item.gid, item.mode)\n\n for i in item.children:\n recurse(i, current)\n else:\n if item.uid != current[0] or item.gid != current[1] or \\\n item.mode != current[3]:\n script.SetPermissions(\"/\"+item.name, item.uid, item.gid, item.mode)\n\n recurse(self, (-1, -1, -1, -1))",
"def perm_doc_filter(self, user, input_docs, write = False):\n\n if user.is_superuser:\n return input_docs\n\n docs = []\n if write:\n user_perms = self.user_perms.filter(user = user, write = write)\n else:\n user_perms = self.user_perms.filter(user = user)\n\n\n for user_perm in user_perms:\n if user_perm.doc not in docs and user_perm.doc in input_docs:\n docs.append(user_perm.doc)\n\n groups = user.groups\n for group in groups.all():\n\n if write:\n group_perms = self.group_perms.filter(group = group,\n write = True)\n else:\n group_perms = self.group_perms.filter(group = group)\n\n for group_perm in group_perms:\n if group_perm.doc not in docs and group_perm.doc in input_docs:\n docs.append(group_perm.doc)\n\n return docs",
"def update_permission(self):\n\n from stat import S_IEXEC\n\n for data in self.files:\n if data not in ['iana', 'dir_structure']:\n stats = stat(self.destination + self.files[data])\n chmod(\n self.destination +\n self.files[data],\n stats.st_mode | S_IEXEC)\n\n return",
"def setAccessList(self, doc, access, save=False, recurse=False, user=None,\n progress=noProgress, setPublic=None, publicFlags=None, force=False):\n progress.update(increment=1, message='Updating ' + doc['name'])\n if setPublic is not None:\n self.setPublic(doc, setPublic, save=False)\n\n if publicFlags is not None:\n doc = self.setPublicFlags(doc, publicFlags, user=user, save=False, force=force)\n\n doc = AccessControlledModel.setAccessList(\n self, doc, access, user=user, save=save, force=force)\n\n if recurse:\n from .folder import Folder\n\n folderModel = Folder()\n folders = folderModel.findWithPermissions({\n 'parentId': doc['_id'],\n 'parentCollection': 'collection'\n }, user=user, level=AccessType.ADMIN)\n\n for folder in folders:\n folderModel.setAccessList(\n folder, access, save=True, recurse=True, user=user,\n progress=progress, setPublic=setPublic, publicFlags=publicFlags)\n\n return doc",
"def test_document_upload_schedule_virus_scan(\n self,\n virus_scan_document_apply_async,\n permissions,\n ):\n user = create_test_user(permission_codenames=permissions)\n proposition = PropositionFactory()\n entity_document = PropositionDocument.objects.create(\n proposition_id=proposition.pk,\n original_filename='test.txt',\n created_by=user,\n )\n\n url = reverse(\n 'api-v3:investment:proposition:document-item-callback',\n kwargs={\n 'proposition_pk': proposition.pk,\n 'project_pk': proposition.investment_project.pk,\n 'entity_document_pk': entity_document.pk,\n },\n )\n\n api_client = self.create_api_client(user=user)\n response = api_client.post(url)\n assert response.status_code == status.HTTP_200_OK\n\n entity_document.document.refresh_from_db()\n\n assert response.data == {\n 'id': str(entity_document.pk),\n 'av_clean': None,\n 'created_by': {\n 'id': str(entity_document.created_by.pk),\n 'first_name': entity_document.created_by.first_name,\n 'last_name': entity_document.created_by.last_name,\n 'name': entity_document.created_by.name,\n },\n\n 'original_filename': 'test.txt',\n 'url': _get_document_url(entity_document.proposition, entity_document),\n 'status': UploadStatus.VIRUS_SCANNING_SCHEDULED,\n 'created_on': format_date_or_datetime(entity_document.created_on),\n 'uploaded_on': format_date_or_datetime(entity_document.document.uploaded_on),\n }\n virus_scan_document_apply_async.assert_called_once_with(\n args=(str(entity_document.document.pk), ),\n )",
"def backwards(self, orm):\r\n Permission.objects.filter(codename=\"project_perm.submit_translations\"\r\n ).update(codename=\"project_perm.submit_file\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Allows testing management commands in a temporary migrations module. Wrap all invocations to makemigrations and squashmigrations with this context manager in order to avoid creating migration files in your source tree inadvertently. Takes the application label that will be passed to makemigrations or squashmigrations and the Python path to a migrations module. The migrations module is used as a template for creating the temporary migrations module. If it isn't provided, the application's migrations module is used, if it exists. Returns the filesystem path to the temporary migrations module.
|
def temporary_migration_module(self, app_label="migrations", module=None):
with tempfile.TemporaryDirectory() as temp_dir:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, "__init__.py"), "w"):
pass
target_migrations_dir = os.path.join(target_dir, "migrations")
if module is None:
module = apps.get_app_config(app_label).name + ".migrations"
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + ".migrations"
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
|
[
"def temporary_migration_module(self, app_label='migrations', module=None):\n temp_dir = tempfile.mkdtemp()\n try:\n target_dir = tempfile.mkdtemp(dir=temp_dir)\n with open(os.path.join(target_dir, '__init__.py'), 'w'):\n pass\n target_migrations_dir = os.path.join(target_dir, 'migrations')\n\n if module is None:\n module = apps.get_app_config(app_label).name + '.migrations'\n\n try:\n source_migrations_dir = module_dir(import_module(module))\n except (ImportError, ValueError):\n pass\n else:\n shutil.copytree(source_migrations_dir, target_migrations_dir)\n\n with extend_sys_path(temp_dir):\n new_module = os.path.basename(target_dir) + '.migrations'\n with self.settings(MIGRATION_MODULES={app_label: new_module}):\n yield target_migrations_dir\n\n finally:\n shutil.rmtree(temp_dir)",
"def make_migration(app_label=\"tests\", from_state=None, to_state=None):\n\n app_labels = [app_label]\n\n loader = MigrationLoader(None, ignore_no_migrations=True)\n loader.check_consistent_history(connection)\n\n questioner = NonInteractiveMigrationQuestioner(\n specified_apps=app_labels, dry_run=False\n )\n\n autodetector = MigrationAutodetector(\n from_state or loader.project_state(),\n to_state or ProjectState.from_apps(apps),\n questioner,\n )\n\n changes = autodetector.changes(\n graph=loader.graph,\n trim_to_apps=app_labels or None,\n convert_apps=app_labels or None,\n migration_name=\"test\",\n )\n\n changes_for_app = changes.get(app_label)\n if not changes_for_app or len(changes_for_app) == 0:\n return None\n\n return changes_for_app[0]",
"def migrations_dir(self):\n module_path = self.migrations_module()\n try:\n module = importlib.import_module(module_path)\n except ImportError:\n # There's no migrations module made yet; guess!\n try:\n parent = importlib.import_module(\".\".join(module_path.split(\".\")[:-1]))\n except ImportError:\n # The parent doesn't even exist, that's an issue.\n raise exceptions.InvalidMigrationModule(\n application=self.application.__name__,\n module=module_path,\n )\n else:\n # Good guess.\n return os.path.join(os.path.dirname(parent.__file__), module_path.split(\".\")[-1])\n else:\n # Get directory directly\n return os.path.dirname(module.__file__)",
"def set_application(self, application, force_creation=False, verbose_creation=True):\n self._application = application\n if not hasattr(application, 'migrations') and not hasattr(application, 'south_migrations'):\n try:\n module = importlib.import_module(self.migrations_module())\n self._migrations = application.migrations = module\n except ImportError:\n if force_creation:\n self.create_migrations_directory(verbose_creation)\n module = importlib.import_module(self.migrations_module())\n self._migrations = application.migrations = module\n else:\n raise exceptions.NoMigrations(application)\n if hasattr(application, 'south_migrations'):\n self._load_migrations_module(application.south_migrations)\n else:\n self._load_migrations_module(application.migrations)",
"def migrate(app=None):\n require('site_dir')\n with cd(env.site_dir):\n run(\"python manage.py migrate {0}\".format(app or ''))",
"def migrate(app):\n with cd(SITE_SETTINGS['repo_dir']):\n vsu('./manage.py migrate %s' % app)",
"def migrations():\n with cd('%(site_dir)s' % env):\n run('%(python_path)spython ./manage.py migrate' % env)",
"def create_squashed(py_package, name, migration_number, forward_content, backward_content):\n if name is None:\n name = '%04d_squashed.py' % migration_number\n else:\n name = MigrationHelper.generate_migration_name(name, migration_number)\n fs_migration_directory = FileSystemHelper.get_package_migrations_directory(py_package)\n fs_file_path = path.join(fs_migration_directory, name)\n with open(fs_file_path, 'w') as file_descriptor:\n file_descriptor.write(MigrationHelper.MIGRATION_TEMPLATE % (forward_content, backward_content, ))\n return Migration(py_package, FileSystemHelper.trim_py_extension(name))",
"def migration(app, intitial=False):\n require('site_dir')\n with cd(env.site_dir):\n if intitial:\n run(\"python manage.py schemamigration {0} --initial\".format(app))\n else:\n run(\"python manage.py schemamigration {0} --auto\".format(app))",
"def test_migrate():\n mock = MagicMock(return_value=True)\n with patch.dict(djangomod.__salt__, {\"cmd.run\": mock}):\n assert djangomod.migrate(\"DJANGO_SETTINGS_MODULE\")",
"def fixture_run_globaldb_migrations() -> bool:\n return True",
"def test_module(modules_tmpdir, test_app):\n fake_extension = modules_tmpdir.join('fake_extension.py')\n fake_extension.write('\\n'.join((\n 'from henson import Extension',\n 'class FakeExtension(Extension):',\n ' def register_cli(self): pass',\n )))",
"def test_migrate(self):\n # Make sure no tables are created\n self.assertTableNotExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")\n # Run the migrations to 0001 only\n call_command(\"migrate\", \"migrations\", \"0001\", verbosity=0)\n # Make sure the right tables exist\n self.assertTableExists(\"migrations_author\")\n self.assertTableExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")\n # Run migrations all the way\n call_command(\"migrate\", verbosity=0)\n # Make sure the right tables exist\n self.assertTableExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableExists(\"migrations_book\")\n # Unmigrate everything\n call_command(\"migrate\", \"migrations\", \"zero\", verbosity=0)\n # Make sure it's all gone\n self.assertTableNotExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")",
"def apply_migrations():\n applied_migrations = False\n retries = 0\n\n with app.app_context():\n # The migrations repo resides in the virtual env.\n # Specifically, Pipenv installs the mci-database repo in the `src` directory,\n # since the Pipfile marks it as \"editable.\"\n path_to_virtual_env = os.environ['VIRTUAL_ENV']\n migrations_dir = os.path.join(\n path_to_virtual_env, 'src', 'mci-database', 'mci_database', 'db', 'migrations')\n\n while retries < MAX_RETRIES and applied_migrations is False:\n print('Attempting to apply migrations ({} of {})...'.format(\n retries + 1, MAX_RETRIES))\n try:\n # apply the migrations\n upgrade(directory=migrations_dir)\n applied_migrations = True\n except Exception:\n retries += 1\n sleep(SLEEP)",
"def ensure_migrations():",
"def migrate(self, *apps):\n with cd(self.cfg['django']['DJANGO_ROOT']):\n if not apps:\n local('python manage.py migrate')\n else:\n for app in apps:\n local('python manage.py migrate %s' % app)",
"def _app_path(tm_env, instance, uniq):\n return os.path.join(tm_env.apps_dir,\n '%s-%s' % (instance.replace('#', '-'), uniq))",
"def all_migrations(applications=None):\n if applications is None:\n applications = models.get_apps()\n for model_module in applications:\n # The app they've passed is the models module - go up one level\n app_path = \".\".join(model_module.__name__.split(\".\")[:-1])\n app = ask_for_it_by_name(app_path)\n try:\n yield Migrations(app)\n except exceptions.NoMigrations:\n pass",
"def fix_deletion_django_core_management_base_AppCommand_handle_app(utils):\n\n from django.core.management.base import CommandError, AppCommand\n\n def handle_app_config(self, app_config, **options):\n \"\"\"\n Perform the command's actions for app_config, an AppConfig instance\n corresponding to an application label given on the command line.\n \"\"\"\n try:\n # During the deprecation path, keep delegating to handle_app if\n # handle_app_config isn't implemented in a subclass.\n handle_app = self.handle_app\n except AttributeError:\n # Keep only this exception when the deprecation completes.\n raise NotImplementedError(\n \"Subclasses of AppCommand must provide \"\n \"a handle_app_config() method.\")\n else:\n utils.emit_warning(\n \"AppCommand.handle_app() is superseded by \"\n \"AppCommand.handle_app_config().\",\n RemovedInDjango19Warning, stacklevel=2)\n if app_config.models_module is None:\n raise CommandError(\n \"AppCommand cannot handle app '%s' in legacy mode \"\n \"because it doesn't have a models module.\"\n % app_config.label)\n return handle_app(app_config.models_module, **options)\n\n utils.inject_callable(AppCommand, \"handle_app_config\", handle_app_config)",
"def fix_deletion_core_management_base_AppCommand_handle_app(utils):\n\n from django.core.management.base import CommandError, AppCommand\n\n def handle_app_config(self, app_config, **options):\n \"\"\"\n Perform the command's actions for app_config, an AppConfig instance\n corresponding to an application label given on the command line.\n \"\"\"\n try:\n # During the deprecation path, keep delegating to handle_app if\n # handle_app_config isn't implemented in a subclass.\n handle_app = self.handle_app\n except AttributeError:\n # Keep only this exception when the deprecation completes.\n raise NotImplementedError(\n \"Subclasses of AppCommand must provide \" \"a handle_app_config() method.\"\n )\n else:\n utils.emit_warning(\n \"AppCommand.handle_app() is superseded by \"\n \"AppCommand.handle_app_config().\",\n RemovedInDjango19Warning,\n stacklevel=2,\n )\n if app_config.models_module is None:\n raise CommandError(\n \"AppCommand cannot handle app '%s' in legacy mode \"\n \"because it doesn't have a models module.\" % app_config.label\n )\n return handle_app(app_config.models_module, **options)\n\n utils.inject_callable(AppCommand, \"handle_app_config\", handle_app_config)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Makes a test state using set_up_test_model and returns the original state and the state after the migration is applied.
|
def make_test_state(self, app_label, operation, **kwargs):
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
|
[
"def test_save_state(self):\n self.save_helper(\"State\")",
"def test_new_state(self):\n self.new_helper(\"State\")",
"def _test_update_state_fn(self):\n return encoding_stage._tf_style_update_state(\n lambda _, s, sut, name: {'state': s['state'] + sut['tensor']})",
"def test_state_seeded_to_db(self):\n \n seed_states = seed_database.seed_states_table()\n self.assertEqual('Alabama', seed_states[0].state_name)",
"def test_set_user_state(self):\n pass",
"def test_migrate(self):\n # Make sure no tables are created\n self.assertTableNotExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")\n # Run the migrations to 0001 only\n call_command(\"migrate\", \"migrations\", \"0001\", verbosity=0)\n # Make sure the right tables exist\n self.assertTableExists(\"migrations_author\")\n self.assertTableExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")\n # Run migrations all the way\n call_command(\"migrate\", verbosity=0)\n # Make sure the right tables exist\n self.assertTableExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableExists(\"migrations_book\")\n # Unmigrate everything\n call_command(\"migrate\", \"migrations\", \"zero\", verbosity=0)\n # Make sure it's all gone\n self.assertTableNotExists(\"migrations_author\")\n self.assertTableNotExists(\"migrations_tribble\")\n self.assertTableNotExists(\"migrations_book\")",
"def test_set_transition_state():\n\n def assert_state(instance):\n \"\"\"\n ensure the running state is set\n \"\"\"\n assert instance.state == \"do_thing_running\"\n\n x = get_thing()\n x.do_thing(assert_state)\n\n # ensure the target transition is set when the process is done\n assert x.state == x.CHOICES.done",
"def _to_model(self, set_random_state):",
"async def test_reproducing_states(\n hass: HomeAssistant, caplog: pytest.LogCaptureFixture\n) -> None:\n\n assert await async_setup_component(\n hass,\n \"input_number\",\n {\n \"input_number\": {\n \"test_number\": {\"min\": \"5\", \"max\": \"100\", \"initial\": VALID_NUMBER1}\n }\n },\n )\n\n # These calls should do nothing as entities already in desired state\n await async_reproduce_state(\n hass,\n [\n State(\"input_number.test_number\", VALID_NUMBER1),\n # Should not raise\n State(\"input_number.non_existing\", \"234\"),\n ],\n )\n\n assert hass.states.get(\"input_number.test_number\").state == VALID_NUMBER1\n\n # Test reproducing with different state\n await async_reproduce_state(\n hass,\n [\n State(\"input_number.test_number\", VALID_NUMBER2),\n # Should not raise\n State(\"input_number.non_existing\", \"234\"),\n ],\n )\n\n assert hass.states.get(\"input_number.test_number\").state == VALID_NUMBER2\n\n # Test setting state to number out of range\n await async_reproduce_state(hass, [State(\"input_number.test_number\", \"150\")])\n\n # The entity states should be unchanged after trying to set them to out-of-range number\n assert hass.states.get(\"input_number.test_number\").state == VALID_NUMBER2\n\n await async_reproduce_state(\n hass,\n [\n # Test invalid state\n State(\"input_number.test_number\", \"invalid_state\"),\n # Set to state it already is.\n State(\"input_number.test_number\", VALID_NUMBER2),\n ],\n )",
"def make_migration(app_label=\"tests\", from_state=None, to_state=None):\n\n app_labels = [app_label]\n\n loader = MigrationLoader(None, ignore_no_migrations=True)\n loader.check_consistent_history(connection)\n\n questioner = NonInteractiveMigrationQuestioner(\n specified_apps=app_labels, dry_run=False\n )\n\n autodetector = MigrationAutodetector(\n from_state or loader.project_state(),\n to_state or ProjectState.from_apps(apps),\n questioner,\n )\n\n changes = autodetector.changes(\n graph=loader.graph,\n trim_to_apps=app_labels or None,\n convert_apps=app_labels or None,\n migration_name=\"test\",\n )\n\n changes_for_app = changes.get(app_label)\n if not changes_for_app or len(changes_for_app) == 0:\n return None\n\n return changes_for_app[0]",
"def test_reload_state(self):\n self.reload_helper(\"State\")",
"def test_save_load_state_dict(self):\n\n for qengine in supported_qengines:\n with override_quantized_engine(qengine):\n model = TwoLayerLinearModel()\n model = torch.ao.quantization.QuantWrapper(model)\n model.qconfig = torch.ao.quantization.get_default_qconfig(qengine)\n\n model = prepare(model)\n # calibrate\n test_only_eval_fn(model, self.calib_data)\n model = convert(model)\n x = torch.rand(2, 5, dtype=torch.float)\n ref = model(x)\n\n quant_state_dict = model.state_dict()\n\n # Create model again for eval\n model = TwoLayerLinearModel()\n model = torch.ao.quantization.QuantWrapper(model)\n model.qconfig = torch.ao.quantization.get_default_qconfig(qengine)\n model = prepare(model)\n model = convert(model)\n new_state_dict = model.state_dict()\n\n # Check to make sure the state dict keys match original model after convert.\n self.assertEqual(set(new_state_dict.keys()), set(quant_state_dict.keys()))\n\n model.load_state_dict(quant_state_dict)\n\n out = model(x)\n self.assertEqual(ref, out)",
"def Migrate(self):\n\n # TODO(amoser): This doesn't do anything yet.\n pass",
"def setMigrating(state): # @NoSelf",
"def setUp(self):\n db.create_all()\n self.db = db",
"def test_init_state(self) -> None:\n # Execute\n state = self.state_factory()\n\n # Assert\n assert isinstance(state, State)",
"def test_save_restore(self):\n with tempfile.TemporaryDirectory(prefix=\"phd_\") as d:\n tempdir = pathlib.Path(d)\n\n model_to_file = self.model_class(**self.model_init_opts)\n model_to_file.init(0, self.atomizer)\n model_to_file.save(tempdir / \"model\")\n\n model_from_file = self.model_class(**self.model_init_opts)\n model_from_file.restore(tempdir / \"model\")\n # We can't test that restoring the model from file actually does anything,\n # since we don't have __eq__ operator implemented for models.",
"async def test_restore_state(hass: HomeAssistant) -> None:\n # Home assistant is not running yet\n hass.state = CoreState.not_running\n last_reset = \"2022-11-29T00:00:00.000000+00:00\"\n mock_restore_cache_with_extra_data(\n hass,\n [\n (\n State(\n \"sensor.test_duration\",\n \"1234\",\n attributes={\n ATTR_LAST_RESET: last_reset,\n ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.MINUTES,\n ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT,\n },\n ),\n {\n \"native_value\": 1234,\n \"native_unit_of_measurement\": UnitOfTime.MINUTES,\n \"icon\": \"mdi:car\",\n \"last_reset\": last_reset,\n },\n ),\n (\n State(\n \"sensor.test_duration_in_traffic\",\n \"5678\",\n attributes={\n ATTR_LAST_RESET: last_reset,\n ATTR_UNIT_OF_MEASUREMENT: UnitOfTime.MINUTES,\n ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT,\n },\n ),\n {\n \"native_value\": 5678,\n \"native_unit_of_measurement\": UnitOfTime.MINUTES,\n \"icon\": \"mdi:car\",\n \"last_reset\": last_reset,\n },\n ),\n (\n State(\n \"sensor.test_distance\",\n \"123\",\n attributes={\n ATTR_LAST_RESET: last_reset,\n ATTR_UNIT_OF_MEASUREMENT: UnitOfLength.KILOMETERS,\n ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT,\n },\n ),\n {\n \"native_value\": 123,\n \"native_unit_of_measurement\": UnitOfLength.KILOMETERS,\n \"icon\": \"mdi:car\",\n \"last_reset\": last_reset,\n },\n ),\n (\n State(\n \"sensor.test_origin\",\n \"Origin Address 1\",\n attributes={\n ATTR_LAST_RESET: last_reset,\n ATTR_LATITUDE: ORIGIN_LATITUDE,\n ATTR_LONGITUDE: ORIGIN_LONGITUDE,\n },\n ),\n {\n \"native_value\": \"Origin Address 1\",\n \"native_unit_of_measurement\": None,\n ATTR_LATITUDE: ORIGIN_LATITUDE,\n ATTR_LONGITUDE: ORIGIN_LONGITUDE,\n \"icon\": \"mdi:store-marker\",\n \"last_reset\": last_reset,\n },\n ),\n (\n State(\n \"sensor.test_destination\",\n \"Destination Address 1\",\n attributes={\n ATTR_LAST_RESET: last_reset,\n ATTR_LATITUDE: DESTINATION_LATITUDE,\n ATTR_LONGITUDE: DESTINATION_LONGITUDE,\n },\n ),\n {\n \"native_value\": \"Destination Address 1\",\n \"native_unit_of_measurement\": None,\n \"icon\": \"mdi:store-marker\",\n \"last_reset\": last_reset,\n },\n ),\n ],\n )\n\n # create and add entry\n mock_entry = MockConfigEntry(\n domain=DOMAIN, unique_id=DOMAIN, data=DEFAULT_CONFIG, options=DEFAULT_OPTIONS\n )\n mock_entry.add_to_hass(hass)\n\n await hass.config_entries.async_setup(mock_entry.entry_id)\n await hass.async_block_till_done()\n\n # restore from cache\n state = hass.states.get(\"sensor.test_duration\")\n assert state.state == \"1234\"\n assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTime.MINUTES\n assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT\n\n state = hass.states.get(\"sensor.test_duration_in_traffic\")\n assert state.state == \"5678\"\n assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTime.MINUTES\n assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT\n\n state = hass.states.get(\"sensor.test_distance\")\n assert state.state == \"123\"\n assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfLength.KILOMETERS\n assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT\n\n state = hass.states.get(\"sensor.test_origin\")\n assert state.state == \"Origin Address 1\"\n\n state = hass.states.get(\"sensor.test_destination\")\n assert state.state == \"Destination Address 1\"",
"def transition_model(self, state, action):\n ..."
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the mount manager. You can use this to set the write directory and base path
|
def getMountManager(self):
return self.mountManager
|
[
"def mount_option(self):\n return self._mount_option",
"def get_shm_context_mount() -> k8s_schemas.V1VolumeMount:\n return k8s_schemas.V1VolumeMount(\n name=constants.VOLUME_MOUNT_SHM, mount_path=ctx_paths.CONTEXT_MOUNT_SHM\n )",
"def get_mapping_path_manager(self):\n return # osid.mapping.path.MappingPathManager",
"def get_manager(self):\n return self.__manager",
"def auto_mount(self):\n ret = self._get_attr(\"autoMount\")\n return ret",
"def get_mount_data(self):\n cmd = [\n self.mount_path,\n ]\n try:\n with subprocess.Popen(cmd, stdout=subprocess.PIPE) as result:\n return result.communicate()[0]\n except OSError as err:\n _LOG.error(\"mount execution failed: %s\", err)\n raise\n except Exception as err:\n _LOG.error(\"unknown error calling mount: %s\", err)\n raise",
"def pyre_mountPrivateFilespace(self):\n # get the file server\n vfs = self.vfs\n # get the namespace\n namespace = self.pyre_namespace\n # if i don't have a namespace\n if not namespace:\n # make an empty virtual filesystem and return it\n return vfs.virtual()\n\n # attempt to\n try:\n # get my private filespace\n pfs = vfs[namespace]\n # if not there\n except vfs.NotFoundError:\n # make it\n pfs = vfs.folder()\n # and mount it\n vfs[namespace] = pfs\n\n # check whether\n try:\n # the user directory is already mounted\n pfs[self.USER]\n # if not\n except pfs.NotFoundError:\n # check whether\n try:\n # i have a folder in the user area\n userdir = vfs[vfs.USER_DIR, namespace]\n # if not\n except vfs.NotFoundError:\n # make and mount an empty folder\n pfs[self.USER] = pfs.folder()\n # if it is there\n else:\n # look deeply\n userdir.discover()\n # and mount it\n pfs[self.USER] = userdir\n\n # get my prefix\n prefix = self.pyre_prefix\n # if i don't have one\n if not prefix:\n # attach an empty folder; must use {pfs} to do this to guarantee filesystem consistency\n pfs[self.SYSTEM] = pfs.folder()\n # and return\n return pfs\n # otherwise, get the associated filesystem\n home = vfs.retrieveFilesystem(root=prefix)\n # and mount my folders in my namespace\n self.pyre_mountApplicationFolders(pfs=pfs, prefix=home)\n\n # now, build the protocol resolution folders by assembling the contents of the\n # configuration folders in priority order\n for root in [self.SYSTEM, self.USER]:\n # build the work list: triplets of {name}, {source}, {destination}\n todo = [(root, pfs[root], pfs)]\n # now, for each triplet in the work list\n for path, source, destination in todo:\n # go through all the children of {source}\n for name, node in source.contents.items():\n # if the node is a folder\n if node.isFolder:\n # gingerly attempt to\n try:\n # grab the associated folder in {destination}\n link = destination[name]\n # if not there\n except destination.NotFoundError:\n # no worries, make it\n link = destination.folder()\n # and attach it\n destination[name] = link\n # add it to the work list\n todo.append((name, node, link))\n # otherwise\n else:\n # link the file into the destination folder\n destination[name] = node\n\n # all done\n return pfs",
"def get_manager():\n global _MANAGER\n if _MANAGER is None:\n _MANAGER = ResourceManager()\n return _MANAGER",
"def data_manager(self) -> CGSDataManager:\n if self._data_manager is None:\n self._data_manager: CGSDataManager = CommonDataManagerRegistry().locate_data_manager(ModInfo.get_identity())\n return self._data_manager",
"def command_manager(self):\n return self._parent.command_manager",
"def mount(path='/sd'):\n from machine import SD\n sd = SD()\n os.mount(sd, path)",
"def getconfigmgr(self):\r\n\t\treturn self.cfg",
"def filesystem( self ):\n return Filesystem.objects.get( pool_id=self.id, name=self.name )",
"def filesystem( self ):\n return Filesystem.objects.get( name=self.filesystem_name )",
"def manager():\n return _global_manager",
"def lock_manager(self):\n return self.__lock_manager",
"def docker_mount(self) -> str:\n result = self.dataset_type.docker_mount_func(self)\n if result is None:\n msg = \"Can't get Docker mount location for dataset {} of type {}\"\n raise DmodRuntimeError(msg.format(self.name, self.dataset_type.name))\n else:\n return result",
"def get_mount_class(paths: List[str] = None,\n types: List[str] = None,\n mount_path: str = MOUNT_PATH) -> Mount:\n class_map = {\n 'Darwin': BSDMount,\n 'FreeBSD': BSDMount,\n 'Linux': LinuxMount,\n }\n os_name = platform.system()\n try:\n new_class = class_map[os_name]\n except KeyError:\n raise NotImplementedError(f\"OS {os_name} not supported\") from None\n return new_class(paths, types, mount_path)",
"def filesystem(self):\n return self._attrs[\"filesystem\"]",
"def get_unit_manager_data(self):\n return self._dbs.get_unit_manager(self.uid)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Loads the pipeline settings from an ini file
|
def loadSettings(self, filename):
self.settings = PipelineSettingsManager()
self.settings.loadFromFile(filename)
|
[
"def load_settings():\n with open(os.path.join(SCRIPT_DIR, '../road-shields.yml'), 'r') as fh:\n return yaml.load(fh, Loader=yaml.FullLoader)",
"def read_ini_file( ):\n pkeys = {}\n for p in __para_name: pkeys[__para_name[p].lower()] = p\n set_defaults()\n iniFile = __param[init_file]\n cfg = ConfigParser()\n if iniFile and cfg.read(iniFile):\n for sect in cfg.sections():\n plist = [pname for pname in cfg[sect] if pname in pkeys]\n for pname in plist:\n v = cfg['PARAMETERS'][pname]\n p = pkeys[pname]\n __param[p]= ( float(v) if '.' in v else int(v)) if __is_number(v) else v",
"def read_ini(self, filename = '../src/initialize.ini'):\n\t\tconfig = configparser.ConfigParser()\n\t\tconfig.optionxform=str # preserves uppercase for keys\n\n\t\tif os.path.isfile(filename):\n\t\t\tconfig.read(filename)\n\t\telse: \n\t\t\tprint(\"\\nNo .ini in folder.\\nProceed with default tedlium-configurations.\\n\")\n\t\t\treturn\n\n\t\tfor entry in config['NUMERICAL_VALUES']:\n\t\t\ttry:\n\t\t\t\tself.params[entry]['value'] = self.params[entry]['type'](config['NUMERICAL_VALUES'][entry])\n\t\t\texcept: \n\t\t\t\tif config['NUMERICAL_VALUES'][entry] == 'None':\n\t\t\t\t\tself.params[entry]['value'] = None\n\t\t\t\telse: \n\t\t\t\t\tprint(\"Unexpected error: \", sys.exc_info()[0])\n\n\t\tfor entry in config['LITERAL_VALUES']:\n\t\t\tif not config['LITERAL_VALUES'][entry] == 'None':\n\t\t\t\tself.params[entry]['value'] = self.params[entry]['type'](config['LITERAL_VALUES'][entry])\n\t\t\telse:\n\t\t\t\tself.params[entry]['value'] = None",
"def load_settings():\n settings = {}\n with open('settings.ini', 'r') as f:\n lines = f.readlines()\n for line in lines:\n if '//' not in line and line != '\\n': # ignoring comments and blank lines\n setting = line.split(\"=\")\n settings[setting[0]] = setting[1].replace(\"\\n\", \"\") # getting the actual data we need\n settings[\"SUBREDDITS\"] = settings[\"SUBREDDITS\"].split(',') # splitting up the subreddits into a list instead of string\n try:\n settings[\"TIME_SLEEP\"] = float(settings[\"TIME_SLEEP\"]) * 60\n except ValueError:\n print(f'{settings[\"TIME_SLEEP\"]} is not a valid float.')\n return\n return settings",
"def load_settings():\n load_setting('status_format')\n load_setting('status_key')\n load_setting('start_on', 'on')",
"def read_config(self, args):\n # Try to load configuration file if provided\n import yaml\n\n # This is all the config information in the file, including\n # things for other stages\n overall_config = yaml.load(open(self.get_input('config')), yaml.FullLoader)\n \n # The user can define global options that are inherited by\n # all the other sections if not already specified there.\n input_config = overall_config.get('global', {})\n\n # This is just the config info in the file for this stage.\n # It may be incomplete - there may be things specified on the\n # command line instead, or just using their default values\n stage_config = overall_config.get(self.name, {})\n input_config.update(stage_config)\n\n # Here we build up the actual configuration we use on this\n # run from all these sources\n my_config = {}\n\n # Loop over the options of the pipeline stage\n for x in self.config_options:\n opt = None\n opt_type = None\n\n # First look for a default value,\n # if a type (like int) is provided as the default it indicates that\n # this option doesn't have a default (i.e. is mandatory) and should\n # be explicitly provided with the specified type\n if type(self.config_options[x]) is type:\n opt_type = self.config_options[x]\n\n elif type(self.config_options[x]) is list:\n v = self.config_options[x][0]\n if type(v) is type:\n opt_type = v\n else:\n opt = self.config_options[x]\n opt_type = type(v)\n else:\n opt = self.config_options[x]\n opt_type = type(opt)\n\n # Second, look for the option in the configuration file and override\n # default if provided TODO: Check types\n if x in input_config:\n opt = input_config[x]\n\n # Finally check for command line option that would override the value\n # in the configuration file. Note that the argument parser should\n # already have taken care of type\n if args[x] is not None:\n opt = args[x]\n\n # Finally, check that we got at least some value for this option\n if opt is None:\n raise ValueError(f\"Missing configuration option {x} for stage {self.name}\")\n\n my_config[x] = opt\n\n # Unspecified parameters can also be copied over.\n # This will be needed for parameters that are more complicated, such\n # as dictionaries or other more structured parameter information.\n for x,val in input_config.items():\n # Omit things we've already dealt with above\n if x in self.config_options:\n continue\n # copy over everything else\n else:\n my_config[x] = val\n\n\n\n return my_config",
"def load_settings():\n sl = SettingLoader()\n return sl.settings",
"def load(self):\n self.config.read(\"config.py\")\n pass",
"def load_cfg(self,filepath):\n config = configparser.ConfigParser()\n config.read([filepath])\n return config",
"def load_settings():\n # Load settings.ini\n config = configparser.ConfigParser()\n config.read(SETTINGS_PATH)\n config.sections()\n server_address = config['SERVER']['Address']\n server_port = config['SERVER']['Port']\n window_height = int(config['CLIENT']['Window height'])\n window_width = int(config['CLIENT']['Window width'])\n\n return server_address, server_port, window_height, window_width",
"def Load(self, filename):\n\t\tconfigdict = {}\n\t\tparser = configparser.ConfigParser()\n\t\tparser.read(filename)\n\t\tfor s in parser.sections():\n\t\t\tconfigdict[s] = {}\n\t\t\tfor o in parser.options(s):\n\t\t\t\tconfigdict[s][o] = parser.get(s, o)\n\t\tself.__mixer.ParseConfigDict(configdict)\n\t\tif self.__gui is not None:\n\t\t\tself.__gui.ParseConfigDict(configdict)",
"def _load_settings(self):\n with open(DEFAULT_PATH, 'rb') as file_:\n default_settings = yaml.load(file_)\n LOG.info('Loaded defaults: %s', default_settings)\n\n user_settings = {}\n if os.path.isfile(USERSETTINGS_PATH) and os.access(USERSETTINGS_PATH, os.R_OK):\n try:\n with open(USERSETTINGS_PATH, 'rb') as file_:\n user_settings = yaml.load(file_)\n LOG.info('Loaded user settings %s from path %s', user_settings,\n USERSETTINGS_PATH)\n except Exception:\n LOG.exception('Exception during loading of user settings')\n # FIXME check user_settings keys\n else:\n LOG.info('No user settings found, file %s does not exist or is not readable',\n USERSETTINGS_PATH)\n\n self.__class__.settings = ChainMap(user_settings, default_settings)\n self.__class__.settings_names = list(self.settings.keys())",
"def read_settings(settings_file):\n\n with open(settings_file, \"r\", encoding=\"utf-8\") as stream:\n config = yaml.safe_load(stream)\n\n return config",
"def load(file=None):\r\n if not file:\r\n folders = [Path.cwd(), location(), Path(__file__).parent]\r\n for folder in folders:\r\n file = folder/'MPh.ini'\r\n if file.exists():\r\n break\r\n else:\r\n log.debug('Using default configuration.')\r\n return\r\n log.debug(f'Loading configuration from \"{file}\".')\r\n parser = configparser.RawConfigParser(interpolation=None)\r\n parser.optionxform = str\r\n parser.read(file, encoding='UTF-8')\r\n section = 'config'\r\n if section not in parser.sections():\r\n log.debug(f'Section [{section}] missing in configuration file.')\r\n return\r\n for (key, value) in options.items():\r\n if key in parser[section]:\r\n if isinstance(value, bool):\r\n options[key] = parser.getboolean(section, key)\r\n elif isinstance(value, int):\r\n options[key] = parser.getint(section, key)\r\n elif isinstance(value, float):\r\n options[key] = parser.getfloat(section, key)\r\n else:\r\n options[key] = parser[section][key]",
"def load_settings(self):\n self.settings = {}\n print(\"loading settings from: {}\".format(self.settings_file_name))\n if os.path.isfile(self.settings_file_name):\n with open(self.settings_file_name, \"r\") as f:\n self.settings = json.load(f)",
"def readSettings():\n config_object.read(\"FileStorage.ini\")\n return config_object[\"Strategy's\"]",
"def load(self):\n if not self.file:\n raise ValueError(\"No configuration file configured\")\n try:\n reader = ConfigReader()\n with open(self.file, \"r\", encoding=\"utf-8\") as f:\n reader.read_file(f)\n for section, settings in self.settings.items():\n for key, setting in settings.items():\n try:\n setting.validate(reader)\n except ValueError as e:\n value = reader.get(section, key, fallback='(undefined)')\n logger.warning(\n \"config key '{}' in section '{}' has the invalid configuration value '{}': {}\".format(\n key, section, value, str(e)\n ))\n except KeyError as e:\n logger.warning(\"config key '{}' in section '{}' needs to be set\".format(key, section))\n self.reader = reader\n except FileNotFoundError as e:\n pass",
"def load_ini_config(filename, key=None):\n config = configparser.ConfigParser()\n config.read(filename)\n return _config_helper(config, key)",
"def load_params(run_dirpath: str) -> dict:\n params_path = os.path.join(run_dirpath, \"params.yml\")\n with open(params_path, \"r\") as f:\n return yaml.safe_load(f)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the antialiasing technique
|
def _setupAntialiasing(self):
technique = self.settings.antialiasingTechnique
self.debug("Creating antialiasing handler for", technique)
if technique == "None":
self.antialias = AntialiasingTechniqueNone()
elif technique == "SMAA":
self.antialias = AntialiasingTechniqueSMAA()
elif technique == "FXAA":
self.antialias = AntialiasingTechniqueFXAA()
else:
self.error(
"Unkown antialiasing technique", technique, "-> using None:")
self.antialias = AntialiasingTechniqueNone()
if self.occlusion.requiresBlurring():
self.antialias.setColorTexture(
self.blurOcclusionH.getColorTexture())
else:
if self.haveCombiner and self.settings.enableTemporalReprojection:
self.antialias.setColorTexture(self.combiner.getColorTexture())
else:
self.antialias.setColorTexture(
self.lightingComputeContainer.getColorTexture())
self.antialias.setDepthTexture(self.deferredTarget.getDepthTexture())
self.antialias.setVelocityTexture(self.deferredTarget.getAuxTexture(1))
self.antialias.setup()
|
[
"def antialias(self):\n return self._ripy.antialias",
"def getAntialiasing(self, smoothing: 'SbBool &', numPasses: 'int &') -> \"void\":\n return _coin.SoRenderManager_getAntialiasing(self, smoothing, numPasses)",
"def getAntialiasing(self, smoothing: 'SbBool &', numPasses: 'int &') -> \"void\":\n return _coin.SoSceneManager_getAntialiasing(self, smoothing, numPasses)",
"def setAntialiasing(self, smoothing: 'SbBool const', numPasses: 'int const') -> \"void\":\n return _coin.SoRenderManager_setAntialiasing(self, smoothing, numPasses)",
"def setAntialiasing(self, smoothing: 'SbBool const', numPasses: 'int const') -> \"void\":\n return _coin.SoSceneManager_setAntialiasing(self, smoothing, numPasses)",
"def scale_strokes2(self):",
"def draw(self, image, px, py, angle, color, map_resolution, alpha=1.0, draw_steering_details=True):",
"def brush_stroke_mask(W, H):\n min_num_vertex = 4\n max_num_vertex = 12\n mean_angle = 2*math.pi / 5\n angle_range = 2*math.pi / 15\n min_width = 12\n max_width = 40\n def generate_mask(W, H):\n average_radius = math.sqrt(H*H+W*W) / 8\n mask = Image.new('L', (W, H), 0)\n\n for _ in range(np.random.randint(1, 4)):\n num_vertex = np.random.randint(min_num_vertex, max_num_vertex)\n angle_min = mean_angle - np.random.uniform(0, angle_range)\n angle_max = mean_angle + np.random.uniform(0, angle_range)\n angles = []\n vertex = []\n for i in range(num_vertex):\n if i % 2 == 0:\n angles.append(2*math.pi - np.random.uniform(angle_min, angle_max))\n else:\n angles.append(np.random.uniform(angle_min, angle_max))\n\n h, w = mask.size\n vertex.append((int(np.random.randint(0, w)), int(np.random.randint(0, h))))\n for i in range(num_vertex):\n r = np.clip(\n np.random.normal(loc=average_radius, scale=average_radius//2),\n 0, 2*average_radius)\n new_x = np.clip(vertex[-1][0] + r * math.cos(angles[i]), 0, w)\n new_y = np.clip(vertex[-1][1] + r * math.sin(angles[i]), 0, h)\n vertex.append((int(new_x), int(new_y)))\n\n draw = ImageDraw.Draw(mask)\n width = int(np.random.uniform(min_width, max_width))\n draw.line(vertex, fill=1, width=width)\n for v in vertex:\n draw.ellipse((v[0] - width//2,\n v[1] - width//2,\n v[0] + width//2,\n v[1] + width//2),\n fill=1)\n\n if np.random.normal() > 0:\n mask.transpose(Image.FLIP_LEFT_RIGHT)\n if np.random.normal() > 0:\n mask.transpose(Image.FLIP_TOP_BOTTOM)\n mask = np.asarray(mask, np.float32)\n mask = np.reshape(mask, (W, H, 1))\n return mask\n\n return generate_mask(W, H)",
"def artAttrSkinPaintCtx(context, xrayJoints=bool, paintattrselected=\"string\", mappressure=\"string\", outline=bool, clampupper=float, reflectionaxis=\"string\", exportfilesave=\"string\", paintNodeArray=\"string\", image3=\"string\", rampMaxColor=float, useMaxMinColor=bool, reflection=bool, dragSlider=\"string\", tangentOutline=bool, surfaceConformedBrushVertices=bool, exportfilesizey=int, exportfiletype=\"string\", toolOnProc=\"string\", lowerradius=float, exportfilesizex=int, opacity=float, objattrArray=\"string\", paintmode=\"string\", skinPaintMode=int, paintSelectMode=int, attrSelected=\"string\", accopacity=bool, usepressure=bool, exists=bool, brushalignment=bool, colorRamp=\"string\", name=\"string\", showactive=bool, afterStrokeCmd=\"string\", selectedattroper=\"string\", exportfilemode=\"string\", clamplower=float, useColorRamp=bool, minvalue=float, influence=\"string\", colorrangelower=float, activeListChangedProc=\"string\", clamp=\"string\", expandfilename=bool, outwhilepaint=bool, filterNodes=bool, value=float, alphaclamp=\"string\", tablet=bool, colorfeedback=bool, importfileload=\"string\", image1=\"string\", stampProfile=\"string\", profileShapeFile=\"string\", projective=bool, duringStrokeCmd=\"string\", brushfeedback=bool, dataTypeIndex=int, interactiveUpdate=bool, whichTool=\"string\", clear=bool, importfilemode=\"string\", alphaclamplower=float, maxvalue=float, importreassign=bool, rampMinColor=float, toolOffProc=\"string\", colorrangeupper=float, history=bool, beforeStrokeCmd=\"string\", image2=\"string\", alphaclampupper=float, disablelighting=bool, radius=float):\n pass",
"def artSetPaintCtx(mappressure=\"string\", outline=bool, reflectionaxis=\"string\", exportfilesave=\"string\", image3=\"string\", reflection=bool, dragSlider=\"string\", image1=\"string\", exportfiletype=\"string\", lowerradius=float, exportfilesizex=int, opacity=float, setopertype=\"string\", setdisplaycvs=bool, settomodify=\"string\", accopacity=bool, usepressure=bool, exists=bool, brushalignment=bool, name=\"string\", showactive=bool, surfaceConformedBrushVertices=bool, exportfilemode=\"string\", setcolorfeedback=bool, exportfilesizey=int, outwhilepaint=bool, tablet=bool, importfileload=\"string\", paintmode=\"string\", profileShapeFile=\"string\", expandfilename=bool, brushfeedback=bool, stampProfile=\"string\", clear=bool, importfilemode=\"string\", projective=bool, importreassign=bool, history=bool, image2=\"string\", tangentOutline=bool, radius=float):\n pass",
"def art3dPaintCtx(mappressure=\"string\", outline=bool, soloAsDiffuse=bool, painttxtattr=\"string\", filetxtsizex=int, reflectionaxis=\"string\", exportfilesave=\"string\", commonattr=\"string\", saveonstroke=bool, reflection=bool, dragSlider=\"string\", usepressure=bool, paintoperationtype=\"string\", tangentOutline=bool, image1=\"string\", exportfiletype=\"string\", lowerradius=float, filetxtaspectratio=float, opacity=float, savetexture=bool, saveTextureOnStroke=bool, pfxScale=float, accopacity=bool, shapeattr=bool, exists=bool, brushalignment=bool, shapenames=\"string\", name=\"string\", showactive=bool, surfaceConformedBrushVertices=bool, exportfilemode=\"string\", keepaspectratio=bool, reloadtexfile=bool, filetxtsizey=int, resizetxt=bool, exportfilesizey=int, image3=\"string\", textureFilenames=bool, stampSpacing=float, outwhilepaint=bool, tablet=bool, shadernames=\"string\", importfileload=\"string\", paintmode=\"string\", profileShapeFile=\"string\", projective=bool, expandfilename=bool, brushfeedback=bool, stampProfile=\"string\", resizeratio=float, pfxWidth=float, clear=bool, importfilemode=\"string\", painttxtattrname=\"string\", importreassign=bool, extendFillColor=bool, history=bool, image2=\"string\", radius=float, assigntxt=bool, exportfilesizex=int, alphablendmode=\"string\"):\n pass",
"def _create_aliasing(self, patches):\n down_scale = [1 / self.scale_factor, 1]\n mode = Config().interp_mode\n results = F.interpolate(patches, scale_factor=down_scale, mode=mode)\n # up_scale = [self.scale_factor, 1]\n # results = F.interpolate(results, scale_factor=up_scale, mode=mode)\n return results",
"def artAttrPaintVertexCtx(*args, **kwargs):\n\n pass",
"def paint_pattern(self):\n pass",
"def artAttrPaintVertexCtx(context, paintattrselected=\"string\", mappressure=\"string\", paintComponent=int, outline=bool, clampupper=float, vertexColorRangeUpper=float, reflectionaxis=\"string\", exportfilesave=\"string\", paintNodeArray=\"string\", image3=\"string\", rampMaxColor=float, reflection=bool, dragSlider=\"string\", tangentOutline=bool, surfaceConformedBrushVertices=bool, exportfilesizey=int, exportfiletype=\"string\", toolOnProc=\"string\", lowerradius=float, exportfilesizex=int, opacity=float, objattrArray=\"string\", paintmode=\"string\", useMaxMinColor=bool, vertexColorRangeLower=float, paintVertexFace=bool, attrSelected=\"string\", accopacity=bool, usepressure=bool, exists=bool, brushalignment=bool, colorRamp=\"string\", name=\"string\", showactive=bool, afterStrokeCmd=\"string\", selectedattroper=\"string\", exportfilemode=\"string\", clamplower=float, useColorRamp=bool, minvalue=float, colorrangelower=float, activeListChangedProc=\"string\", clamp=\"string\", expandfilename=bool, paintRGBA=bool, outwhilepaint=bool, filterNodes=bool, value=float, alphaclamp=\"string\", tablet=bool, colorfeedback=bool, importfileload=\"string\", image1=\"string\", stampProfile=\"string\", profileShapeFile=\"string\", projective=bool, duringStrokeCmd=\"string\", brushfeedback=bool, dataTypeIndex=int, interactiveUpdate=bool, whichTool=\"string\", clear=bool, importfilemode=\"string\", alphaclamplower=float, maxvalue=float, vertexColorRange=bool, importreassign=bool, rampMinColor=float, toolOffProc=\"string\", colorrangeupper=float, history=bool, beforeStrokeCmd=\"string\", image2=\"string\", alphaclampupper=float, disablelighting=bool, radius=float):\n pass",
"def __init__(self, capiness = 0.5, interiorIncludesCaps = False, *args, **keywordArgs):\n \n Shape.__init__(self, *args, **keywordArgs)\n \n # TODO: use VBO's so all instances share the same data?\n # TODO: fix seams caused by texture coords\n \n self.capiness = capiness\n self.interiorIncludesCaps = interiorIncludesCaps\n \n steps = 32 # must be multiple of four\n angleIncrement = 2.0 * pi / steps\n capSteps = steps / 4\n azimuthIncrement = pi / 2.0 / capSteps\n \n topVertices = []\n topTexCoords = []\n bottomVertices = []\n bottomTexCoords = []\n for azimuthStep in range(0, capSteps):\n topAzimuth = pi / 2.0 - (azimuthStep + 1) * azimuthIncrement\n topY, topMag = (sin(topAzimuth) * (capiness / 2.0), cos(topAzimuth) * 0.5)\n bottomAzimuth = -azimuthStep * azimuthIncrement\n bottomY, bottomMag = (sin(bottomAzimuth) * (capiness / 2.0), cos(bottomAzimuth) * 0.5)\n for step in range(0, steps):\n angle = pi + step * angleIncrement\n topVertices += [(sin(angle) * topMag, topY + (0.5 * (1.0 - capiness)), cos(angle) * topMag)]\n topTexCoords += [(float(step) / steps, topVertices[-1][1] + 0.5)]\n bottomVertices += [(sin(angle) * bottomMag, -(0.5 * (1.0 - capiness)) + bottomY, cos(angle) * bottomMag)]\n bottomTexCoords += [(float(step) / steps, bottomVertices[-1][1] + 0.5)]\n\n vertices = [(0.0, 0.5, 0.0)] + topVertices + bottomVertices + [(0.0, -0.5, 0.0)]\n self.geometry().setVertexArray(Shape.vectorArrayFromList(vertices))\n \n normals = []\n for vertex in vertices:\n normals += [(vertex[0] / 2.0, vertex[1] / 2.0, vertex[2] / 2.0)]\n self.geometry().setNormalArray(Shape.vectorArrayFromList(normals))\n self.geometry().setNormalBinding(osg.Geometry.BIND_PER_VERTEX)\n \n texCoords = [(0.0, 1.0)] + topTexCoords + bottomTexCoords + [(0.0, 0.0)]\n self.geometry().setTexCoordArray(0, Shape.vectorArrayFromList(texCoords))\n \n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, range(0, steps + 1) + [1, 0])\n self.geometry().addPrimitiveSet(faceSet)\n for stripNum in range(0, 2 * capSteps - 1):\n vertexIndices = []\n baseIndex = 1 + stripNum * steps\n for step in range(steps) + [0]:\n vertexIndices += [baseIndex + step, baseIndex + steps + step]\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.QUAD_STRIP, vertexIndices)\n self.geometry().addPrimitiveSet(faceSet)\n bottomFanBaseIndex = len(vertices) - steps - 1\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, [len(vertices) - 1] + range(bottomFanBaseIndex, bottomFanBaseIndex + steps) + [bottomFanBaseIndex, len(vertices) - 1])\n self.geometry().addPrimitiveSet(faceSet)",
"def CalculateAlpha(self):\n\n # Adaptive alpha = Base Alpha * Mask modifer * Hygiene modifie * Distancing modifier\n self.Alpha = self.BaseAlpha\n self.Alpha *= (1 - self.MASK * 0.3)\n self.Alpha *= (1 - self.HYGIENE * 0.8)\n self.Alpha *= (1 - self.DISTANCING * 0.7)\n\n return",
"def draw_housing():\r\n\r\n tess.pensize(3)\r\n\r\n tess.color(\"black\", \"darkgrey\")\r\n\r\n tess.begin_fill()\r\n\r\n tess.forward(80)\r\n\r\n tess.left(90)\r\n\r\n tess.forward(200)\r\n\r\n tess.circle(40, 180)\r\n\r\n tess.forward(200)\r\n\r\n tess.left(90)\r\n\r\n tess.end_fill()",
"def _setupFinalPass(self):\n # Set wrap for motion blur\n colorTex = self.antialias.getResultTexture()\n colorTex.setWrapU(Texture.WMClamp)\n colorTex.setWrapV(Texture.WMClamp)\n self._setFinalPassShader()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the occlusion technique
|
def _setupOcclusion(self):
technique = self.settings.occlusionTechnique
self.debug("Creating occlusion handle for", technique)
if technique == "None":
self.occlusion = AmbientOcclusionTechniqueNone()
elif technique == "SAO":
self.occlusion = AmbientOcclusionTechniqueSAO()
else:
self.error("Unkown occlusion technique:", technique)
self.occlusion = AmbientOcclusionTechniqueNone()
|
[
"def _gen_occlusions(self):\n # self.oc_grid = self.grid.copy()\n self.oc_grid[:, 2] = 1.0\n # self.oc_grid[:, 3] = self.types\n c = np.squeeze(self.cam_loc)\n pos = self.oc_grid[:, :2]\n\n # Compute distances from Camera to each object. Obtain sorted indices, closest object to furthest\n dist = np.linalg.norm(self.cam_loc-pos, axis=1)\n sorted_indices = np.argsort(dist)\n\n # Lists of occluded objects, tangency points of each object\n occluded = []\n self.a_list = []\n self.b_list = []\n # print(sorted_indices)\n\n # For each object, check if the objects behind are occluded or not\n for count, i in enumerate(sorted_indices):\n # a, b, are (approximate) tangency points originating from camera till object periphery\n a = pos[i] + np.asarray([-self.obj_width, 0.0])\n b = pos[i] + np.asarray([ self.obj_width, 0.0])\n self.a_list.append(a)\n self.b_list.append(b)\n\n # check if each point is on the RIGHT of line (CA) and LEFT of line (CB)\n for _, j in enumerate(sorted_indices[count+1:]):\n p = pos[j]\n\n # Check if right of line (CA) and left of line (CB) i.e. camera -- point A\n # https://math.stackexchange.com/questions/274712/calculate-on-which-side-of-a-straight-line-is-a-given-point-located\n da = (p[0] - c[0]) * (a[1] - c[1]) - (p[1] - c[1]) * (a[0] - c[0])\n db = (p[0] - c[0]) * (b[1] - c[1]) - (p[1] - c[1]) * (b[0] - c[0])\n\n if da > 0:\n # Point P is on the RIGHT of line (CA)\n if db < 0:\n # Point P is on the LEFT of the line (CB)\n occluded.append(j)\n # Set in_view to -1 and obj_type to unknown\n # self.oc_grid[j, 2] = -1.0\n # self.oc_grid[j, 3] = -2.0\n self.oc_grid[j, :] = -10.0\n # print(\"Changed oc grid:\\n\", self.oc_grid == self.grid)\n\n # self.grid = oc_grid",
"def testRenderingWithOcclusion(self):\n self._testRendering(occlusion_in_layers=True)",
"def create_displacement_parms(self, node):\n\n # add_folder 'Shaders'\n self.add_folder(node, 'Shaders')\n \n #Create Parameters\n #------------------------------------------------------------------\n\n # parm_template_group\n parm_template_group = node.parmTemplateGroup()\n\n # folder_shaders\n folder_shaders = parm_template_group.findFolder('Shaders')\n # shop_disable_displace_shader\n hou_parm_template = hou.ToggleParmTemplate(\"shop_disable_displace_shader\", \"Disable Displace Shader Rendering\", default_value=False)\n hou_parm_template.setHelp(\"None\")\n hou_parm_template.setTags({\"spare_category\": \"Shaders\"})\n #append\n parm_template_group.appendToFolder(folder_shaders, hou_parm_template)\n #set in node\n node.setParmTemplateGroup(parm_template_group)\n\n #log\n parm = node.parm(\"shop_disable_displace_shader\")\n parm_name = parm.name()\n parm_value = parm.eval()\n print('Added parm. {0} - {1}'.format(parm_name, parm_value))\n\n\n #Adjust Parameters\n #------------------------------------------------------------------\n\n # shop_disable_displace_shader \n hou_parm = node.parm(\"shop_disable_displace_shader\")\n hou_parm.lock(False)\n hou_parm.set(0)\n hou_parm.setAutoscope(False)",
"def testRenderingWithoutOcclusion(self):\n self._testRendering(occlusion_in_layers=False)",
"def _createOcclusionBlurBuffer(self):\n self.blurOcclusionV = RenderTarget(\"blurOcclusionVertical\")\n self.blurOcclusionV.addColorTexture()\n self.blurOcclusionV.prepareOffscreenBuffer()\n\n self.blurOcclusionH = RenderTarget(\"blurOcclusionHorizontal\")\n self.blurOcclusionH.addColorTexture()\n self.blurOcclusionH.prepareOffscreenBuffer()\n\n # Mipmaps for blur?\n # self.blurOcclusionV.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)\n # self.combiner.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)",
"def gen_and_print_conclusions(self, prop, mod):\n # Generate the conclusion premise in words from prop.\n conclusion = PARSER.generate_conclusion(prop)\n # Repalce the last premise with the generated conclusion\n self.premises[-1] = [conclusion]\n # For indeterminate problems, there can be more than one\n # model. Get one such model by calling make false\n print(\"Attempting to falsify the generated conclusion/model.\")\n alt_mod = model_validation.make_false(prop, mod,\n self.premises)\n if mod != alt_mod:\n print(\"Different conclusions are found in different\"\n \" models\")\n print(\"Conclusion generated in initial model: {}\"\n .format(conclusion))\n print(\"Initial model: {}\".format(mod))\n # Note: make_false negates the relation in prop and\n # changes carry over to the calling function. We don't\n # need to negate the prop again.\n print(\"Conclusion generated in an altered model:\"\n \" {}\".format(PARSER.generate_conclusion(prop)))\n print(\"Altered model: {}\".format(alt_mod))\n print(\"NO VALID CONCLUSION possible!\")\n self.models.append(mod)\n self.models.append(alt_mod)\n else:\n print(\"No alternative conclusions found\")\n print(\"Generated conclusion: {}\".format(conclusion))\n print(\"Final model: {}\".format(mod))\n self.models.append(mod)",
"def Random_Occlusion_Augmentation(self,input,keypoints,size=(21,21),probability=0.5,block_nums = 2):\n w, h = input.shape[1], input.shape[0]\n rx, ry = (size[0]-1)/2 , (size[1]-1)/2 # block radius\n \n if np.random.random() <= probability:\n\n for num in range(block_nums):\n\n x, y = np.random.randint(w), np.random.randint(h) # block center \n left ,top ,right, bottom = int(max(0,x-rx)), int(max(0,y-ry)), int(min(x+rx,w)), int(min(y+ry,h)) #block region\n \n input[top:bottom,left:right,:] = 0 # zero value\n\n # judge the keypoint's visibility ; `*` here means `and` operation for bool array\n keypoints[:,2]= np.where((left <= keypoints[:,0])* (keypoints[:,0] <= right) * \n (top <= keypoints[:,1] ) * (keypoints[:,1] <= bottom)*\n (keypoints[:,1]!=0) * (keypoints[:,0]!=0), # consideration for keypoint [0,0,0]\n 1 ,keypoints[:,2] ) # True: = 1 invisible keypoints False: keep original \n \n return input, keypoints",
"def create_aspect_topo_code (topo_data):\n # 'x' \n topo_data['x'] = topo_data['TopC'] * 10\n \n # 'y'\n def get_y_code(y):\n if y == .5:\n return 1\n elif y == 1:\n return 2\n elif y == 1.5:\n return 3\n elif y in [2,2.5]:\n return 4\n elif y in [3,3.5]:\n return 5\n elif y in [4,4.5]:\n return 6\n else:\n return 7\n topo_data['y'] = 0\n topo_data['y'][topo_data['Top'] == 500] = \\\n topo_data['RH'][topo_data['Top'] == 500].map(get_y_code)\n \n # 500 -> n00\n #(old, consolidated, snow covered)\n bool_map = {\n '000': 500, #(False, False, False)\n '001': 600, #(False, False, True)\n '011': 700, #(False, True, True)\n '010': 700, #(False, True, False) # not in aspect\n '019': 700, #(False, True, N/a)\n '100': 800, #(True, False, False) # not in aspect\n '101': 800, #(True, False, True), # not in aspect\n '110': 800, #(True, True, False) \n '111': 800, #(True, True, True)\n '190': 800, #(True, N/a, False), \n '191': 800, #(True, N/a, True), \n \n }\n b_cols = ['Old','Cs','SC']\n topo_data['code'] = [\n ''.join(x) for x in list(\n topo_data[b_cols].astype(int).astype(str).values\n )\n ]\n \n #~ get_code = lambda x: bool_map[x]\n def get_code (x):\n try: \n return bool_map[x]\n except KeyError:\n return np.nan\n topo_data['code'][topo_data['Top'] != 500] = \\\n topo_data['Top'][topo_data['Top'] != 500]\n topo_data['code'][topo_data['Top'] == 500] = \\\n topo_data['code'][topo_data['Top'] == 500].map(get_code)\n \n topo_data['code'] = topo_data['code'] + topo_data['x'] + topo_data['y']\n \n topo_data['code'][topo_data['code'] == 0] = np.nan\n \n return topo_data['code']",
"def apply_occulter(self, wf):\n # Code here pulled directly from Proper Manual pg 86\n if self.mode == \"Gaussian\":\n r = proper.prop_radius(wf)\n h = np.sqrt(-0.5 * self.size**2 / np.log(1 - np.sqrt(0.5)))\n gauss_spot = 1 - np.exp(-0.5 * (r/h)**2)\n # gauss_spot = shift(gauss_spot, shift=tp.occult_loc, mode='wrap') # ???\n proper.prop_multiply(wf, gauss_spot)\n elif self.mode == \"Solid\":\n proper.prop_circular_obscuration(wf, self.size)\n elif self.mode == \"8th_Order\":\n proper.prop_8th_order_mask(wf, self.size, CIRCULAR=True)\n elif self.mode == 'Vortex':\n vortex = Vortex().occulter(wf)",
"def inCurredSideEffect(self,doctor):\n SideEffect = 0 \n if random.uniform(0,1) < self.params['SideEffect']:\n self.medicalRecords['TreatmentOverallStatus'] = 'SEorIneffective'\n self.medicalRecords['ContinueTreatment'] = True\n #Just call the doctor, no need to check for IOP target\n doctor.DoctorModule()\n self.monitor.UpdateCurrentMedicationType(self.name,self.medicalRecords)\n self.monitor.UpdateOverallStatus(self.name,self.medicalRecords)\n SideEffect = 1\n \"\"\" Update current QALY according to side effect\"\"\"\n #self.QALY += (0.88 - 0.101*SideEffect + 0.011*self.Attribute['MD'] - 0.065*self.medicalRecords['Cataract'])*(self.DiscountRate()/12)\n self.QALY += (0.88 - self.params['betaSE']*SideEffect + self.params['betaMD']*self.Attribute['MD'] - self.params['betaCataract']*self.medicalRecords['Cataract'])*(self.params['time_next_visit']/12)",
"def __init__(self, capiness = 0.5, interiorIncludesCaps = False, *args, **keywordArgs):\n \n Shape.__init__(self, *args, **keywordArgs)\n \n # TODO: use VBO's so all instances share the same data?\n # TODO: fix seams caused by texture coords\n \n self.capiness = capiness\n self.interiorIncludesCaps = interiorIncludesCaps\n \n steps = 32 # must be multiple of four\n angleIncrement = 2.0 * pi / steps\n capSteps = steps / 4\n azimuthIncrement = pi / 2.0 / capSteps\n \n topVertices = []\n topTexCoords = []\n bottomVertices = []\n bottomTexCoords = []\n for azimuthStep in range(0, capSteps):\n topAzimuth = pi / 2.0 - (azimuthStep + 1) * azimuthIncrement\n topY, topMag = (sin(topAzimuth) * (capiness / 2.0), cos(topAzimuth) * 0.5)\n bottomAzimuth = -azimuthStep * azimuthIncrement\n bottomY, bottomMag = (sin(bottomAzimuth) * (capiness / 2.0), cos(bottomAzimuth) * 0.5)\n for step in range(0, steps):\n angle = pi + step * angleIncrement\n topVertices += [(sin(angle) * topMag, topY + (0.5 * (1.0 - capiness)), cos(angle) * topMag)]\n topTexCoords += [(float(step) / steps, topVertices[-1][1] + 0.5)]\n bottomVertices += [(sin(angle) * bottomMag, -(0.5 * (1.0 - capiness)) + bottomY, cos(angle) * bottomMag)]\n bottomTexCoords += [(float(step) / steps, bottomVertices[-1][1] + 0.5)]\n\n vertices = [(0.0, 0.5, 0.0)] + topVertices + bottomVertices + [(0.0, -0.5, 0.0)]\n self.geometry().setVertexArray(Shape.vectorArrayFromList(vertices))\n \n normals = []\n for vertex in vertices:\n normals += [(vertex[0] / 2.0, vertex[1] / 2.0, vertex[2] / 2.0)]\n self.geometry().setNormalArray(Shape.vectorArrayFromList(normals))\n self.geometry().setNormalBinding(osg.Geometry.BIND_PER_VERTEX)\n \n texCoords = [(0.0, 1.0)] + topTexCoords + bottomTexCoords + [(0.0, 0.0)]\n self.geometry().setTexCoordArray(0, Shape.vectorArrayFromList(texCoords))\n \n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, range(0, steps + 1) + [1, 0])\n self.geometry().addPrimitiveSet(faceSet)\n for stripNum in range(0, 2 * capSteps - 1):\n vertexIndices = []\n baseIndex = 1 + stripNum * steps\n for step in range(steps) + [0]:\n vertexIndices += [baseIndex + step, baseIndex + steps + step]\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.QUAD_STRIP, vertexIndices)\n self.geometry().addPrimitiveSet(faceSet)\n bottomFanBaseIndex = len(vertices) - steps - 1\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, [len(vertices) - 1] + range(bottomFanBaseIndex, bottomFanBaseIndex + steps) + [bottomFanBaseIndex, len(vertices) - 1])\n self.geometry().addPrimitiveSet(faceSet)",
"def _setOcclusionBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionHorizontal.fragment\")\n self.blurOcclusionV.setShader(blurVShader)\n self.blurOcclusionH.setShader(blurHShader)",
"def edit_singularity_mesh(pattern):",
"def apply_scattering_rules(self):\n pass",
"def spring_cutout_generator(spring_angle, r0, r1, chamfer0, chamfer1=0):\n\n assert 0 < spring_angle < 180\n\n r0 += arm_clearance\n r1 -= arm_clearance\n\n spring_r = vitamins.spring.diameter / 2 + arm_clearance\n cos = math.cos(math.radians(spring_angle)) * 2 * r1\n sin = math.sin(math.radians(spring_angle)) * 2 * r1\n\n points1 = [(0, 0), (2 * r1, 0)]\n if spring_angle > 90:\n points1.append((2 * r1, 2 * r1))\n points1.append((cos, sin))\n\n points2 = [(0, 0), (0, -r1)]\n if spring_angle < 90:\n points2.append((-r1, 0))\n points2.append((-sin, cos))\n\n p = polygon2d(points1)\n\n s = p.extruded(0).offset(spring_r) + \\\n p.offset(spring_r).extruded(spring_r, symmetrical=False) - \\\n polygon2d(points2).offset(r0 - chamfer0).extruded(float(\"inf\"))\n\n\n chamfer_poly = polygon2d([(r0 - chamfer0, 2 * vitamins.spring.diameter),\n (r0 - chamfer0, 0),\n (r0, -spring_r),\n (r1, -spring_r),\n (r1 + chamfer1, 0),\n (r1 + chamfer1, 2 * vitamins.spring.diameter)])\n mask = chamfer_poly.revolved().rotated_x(90)\n mask &= p.extruded(float(\"inf\"))\n mask += chamfer_poly.extruded(vitamins.spring.diameter).translated_z(0.95 * vitamins.spring.diameter / 2).rotated_x(90)\n mask += chamfer_poly.extruded(vitamins.spring.diameter).translated_z(-0.95 * vitamins.spring.diameter / 2).rotated_x(90).rotated_z(spring_angle)\n\n return s & mask",
"def preprocessing():",
"def CreateAlphaFile(self):\n\n tmpField = self.itrSpongeAreas()\n\n self._alpha.content['dimensions']= self.params['dimensions']\n self._alpha['internalField']=tmpField\n\n self._alpha.writeFileAs(os.path.join('constant',self.params['fileName']))",
"def makeSteric(salinity,salinityChg,temp,tempChg,outFileName,thetao,pressure):\n\n # Remap all variables to short names\n so = salinity\n so_chg = salinityChg\n temp = temp\n temp_chg = tempChg\n del(salinity,salinityChg,tempChg) ; gc.collect()\n\n # Strip attributes to maintain consistency between datasets\n for count,x in enumerate(so.attributes.keys()):\n delattr(so,x)\n #print so.listattributes() ; # Print remaining attributes\n for count,x in enumerate(so_chg.attributes.keys()):\n delattr(so_chg,x)\n for count,x in enumerate(temp.attributes.keys()):\n delattr(temp,x)\n for count,x in enumerate(temp_chg.attributes.keys()):\n delattr(temp_chg,x)\n del(count,x)\n \n # Create z-coordinate from salinity input\n if not pressure:\n z_coord = so.getAxis(0)\n y_coord = so.getAxis(1)\n y_coord = tile(y_coord,(so.shape[2],1)).transpose()\n depth_levels = tile(z_coord.getValue(),(so.shape[2],so.shape[1],1)).transpose()\n pressure_levels = sw.pres(np.array(depth_levels),np.array(y_coord))\n del(z_coord,y_coord,depth_levels) ; gc.collect()\n else:\n pressure_levels = so.getAxis(0)\n pressure_levels = transpose(tile(pressure_levels,(so.shape[2],so.shape[1],1)))\n \n pressure_levels = cdm.createVariable(pressure_levels,id='pressure_levels')\n pressure_levels.setAxis(0,so.getAxis(0))\n pressure_levels.setAxis(1,so.getAxis(1))\n pressure_levels.setAxis(2,so.getAxis(2))\n pressure_levels.id = 'pressure_levels'\n pressure_levels.units_long = 'decibar (pressure)'\n pressure_levels.positive = 'down'\n pressure_levels.long_name = 'sea_water_pressure'\n pressure_levels.standard_name = 'sea_water_pressure'\n pressure_levels.units = 'decibar'\n pressure_levels.axis = 'Z'\n \n # Cleanup depth axis attributes\n depth = so.getAxis(0)\n depth.id = 'depth'\n depth.name = 'depth'\n depth.long_name = 'depth'\n depth.standard_name = 'depth'\n depth.axis = 'Z'\n so.setAxis(0,depth)\n so_chg.setAxis(0,depth)\n temp.setAxis(0,depth)\n temp_chg.setAxis(0,depth)\n del(depth)\n \n # Convert using python-seawater library (v3.3.1 - 130807)\n if thetao:\n # Process potential temperature to in-situ - default conversion sets reference pressure to 0 (surface)\n #temp_chg = sw.temp(np.array(so),np.array(temp_chg),np.array(pressure_levels)); # units degrees C\n #temp = sw.temp(np.array(so),np.array(temp),np.array(pressure_levels)); # units degrees C\n #temp_chg = sw.ptmp(np.array(so),np.array(temp_chg),np.array(pressure_levels),np.array(pressure_levels)); # units degrees C\n #temp = sw.ptmp(np.array(so),np.array(temp),np.array(pressure_levels),np.array(pressure_levels)); # units degrees C\n temp_chg = np.array(temp_chg); # units degrees C\n temp = np.array(temp); # units degrees C\n \n # Climatologies - rho,cp,steric_height\n rho = sw.dens(np.array(so),np.array(temp),np.array(pressure_levels)) ; # units kg m-3\n cp = sw.cp(np.array(so),np.array(temp),np.array(pressure_levels)) ; # units J kg-1 C-1\n steric_height = sw.gpan(np.array(so),np.array(temp),np.array(pressure_levels)) ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n \n # Halosteric - rho,cp\n ss = map(array,(so+so_chg))\n rho_halo = sw.dens(np.array(ss),np.array(temp),np.array(pressure_levels)) ; # units kg m-3\n cp_halo = sw.cp(np.array(ss),np.array(temp),np.array(pressure_levels)) ; # units J kg-1 C-1\n tmp = sw.gpan(np.array(ss),np.array(temp),np.array(pressure_levels)) ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n steric_height_halo_anom2 = tmp-steric_height ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n \n # Full steric - steric_height\n tt = map(array,(temp+temp_chg))\n tmp = sw.gpan(np.array(ss),np.array(tt),np.array(pressure_levels)) ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n steric_height_anom = tmp-steric_height ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n del(ss,tmp) ; gc.collect()\n \n # Thermosteric - rho,cp,steric_height\n rho_thermo = sw.dens(np.array(so),np.array(tt),np.array(pressure_levels)) ; # units kg m-3 \n cp_thermo = sw.cp(np.array(so),np.array(tt),np.array(pressure_levels)) ; # units J kg-1 C-1\n tmp = sw.gpan(np.array(so),np.array(tt),np.array(pressure_levels)) ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n steric_height_thermo_anom = tmp-steric_height ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n del(tt,tmp) ; gc.collect() \n\n # Halosteric - steric_height\n steric_height_halo_anom = steric_height_anom-steric_height_thermo_anom ; # units m3 kg-1 Pa == m2 s-2 == J kg-1 (dynamic decimeter)\n\n # Create heat content\n heat_content = np.array(temp)*np.array(rho)*np.array(cp) ; # units J\n heat_content_sanom = np.array(temp)*np.array(rho_halo)*np.array(cp_halo) ; # units J\n heat_content_tanom = np.array(temp_chg)*np.array(rho)*np.array(cp) ; # units J\n #heat_content_tanom = np.array(temp_chg)*np.array(1020)*np.array(4187) ; # units J - try hard-coded - AR5 numbers\n heat_content_tsanom = np.array(temp_chg)*np.array(rho_halo)*np.array(cp_halo) ; # units J\n \n # Correct all instances of NaN values and fix masks - applied before cdms variables are created otherwise names/ids/attributes are reset\n temp = scrubNaNAndMask(temp,so)\n temp_chg = scrubNaNAndMask(temp_chg,so)\n rho = scrubNaNAndMask(rho,so)\n cp = scrubNaNAndMask(cp,so)\n rho_halo = scrubNaNAndMask(rho_halo,so)\n cp_halo = scrubNaNAndMask(cp_halo,so)\n rho_thermo = scrubNaNAndMask(rho_thermo,so)\n cp_thermo = scrubNaNAndMask(cp_thermo,so)\n steric_height = scrubNaNAndMask(steric_height,so)\n steric_height_anom = scrubNaNAndMask(steric_height_anom,so)\n steric_height_thermo_anom = scrubNaNAndMask(steric_height_thermo_anom,so)\n steric_height_halo_anom = scrubNaNAndMask(steric_height_halo_anom,so)\n steric_height_halo_anom2 = scrubNaNAndMask(steric_height_halo_anom2,so)\n heat_content = scrubNaNAndMask(heat_content,so)\n heat_content_sanom = scrubNaNAndMask(heat_content_sanom,so)\n heat_content_tanom = scrubNaNAndMask(heat_content_tanom,so)\n heat_content_tsanom = scrubNaNAndMask(heat_content_tsanom,so)\n \n # Recreate and redress variables\n so.id = 'so_mean'\n so.units = '1e-3'\n so_chg.id = 'so_chg'\n so_chg.units = '1e-3'\n temp = cdm.createVariable(temp,id='temp_mean')\n temp.setAxis(0,so.getAxis(0))\n temp.setAxis(1,so.getAxis(1))\n temp.setAxis(2,so.getAxis(2))\n temp.units = 'degrees_C'\n temp_chg = cdm.createVariable(temp_chg,id='temp_chg')\n temp_chg.setAxis(0,so.getAxis(0))\n temp_chg.setAxis(1,so.getAxis(1))\n temp_chg.setAxis(2,so.getAxis(2)) \n temp_chg.units = 'degrees_C' \n rho = cdm.createVariable(rho,id='rho')\n rho.setAxis(0,so.getAxis(0))\n rho.setAxis(1,so.getAxis(1))\n rho.setAxis(2,so.getAxis(2))\n rho.name = 'density_mean'\n rho.units = 'kg m^-3'\n cp = cdm.createVariable(cp,id='cp')\n cp.setAxis(0,so.getAxis(0))\n cp.setAxis(1,so.getAxis(1))\n cp.setAxis(2,so.getAxis(2))\n cp.name = 'heat_capacity_mean'\n cp.units = 'J kg^-1 C^-1'\n rho_halo = cdm.createVariable(rho_halo,id='rho_halo')\n rho_halo.setAxis(0,so.getAxis(0))\n rho_halo.setAxis(1,so.getAxis(1))\n rho_halo.setAxis(2,so.getAxis(2))\n rho_halo.name = 'density_mean_halo'\n rho_halo.units = 'kg m^-3'\n cp_halo = cdm.createVariable(cp_halo,id='cp_halo')\n cp_halo.setAxis(0,so.getAxis(0))\n cp_halo.setAxis(1,so.getAxis(1))\n cp_halo.setAxis(2,so.getAxis(2))\n cp_halo.name = 'heat_capacity_mean_halo'\n cp_halo.units = 'J kg^-1 C^-1'\n rho_thermo = cdm.createVariable(rho_thermo,id='rho_thermo')\n rho_thermo.setAxis(0,so.getAxis(0))\n rho_thermo.setAxis(1,so.getAxis(1))\n rho_thermo.setAxis(2,so.getAxis(2))\n rho_thermo.name = 'density_mean_thermo'\n rho_thermo.units = 'kg m^-3'\n cp_thermo = cdm.createVariable(cp_thermo,id='cp_thermo')\n cp_thermo.setAxis(0,so.getAxis(0))\n cp_thermo.setAxis(1,so.getAxis(1))\n cp_thermo.setAxis(2,so.getAxis(2))\n cp_thermo.name = 'heat_capacity_mean_thermo'\n cp_thermo.units = 'J kg^-1 C^-1'\n steric_height = cdm.createVariable(steric_height,id='steric_height')\n steric_height.setAxis(0,so.getAxis(0))\n steric_height.setAxis(1,so.getAxis(1))\n steric_height.setAxis(2,so.getAxis(2))\n steric_height.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_anom = cdm.createVariable(steric_height_anom,id='steric_height_anom')\n steric_height_anom.setAxis(0,so.getAxis(0))\n steric_height_anom.setAxis(1,so.getAxis(1))\n steric_height_anom.setAxis(2,so.getAxis(2))\n steric_height_anom.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_thermo_anom = cdm.createVariable(steric_height_thermo_anom,id='steric_height_thermo_anom')\n steric_height_thermo_anom.setAxis(0,so.getAxis(0))\n steric_height_thermo_anom.setAxis(1,so.getAxis(1))\n steric_height_thermo_anom.setAxis(2,so.getAxis(2))\n steric_height_thermo_anom.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_halo_anom = cdm.createVariable(steric_height_halo_anom,id='steric_height_halo_anom')\n steric_height_halo_anom.setAxis(0,so.getAxis(0))\n steric_height_halo_anom.setAxis(1,so.getAxis(1))\n steric_height_halo_anom.setAxis(2,so.getAxis(2))\n steric_height_halo_anom.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_halo_anom2 = cdm.createVariable(steric_height_halo_anom2,id='steric_height_halo_anom2')\n steric_height_halo_anom2.setAxis(0,so.getAxis(0))\n steric_height_halo_anom2.setAxis(1,so.getAxis(1))\n steric_height_halo_anom2.setAxis(2,so.getAxis(2))\n steric_height_halo_anom2.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n heat_content = cdm.createVariable(heat_content,id='heat_content')\n heat_content.setAxis(0,so.getAxis(0))\n heat_content.setAxis(1,so.getAxis(1))\n heat_content.setAxis(2,so.getAxis(2))\n heat_content.units = 'J'\n heat_content_sanom = cdm.createVariable(heat_content_sanom,id='heat_content_sanom')\n heat_content_sanom.setAxis(0,so.getAxis(0))\n heat_content_sanom.setAxis(1,so.getAxis(1))\n heat_content_sanom.setAxis(2,so.getAxis(2))\n heat_content_sanom.units = 'J'\n heat_content_tanom = cdm.createVariable(heat_content_tanom,id='heat_content_tanom')\n heat_content_tanom.setAxis(0,so.getAxis(0))\n heat_content_tanom.setAxis(1,so.getAxis(1))\n heat_content_tanom.setAxis(2,so.getAxis(2))\n heat_content_tanom.units = 'J'\n heat_content_tsanom = cdm.createVariable(heat_content_tsanom,id='heat_content_tsanom')\n heat_content_tsanom.setAxis(0,so.getAxis(0))\n heat_content_tsanom.setAxis(1,so.getAxis(1))\n heat_content_tsanom.setAxis(2,so.getAxis(2))\n heat_content_tsanom.units = 'J'\n \n # Create model-based depth index for subset target levels\n newdepth = np.array([5,10,20,30,40,50,75,100,125,150,200,300,500,700,1000,1500,1800,2000]).astype('f');\n newdepth_bounds = np.array([[0,5],[5,10],[10,20],[20,30],[30,40],[40,50],[50,75],[75,100],[100,125],[125,150],\n [150,200],[200,300],[300,500],[500,700],[700,1000],[1000,1500],[1500,1800],[1800,2000]]).astype('f')\n #newdepth = np.array([200,300,500,700,1000,1500,1800,2000]).astype('f');\n #newdepth_bounds = np.array([[0,200],[200,300],[300,500],[500,700],[700,1000],[1000,1500],[1500,1800],[1800,2000]]).astype('f')\n \n # Interpolate to depths\n so_depthInterp = cdu.linearInterpolation(so,pressure_levels,levels=newdepth)\n temp_depthInterp = cdu.linearInterpolation(temp,pressure_levels,levels=newdepth)\n steric_height_depthInterp = cdu.linearInterpolation(steric_height,pressure_levels,levels=newdepth)\n steric_height_anom_depthInterp = cdu.linearInterpolation(steric_height_anom,pressure_levels,levels=newdepth)\n steric_height_thermo_anom_depthInterp = cdu.linearInterpolation(steric_height_thermo_anom,pressure_levels,levels=newdepth)\n steric_height_halo_anom_depthInterp = cdu.linearInterpolation(steric_height_halo_anom,pressure_levels,levels=newdepth)\n steric_height_halo_anom2_depthInterp = cdu.linearInterpolation(steric_height_halo_anom2,pressure_levels,levels=newdepth)\n heat_content_sanom_depthInterp = cdu.linearInterpolation(heat_content_sanom,pressure_levels,levels=newdepth)\n heat_content_tanom_depthInterp = cdu.linearInterpolation(heat_content_tanom,pressure_levels,levels=newdepth)\n heat_content_tsanom_depthInterp = cdu.linearInterpolation(heat_content_tanom,pressure_levels,levels=newdepth)\n \n # Fix masks - applied before cdms variables are created otherwise names/ids/attributes are reset\n temp_depthInterp = scrubNaNAndMask(temp_depthInterp,so_depthInterp)\n steric_height_depthInterp = scrubNaNAndMask(steric_height_depthInterp,so_depthInterp)\n steric_height_anom_depthInterp = scrubNaNAndMask(steric_height_anom_depthInterp,so_depthInterp)\n steric_height_thermo_anom_depthInterp = scrubNaNAndMask(steric_height_thermo_anom_depthInterp,so_depthInterp)\n steric_height_halo_anom_depthInterp = scrubNaNAndMask(steric_height_halo_anom_depthInterp,so_depthInterp)\n steric_height_halo_anom2_depthInterp = scrubNaNAndMask(steric_height_halo_anom2_depthInterp,so_depthInterp)\n heat_content_sanom_depthInterp = scrubNaNAndMask(heat_content_sanom_depthInterp,so_depthInterp)\n heat_content_tanom_depthInterp = scrubNaNAndMask(heat_content_tanom_depthInterp,so_depthInterp)\n heat_content_tsanom_depthInterp = scrubNaNAndMask(heat_content_tsanom_depthInterp,so_depthInterp)\n \n # Fix bounds\n newdepth = so_depthInterp.getAxis(0)\n newdepth.setBounds(newdepth_bounds)\n del(newdepth_bounds)\n newdepth.id = 'depth2'\n newdepth.units_long = 'decibar (pressure)'\n newdepth.positive = 'down'\n newdepth.long_name = 'sea_water_pressure'\n newdepth.standard_name = 'sea_water_pressure'\n newdepth.units = 'decibar'\n newdepth.axis = 'Z'\n \n # Assign corrected bounds\n so_depthInterp.setAxis(0,newdepth)\n temp_depthInterp.setAxis(0,newdepth)\n steric_height_depthInterp.setAxis(0,newdepth)\n steric_height_anom_depthInterp.setAxis(0,newdepth)\n steric_height_thermo_anom_depthInterp.setAxis(0,newdepth)\n steric_height_halo_anom_depthInterp.setAxis(0,newdepth)\n steric_height_halo_anom2_depthInterp.setAxis(0,newdepth)\n heat_content_sanom_depthInterp.setAxis(0,newdepth)\n heat_content_tanom_depthInterp.setAxis(0,newdepth)\n heat_content_tsanom_depthInterp.setAxis(0,newdepth)\n \n # Average/integrate to surface - configure bounds\n # Preallocate arrays\n so_depthAve = np.ma.zeros([len(newdepth),shape(so)[1],shape(so)[2]])\n temp_depthAve = so_depthAve.copy()\n heat_content_sanom_depthInteg = so_depthAve.copy()\n heat_content_tanom_depthInteg = so_depthAve.copy()\n heat_content_tsanom_depthInteg = so_depthAve.copy()\n for count,depth in enumerate(newdepth):\n tmp = cdu.averager(so_depthInterp[0:(count+1),...],axis=0,weights='weighted',action='average')\n so_depthAve[count,] = tmp;\n tmp = cdu.averager(temp_depthInterp[0:(count+1),...],axis=0,weights='weighted',action='average')\n temp_depthAve[count,] = tmp;\n tmp = cdu.averager(heat_content_sanom_depthInterp[0:(count+1),...],axis=0,weights='weighted',action='sum')\n heat_content_sanom_depthInteg[count,] = tmp\n tmp = cdu.averager(heat_content_tanom_depthInterp[0:(count+1),...],axis=0,weights='weighted',action='sum')\n heat_content_tanom_depthInteg[count,] = tmp\n tmp = cdu.averager(heat_content_tsanom_depthInterp[0:(count+1),...],axis=0,weights='weighted',action='sum')\n heat_content_tsanom_depthInteg[count,] = tmp\n del(heat_content_tanom_depthInterp,heat_content_tsanom_depthInterp); gc.collect()\n \n # Fix masks - applied before cdms variables are created otherwise names/ids/attributes are reset\n so_depthAve = scrubNaNAndMask(so_depthAve,so_depthInterp)\n temp_depthAve = scrubNaNAndMask(temp_depthAve,so_depthInterp)\n heat_content_sanom_depthInteg = scrubNaNAndMask(heat_content_sanom_depthInteg,so_depthInterp)\n heat_content_tanom_depthInteg = scrubNaNAndMask(heat_content_tanom_depthInteg,so_depthInterp)\n heat_content_tsanom_depthInteg = scrubNaNAndMask(heat_content_tsanom_depthInteg,so_depthInterp)\n del(so_depthInterp)\n \n # Convert numpy arrays to cdms objects\n heat_content_sanom_depthInteg = cdm.createVariable(heat_content_sanom_depthInteg,id='heat_content_sanom_depthInteg')\n heat_content_sanom_depthInteg.id = 'heat_content_sanom_depthInteg'\n heat_content_sanom_depthInteg.setAxis(0,newdepth)\n heat_content_sanom_depthInteg.setAxis(1,so.getAxis(1))\n heat_content_sanom_depthInteg.setAxis(2,so.getAxis(2))\n heat_content_sanom_depthInteg.units = 'J'\n heat_content_tanom_depthInteg = cdm.createVariable(heat_content_tanom_depthInteg,id='heat_content_tanom_depthInteg')\n heat_content_tanom_depthInteg.id = 'heat_content_tanom_depthInteg'\n heat_content_tanom_depthInteg.setAxis(0,newdepth)\n heat_content_tanom_depthInteg.setAxis(1,so.getAxis(1))\n heat_content_tanom_depthInteg.setAxis(2,so.getAxis(2))\n heat_content_tanom_depthInteg.units = 'J'\n heat_content_tsanom_depthInteg = cdm.createVariable(heat_content_tsanom_depthInteg,id='heat_content_tsanom_depthInteg')\n heat_content_tsanom_depthInteg.id = 'heat_content_tsanom_depthInteg'\n heat_content_tsanom_depthInteg.setAxis(0,newdepth)\n heat_content_tsanom_depthInteg.setAxis(1,so.getAxis(1))\n heat_content_tsanom_depthInteg.setAxis(2,so.getAxis(2))\n heat_content_tsanom_depthInteg.units = 'J'\n so_depthAve = cdm.createVariable(so_depthAve,id='so_depthAve')\n so_depthAve.id = 'so_depthAve'\n so_depthAve.setAxis(0,newdepth)\n so_depthAve.setAxis(1,so.getAxis(1))\n so_depthAve.setAxis(2,so.getAxis(2))\n so_depthAve.units = '1e-3'\n temp_depthAve = cdm.createVariable(temp_depthAve,id='temp_depthAve')\n temp_depthAve.id = 'temp_depthAve'\n temp_depthAve.setAxis(0,newdepth)\n temp_depthAve.setAxis(1,so.getAxis(1))\n temp_depthAve.setAxis(2,so.getAxis(2))\n temp_depthAve.units = 'degrees_C'\n steric_height_depthInterp = cdm.createVariable(steric_height_depthInterp,id='steric_height_depthInterp')\n steric_height_depthInterp.setAxis(0,newdepth)\n steric_height_depthInterp.setAxis(1,so.getAxis(1))\n steric_height_depthInterp.setAxis(2,so.getAxis(2))\n steric_height_depthInterp.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_anom_depthInterp = cdm.createVariable(steric_height_anom_depthInterp,id='steric_height_anom_depthInterp')\n steric_height_anom_depthInterp.setAxis(0,newdepth)\n steric_height_anom_depthInterp.setAxis(1,so.getAxis(1))\n steric_height_anom_depthInterp.setAxis(2,rho.getAxis(2))\n steric_height_anom_depthInterp.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_thermo_anom_depthInterp = cdm.createVariable(steric_height_thermo_anom_depthInterp,id='steric_height_thermo_anom_depthInterp')\n steric_height_thermo_anom_depthInterp.setAxis(0,newdepth)\n steric_height_thermo_anom_depthInterp.setAxis(1,so.getAxis(1))\n steric_height_thermo_anom_depthInterp.setAxis(2,so.getAxis(2))\n steric_height_thermo_anom_depthInterp.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_halo_anom_depthInterp = cdm.createVariable(steric_height_halo_anom_depthInterp,id='steric_height_halo_anom_depthInterp')\n steric_height_halo_anom_depthInterp.setAxis(0,newdepth)\n steric_height_halo_anom_depthInterp.setAxis(1,so.getAxis(1))\n steric_height_halo_anom_depthInterp.setAxis(2,so.getAxis(2))\n steric_height_halo_anom_depthInterp.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n steric_height_halo_anom2_depthInterp = cdm.createVariable(steric_height_halo_anom2_depthInterp,id='steric_height_halo_anom2_depthInterp')\n steric_height_halo_anom2_depthInterp.setAxis(0,newdepth)\n steric_height_halo_anom2_depthInterp.setAxis(1,so.getAxis(1))\n steric_height_halo_anom2_depthInterp.setAxis(2,so.getAxis(2))\n steric_height_halo_anom2_depthInterp.units = 'm^3 kg^-1 Pa (dynamic decimeter)'\n # Cleanup workspace\n del(newdepth) ; gc.collect()\n \n \n # Write variables to file\n if os.path.isfile(outFileName):\n os.remove(outFileName)\n filehandle = cdm.open(outFileName,'w')\n # Global attributes\n globalAttWrite(filehandle,options=None) ; # Use function to write standard global atts\n # Write seawater version\n filehandle.seawater_library_version = sw.__version__\n # Write makeSteric version\n makeStericPath = str(makeSteric.__code__).split(' ')[6]\n makeStericPath = replace(replace(makeStericPath,'\"',''),',','') ; # Clean scraped path\n filehandle.makeSteric_version = ' '.join(getGitInfo(makeStericPath)[0:3])\n # Master variables\n filehandle.write(so.astype('float32'))\n filehandle.write(so_chg.astype('float32'))\n filehandle.write(so_depthAve.astype('float32'))\n filehandle.write(temp.astype('float32'))\n filehandle.write(temp_chg.astype('float32'))\n filehandle.write(temp_depthAve.astype('float32'))\n # Derived variables\n filehandle.write(cp.astype('float32'))\n filehandle.write(cp_halo.astype('float32'))\n filehandle.write(cp_thermo.astype('float32')) \n filehandle.write(rho.astype('float32'))\n filehandle.write(rho_halo.astype('float32'))\n filehandle.write(rho_thermo.astype('float32'))\n filehandle.write(heat_content.astype('float32'))\n filehandle.write(heat_content_sanom.astype('float32'))\n filehandle.write(heat_content_sanom_depthInteg.astype('float32'))\n filehandle.write(heat_content_tanom.astype('float32'))\n filehandle.write(heat_content_tanom_depthInteg.astype('float32'))\n filehandle.write(heat_content_tsanom.astype('float32'))\n filehandle.write(heat_content_tsanom_depthInteg.astype('float32'))\n filehandle.write(steric_height.astype('float32'))\n filehandle.write(steric_height_depthInterp.astype('float32'))\n filehandle.write(steric_height_anom.astype('float32'))\n filehandle.write(steric_height_anom_depthInterp.astype('float32'))\n filehandle.write(steric_height_halo_anom.astype('float32'))\n filehandle.write(steric_height_halo_anom2.astype('float32'))\n filehandle.write(steric_height_halo_anom_depthInterp.astype('float32'))\n filehandle.write(steric_height_halo_anom2_depthInterp.astype('float32'))\n filehandle.write(steric_height_thermo_anom.astype('float32'))\n filehandle.write(steric_height_thermo_anom_depthInterp.astype('float32'))\n filehandle.close()\n # Cleanup workspace\n del(outFileName) ; gc.collect()",
"def __init__(self, doc, profil, skin, name='disque'):\n\n self.data = {\n 'thick': 5., # mm\n 'hole radius': 30., # mm\n 'diameter': skin['diameter'] + skin['thick'], # mm\n }\n\n side = profil['side']\n radius = profil['radius']\n thick = self.data['thick']\n diam = self.data['diameter']\n\n # use profile shape to make suppressed parts of the disque\n shape = []\n\n # 1st part\n shape.append(Vector(radius, side / 2, 0))\n shape.append(Vector(radius + diam, side / 2, 0))\n shape.append(Vector(radius + diam, -side / 2, 0))\n shape.append(Vector(radius, -side / 2, 0))\n shape.append(Vector(radius, side / 2, 0))\n\n wire0 = Part.makePolygon(shape)\n face0 = Part.Face(wire0)\n\n # 2nd and 3rd parts\n face1 = Part.Face(wire0)\n face2 = Part.Face(wire0)\n\n # make the volumes\n cut0 = face0.extrude(Vector(0, 0, thick))\n cut0.rotate(Vector(0, 0, 0), Vector(0, 0, 1), 0)\n\n cut1 = face1.extrude(Vector(0, 0, thick))\n cut1.rotate(Vector(0, 0, 0), Vector(0, 0, 1), 120)\n\n cut2 = face2.extrude(Vector(0, 0, thick))\n cut2.rotate(Vector(0, 0, 0), Vector(0, 0, 1), 240)\n\n # make the disque\n disque = Part.makeCylinder(diam / 2, thick)\n disque = disque.cut(cut0)\n disque = disque.cut(cut1)\n disque = disque.cut(cut2)\n\n # dig the hole\n hole = Part.makeCylinder(self.data['hole radius'], thick)\n disque = disque.cut(hole)\n\n MecaComponent.__init__(self, doc, disque, name, (0.95, 1., 1.))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setups the final pass which applies motion blur and so on
|
def _setupFinalPass(self):
# Set wrap for motion blur
colorTex = self.antialias.getResultTexture()
colorTex.setWrapU(Texture.WMClamp)
colorTex.setWrapV(Texture.WMClamp)
self._setFinalPassShader()
|
[
"def main():\n # Import a image ready to be blurred\n old_img = SimpleImage(\"images/smiley-face.png\")\n # Show the original image\n old_img.show()\n\n # Blur the original for one time\n blurred_img = blur(old_img)\n # Keep to blur the blurred image till the total times that user wished to blur the image are completed\n for i in range(BLUR_TIMES - 1):\n blurred_img = blur(blurred_img)\n # Show the final blurred image\n blurred_img.show()",
"def _setBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurHorizontal.fragment\")\n self.blurColorV.setShader(blurVShader)\n self.blurColorH.setShader(blurHShader)",
"def initialize(self):\n# n_pre = self.n_hc_in * self.n_mc_in\n# n_post = self.n_hc_out * self.n_mc_out\n# self.p_i = np.zeros(n_pre)\n# self.p_j = np.zeros(n_post)\n# self.p_ij = np.zeros((n_pre, n_post))\n# self.bias = np.ones(n_post) * np.log((1./ self.n_patterns)**2)\n\n # show all patterns once and activate units in the output layer and apply WTA to the post activity\n# for pn in xrange(self.n_patterns):\n# pre_activity = self.input_activity[pn, :]\n# for post in xrange(n_post): # mc index\n# in_j = 0.\n# for pre in xrange(n_pre):\n# in_j += (self.w_ij[pre, post] * pre_activity[pre])\n# self.post_activity[pn, post] = in_j\n\n# print \"Calculating probabilities: \", self.iteration\n# self.calculate_probabilities()\n# print \"Calculating weights and bias: \", self.iteration\n# self.calculate_weights_and_bias()\n\n debug_fn_1 = self.params['bcpnn_folder'] + \"/weights_after_init_wij_mc_hc.dat\"\n debug_fn_2 = self.params['bcpnn_folder'] + \"/bias_after_init.dat\"\n debug_fn_3 = self.params['bcpnn_folder'] + \"/p_ij_after_init.dat\"\n debug_fn_4 = self.params['bcpnn_folder'] + \"/post_activity_after_init.dat\"\n debug_fn_5 = self.params['bcpnn_folder'] + \"/pi_after_init.dat\"\n debug_fn_6 = self.params['bcpnn_folder'] + \"/pj_after_init.dat\"\n debug_fn_7 = self.params['bcpnn_folder'] + \"/input_activity_after_init.dat\"\n\n np.savetxt(debug_fn_1, self.w_ij)\n np.savetxt(debug_fn_2, self.bias)\n np.savetxt(debug_fn_3, self.p_ij)\n np.savetxt(debug_fn_4, self.post_activity)\n np.savetxt(debug_fn_5, self.p_i)\n np.savetxt(debug_fn_6, self.p_j)\n np.savetxt(debug_fn_7, self.input_activity)",
"def update_step(self, t, replay_buffer, lr):\r\n\r\n s_batch, a_batch, r_batch, sp_batch, done_mask_batch = replay_buffer.sample(\r\n self.config.batch_size)\r\n \r\n\r\n fd = {\r\n # inputs\r\n self.s: s_batch,\r\n self.a: a_batch,\r\n self.r: r_batch,\r\n self.sp: sp_batch, \r\n self.done_mask: done_mask_batch,\r\n self.lr: lr, \r\n # extra info\r\n self.avg_reward_placeholder: self.avg_reward, \r\n self.max_reward_placeholder: self.max_reward, \r\n self.std_reward_placeholder: self.std_reward, \r\n self.avg_q_placeholder: self.avg_q, \r\n self.max_q_placeholder: self.max_q, \r\n self.std_q_placeholder: self.std_q, \r\n self.eval_reward_placeholder: self.eval_reward, \r\n }\r\n\r\n if self.config.lwf:\r\n fd[self.eval_reward_old_placeholder] = self.eval_reward_old\r\n\r\n if self.config.noise:\r\n state_shape = list(self.env.observation_space.shape)\r\n img_height, img_width, nchannels = state_shape\r\n if t > 0 and t % self.config.num_adv_iter == 0 and self.config.adv:\r\n\t\t self.noise_update = 255*self.noise_update/np.max(self.noise_update, axis=(1,2,3), keepdims=True)\r\n #print 'Adding adversarial noise', self.noise_update\r\n noise = self.noise_update + self.prev_noise\r\n noise_min = np.min(noise, axis=(1,2,3), keepdims=True)\r\n noise_zero = noise - noise_min\r\n noise_max = np.max(noise_zero, axis=(1,2,3), keepdims=True)\r\n noise_scaled = 255*(noise_zero/noise_max)\r\n noise = noise_scaled.astype(np.uint8)\r\n else:\r\n noise = np.random.choice(np.arange(256, dtype=np.uint8), replace=True, size=[self.config.batch_size, \\\r\n img_height, img_width, nchannels*self.config.state_history])\r\n fd[self.n] = noise\r\n self.prev_noise = noise\r\n\r\n if self.config.adv:\r\n\t loss_eval, grad_norm_eval, summary, self.noise_update, _ = self.sess.run([self.loss, \\\r\n self.grad_norm, self.merged, self.noise_grad, self.train_op], feed_dict=fd)\r\n else:\r\n loss_eval, grad_norm_eval, summary, _ = self.sess.run([self.loss, \\\r\n self.grad_norm, self.merged, self.train_op], feed_dict=fd)\r\n \r\n\r\n\r\n # tensorboard stuff\r\n self.file_writer.add_summary(summary, t)\r\n \r\n return loss_eval, grad_norm_eval",
"def __init__(self, cam_width=640, cam_height=480, hue_min=20, hue_max=160,\n sat_min=100, sat_max=255, val_min=200, val_max=256,\n display_thresholds=False):\n\n self.cam_width = cam_width\n self.cam_height = cam_height\n self.hue_min = hue_min\n self.hue_max = hue_max\n self.sat_min = sat_min\n self.sat_max = sat_max\n self.val_min = val_min\n self.val_max = val_max\n self.display_thresholds = display_thresholds\n\n self.capture = None # camera capture device\n self.channels = {\n 'hue': None,\n 'saturation': None,\n 'value': None,\n 'laser': None,\n }\n\n self.mouse = Controller()\n self.corners=[False,False,False,False] #TL, TR, BR, BL\n self.refPts=[]\n \n global screenRoot\n self.wRatio = 1/(self.cam_width/screenRoot.winfo_screenwidth())\n self.hRatio = 1/(self.cam_height/screenRoot.winfo_screenheight())\n print('Cam resolution : {} x {}'.format(self.cam_width, self.cam_height))\n print('Screen resolution : {} x {}'.format(screenRoot.winfo_screenwidth(), screenRoot.winfo_screenheight()))\n print('calculated ratio {} x {}'.format(self.wRatio, self.hRatio))\n self.emulate = Emulate()\n self.previous_pos = []",
"def main():\n original_img = image.Image('pres_casey.gif')\n red_image = red_filter(original_img)\n win = image.ImageWin(original_img.getWidth(), original_img.getHeight())\n red_image.draw(win)\n\n grayscale_img = grayscale(original_img)\n grayscale_img.draw(win)\n\n cycle_colors_img = cycle_colors(original_img)\n cycle_colors_img.draw(win)\n\n negative_img = negative(original_img)\n negative_img.draw(win)\n\n brightness_img = brightness(original_img, 90)\n brightness_img.draw(win)\n\n increase_contrast_img = increase_contrast(original_img)\n increase_contrast_img.draw(win)\n\n vertical_flip_image = vertical_flip(original_img)\n vertical_flip_image.draw(win)\n\n posterize_image = posterize(original_img)\n posterize_image.draw(win)\n\n scroll_image = scroll(original_img, 10)\n scroll_image.draw(win)\n\n horizontal_mirror_image = horizontal_mirror(original_img)\n horizontal_mirror_image.draw(win)\n\n obamafy_image = obamafy(original_img)\n obamafy_image.draw(win)",
"def update(self):\n if self.first_iter:\n self.first_iter = False\n self.params = [i for i in self.opt.target.params()]\n for i, p in enumerate(self.params):\n self.init_params.append(xp.copy(p.data))\n if not os.path.exists(self.output_dir):\n os.makedirs(self.output_dir)\n xp.savez(os.path.join(self.output_dir, 'init_params_{0}'.format(self.time_stamp)),\n self.init_params)\n if self.tracked_size:\n self.frozen_masks = [None] * len(self.params)\n super(DropBack, self).update()\n if self.decay_init and not self.first_iter:\n for i, _ in enumerate(self.init_params):\n self.init_params[i] = self.init_params[i]*.90\n if self.tracked_size:\n if not self.freeze:\n abs_values = []\n for i, param in enumerate(self.params):\n if param.name == 'b':\n values = (xp.abs(param.data).flatten()).copy()\n else:\n values = (xp.abs(param.data - self.init_params[i]).flatten()).copy()\n abs_values.append(values)\n abs_vals = xp.concatenate(abs_values)\n thresh = xp.partition(abs_vals, self.tracked_size)[-self.tracked_size]\n for i, param in enumerate(self.params):\n if param.name == 'b':\n if self.freeze:\n mask = self.frozen_masks[i]\n else:\n mask = xp.abs(param.data) > thresh\n param.data = mask*param.data\n else:\n if self.freeze:\n mask = self.frozen_masks[i]\n else:\n mask = xp.abs(param.data - self.init_params[i]) > thresh\n param.data = mask*param.data + self.init_params[i]*~mask\n self.frozen_masks[i] = mask\n if self.iteration == 3465:\n print(\"Checking inv...\")\n total_sum = sum([xp.count_nonzero(p.data != self.init_params[i]) for i, p in enumerate(self.params)])\n print(\"********\\n\\n Total non zero is: {}\\n\\n1*********\".format(total_sum))\n assert total_sum <= self.tracked_size * 1.1\n if self.track:\n if (self.iteration-1) % 100 == 0:\n flat_now = xp.concatenate([i.array.ravel() for i in self.params])\n flat_0 = xp.concatenate([i.ravel() for i in self.init_params])\n xp.savez(os.path.join(self.output_dir, f'l2_{self.iteration-1}'), xp.linalg.norm(flat_now - flat_0))\n xp.savez(os.path.join(self.output_dir, f'param_hist_{self.iteration-1}'), xp.concatenate([i.array.ravel() for i in self.params if i.name == 'b' or i.name == 'W']))",
"def bg_init(self):\n self.bg_depth, self.bg_color = self.get_camera_data()",
"def _createOcclusionBlurBuffer(self):\n self.blurOcclusionV = RenderTarget(\"blurOcclusionVertical\")\n self.blurOcclusionV.addColorTexture()\n self.blurOcclusionV.prepareOffscreenBuffer()\n\n self.blurOcclusionH = RenderTarget(\"blurOcclusionHorizontal\")\n self.blurOcclusionH.addColorTexture()\n self.blurOcclusionH.prepareOffscreenBuffer()\n\n # Mipmaps for blur?\n # self.blurOcclusionV.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)\n # self.combiner.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)",
"def process(self, frame):\n self.increment_time()\n if self.in_gray_mode and len(frame.shape) == 3:\n frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY).astype(np.float)\n # self.particle_distibution()\n # frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)\n # move particles by gaussian noise\n self.render(np.copy(frame), \"out/frames/frame_\" + str(self.time) + \"_0.png\")\n # self.show_error_function(np.copy(frame))\n # likelihood of measurement\n self.update_measurement_likelihood(frame)\n # update weights\n self.update_weights()\n # self.render_best_weights(frame)\n # resample\n sampled_particles, weights = self.resample_particles()\n self.particles = sampled_particles\n self.weights = weights\n self.weights = self.weights / np.sum(self.weights)\n\n self.render(np.copy(frame), \"out/frame_\" + str(self.time) + \"_1.png\")\n\n self.diffuse()\n self.render(np.copy(frame), \"out/frame_\" + str(self.time) + \"_2.png\")\n\n # get mean x, mean y etc\n x, y, _ = self.get_mean()\n return x, y",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def on2Claps(self):\n print(\"Flashed light\")",
"def _setupAntialiasing(self):\n technique = self.settings.antialiasingTechnique\n self.debug(\"Creating antialiasing handler for\", technique)\n\n if technique == \"None\":\n self.antialias = AntialiasingTechniqueNone()\n elif technique == \"SMAA\":\n self.antialias = AntialiasingTechniqueSMAA()\n elif technique == \"FXAA\":\n self.antialias = AntialiasingTechniqueFXAA()\n else:\n self.error(\n \"Unkown antialiasing technique\", technique, \"-> using None:\")\n self.antialias = AntialiasingTechniqueNone()\n\n if self.occlusion.requiresBlurring():\n self.antialias.setColorTexture(\n self.blurOcclusionH.getColorTexture())\n else:\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.antialias.setColorTexture(self.combiner.getColorTexture())\n else:\n self.antialias.setColorTexture(\n self.lightingComputeContainer.getColorTexture())\n\n self.antialias.setDepthTexture(self.deferredTarget.getDepthTexture())\n self.antialias.setVelocityTexture(self.deferredTarget.getAuxTexture(1))\n self.antialias.setup()",
"def detector_stage(self):\n if self.is_detector_stage:\n return\n log.debug(\"Staging detector: removing hardware gains.\")\n self.channels.load_temporary_hardware_gains()\n int_nf.detector_stage(\n frame_data=self.frames.data,\n frame_valid=self.frames.valid,\n channel_indices=np.arange(self.channels.size),\n channel_hardware_gain=self.channels.data.hardware_gain)\n self.is_detector_stage = True",
"def _reset_image_wl(self):\n iv = self._image_viewer\n inp = iv.GetInput()\n if inp:\n r = inp.GetScalarRange()\n iv.SetColorWindow(r[1] - r[0])\n iv.SetColorLevel(0.5 * (r[1] + r[0]))",
"def CalibrateBrightness(self, beamwidth, control=False, color=\"none\", width=640, height=480):\r\n \r\n max_255 = int(beamwidth*self.dx*self.dy) # max number of bright pixels\r\n if control == True:\r\n print \"calibrating brightness and exposure compensation...\"\r\n print \"max_255=\", max_255\r\n\r\n \r\n # first round: step 10\r\n for b in xrange(71,0,-10):\r\n #self.P.SetParameter(\"Action\",\"ResetTimer\") # keep laser alive\r\n self.Cam.SetBrightness(b)\r\n self.Cam.QueryImage(width,height)\r\n if color == \"green\": # make sure green laser is turned on! \r\n simg = cv2.split(self.Cam.img)\r\n gimg = simg[1] # simg[1] is the green channel\r\n gimg = self.CropImg(gimg) # crop image\r\n hist = np.bincount(gimg.ravel(),minlength=256) # optimise on green channel\r\n if hist[255] < max_255:\r\n break \r\n else:\r\n gimg = self.CropImg(self.Cam.img) # crop image \r\n hist = np.bincount(gimg.ravel(),minlength=256) # optimise on all channels\r\n if hist[255] < 2*max_255:\r\n break \r\n \r\n # second round: step 1 \r\n for bb in xrange(b+9,b-1,-1):\r\n #self.P.SetParameter(\"Action\",\"ResetTimer\") # keep laser alive\r\n self.Cam.SetBrightness(bb)\r\n self.Cam.QueryImage(width,height)\r\n if color == \"green\": # make sure green laser is turned on! \r\n simg = cv2.split(self.Cam.img)\r\n gimg = simg[1] # simg[1] is the green channel\r\n gimg = self.CropImg(gimg) # crop image\r\n hist = np.bincount(gimg.ravel(),minlength=256) # optimise on green channel\r\n if hist[255] < max_255:\r\n break \r\n else:\r\n gimg = self.CropImg(self.Cam.img) # crop image \r\n hist = np.bincount(gimg.ravel(),minlength=256) # optimise on all channels\r\n if hist[255] < 2*max_255:\r\n break \r\n \r\n #self.Cam.best_brightness = self.Cam.best_brightness + 1 # one step back\r\n if control == True:\r\n print \"best_brightness: \", self.Cam.best_brightness\r\n \r\n # third round: find optimal camera.exposure_compensation\r\n # optimise on number of laser spot contours found\r\n for c in xrange(20,-25,-1):\r\n #self.P.SetParameter(\"Action\",\"ResetTimer\") # keep laser alive\r\n self.Cam.SetExposureCompensation(c)\r\n self.Cam.QueryImage(width,height)\r\n \r\n if color == \"green\": # make sure green laser is turned on!\r\n simg = cv2.split(self.Cam.img)\r\n gimg = simg[1] # simg[1] is the green channel\r\n else:\r\n gimg = self.Cam.img\r\n \r\n gimg = self.WarpImg(gimg) # warp\r\n gimg = self.CropImg(gimg) # crop image\r\n gimg = cv2.blur(gimg, (3,3) ) # blur\r\n ret, dst = cv2.threshold(gimg, 251, 255, cv2.THRESH_BINARY) # only keep the brightest pixels \r\n # detect contours\r\n contours, hierarchy = cv2.findContours( dst, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\r\n if contours != None and len(contours)<3:\r\n break\r\n \r\n if control == True:\r\n print \"best_exposure_compensation: \", self.Cam.best_exposure_compensation",
"def __post_init__(self):\n\n mods_shifted = np.fft.fftshift(self.mods, axes=(-1,-2))\n\n opix = self.obj_params.npix\n if self.obj is None:\n obj = getRandomComplexArray((opix, opix))\n object.__setattr__(self, 'obj', obj)\n\n ppix = self.probe_params.npix\n prop_kernel = getNFPropKernel(beam_shape=(ppix, ppix),\n pixel_pitch=self.det_params.pixel_pitch,\n wavelength=self.probe_params.wavelength,\n prop_dist=self.det_params.obj_dist)\n if self.probe is None:\n mods_avg = np.mean(mods_shifted, axis=0)\n probe = np.fft.ifftshift(np.fft.ifft2(np.fft.fft2(mods_avg)\n / prop_kernel))\n object.__setattr__(self, 'probe', probe)\n object.__setattr__(self, 'prop_kernel', prop_kernel)\n object.__setattr__(self, 'mods_shifted', mods_shifted)",
"def blur_beer(org_img, config_path, weights_path, labels_path):\r\n try:\r\n # PART 1 - Detection of bottles\r\n\r\n my_bar = st.progress(0)\r\n\r\n CONFIDENCE = 0.5\r\n SCORE_THRESHOLD = 0.5\r\n IOU_THRESHOLD = 0.5\r\n extra = 20\r\n # the neural network configuration\r\n config_path = config_path\r\n\r\n # the YOLO net weights file\r\n weights_path = weights_path\r\n\r\n # loading all the class labels (objects)\r\n labels = open(labels_path).read().strip().split(\"\\n\")\r\n # generating colors for each object for later plotting\r\n colors = np.random.randint(0, 255, size=(len(labels), 3), dtype=\"uint8\")\r\n\r\n # load the YOLO network\r\n net = cv.dnn.readNetFromDarknet(config_path, weights_path)\r\n\r\n # path_name = img_path\r\n\r\n padded = org_img.copy()\r\n padded = cv.copyMakeBorder(padded, 20, 20, 20, 20, cv.BORDER_REFLECT)\r\n\r\n image = padded.copy()\r\n h, w = image.shape[:2]\r\n\r\n # create 4D blob\r\n blob = cv.dnn.blobFromImage(image, 1/255.0, (416, 416), swapRB=True, crop=False)\r\n\r\n # sets the blob as the input of the network\r\n net.setInput(blob)\r\n\r\n # get all the layer names\r\n ln = net.getLayerNames()\r\n ln = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()]\r\n\r\n # feed forward (inference) and get the network output\r\n # measure how much it took in seconds\r\n start = time.perf_counter()\r\n layer_outputs = net.forward(ln)\r\n time_took = time.perf_counter() - start\r\n\r\n font_scale = 1\r\n thickness = 1\r\n boxes, confidences, class_ids = [], [], []\r\n\r\n # loop over each of the layer outputs\r\n for output in layer_outputs:\r\n\r\n # loop over each of the object detections\r\n for detection in output:\r\n\r\n # extract the class id (label) and confidence (as a probability) of\r\n # the current object detection\r\n scores = detection[5:]\r\n class_id = np.argmax(scores)\r\n confidence = scores[class_id]\r\n\r\n # discard out weak predictions by ensuring the detected\r\n # probability is greater than the minimum probability\r\n if confidence > CONFIDENCE:\r\n\r\n # scale the bounding box coordinates back relative to the\r\n # size of the image, keeping in mind that YOLO actually\r\n # returns the center (x, y)-coordinates of the bounding\r\n # box followed by the boxes' width and height\r\n box = detection[:4] * np.array([w, h, w, h])\r\n (centerX, centerY, width, height) = box.astype(\"int\")\r\n\r\n # use the center (x, y)-coordinates to derive the top and\r\n # and left corner of the bounding box\r\n x = int(centerX - (width / 2))\r\n y = int(centerY - (height / 2))\r\n\r\n # update our list of bounding box coordinates, confidences,\r\n # and class IDs\r\n boxes.append([x, y, int(width), int(height)])\r\n confidences.append(float(confidence))\r\n class_ids.append(class_id)\r\n\r\n my_bar.progress(10)\r\n\r\n # loop over the indexes we are keeping\r\n for i in range(len(boxes)):\r\n\r\n # extract the bounding box coordinates\r\n x, y = boxes[i][0], boxes[i][1]\r\n w, h = boxes[i][2], boxes[i][3]\r\n\r\n # draw a bounding box rectangle and label on the image\r\n color = [int(c) for c in colors[class_ids[i]]]\r\n cv.rectangle(image, (x, y), (x + w, y + h), color=color, thickness=thickness)\r\n text = f\"{labels[class_ids[i]]}: {confidences[i]:.2f}\"\r\n\r\n # calculate text width & height to draw the transparent boxes as background of the text\r\n (text_width, text_height) = cv.getTextSize(text, cv.FONT_HERSHEY_SIMPLEX, fontScale=font_scale, thickness=thickness)[0]\r\n text_offset_x = x\r\n text_offset_y = y - 5\r\n box_coords = ((text_offset_x, text_offset_y), (text_offset_x + text_width + 2, text_offset_y - text_height))\r\n overlay = image.copy()\r\n cv.rectangle(overlay, box_coords[0], box_coords[1], color=color, thickness=cv.FILLED)\r\n\r\n # add opacity (transparency to the box)\r\n image = cv.addWeighted(overlay, 0.6, image, 0.4, 0)\r\n\r\n # now put the text (label: confidence %)\r\n cv.putText(image, text, (x, y - 5), cv.FONT_HERSHEY_SIMPLEX,\r\n fontScale=font_scale, color=(0, 0, 0), thickness=thickness)\r\n\r\n my_bar.progress(15)\r\n tl_cord = []\r\n br_cord = []\r\n\r\n # perform the non maximum suppression given the scores defined before\r\n idxs = cv.dnn.NMSBoxes(boxes, confidences, SCORE_THRESHOLD, IOU_THRESHOLD)\r\n\r\n # ensure at least one detection exists\r\n if len(idxs) > 0:\r\n # loop over the indexes we are keeping\r\n for i in idxs.flatten():\r\n\r\n # extract the bounding box coordinates\r\n x, y = boxes[i][0], boxes[i][1]\r\n w, h = boxes[i][2], boxes[i][3]\r\n\r\n # draw a bounding box rectangle and label on the image\r\n tl_cord.append((x,y))\r\n br_cord.append((x+w, y+h))\r\n color = [int(c) for c in colors[class_ids[i]]]\r\n cv.rectangle(image, (x, y), (x + w, y + h), color=color, thickness=thickness)\r\n text = f\"{labels[class_ids[i]]}: {confidences[i]:.2f}\"\r\n\r\n # calculate text width & height to draw the transparent boxes as background of the text\r\n (text_width, text_height) = cv.getTextSize(text, cv.FONT_HERSHEY_SIMPLEX, fontScale=font_scale, thickness=thickness)[0]\r\n text_offset_x = x\r\n text_offset_y = y - 5\r\n box_coords = ((text_offset_x, text_offset_y), (text_offset_x + text_width + 2, text_offset_y - text_height))\r\n overlay = image.copy()\r\n cv.rectangle(overlay, box_coords[0], box_coords[1], color=color, thickness=cv.FILLED)\r\n\r\n # add opacity (transparency to the box)\r\n image = cv.addWeighted(overlay, 0.6, image, 0.4, 0)\r\n\r\n # now put the text (label: confidence %)\r\n cv.putText(image, text, (x, y - 5), cv.FONT_HERSHEY_SIMPLEX,\r\n fontScale=font_scale, color=(0, 0, 0), thickness=thickness)\r\n\r\n\r\n crop_imgs = []\r\n\r\n for i in range(len(tl_cord)):\r\n crop_imgs.append(padded[tl_cord[i][1] - extra:br_cord[i][1] + extra, tl_cord[i][0] - extra:br_cord[i][0] + extra])\r\n\r\n my_bar.progress(25)\r\n\r\n\r\n # PART 2 - Segmentation of detected bottles\r\n\r\n\r\n output_img = padded.copy()\r\n for i, crop_img in enumerate(crop_imgs):\r\n orig = crop_img\r\n img = cv.blur(orig,(7,11))\r\n\r\n b_bg, g_bg, r_bg = cv.split(img)\r\n\r\n b1 = []\r\n g1 = []\r\n r1 = []\r\n\r\n h = img.shape[0]\r\n w = img.shape[1]\r\n\r\n # Aggregate the background pixels\r\n for color1, color2 in zip([b_bg, g_bg, r_bg], [b1, g1, r1]):\r\n for strip2d in [color1[:w//5,:], color1[:,:w//5], color1[-w//5:,:], color1[:,-w//5:]]:\r\n for strip1d in strip2d:\r\n for val in strip1d:\r\n color2.append(val)\r\n\r\n\r\n r1 = np.array(r1)\r\n g1 = np.array(g1)\r\n b1 = np.array(b1)\r\n\r\n # Histogram of the background pixels\r\n bg_hist1,bins = np.histogram(b1.ravel(),256,[0,256])\r\n bg_hist2,bins = np.histogram(g1.ravel(),256,[0,256])\r\n bg_hist3,bins = np.histogram(r1.ravel(),256,[0,256])\r\n my_bar.progress(35)\r\n\r\n b_bg, g_bg, r_bg = cv.split(img)\r\n\r\n b2 = []\r\n g2 = []\r\n r2 = []\r\n\r\n # Agregate the object pixels\r\n for color1, color2 in zip([b_bg, g_bg, r_bg], [b2, g2, r2]):\r\n for strip1d in [color1[h//8:h-h//8, w//4:-w//4]]:\r\n for val in strip1d:\r\n color2.append(val)\r\n\r\n r2 = np.array(r2)\r\n g2 = np.array(g2)\r\n b2 = np.array(b2)\r\n\r\n # Histogram of object pixels\r\n obj_hist1,bins = np.histogram(b2.ravel(),256,[0,256])\r\n obj_hist2,bins = np.histogram(g2.ravel(),256,[0,256])\r\n obj_hist3,bins = np.histogram(r2.ravel(),256,[0,256])\r\n my_bar.progress(40)\r\n\r\n b_all, g_all, r_all = cv.split(img)\r\n\r\n # Probability of being background\r\n prob_being_background = np.array(bg_hist1)[list(b_all)]/np.sum(bg_hist1) + np.array(bg_hist2)[list(g_all)]/np.sum(bg_hist2) + np.array(bg_hist3)[list(r_all)]/np.sum(bg_hist3)\r\n\r\n # Probability of being the object\r\n prob_being_object = np.array(obj_hist1)[list(b_all)]/np.sum(obj_hist1) + np.array(obj_hist2)[list(g_all)]/np.sum(obj_hist2) + np.array(obj_hist3)[list(r_all)]/np.sum(obj_hist3)\r\n\r\n y = np.array(range(w))\r\n\r\n # Weighted absolute distance of pixel from the center line\r\n dist_from_center = 0.035*np.abs(y - w/2)/w\r\n\r\n # Weighted absolute distance of pixel from the edge\r\n dist_from_edge = 0.035*(w/2 - np.abs(y - w/2))/w\r\n\r\n # Each pixel mapped to its probable output\r\n prob_matrix = (dist_from_edge + prob_being_object > dist_from_center+prob_being_background) * 255\r\n\r\n\r\n # For a smoother output\r\n prob_matrix_blurred = cv.blur(prob_matrix, (1,1))\r\n\r\n disp = orig.copy()\r\n blurred = cv.blur(orig,(50,50))\r\n\r\n # Background mask\r\n bg_mask = (cv.blur(prob_matrix_blurred,(25,35)) < 35) * 1\r\n bg_mask = np.stack((bg_mask, bg_mask, bg_mask), axis = 2)\r\n\r\n # Object mask\r\n obj_mask = (cv.blur(prob_matrix_blurred,(25,35)) > 35) * 1\r\n obj_mask = np.stack((obj_mask, obj_mask, obj_mask), axis = 2)\r\n final = np.multiply(bg_mask,disp) + np.multiply(obj_mask,blurred)\r\n\r\n # Replace the detected segment with the blurred segment\r\n output_img[tl_cord[i][1]-extra:br_cord[i][1]+extra, tl_cord[i][0]-extra:br_cord[i][0]+extra] = final\r\n\r\n my_bar.progress(100)\r\n\r\n output_img = output_img[20:-20, 20:-20]\r\n \r\n st.image(output_img, caption='Final Image.', use_column_width=True, channels=\"BGR\")\r\n return 'Done!'\r\n\r\n except:\r\n return 'Try another image!!'",
"def update_params(self): # computes gradient descent\n self.W=self.W-(self.rate*self.dW)\n self.b=self.b-(self.rate*self.db)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates a texture to store the lights per tile into. Should get replaced with ssbos later
|
def _makeLightPerTileStorage(self):
storageSizeX = self.precomputeSize.x * 8
storageSizeY = self.precomputeSize.y * 8
self.debug(
"Creating per tile storage of size",
storageSizeX, "x", storageSizeY)
self.lightPerTileStorage = Texture("LightsPerTile")
self.lightPerTileStorage.setup2dTexture(
storageSizeX, storageSizeY, Texture.TUnsignedShort, Texture.FR32i)
self.lightPerTileStorage.setMinfilter(Texture.FTNearest)
self.lightPerTileStorage.setMagfilter(Texture.FTNearest)
|
[
"def world_texture(hdr_name):\r\n world=bpy.data.worlds['World']\r\n world.use_nodes = True\r\n links = world.node_tree.links\r\n nodes = world.node_tree.nodes\r\n for l in links:\r\n links.remove(l)\r\n for n in nodes:\r\n nodes.remove(n)\r\n world_output = nodes.new(type='ShaderNodeOutputWorld')\r\n background_node = nodes.new(type='ShaderNodeBackground')\r\n if hdr_name[-3:] == 'exr':\r\n background_node.inputs[1].default_value = 100\r\n env_node = nodes.new(type='ShaderNodeTexEnvironment')\r\n env_node.image = bpy.data.images.load(hdr_name)\r\n mapping_node = nodes.new(type='ShaderNodeMapping')\r\n mapping_node.inputs[2].default_value[1] = random.uniform(0, 3.14)\r\n cor_node = nodes.new(type='ShaderNodeTexCoord')\r\n links.new(cor_node.outputs['Generated'],mapping_node.inputs['Vector'])\r\n links.new(mapping_node.outputs['Vector'],env_node.inputs['Vector'])\r\n links.new(env_node.outputs['Color'],background_node.inputs['Color'])\r\n links.new(background_node.outputs['Background'],world_output.inputs['Surface'])\r\n return",
"def create_textures(self):\n self.log.info(__name__ + ': ' + 'def ' + self.create_textures.__name__ + '(): ' + self.create_textures.__doc__)\n\n button = pygame.Surface((self.button_x, self.button_y), pygame.SRCALPHA, 32)\n button.fill((0, 0, 0, 0), None, pygame.BLEND_RGBA_MULT)\n pygame.draw.rect(button, Colors.DEEPSKYBLUE, (0, 0, self.button_x, self.button_y))\n pygame.draw.rect(button, Colors.BLACK, (0, 0, self.button_x, self.button_y), 2)\n self.textures['button'] = button",
"def __init__(self):\n\n # Dimensions of the texture array.\n self.__width = 1024\n self.__height = 1024\n self.__depth = 20\n self.__scratch_depth = 2\n\n # Allocate the texture array.\n # NOTE: If this goes wrong, we're probably trying to do this before\n # the opengl context has been created, and things will go horribly\n # wrong later! For some reason glGetError() is returning 0 anyway.\n self.__texture = GL.glGenTextures(1)\n\n # Ok, initialise the texture.\n GL.glBindTexture(GL.GL_TEXTURE_2D_ARRAY, self.__texture)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE)\n GL.glTexImage3D(\n GL.GL_TEXTURE_2D_ARRAY,\n 0, #level\n GL.GL_RGBA8, # internal format\n self.__width,\n self.__height,\n self.__depth + self.__scratch_depth,\n 0, #border\n GL.GL_RGBA, # format\n GL.GL_UNSIGNED_BYTE, # data type\n None # The data.\n )\n\n # We insert images one at a time, and keep track of the current\n # insertion point. When we reach the end of the row, the next\n # row starts at a y coordinate flush with the bottom of the tallest\n # item in the current row. Note that this will end up with lots of\n # wasted space, we don't do any work to optimise the packing!\n self.__cursor = TextureArray.Cursor()\n self.__cursor.end = self.__depth\n\n # Initialise the scratch cursor.\n self.__scratch_cursor = TextureArray.Cursor()\n self.__scratch_cursor.index = self.__depth\n self.__scratch_cursor.end = self.__depth + self.__scratch_depth\n\n # Map from filenames to virtual textures.\n self.__filename_map = {}",
"def __initTiles(self):\n for m in range(self.amountVertical):\n for n in range(self.amountHorizontal):\n tile = self.themeFactory.createThemeElement(self.mapfile[m][n])\n tile.setCoordinates(m, n)\n tile.number = (m * self.amountHorizontal) + n\n self.tiles.append(tile)\n self.sprites.add(tile)",
"def make_tiles(self):\n num_tiles = self._puzzle_height * self._puzzle_width\n #subsurface is a ract(left, top, width, height\n \n for idx in xrange(num_tiles):\n self._tiles.append(self._tiles_sprite.subsurface(\n (idx * TILE_SIZE, 0, TILE_SIZE, TILE_SIZE)))",
"def regenerate_skylight(self):\n\n lightmap = zeros((16, 16, 128), dtype=uint8)\n\n for x, z in product(xrange(16), repeat=2):\n # The maximum lighting value, unsurprisingly, is 0xf, which is the\n # biggest possible value for a nibble.\n light = 0xf\n\n # Apparently, skylights start at the block *above* the block on\n # which the light is incident?\n height = self.heightmap[x, z] + 1\n\n # The topmost block, regardless of type, is set to maximum\n # lighting, as are all the blocks above it.\n lightmap[x, z, height:] = light\n\n # Dim the light going throught the remaining blocks, until there\n # is no more light left.\n for y in range(height, -1, -1):\n dim = blocks[self.blocks[x, z, y]].dim\n light -= dim\n if light <= 0:\n break\n\n lightmap[x, z, y] = light\n\n # Now it's time to spread the light around. This flavor uses extra\n # memory to speed things up; the basic idea is to spread *all* light,\n # one glow level at a time, rather than spread each block\n # individually.\n max_height = amax(self.heightmap)\n lightable = vectorize(lambda block: blocks[block].dim < 15)(self.blocks)\n # Protip: This is a bitwise AND because logical ANDs on arrays can't\n # happen in Numpy.\n unlighted = logical_not(lightmap) & lightable\n\n # Create a mask to find all blocks that have an unlighted block\n # as a neighbour in the xz-plane.\n mask = zeros((16, 16, max_height), dtype=bool)\n mask[:-1,:,:max_height] |= unlighted[1:, :, :max_height]\n mask[:,:-1,:max_height] |= unlighted[:, 1:, :max_height]\n mask[1:,:,:max_height] |= unlighted[:-1, :, :max_height]\n mask[:,1:,:max_height] |= unlighted[:, :-1, :max_height]\n\n # Apply the mask to the lightmap to find all lighted blocks with one\n # or more unlighted blocks as neighbours.\n edges = logical_and(mask, lightmap[:, :, :max_height]).nonzero()\n\n spread = [tuple(coords) for coords in transpose(edges)]\n visited = set()\n\n # Run the actual glow loop. For each glow level, go over unvisited air\n # blocks and illuminate them.\n for glow in range(14, 0, -1):\n for coords in spread:\n if lightmap[coords] <= glow:\n visited.add(coords)\n continue\n\n for dx, dz, dy in (\n (1, 0, 0),\n (-1, 0, 0),\n (0, 1, 0),\n (0, -1, 0),\n (0, 0, 1),\n (0, 0, -1)):\n x, z, y = coords\n x += dx\n z += dz\n y += dy\n\n if not (0 <= x < 16 and\n 0 <= z < 16 and\n 0 <= y < 128):\n continue\n\n if (x, z, y) in visited:\n continue\n\n if lightable[x, z, y] and lightmap[x, z, y] < glow:\n lightmap[x, z, y] = glow - blocks[self.blocks[x, z, y]].dim\n visited.add((x, z, y))\n spread = visited\n visited = set()\n\n self.skylight = lightmap.clip(0, 15)",
"def image_process():\n\n texture = Image.new('RGBA', import_coords(4, 4), (0, 0, 0, 0))\n imgdir = sorted(os.listdir('textures'), key=get_block_id)\n files = len(imgdir)\n x = 0\n y = 0\n while x <= 4:\n while y <= 4:\n for fn in imgdir:\n fnpath = imgpath(fn)\n files -= 1\n if files < 0:\n break\n fnimg = flip_image(Image.open(fnpath))\n texture.paste(fnimg, import_coords(x, y))\n print('Pasted texture ' + fn + \" into textures with coords \" + str(x) + \", \" + str(y))\n x += 1\n if x == 4:\n y += 1\n x = 0\n if files < 0:\n break\n if files < 0:\n break\n texture = texture.transpose(Image.FLIP_TOP_BOTTOM)\n\n # Save texture internally\n\n try:\n texture.save(basepath('_texture.png'))\n except IOError:\n print(\"Couldn't save temponary texture file. Check write-access?\")\n else:\n print(\"Saved temponary texture file from memory, checking md5 checksum...\")\n\n # Compute hash texture in memory (that we created above)\n\n try:\n hash = md5_file(basepath('_texture.png'))\n except:\n print(\"Couldn't hash texture. md5 not installed?\")\n else:\n print(\"Succesfully hashed texture in memory. Checksum is: \" + hash)\n\n # Compute hash for old texture.png, if it exists\n\n try:\n newhash = md5_file('texture.png')\n except IOError:\n print(\"Couldn't open texture.png, check if it is properly saved, or maybe it doesn't exist now?\")\n newhash = 0\n else:\n print(\"Checksum for texture.png is: \" + newhash)\n\n # Saving texture.png from memory\n if hash != newhash:\n try:\n texture.save(TEXTURE_PATH)\n except:\n print('Failed to create texture.png! Maybe check if write-access has given?')\n raise IOError(\"Failed to create texture map.\")\n else:\n print(\"Successfully created texture.png, maybe it didn't exist or corrupted\")\n else:\n print(\"All okay, cached textures will do the job, no need to resave.\")",
"def new(self):\n self.all_sprites = pygame.sprite.LayeredUpdates()\n self.walls = pygame.sprite.Group()\n self.holes = pygame.sprite.Group()\n self.decelerations = pygame.sprite.Group()\n self.holdbacks = pygame.sprite.Group()\n self.viruses_shoot = pygame.sprite.Group()\n self.viruses_move = pygame.sprite.Group()\n self.shooting = pygame.sprite.Group()\n self.items = pygame.sprite.Group()\n self.map = Map(path.join(self.map_folder, 'new_tilemap.tmx'))\n self.map_img = self.map.make_map()\n self.map_rect = self.map_img.get_rect()\n self.dark = True\n for tile_object in self.map.tmxdata.objects:\n obj_centerx = tile_object.x + tile_object.width / 2\n obj_centery = tile_object.y + tile_object.height / 2\n if tile_object.name == 'player':\n if self.role1_col == YELLOW:\n self.player = Player(self, obj_centerx, obj_centery, 'role1')\n else:\n self.player = Player(self, obj_centerx, obj_centery, 'role2')\n if tile_object.name == 'wall':\n Wall(self, tile_object.x, tile_object.y, tile_object.width, tile_object.height)\n if tile_object.name == 'hole':\n Hole(self, tile_object.x, tile_object.y, tile_object.width, tile_object.height)\n if tile_object.name == 'deceleration':\n Deceleration(self, tile_object.x, tile_object.y, tile_object.width, tile_object.height)\n if tile_object.name == 'holdback':\n Holdback(self, tile_object.x, tile_object.y)\n if tile_object.name == 'virus_shoot':\n Virus(self, obj_centerx, obj_centery, 'shoot')\n if tile_object.name == 'virus_movex':\n Virus(self, obj_centerx, obj_centery, 'move_x')\n if tile_object.name == 'virus_movey':\n Virus(self, obj_centerx, obj_centery, 'move_y')\n if tile_object.name in ['treatment', 'key', 'light']:\n Item(self, obj_centerx, obj_centery, tile_object.name)\n self.camera = Camera(self.map.width, self.map.height)",
"def random_texture(n=100):\n m = Microstructure(name='random_texture')\n for i in range(n):\n m.grains.append(Grain(i + 1, Orientation.random()))\n return m",
"def init_tiles(self):\n for simple in [Game.TILE_SIMPLE_DOT, Game.TILE_SIMPLE_BAMBOO, Game.TILE_SIMPLE_CHAR]:\n for value in range(Game.SIZE_SIMPLE):\n self.tiles += [(simple, value) for i in range(4)]\n\n for value in ['east', 'west', 'north', 'south']:\n self.tiles += [(Game.TILE_HONOR_WIND, value) for i in range(4)]\n self.tiles += [(Game.TILE_BONUS_FLOWER, value)]\n self.tiles += [(Game.TILE_BONUS_SEASON, value)]\n\n for value in ['red', 'green', 'white']:\n self.tiles += [(Game.TILE_HONOR_DRAGON, value) for i in range(4)]\n\n random.shuffle(self.tiles)\n return",
"def draw_tile_backgrounds(self, tiles):\n\n def process_tile(tile):\n h = tile.height\n h_index = (h - self.parent.min_height) / (self.parent.max_height - self.parent.min_height)\n\n rgb_rand_1 = random.randint(0, self.ocean_noise)\n\n height_rgb = [0, 0, 0]\n height_rgb[0] = self.height_rgb_low[0] + h_index * (self.height_rgb_high[0] - self.height_rgb_low[0])\n height_rgb[1] = self.height_rgb_low[1] + h_index * (self.height_rgb_high[1] - self.height_rgb_low[1])\n height_rgb[2] = self.height_rgb_low[2] + h_index * (self.height_rgb_high[2] - self.height_rgb_low[2])\n\n water_rgb = (rgb_rand_1, rgb_rand_1, 255)\n if self.screen_mode == \"dark\":\n water_rgb = (rgb_rand_1 // 2, rgb_rand_1 // 2, 150)\n if self.screen_mode == \"martin\":\n water_rgb = (195 + rgb_rand_1 * 0.5, 234 + rgb_rand_1 * 0.5, 251)\n\n fillColors = [\n height_rgb, # Ground\n height_rgb, # Rail\n self.road_tile_rgb, # Road\n height_rgb, # Town building\n height_rgb, # Trees\n self.station_rgb, # Stations\n water_rgb, # Water\n height_rgb, # Void\n self.industry_rgb, # Industries\n self.torb_rgb, # Tunnel/bridge\n height_rgb, # Objects\n ]\n fillColor = fillColors[tile.kind % len(fillColors)]\n if tile.kind == 1:\n rail = tile.occupant\n if rail.is_depot:\n fillColor = self.rail_depot_rgb\n\n if tile.kind == 5:\n station = tile.occupant\n if station.station_type == 0:\n fillColor = self.rail_station_rgb\n if station.station_type == 1:\n fillColor = self.airport_rgb\n if station.station_type == 2:\n fillColor = self.bus_station_rgb\n if station.station_type == 3:\n fillColor = self.truck_station_rgb\n if station.station_type == 4:\n fillColor = self.heliport_rgb\n if station.station_type == 5:\n fillColor = self.seaport_rgb\n\n self.draw_square(tile, fillColor)\n if tile.kind == 1:\n rail = tile.occupant\n if not rail.is_depot:\n self.draw_rail_background(tile)\n\n if self.parent.show_progress_bar:\n with alive_bar(len(tiles)) as abar:\n for tile in tiles:\n process_tile(tile)\n abar()\n else:\n for tile in tiles:\n process_tile(tile)",
"def LoadTextures(): \n image_1 = open(\"/home/mikeodf/constr/images_opengl/steel_ball3.jpg\") \n image_2 = open(\"/home/mikeodf/constr/images_opengl/steel_green_ball3.jpg\") \n image_3 = open(\"/home/mikeodf/constr/images_opengl/steel_blue_ball3.jpg\") \n image_4 = open(\"/home/mikeodf/constr/images_opengl/steel_red_ball3.jpg\") \n\n image_1 = image_1.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_2 = image_2.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_3 = image_3.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_4 = image_4.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n glGenTextures(11, texture_1) # Create texture number and names and sizw. \n #===================================== \n texture_setup(image_1, 0, ix, iy) \n texture_setup(image_2, 1, ix, iy) \n texture_setup(image_3, 2, ix, iy) \n texture_setup(image_4, 3, ix, iy)",
"def AddTexture(self):\n\t\t\n\t\tpass",
"def _create_tiles(self):\r\n for column in range(self.columns):\r\n for row in range(self.rows):\r\n tile_name = str(column) + ',' + str(row)\r\n self.tiles[tile_name] = Tile(column=column, row=row)",
"def test_layer_factory(self):\n\t\ttile_map = [\n\t\t\t[0,3,2],\n\t\t\t[2,3,0],\n\t\t]\n\t\tself.graphic = TextureTileMap(tile_map, get_testing_tileset(2,2))\n\n\t\tassert_layer_factory(self, 'texture tile map')",
"def sync(self):\n if self.texture is None:\n self.texture = texture.TextureUniform(\"texture_tileset\", self.im, self.program)\n else:\n self.texture.setData(self.im)",
"def create_sprite(self):\n rgb = (84, 170, 232)\n height = 15\n length = 15\n self.sprite = BaseStationSprite(rgb)",
"def generateImage(self, **kwargs):\n\n start_x = kwargs.get('start_x', None)\n start_y = kwargs.get('start_y', None)\n tile_width = kwargs.get('tile_width', 5)\n tile_height = kwargs.get('tile_height', 5)\n\n # Check that we have x and y tile coordinates\n if start_x == None or start_y == None :\n start_x, start_y = self.getXY()\n\n # Determine the size of the image\n width, height = 256 * tile_width, 256 * tile_height\n\n #Create a new image of the size require\n map_img = Image.new('RGB', (width,height))\n sat_img = Image.new('RGB', (width,height))\n\n for x in range(0, tile_width):\n for y in range(0, tile_height) :\n if True:\n if args.label:\n # Store the image with labels\n url = 'https://mt0.google.com/vt/lyrs=y&?x=' + str(start_x + x) + '&y=' + str(start_y + y) + '&z=' + str( self._zoom)\n if args.debug: print(url)\n else:\n url = 'https://mt0.google.com/vt/lyrs=s&?x=' + str(start_x + x) + '&y=' + str(start_y + y) + '&z=' + str( self._zoom)\n if args.debug: print(url)\n current_tile = str(x)+'-'+str(y)\n urllib.request.urlretrieve(url, current_tile)\n\n im = Image.open(current_tile)\n sat_img.paste(im, (x*256, y*256))\n\n os.remove(current_tile)\n\n\n if True:\n if args.label:\n url = 'https://mt0.google.com/vt?x='+str(start_x+x)+'&y='+str(start_y+y)+'&z='+str(self._zoom)\n if args.debug: print(url)\n else:\n url = 'https://mt0.google.com/vt?x='+str(start_x+x)+'&y='+str(start_y+y)+'&z='+str(self._zoom) # work needs to be done\n if args.debug: print(url)\n\n current_tile = str(x)+'-'+str(y)\n urllib.request.urlretrieve(url, current_tile)\n\n im = Image.open(current_tile)\n map_img.paste(im, (x*256, y*256))\n\n os.remove(current_tile)\n\n return map_img, sat_img",
"def export_texture_effect(self, b_mat_texslot = None):\n texeff = NifFormat.NiTextureEffect()\n texeff.flags = 4\n texeff.rotation.set_identity()\n texeff.scale = 1.0\n texeff.model_projection_matrix.set_identity()\n texeff.texture_filtering = NifFormat.TexFilterMode.FILTER_TRILERP\n texeff.texture_clamping = NifFormat.TexClampMode.WRAP_S_WRAP_T\n texeff.texture_type = NifFormat.EffectType.EFFECT_ENVIRONMENT_MAP\n texeff.coordinate_generation_type = NifFormat.CoordGenType.CG_SPHERE_MAP\n if b_mat_texslot:\n texeff.source_texture = self.export_source_texture(b_mat_texslot.texture)\n if self.properties.game == 'MORROWIND':\n texeff.num_affected_node_list_pointers += 1\n texeff.affected_node_list_pointers.update_size()\n texeff.unknown_vector.x = 1.0\n return self.register_block(texeff)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the halfresolution buffer which computes gi and gi reflections. We use halfres for performance
|
def _creatGIPrecomputeBuffer(self):
self.giPrecomputeBuffer = RenderTarget("GICompute")
self.giPrecomputeBuffer.setSize(self.size.x / 2, self.size.y / 2)
self.giPrecomputeBuffer.addColorTexture()
self.giPrecomputeBuffer.addAuxTextures(1)
self.giPrecomputeBuffer.setColorBits(16)
self.giPrecomputeBuffer.prepareOffscreenBuffer()
|
[
"def _makeLightBoundsComputationBuffer(self, w, h):\n self.debug(\"Creating light precomputation buffer of size\", w, \"x\", h)\n self.lightBoundsComputeBuff = RenderTarget(\"ComputeLightTileBounds\")\n self.lightBoundsComputeBuff.setSize(w, h)\n self.lightBoundsComputeBuff.setColorWrite(False)\n self.lightBoundsComputeBuff.prepareOffscreenBuffer()",
"def get_rings( bin_files, start, num_shots_per_proc, \n qi,ai,bi, \n panels, pan_inds, \n outfilename, wavelen, pixsize, \n detdist, pol_fac, num_phi ):\n \n# ~Form output arrays\n pol_intens = ones( ( num_shots_per_proc, num_phi) )*-1\n pol_intens_bin = ones_like( pol_intens )*-1\n\n\n for i_shot in xrange( num_shots_per_proc):\n# ~Load image and find center\n \n print bin_files[ start + i_shot], i_shot\n\n img = fromfile( bin_files[ start + i_shot ],dtype=float32).reshape( (2463, 2527) )\n\n q = qi\n a = ai\n b = bi\n\n# ~Define full detector coordinates\n #X,Y = meshgrid( arange(img.shape[0]), arange(img.shape[1]) ) # x,y dim of image\n X,Y = meshgrid( arange(img.shape[1]), arange(img.shape[0]) ) # x,y dim of image\n \n R = sqrt( (X-a)**2 + ( Y-b )**2 ) # distance of each pixel from center\n PHI = arctan2( ( Y-b) , ( X-a ) ) # azimuthal angle of each pixel\n phi_values = linspace( -pi, pi, num_phi ) # azimuthal angle of each interpolated pixel in each ring\n# ...and define x,y of each interpolated pixel around the ring\n ring = array( [ [ q*sin(phi)+b, q*cos( phi) + a] for phi in phi_values ] ) \n# ~Do a panel-wise interpolation of the ring\n for j_pan, i_pan in pan_inds:\n# parameter \"p\" is [ymin,ymax,xmin,xmax] of panel \n p = panels[j_pan,i_pan]\n pan = img[ p[0]:p[1], p[2]:p[3] ]\n # pan = common_mode( pan)\n pan = depolarize( pan, R[ p[0]:p[1], p[2]:p[3] ], \n PHI[ p[0]:p[1], p[2]:p[3] ], \n wavelen, pixsize,detdist, pol_fac )\n\n# ~Build the spline (rectBivariateSpline)\n rbs = RBS( arange( p[0],p[1] ), arange( p[2],p[3]), pan,kx=5,ky=5 ) \n# ...determine the x,y of each bragg-ring-pixel on this panel \n interp_here = ring[ ring[:,1] > p[2] ]\n interp_here = interp_here[ interp_here[:,1] < p[3]-1 ]\n interp_here = interp_here[ interp_here[:,0] > p[0] ]\n interp_here = interp_here[ interp_here[:,0] < p[1]-1 ]\n if list(interp_here) == []:\n print \"moving on\"\n continue\n# ...and compute the interpolated values\n \n vals = spline_eval( rbs, interp_here )\n# \n #~These are the phi_vals of the interpolated pixels\n phis = np.arctan2( interp_here[:,0]-b, interp_here[:,1]-a )\n# ...and the corresponding indices in our output array\n phi_inds = array( map( lambda x : find_nearest( phi_values, x ) , phis ) )\n pol_intens[ i_shot , phi_inds] = copy(vals)\n \n \n vm = vals.mean()\n vs = vals.std()\n# ~Make binary intensity\n #vals[vals > vm + 3.5*vs] = 0\n #vals[vals < vm - 3.5*vs] = 0\n\n vmask = ma.masked_equal( vals, 0 )\n\n\n #cutoff = vmask.mean() + vmask.std()\n cutoff = vm + vs\n vals[vals <= cutoff] = 0\n vals[ vals > 0] = 1\n pol_intens_bin[ i_shot , phi_inds] = copy(vals)\n \n \n# ~Make some plots\n #subplot(212);\n #plot( phi_inds[ where( vals==1)[0] ], pol_intens[i_shot,phi_inds[where( vals==1 )[0]] ] ,'bd',lw=2)\n #plot( phi_inds, pol_intens[i_shot,phi_inds], 'bx',ms=2.25, lw=2)\n #xlabel('phi (0-2PI)',fontsize=22)\n #ylabel('counts',fontsize=22)\n #subplot(211)\n #imshow( img, vmin=0, vmax=5500 )\n #colorbar()\n #gca().add_patch( Circle( (a,b), q, fc='none', lw=2, ec='k' ) )\n #show()\n\n# ~Output the rings\n out_file = h5py.File(outfilename, 'w' )\n out_file.create_dataset( 'pol_intens', data=pol_intens )\n out_file.create_dataset( 'pol_intens_bin', data=pol_intens_bin )\n out_file.create_dataset( 'file_names', data=bin_files )\n out_file.close()",
"def create_half_res_images(self) -> None:\n\n utils.create_if_not_exists(self.half_res_img_path)\n pngs = utils.sorted_glob(self.img_pngs_glob)\n\n for orig_path in pngs:\n img = cv2.imread(orig_path)\n img_half = im_helpers.resize_percent(img, 50)\n cv2.imwrite(f'{self.half_res_img_path}/{os.path.basename(orig_path)}', img_half)",
"def PGWB(self):\n h0 = 0.6766\n f = np.linspace(start=10**-6, stop=10, num=10000)\n # omega_gw = 10**-15\n Sh_PGWB = 8.85 * 10**-27 * h0 * (self.omega_gw / 10 **-15) ** (1/2) * f**(-3/2) ## units =\n\n return f, Sh_PGWB",
"def NEWsplitRingsIntoBins(bin_size, distances, si_objects): #Could possibly improve this to speed up a bit\n bin_list = [] #2-d array of each bin of ring types (types are ints)\n bin_mids = [] #midpoint distance of each ring/bin from edge of hole\n \n #Get maximum distance away that a ring is from the hole or axis (in nm)\n max_dist = float(numpy.max(distances))\n bin_start = 0\n while bin_start < max_dist:\n #creates the bins\n bin_mids.append(bin_start + (bin_size/2)) #save each midpoint in a list\n \n #put bins in the bin_list\n #for each bin, set up a list of frequencies for each ring type\n bin_list.append([0, 0, 0, 0, 0, 0]) \n bin_start += bin_size\n \n for i in range(len(si_objects)):\n si = si_objects[i] #This is the Si that we are looking at\n si_dist = distances[i] #Here is its distance from the hole or axis\n si_rings = si.get_rings() #Here is a list of 3 ring objects for each Si\n \n #cycle through bins to find which one to put these 3 ring types in\n bin_start = 0\n bin_num = 0\n while bin_start < max_dist:\n if bin_start <= si_dist < bin_start + bin_size:\n #If the distance for the si fits in the bin,\n for ring in si_rings:\n #increment each ring type for the si's rings\n ring_type = ring.get_type()\n #only add a fraction to the frequency,\n #ex: 1 Si in a 7-mem ring is only 1/7 of the ring\n bin_list[bin_num][ring_type - 4] += (1/ring_type)\n \n bin_start += bin_size\n bin_num += 1\n \n return bin_list, bin_mids",
"def __init__(self, capiness = 0.5, interiorIncludesCaps = False, *args, **keywordArgs):\n \n Shape.__init__(self, *args, **keywordArgs)\n \n # TODO: use VBO's so all instances share the same data?\n # TODO: fix seams caused by texture coords\n \n self.capiness = capiness\n self.interiorIncludesCaps = interiorIncludesCaps\n \n steps = 32 # must be multiple of four\n angleIncrement = 2.0 * pi / steps\n capSteps = steps / 4\n azimuthIncrement = pi / 2.0 / capSteps\n \n topVertices = []\n topTexCoords = []\n bottomVertices = []\n bottomTexCoords = []\n for azimuthStep in range(0, capSteps):\n topAzimuth = pi / 2.0 - (azimuthStep + 1) * azimuthIncrement\n topY, topMag = (sin(topAzimuth) * (capiness / 2.0), cos(topAzimuth) * 0.5)\n bottomAzimuth = -azimuthStep * azimuthIncrement\n bottomY, bottomMag = (sin(bottomAzimuth) * (capiness / 2.0), cos(bottomAzimuth) * 0.5)\n for step in range(0, steps):\n angle = pi + step * angleIncrement\n topVertices += [(sin(angle) * topMag, topY + (0.5 * (1.0 - capiness)), cos(angle) * topMag)]\n topTexCoords += [(float(step) / steps, topVertices[-1][1] + 0.5)]\n bottomVertices += [(sin(angle) * bottomMag, -(0.5 * (1.0 - capiness)) + bottomY, cos(angle) * bottomMag)]\n bottomTexCoords += [(float(step) / steps, bottomVertices[-1][1] + 0.5)]\n\n vertices = [(0.0, 0.5, 0.0)] + topVertices + bottomVertices + [(0.0, -0.5, 0.0)]\n self.geometry().setVertexArray(Shape.vectorArrayFromList(vertices))\n \n normals = []\n for vertex in vertices:\n normals += [(vertex[0] / 2.0, vertex[1] / 2.0, vertex[2] / 2.0)]\n self.geometry().setNormalArray(Shape.vectorArrayFromList(normals))\n self.geometry().setNormalBinding(osg.Geometry.BIND_PER_VERTEX)\n \n texCoords = [(0.0, 1.0)] + topTexCoords + bottomTexCoords + [(0.0, 0.0)]\n self.geometry().setTexCoordArray(0, Shape.vectorArrayFromList(texCoords))\n \n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, range(0, steps + 1) + [1, 0])\n self.geometry().addPrimitiveSet(faceSet)\n for stripNum in range(0, 2 * capSteps - 1):\n vertexIndices = []\n baseIndex = 1 + stripNum * steps\n for step in range(steps) + [0]:\n vertexIndices += [baseIndex + step, baseIndex + steps + step]\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.QUAD_STRIP, vertexIndices)\n self.geometry().addPrimitiveSet(faceSet)\n bottomFanBaseIndex = len(vertices) - steps - 1\n faceSet = Shape.primitiveSetFromList(osg.PrimitiveSet.TRIANGLE_FAN, [len(vertices) - 1] + range(bottomFanBaseIndex, bottomFanBaseIndex + steps) + [bottomFanBaseIndex, len(vertices) - 1])\n self.geometry().addPrimitiveSet(faceSet)",
"def _makeLightingComputeBuffer(self):\n self.lightingComputeContainer = RenderTarget(\"ComputeLighting\")\n\n if self.settings.enableTemporalReprojection:\n self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)\n else:\n self.lightingComputeContainer.setSize(self.size.x, self.size.y)\n\n self.lightingComputeContainer.addColorTexture()\n self.lightingComputeContainer.setColorBits(16)\n self.lightingComputeContainer.prepareOffscreenBuffer()\n\n self.lightingComputeCombinedTex = Texture(\"Lighting-Compute-Combined\")\n self.lightingComputeCombinedTex.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)\n self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)\n self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)\n\n self.lastPositionBuffer = Texture(\"Last-Position-Buffer\")\n self.lastPositionBuffer.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)\n self.lastPositionBuffer.setMinfilter(Texture.FTNearest)\n self.lastPositionBuffer.setMagfilter(Texture.FTNearest)",
"def imagetransformer2d_base_8l_8_32_big():\n hparams = image_transformer2d_base()\n hparams.num_heads = 16\n hparams.hidden_size = 1024\n hparams.filter_size = 2048\n hparams.num_decoder_layers = 8\n hparams.batch_size = 1\n hparams.layer_prepostprocess_dropout = 0.3\n hparams.query_shape = (8, 16)\n hparams.memory_flange = (0, 32)\n hparams.unconditional = int(False)\n return hparams",
"def _gen_grid(self):\n self.grid = np.zeros((self.tot_obj, 4))\n self.oc_grid = np.zeros((self.tot_obj, 4))\n for obj_id in range(self.tot_obj):\n x, y = self._gen_pos()\n in_view = 1.0 # Fully visible grid. Mask it later during observation gneration\n if self.obj_types[obj_id] == \"c\":\n obj_type = 1.0\n elif self.obj_types[obj_id] == \"d\":\n obj_type = 2.0\n elif self.obj_types[obj_id] == \"h\":\n obj_type = 3.0\n else:\n obj_type = -2.0\n self.grid[obj_id] = np.asarray([x, y, in_view, obj_type])",
"def __init__(self):\n self.order = 1\n # Here is range of pixels to use in each dimension relative to ceil(u,v)\n self._duv = np.arange(-self.order, self.order, dtype=int)\n # And here are flattened arrays of u, v displacement for whole footprint\n self._du = np.ones( (2*self.order,2*self.order), dtype=int) * self._duv\n self._du = self._du.flatten()\n self._dv = np.ones( (2*self.order,2*self.order), dtype=int) * \\\n self._duv[:,np.newaxis]\n self._dv = self._dv.flatten()",
"def _gen_ref(self, verbose=False):\n\n # PSF information\n # opd = self.psf_info['opd']\n # fov_pix = self.psf_info['fov_pix']\n # oversample = self.psf_info['oversample']\n\n # Detector information\n det_info = self.det_info\n wind_mode = det_info['wind_mode']\n xpix, ypix = (det_info['xpix'], det_info['ypix'])\n x0, y0 = (det_info['x0'], det_info['y0'])\n\n try: \n del self.nrc_ref\n except AttributeError:\n pass\n\n nrc = NIRCam(filter=self.filter, pupil=self.pupil, mask=self.mask,\n module=self.module, wind_mode=wind_mode, xpix=xpix, ypix=ypix,\n x0=x0, y0=y0)\n\n self.nrc_ref = nrc\n\n # offset_r = self.psf_info['offset_r']\n # offset_theta = self.psf_info['offset_theta']\n\n # Create a NIRCam reference class\n # If it already exists, just update OPD info\n # try:\n # if verbose: print(\"Updating NIRCam reference coefficients...\")\n # self.nrc_ref.wfe_drift = self.wfe_ref_drift\n # except AttributeError:\n # if verbose: print(\"Creating NIRCam reference class...\")\n\n # nrc = nrc_hci(filter=self.filter, pupil=self.pupil, mask=self.mask,\n # module=self.module, wind_mode=wind_mode, xpix=xpix, ypix=ypix,\n # fov_pix=fov_pix, oversample=oversample, opd=opd,\n # offset_r=offset_r, offset_theta=offset_theta,\n # wfe_drift=0, bar_offset=self.bar_offset)\n # self.nrc_ref = nrc\n # self.nrc_ref.wfe_drift = self.wfe_ref_drift",
"def test_simplering():\n R=0.10 #meters\n cylind=Cylinder(Vector((0,0,0),type=\"CARTESIAN\"),Vector((0,0,1),type=\"CARTESIAN\"),R)\n SegList=[HelixSeg(Vector((R,0,0),type=\"CYLINDRICAL\"),\n Vector((R,pi,0),type=\"CYLINDRICAL\"),cylind),\n HelixSeg(Vector((R,pi,0),type=\"CYLINDRICAL\"),\n Vector((R,2*pi,0),type=\"CYLINDRICAL\"),cylind)]\n Fieldpoint=Vector((0,0,0),type=\"CARTESIAN\")\n print MagFieldHelixSegArray(SegList,Fieldpoint)",
"def secondDerivativesImages(self):\n secondDs_gal = {}\n for i in range(self.num_params):\n for j in range(self.num_params):\n param_i = self.param_names[i]\n param_j = self.param_names[j]\n\n params_iup_jup = copy.deepcopy(self.g_parameters.params)\n params_iup_jup[param_i] += self.steps[param_i]\n params_iup_jup[param_j] += self.steps[param_j]\n\n params_idown_jup = copy.deepcopy(self.g_parameters.params)\n params_idown_jup[param_i] -= self.steps[param_i]\n params_idown_jup[param_j] += self.steps[param_j] \n\n params_iup_jdown = copy.deepcopy(self.g_parameters.params)\n params_iup_jdown[param_i] += self.steps[param_i]\n params_iup_jdown[param_j] -= self.steps[param_j] \n\n\n params_idown_jdown = copy.deepcopy(self.g_parameters.params)\n params_idown_jdown[param_i] -= self.steps[param_i]\n params_idown_jdown[param_j] -= self.steps[param_j]\n\n gal_iup_jup = galfun.getGalaxiesModels(params_iup_jup)\n gal_idown_jup = galfun.getGalaxiesModels(params_idown_jup)\n gal_iup_jdown = galfun.getGalaxiesModels(params_iup_jdown)\n gal_idown_jdown = galfun.getGalaxiesModels(params_idown_jdown)\n\n img_iup_jup = self.image_renderer_partials.getImage(gal_iup_jup)\n img_idown_jup = self.image_renderer_partials.getImage(gal_idown_jup)\n img_iup_jdown = self.image_renderer_partials.getImage(gal_iup_jdown)\n img_idown_jdown = self.image_renderer_partials.getImage(gal_idown_jdown)\n\n secondDs_gal[param_i, param_j] = ((img_iup_jup + img_idown_jdown - \n img_idown_jup - img_iup_jdown)/\n (4*self.steps[param_i]*self.steps[param_j])).array\n\n return secondDs_gal",
"def create_high_res_topogaphy(arcpy, in_raster, hgt_m_raster, cellsize, sr2, projdir):\n\n if arcpy.CheckExtension(\"Spatial\") == \"Available\":\n arcpy.CheckOutExtension(\"Spatial\")\n from arcpy.sa import *\n\n # Second part of the process\n loglines = ['Step 2 initiated...'] # Initiate log list for this process\n arcpy.AddMessage(loglines[-1])\n\n #Get the extent information from raster object\n arcpy.MakeRasterLayer_management(hgt_m_raster, 'hgt_m_Layer')\n descData = arcpy.Describe('hgt_m_Layer')\n extent = descData.Extent\n arcpy.env.snapRaster = 'hgt_m_Layer' # Does this work or does it need to be hgt_m_raster?\n\n # Test to make sure hgt_m is an integer multiple of supplied output resolution\n cellsize1 = descData.children[0].meanCellHeight\n loglines.append(' The GEOGRID File resolution is %sm' %str(cellsize1))\n arcpy.AddMessage(loglines[-1])\n cellsize2 = (cellsize1/cellsize)\n loglines.append(' The High-resolution dataset will be %sm' %str(cellsize2))\n arcpy.AddMessage(loglines[-1])\n\n # List of coordinates from extent and create a polygon geometry object using an array object\n boundaryPolygon = extent.polygon # Added 2016-02-11 to simplify code\n extent1 = extent # Added 2016-02-11 to simplify code\n\n # Now project the polygon object to the raster catalog spatial reference\n sr3 = arcpy.Describe(in_raster).spatialReference\n arcpy.CreateCustomGeoTransformation_management(geoTransfmName, sr2, sr3, customGeoTransfm)\n loglines.append(' Tranformation: %s' %geoTransfmName)\n arcpy.AddMessage(loglines[-1])\n projpoly = boundaryPolygon.projectAs(sr3, geoTransfmName) # Should be: u'NAD_1983_To_WGS_1984_1'\n\n # Create raster layer from input raster or mosaic dataset\n MosaicLayer = \"MosaicLayer\"\n arcpy.MakeRasterLayer_management(in_raster, MosaicLayer, \"#\", projpoly.extent)\n loglines.append(' MakeRasterLayer process completed without error.')\n arcpy.AddMessage(loglines[-1])\n loglines.append(' The coarse grid has %s rows and %s columns.' %(arcpy.Describe(hgt_m_raster).height, arcpy.Describe(hgt_m_raster).width))\n arcpy.AddMessage(loglines[-1])\n loglines.append(' The input elevation grid (before reprojection) has %s rows and %s columns.' %(arcpy.Describe(MosaicLayer).height, arcpy.Describe(MosaicLayer).width))\n arcpy.AddMessage(loglines[-1])\n\n # Set environments to force creation of high-res raster to have exact extent and cellsize needed\n arcpy.env.extent = extent1 # using extent directly doesn't work.\n arcpy.env.outputCoordinateSystem = sr2\n arcpy.env.cellSize = cellsize2\n arcpy.env.snapRaster = hgt_m_raster # Redundant?\n\n # Now project the polygon object to the raster catalog spatial reference\n mosprj = os.path.join(projdir, 'mosaicprj')\n descData = arcpy.Describe('hgt_m_Layer')\n extent = descData.Extent\n loglines.append(' Projecting input elevation data to WRF coordinate system.')\n arcpy.AddMessage(loglines[-1])\n arcpy.ProjectRaster_management(MosaicLayer, mosprj, sr2, \"NEAREST\", cellsize2, geoTransfmName)\n loglines.append(' Finished projecting input elevation data to WRF coordinate system.')\n arcpy.AddMessage(loglines[-1])\n loglines.append(' The fine grid (before ExtractByMask) has %s rows and %s columns.' %(arcpy.Describe(mosprj).height, arcpy.Describe(mosprj).width))\n arcpy.AddMessage(loglines[-1])\n\n # Extract By Mask\n arcpy.env.cellSize = cellsize2\n mosprj2 = ExtractByMask(mosprj, hgt_m_raster) # Thin the raster down from the projected raster.\n arcpy.Delete_management(mosprj)\n mosprj2.save(os.path.join(projdir, 'mosaicprj'))\n\n # Check that the number of rows and columns are correct\n loglines.append(' Fine Grid has %s rows and %s columns.' %(arcpy.Describe(mosprj2).height, arcpy.Describe(mosprj2).width))\n arcpy.AddMessage(loglines[-1])\n\n # Clean up\n arcpy.Delete_management(\"MosaicLayer\")\n del MosaicLayer\n\n # Finish\n loglines.append(' Step 2 completed without error.')\n arcpy.AddMessage(loglines[-1])\n return mosprj, cellsize1, cellsize2, loglines",
"def hrsflat(rawpath, outpath, detname, obsmode, master_bias=None, f_limit=1000, first_order=53, \n y_start=30, y_limit=3920, smooth_length=20, smooth_fraction=0.4, filter_size=151,\n link=False, sdb=None, clobber=True):\n if not os.path.isdir(rawpath): return\n\n image_list = ImageFileCollection(rawpath)\n if len(image_list.files)==0: return\n\n #make output directory\n if not os.path.isdir(outpath): os.mkdir(outpath)\n\n #get the observing date\n obsdate=get_obsdate(image_list.summary['file'][0])\n\n #setup the instrument prefix\n \n if detname=='HRDET':\n prefix='R'\n process = red_process\n rdnoise=6.81*u.electron\n elif detname=='HBDET':\n prefix='H'\n process = blue_process\n rdnoise=7.11*u.electron\n else:\n raise ValueError('detname must be a valid HRS Detector name')\n\n #process the flat frames\n matches = (image_list.summary['obstype'] == 'Flat field') * (image_list.summary['detnam'] == detname) * (image_list.summary['obsmode'] == obsmode) * (image_list.summary['propid'] != 'JUNK')\n flat_list = []\n for fname in image_list.summary['file'][matches]:\n logging.info('Processing flat image {}'.format(fname))\n ccd = process(rawpath+fname, masterbias=master_bias, error=True, rdnoise=rdnoise)\n flat_list.append(ccd)\n if sdb is not None: dq_ccd_insert(rawpath + fname, sdb)\n\n if flat_list:\n outfile = \"{0}/{2}FLAT_{1}_{3}.fits\".format(outpath, obsdate, prefix, obsmode.replace(' ', '_'))\n logging.info('Created master flat {}'.format(os.path.basename(outfile)))\n if os.path.isfile(outfile) and clobber: os.remove(outfile)\n flat = ccdproc.combine(flat_list, method='median', output_file=outfile)\n\n norm = clean_flatimage(flat.data, filter_size=filter_size, flux_limit=0.3,\n block_size=100, percentile_low=30, median_size=5)\n\n norm[norm>0]=1\n if detname=='HRDET':\n xc = 1947 #int(xs/2.0)\n detect_kern = norm[1:100, xc]\n #these remove light that has bleed at the edges and may need adjusting\n norm[:,:20]=0\n norm[:,4040:]=0\n elif detname=='HBDET':\n ys, xs = norm.shape\n xc = int(xs/2.0)\n detect_kern = norm[32:110, xc]\n\n frame = create_orderframe(norm, first_order, xc, detect_kern, smooth_length=smooth_length, \n smooth_fraction=smooth_fraction, y_start=y_start, y_limit=y_limit)\n order_file = \"{0}/{2}ORDER_{1}_{3}.fits\".format(outpath, obsdate, prefix, obsmode.replace(' ', '_'))\n logging.info('Created order frame {}'.format(os.path.basename(order_file)))\n hdu = fits.PrimaryHDU(frame)\n hdu.writeto(order_file, clobber=True)\n if sdb: dq_order_insert(order_file, sdb)\n\n if link:\n link='/salt/HRS_Cals/CAL_FLAT/{0}/{1}/product/{2}'.format(obsdate[0:4], obsdate[4:8], os.path.basename(outfile))\n if os.path.islink(link) and clobber: os.remove(link)\n print(outfile)\n print(link)\n os.symlink(outfile, link)\n olink='/salt/HRS_Cals/CAL_FLAT/{0}/{1}/product/{2}'.format(obsdate[0:4], obsdate[4:8], os.path.basename(order_file))\n if os.path.islink(olink) and clobber: os.remove(olink)\n os.symlink(order_file, olink)",
"def build_quadrature(self) :\n\n# Compute the Gauss-Legendre quadrature\n [self.polar_nodes,self.polar_weight] = scipy.special.orthogonal.p_roots(self.sn) \n\n# Compute the Chebyshev quadrature\n [self.azith_nodes,self.azith_weight] = self.chebyshev()\n\n self.cos_theta = np.zeros((self.sn/2,1))\n for i in xrange(0,self.sn/2) :\n self.cos_theta[i] = np.real(self.polar_nodes[self.sn/2+i])\n self.sin_theta = np.sqrt(1-self.cos_theta**2)\n\n# Compute omega on one octant\n self.build_octant()\n\n# Compute omega by deploying the octant \n self.deploy_octant()\n\n# Compute the spherical harmonics\n self.compute_harmonics()\n\n# Compute D\n if self.galerkin == True :\n self.D = scipy.linalg.inv(self.M)\n else :\n self.D = np.dot(self.M.transpose(),np.diag(self.weight))",
"def resource_allocation():\n i, s, j, p, t, g, w = np.load(\"ForestData.npy\").T\n zeros = np.zeros_like(s)\n\n\n tgw = np.vstack((np.vstack((-t[0::], -g[0::])), -w[0::]))\n I = np.eye(21).astype(float)\n h1 = np.array([-40000., -5., -70*788.])\n\n # print p\n c = np.array(-p)\n G = np.vstack((tgw, -I))\n h = np.append(h1, zeros)\n\n # print \"c\\n\", c, \"\\nG\\n\", G, \"\\nh\\n\", h, \"\\n\"\n\n c = matrix(c)\n G = matrix(G)\n h = matrix(h)\n A = matrix(np.array([[1.,1.,1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.],\n [0.,0.,0.,1.,1.,1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.],\n [0.,0.,0.,0.,0.,0.,1.,1.,1.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.],\n [0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,1.,1.,0.,0.,0.,0.,0.,0.,0.,0.,0.],\n [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,1.,1.,0.,0.,0.,0.,0.,0.],\n [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,1.,1.,0.,0.,0.],\n [0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,1.,1.,1.]]))\n b = matrix(s[0::3])\n\n sol = solvers.lp(c, G, h, A, b)\n\n return np.ravel(sol['x']), sol['primal objective']*-1000",
"def createBridgeRings():\n # Create a BridgeSplitter to assign the bridges to the different distributors.\n bridgesplitter_hashring = capitalbridges.BridgeSplitter(key)\n #<class 'capitalbridges.BridgeSplitter'>\n print(\"Created bridgesplitter_hashring: %r\" % bridgesplitter_hashring)\n\n # Create ring parameters.\n ringParams = capitalbridges.BridgeRingParameters(needPorts=[(443, 1)],\n needFlags=[(\"Stable\", 1)])\n\n # TODO Do we need a proxy list here?\n proxyList = []\n httpsDistributor = HTTPSDistributor(\n 4, # like the eample in httpsdistributor\n crypto.getHMAC(key, b\"HTTPS-IP-Dist-Key\"),\n proxyList,\n answerParameters=ringParams)\n HTTPS_SHARE = 10\n bridgesplitter_hashring.addRing(httpsDistributor.hashring, \"https\", HTTPS_SHARE)\n\n return bridgesplitter_hashring, httpsDistributor",
"def getRigBuildData (self):\n\n data = super (ArmComponentGuide, self).getRigBuildData ()\n\n # values\n bicepPosition = self.bicepCtrl.xfo.tr\n forearmPosition = self.forearmCtrl.xfo.tr\n wristPosition = self.wristCtrl.xfo.tr\n\n # Calculate Bicep Xfo\n rootToWrist = wristPosition.subtract (bicepPosition).unit ()\n rootToElbow = forearmPosition.subtract (bicepPosition).unit ()\n\n bone1Normal = rootToWrist.cross (rootToElbow).unit ()\n bone1ZAxis = rootToElbow.cross (bone1Normal).unit ()\n\n bicepXfo = Xfo ()\n bicepXfo.setFromVectors (rootToElbow, bone1Normal, bone1ZAxis, bicepPosition)\n\n # Calculate Forearm Xfo\n elbowToWrist = wristPosition.subtract (forearmPosition).unit ()\n elbowToRoot = bicepPosition.subtract (forearmPosition).unit ()\n bone2Normal = elbowToRoot.cross (elbowToWrist).unit ()\n bone2ZAxis = elbowToWrist.cross (bone2Normal).unit ()\n forearmXfo = Xfo ()\n forearmXfo.setFromVectors (elbowToWrist, bone2Normal, bone2ZAxis, forearmPosition)\n\n # Calculate Wrist Xfo\n wristXfo = Xfo ()\n wristXfo.tr = self.wristCtrl.xfo.tr\n wristXfo.ori = forearmXfo.ori\n\n upVXfo = xfoFromDirAndUpV (bicepPosition, wristPosition, forearmPosition)\n upVXfo.tr = forearmPosition\n upVXfo.tr = upVXfo.transformVector (Vec3 (0, 0, 5))\n\n # Lengths\n bicepLen = bicepPosition.subtract (forearmPosition).length ()\n forearmLen = forearmPosition.subtract (wristPosition).length ()\n\n data ['bicepXfo'] = bicepXfo\n data ['forearmXfo'] = forearmXfo\n data ['wristXfo'] = wristXfo\n data ['upVXfo'] = upVXfo\n data ['bicepLen'] = bicepLen\n data ['forearmLen'] = forearmLen\n\n return data"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the lighting pipeline, including shadow handling
|
def _createLightingPipeline(self):
if not self.haveLightingPass:
self.debug("Skipping lighting pipeline")
return
self.debug("Creating lighting pipeline ..")
# size has to be a multiple of the compute unit size
# but still has to cover the whole screen
sizeX = int(math.ceil(float(self.size.x) / self.patchSize.x))
sizeY = int(math.ceil(float(self.size.y) / self.patchSize.y))
self.precomputeSize = LVecBase2i(sizeX, sizeY)
self.debug("Batch size =", sizeX, "x", sizeY,
"Actual Buffer size=", int(sizeX * self.patchSize.x),
"x", int(sizeY * self.patchSize.y))
self._makeLightPerTileStorage()
# Create a buffer which computes which light affects which tile
self._makeLightBoundsComputationBuffer(sizeX, sizeY)
# Create a buffer which applies the lighting
self._makeLightingComputeBuffer()
# Register for light manager
self.lightManager.setLightingComputator(self.lightingComputeContainer)
self.lightManager.setLightingCuller(self.lightBoundsComputeBuff)
self._loadFallbackCubemap()
self._loadLookupCubemap()
|
[
"def create_light():\n\n # Add new plane\n bpy.ops.mesh.primitive_plane_add(location=(15, -5, 5))\n plane = bpy.context.active_object\n plane.name = 'Light Plane'\n plane.scale = mathutils.Vector((4, 4, 4))\n # tilt\n plane.rotation_euler.rotate_axis('Y', radians(40))\n\n # Create a new material\n material = bpy.data.materials.new(name=\"Plane Light Emission Shader\")\n material.use_nodes = True\n\n # Remove default\n material.node_tree.nodes.remove(material.node_tree.nodes.get('Diffuse BSDF'))\n material_output = material.node_tree.nodes.get('Material Output')\n emission = material.node_tree.nodes.new('ShaderNodeEmission')\n emission.inputs['Strength'].default_value = 5.0\n\n # link emission shader to material\n material.node_tree.links.new(material_output.inputs[0], emission.outputs[0])\n\n # set activer material to your new material\n plane.active_material = material",
"def add_lighting(obj, track_to=True):\r\n if np.random.rand() > 0.3:\r\n bpy.context.view_layer.objects.active = None\r\n # docrender using method\r\n # d = random.uniform(2, 5)\r\n # litpos = Vector((0, d, 0))\r\n # eul = Euler((0, 0, 0), 'XYZ')\r\n # eul.rotate_axis('Z', random.uniform(math.radians(0), math.radians(180)))\r\n # eul.rotate_axis('X', random.uniform(math.radians(45), math.radians(135)))\r\n # litpos.rotate(eul)\r\n # bpy.ops.object.select_all(action='DESELECT')\r\n # bpy.ops.object.light_add(type='POINT', radius=1, align='WORLD', location=litpos)\r\n bpy.ops.object.light_add(type='POINT', radius=1, align='WORLD', location=(0,0,0))\r\n point_light = bpy.data.objects['Point']\r\n select_object(point_light)\r\n point_light.data.use_nodes = True\r\n pos_z = random.uniform(5, 8)\r\n pos_x = random.uniform(-1.5, 1.5)\r\n pos_y = random.uniform(-1.5, 1.5)\r\n point_light.location = (pos_x, pos_y, pos_z)\r\n nodes=point_light.data.node_tree.nodes\r\n links=point_light.data.node_tree.links\r\n for node in nodes:\r\n if node.type=='OUTPUT':\r\n output_node = node\r\n elif node.type=='EMISSION':\r\n emission_node=node\r\n strngth=random.uniform(1,8)\r\n emission_node.inputs[1].default_value=strngth\r\n bbody=nodes.new(type='ShaderNodeBlackbody')\r\n color_temp=random.uniform(2700,10200)\r\n bbody.inputs[0].default_value=color_temp\r\n links.new(bbody.outputs[0],emission_node.inputs[0])\r\n if track_to:\r\n # Track to constrain\r\n point_light.constraints.new(\"TRACK_TO\")\r\n point_light.constraints['Track To'].target = obj#bpy.data.objects[label]\r\n point_light.constraints['Track To'].up_axis = 'UP_Y'\r\n point_light.constraints['Track To'].track_axis = 'TRACK_NEGATIVE_Z'\r\n # Damped Track constrain\r\n # point_light.constraints.new(\"DAMPED_TRACK\") \r\n # point_light.constraints['Damped Track'].target = bpy.data.objects[label]\r\n # point_light.constraints['Damped Track'].subtarget = \"Control\"#\"Group\"\r\n # point_light.constraints['Damped Track'].track_axis = 'TRACK_NEGATIVE_Z'\r\n else:\r\n # d = random.uniform(2, 4)\r\n # litpos = Vector((0, d, 0))\r\n # eul = Euler((0, 0, 0), 'XYZ')\r\n # eul.rotate_axis('Z', random.uniform(math.radians(0), math.radians(180)))\r\n # eul.rotate_axis('X', random.uniform(math.radians(45), math.radians(135)))\r\n # litpos.rotate(eul)\r\n # bpy.ops.object.light_add(type='AREA', align='WORLD', location=litpos)\r\n bpy.ops.object.light_add(type='AREA', align='WORLD', location=(0,0,0))\r\n area_light = bpy.data.objects['Area']\r\n area_light.data.use_nodes = True\r\n pos_z = random.uniform(4, 8)\r\n pos_x = random.uniform(-1.5, 1.5)\r\n pos_y = random.uniform(-1.5, 1.5)\r\n area_light.location = (pos_x, pos_y, pos_z)\r\n area_light.data.size = random.uniform(1,3)\r\n nodes=area_light.data.node_tree.nodes\r\n links=area_light.data.node_tree.links\r\n for node in nodes:\r\n if node.type=='OUTPUT':\r\n output_node = node\r\n elif node.type=='EMISSION':\r\n emission_node=node\r\n strngth=random.uniform(1,10)\r\n emission_node.inputs[1].default_value=strngth\r\n bbody=nodes.new(type='ShaderNodeBlackbody')\r\n color_temp=random.uniform(4000,9500)\r\n bbody.inputs[0].default_value=color_temp\r\n links.new(bbody.outputs[0],emission_node.inputs[0])\r\n if track_to:\r\n # Track to constrain\r\n area_light.constraints.new(\"TRACK_TO\")\r\n area_light.constraints['Track To'].target = obj#bpy.data.objects[label]\r\n area_light.constraints['Track To'].up_axis = 'UP_Y'\r\n area_light.constraints['Track To'].track_axis = 'TRACK_NEGATIVE_Z'\r\n # Damped Track constrain\r\n # area_light.constraints.new(\"DAMPED_TRACK\") \r\n # area_light.constraints['Damped Track'].target = bpy.data.objects[label]\r\n # area_light.constraints['Damped Track'].subtarget = \"Control\"#\"Group\"\r\n # area_light.constraints['Damped Track'].track_axis = 'TRACK_NEGATIVE_Z'\r\n return",
"def import_light_sources(self):\n\n print('################# importing light sources #################')\n for pattern_name in self.pattern_names_list:\n\n coll = bpy.data.collections[pattern_name] #Store the associated blender collection as variable\n\n bpy.data.lights.new(name=pattern_name, type='SPOT') #Create new light source\n \n light = bpy.data.lights[pattern_name] #Store blender light as variable\n light_obj = bpy.data.objects.new(name=pattern_name, object_data=light) #Store blender light object as variable\n coll.objects.link(light_obj) #Link light object to assiciated collection\n \n light.spot_blend = 0 #Edge blending of spotlight turned off\n light.spot_size = np.pi #Set spot field of view to 180 (Larger than the image field of view)\n light.shadow_soft_size = 0 #Makes edges of projected image sharp\n\n light_obj.parent = bpy.data.objects[self.camera.name] #Set light to child of camera\n light_obj.parent_type = 'OBJECT'\n\n light_obj.location = self.cam2proj_loc #Set light location relative to camera\n \n if len(self.cam2proj_rot) == 4:\n light_obj.rotation_mode = 'QUATERNION' #Set rotation mode to quaternion\n light_obj.rotation_quaternion = self.cam2proj_rot\n else:\n light_obj.rotation_mode = 'XYZ' #Set rotation mode to euler xyz\n light_obj.rotation_euler = self.cam2proj_rot\n\n light_obj.name = pattern_name #Rename light to be same as associated view layer and collection\n\n #Set up the node tree for the projector\n self.create_projector_node_tree(light=light)",
"def shadow_light_linker():\n\n lights = cmds.ls(\"*:shadowLight_light\")\n light_sets = cmds.ls(\"*_lightLink*\")\n\n if not lights:\n cmds.warning(\"No '*:shadowLight_light' in scene.\")\n return\n if not light_sets:\n cmds.warning(\"No '*_lightLink*' in scene.\")\n return\n\n for light in lights: \n for light_set in light_sets: \n cmds.lightlink(light=light, object=light_set)",
"def set_lighting(self):\n prop = self.GetProperty()\n prop.SetAmbient(0.)\n prop.SetDiffuse(0.)\n prop.SetSpecular(1.0)",
"def _setLightingShader(self):\n lightShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ApplyLighting.fragment\")\n self.lightingComputeContainer.setShader(lightShader)",
"def res_make_funnel_light(inst: Entity) -> None:\n oran_on = inst.fixup.bool('$start_reversed')\n if inst.fixup['$conn_count_b'] != '0':\n import vbsp\n if not vbsp.settings['style_vars']['funnelallowswitchedlights']:\n # Allow disabling adding switchable lights.\n return\n name = conditions.local_name(inst, 'light')\n need_blue = need_oran = True\n else:\n name = ''\n if oran_on:\n need_oran = True\n need_blue = False\n else:\n need_blue = True\n need_oran = False\n\n loc = Vec(0, 0, -56) @ Angle.from_str(inst['angles']) + Vec.from_str(inst['origin'])\n\n if need_blue:\n inst.map.create_ent(\n classname='light',\n targetname=name + '_b' if name else '',\n spawnflags=int(oran_on), # 1 = Initially Dark\n origin=loc,\n _light='50 120 250 50',\n _lightHDR='-1 -1 -1 1',\n _lightscaleHDR=2,\n _fifty_percent_distance=48,\n _zero_percent_distance=96,\n _hardfalloff=1,\n _distance=0,\n style=0,\n )\n if need_oran:\n inst.map.create_ent(\n classname='light',\n targetname=name + '_o' if name else '',\n spawnflags=int(not oran_on),\n origin=loc,\n _light='250 120 50 50',\n _lightHDR='-1 -1 -1 1',\n _lightscaleHDR=2,\n _fifty_percent_distance=48,\n _zero_percent_distance=96,\n _hardfalloff=1,\n _distance=0,\n style=0,\n )",
"def _makeLightingComputeBuffer(self):\n self.lightingComputeContainer = RenderTarget(\"ComputeLighting\")\n\n if self.settings.enableTemporalReprojection:\n self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)\n else:\n self.lightingComputeContainer.setSize(self.size.x, self.size.y)\n\n self.lightingComputeContainer.addColorTexture()\n self.lightingComputeContainer.setColorBits(16)\n self.lightingComputeContainer.prepareOffscreenBuffer()\n\n self.lightingComputeCombinedTex = Texture(\"Lighting-Compute-Combined\")\n self.lightingComputeCombinedTex.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)\n self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)\n self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)\n\n self.lastPositionBuffer = Texture(\"Last-Position-Buffer\")\n self.lastPositionBuffer.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)\n self.lastPositionBuffer.setMinfilter(Texture.FTNearest)\n self.lastPositionBuffer.setMagfilter(Texture.FTNearest)",
"def place_camera_and_light():\n # Place Camera\n bpy.context.scene.use_nodes = True\n camera = bpy.data.cameras.new(\"Camera\")\n camera_obj = bpy.data.objects.new(\"Camera\", camera)\n camera_obj.location = (0,-200,0)\n camera_obj.rotation_euler = (radians(90),0,0)\n bpy.context.scene.camera = camera_obj\n bpy.context.scene.collection.objects.link(camera_obj)\n\n # create light datablock, set attributes\n light_data = bpy.data.lights.new(name=\"light_2.80\", type='POINT')\n light_data.energy = 30\n\n # create new object with our light datablock\n light_object = bpy.data.objects.new(name=\"light_2.80\", object_data=light_data)\n\n # link light object\n bpy.context.collection.objects.link(light_object)\n\n # make it active \n bpy.context.view_layer.objects.active = light_object\n\n #change location\n light_object.location = (0, -0.5, 0)",
"def _setupFinalPass(self):\n # Set wrap for motion blur\n colorTex = self.antialias.getResultTexture()\n colorTex.setWrapU(Texture.WMClamp)\n colorTex.setWrapV(Texture.WMClamp)\n self._setFinalPassShader()",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def create_collections_and_viewlayers(self):\n \n #Set parent collection to be scene master collection\n parent_collection = bpy.context.scene.collection\n \n view_layers = bpy.context.scene.view_layers\n \n if not len(view_layers) == 1:\n raise Exception('Blender file can not contain more than one view layer when running the pipeline\\nBlender file currently contains {} view layers'.format(len(view_layers)))\n\n bpy.context.view_layer.name = 'native_layer' #Rename native view layer\n\n #Seperate native lighting from projector lighting\n lights_in_scene = []\n for light in bpy.data.lights:\n\n if bpy.data.lights[light.name].users:\n lights_in_scene.append(light.name)\n\n native_lights_list = list(set(lights_in_scene) - set(self.pattern_names_list))\n \n #Create a collection to store native lighting\n native_light_collection = bpy.data.collections.new(name='native_lights')\n parent_collection.children.link(native_light_collection)\n\n #Add native lighting to collection\n for native_light_name in native_lights_list:\n\n native_light = bpy.data.objects[native_light_name]\n \n current_collection = native_light.users_collection\n\n #Unlink object from previous collection and link to new collection\n native_light_collection.objects.link(native_light)\n current_collection[0].objects.unlink(native_light)\n\n #Loop number of phase shift patterns in structured light algorithm\n for pattern_name in self.pattern_names_list:\n \n #Create viewlayers for both wave lengths at current shift\n bpy.context.scene.view_layers.new(name=pattern_name)\n \n #Create collections for both wave lengths at current shift\n collection = bpy.data.collections.new(name=\"{}\".format(pattern_name))\n\n #Make collections children of master collection/scene collection\n parent_collection.children.link(collection)",
"def light(self, **kwargs):\n del kwargs\n\n if not self.player:\n return\n\n self.machine.extra_ball_controller.light()",
"def __init__(self):\n this = _coin.new_SoDirectionalLightManip()\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def shadingLightRelCtx(offCommand=\"string\", image1=\"string\", onCommand=\"string\", shadingCentric=bool, history=bool, exists=bool, image2=\"string\", name=\"string\", image3=\"string\"):\n pass",
"def __init__(self):\n this = _coin.new_SoDirectionalLight()\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def _simple_scene_setup(self):\n colors = {\n \"body_color\": [0.5, 0.5, 0.7], \n \"cloth_color\": [0.8, 0.2, 0.2] if 'garment_color' not in self.config else self.config['garment_color'],\n \"floor_color\": [0.8, 0.8, 0.8]\n }\n\n self.scene = {\n 'floor': self._add_floor(self.body)\n }\n # materials\n self.scene['body_shader'], self.scene['body_SG'] = self._new_lambert(colors['body_color'], self.body)\n self.scene['cloth_shader'], self.scene['cloth_SG'] = self._new_lambert(colors['cloth_color'], self.body)\n self.scene['floor_shader'], self.scene['floor_SG'] = self._new_lambert(colors['floor_color'], self.body)\n\n self.scene['light'] = mutils.createLocator('aiSkyDomeLight', asLight=True)\n\n # Put camera\n self.cameras = [self._add_simple_camera()]\n\n # save config\n self.config['garment_color'] = colors['cloth_color']",
"def __init__(self):\n this = _coin.new_SoVRMLDirectionalLight()\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def initialize():\n numeric_attr = OpenMaya.MFnNumericAttribute() \n type_attr = OpenMaya.MFnTypedAttribute()\n\n outMesh = OpenMayaMPx.cvar.MPxGeometryFilter_outputGeom # The driven output geometry\n \n # Creatting the blendMesh and blendValue attributes for the node\n SimpleBlendShapeNode.attr_mesh_blend = type_attr.create('blendMesh', 'blendMesh', OpenMaya.MFnData.kMesh)\n SimpleBlendShapeNode.addAttribute(SimpleBlendShapeNode.attr_mesh_blend)\n SimpleBlendShapeNode.attributeAffects(SimpleBlendShapeNode.attr_mesh_blend, outMesh)\n SimpleBlendShapeNode.attr_weight_blend = numeric_attr.create('blendValue', 'blendValue', OpenMaya.MFnNumericData.kFloat)\n numeric_attr.setKeyable(True)\n numeric_attr.setMin(0.0)\n numeric_attr.setMax(1.0)\n # Adding the attributes back to the class object.\n SimpleBlendShapeNode.addAttribute(SimpleBlendShapeNode.attr_weight_blend)\n SimpleBlendShapeNode.attributeAffects(SimpleBlendShapeNode.attr_weight_blend, outMesh)\n \n # Enabling Maya's paint weights to have influence on the blendshape node.\n OpenMaya.MGlobal.executeCommand(\"makePaintable -attrType multiFloat -sm deformer simpleBlendShapeNode weights;\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets most of the required shader inputs to the targets
|
def _setShaderInputs(self):
# Shader inputs for the light-culling pass
if self.haveLightingPass:
self.lightBoundsComputeBuff.setShaderInput(
"destination", self.lightPerTileStorage)
self.lightBoundsComputeBuff.setShaderInput(
"depth", self.deferredTarget.getDepthTexture())
self.lightBoundsComputeBuff.setShaderInput(
"mainCam", self.showbase.cam)
self.lightBoundsComputeBuff.setShaderInput(
"mainRender", self.showbase.render)
# Shader inputs for the light-applying pass
self.lightingComputeContainer.setShaderInput(
"data0", self.deferredTarget.getColorTexture())
self.lightingComputeContainer.setShaderInput(
"data1", self.deferredTarget.getAuxTexture(0))
self.lightingComputeContainer.setShaderInput(
"data2", self.deferredTarget.getAuxTexture(1))
self.lightingComputeContainer.setShaderInput(
"data3", self.deferredTarget.getAuxTexture(2))
self.lightingComputeContainer.setShaderInput(
"depth", self.deferredTarget.getDepthTexture())
self.lightingComputeContainer.setShaderInput(
"mainCam", self.showbase.cam)
self.lightingComputeContainer.setShaderInput(
"mainRender", self.showbase.render)
if self.occlusion.requiresViewSpacePosNrm():
self.lightingComputeContainer.setShaderInput(
"viewSpaceNormals",
self.normalPrecompute.getColorTexture())
self.lightingComputeContainer.setShaderInput(
"viewSpacePosition",
self.normalPrecompute.getAuxTexture(0))
self.lightingComputeContainer.setShaderInput(
"shadowAtlas", self.lightManager.getAtlasTex())
if self.settings.useHardwarePCF:
self.lightingComputeContainer.setShaderInput(
"shadowAtlasPCF", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())
self.lightingComputeContainer.setShaderInput(
"destination", self.lightingComputeCombinedTex)
self.lightingComputeContainer.setShaderInput(
"temporalProjXOffs", self.temporalProjXOffs)
self.lightingComputeContainer.setShaderInput(
"cameraPosition", self.cameraPosition)
self.lightingComputeContainer.setShaderInput(
"noiseTexture",
self.showbase.loader.loadTexture("Data/Occlusion/noise4x4.png"))
self.lightingComputeContainer.setShaderInput(
"lightsPerTile", self.lightPerTileStorage)
if self.settings.enableGlobalIllumination:
self.lightingComputeContainer.setShaderInput("giDiffuseTex", self.giPrecomputeBuffer.getColorTexture())
self.lightingComputeContainer.setShaderInput("giReflectionTex", self.giPrecomputeBuffer.getAuxTexture(0))
# Shader inputs for the occlusion blur passes
if self.occlusion.requiresBlurring() and self.haveCombiner:
self.blurOcclusionH.setShaderInput(
"colorTex", self.blurOcclusionV.getColorTexture())
if self.settings.enableTemporalReprojection:
self.blurOcclusionV.setShaderInput(
"colorTex", self.combiner.getColorTexture())
else:
self.blurOcclusionV.setShaderInput(
"colorTex",
self.lightingComputeContainer.getColorTexture())
self.blurOcclusionH.setShaderInput(
"normalTex", self.deferredTarget.getAuxTexture(0))
self.blurOcclusionV.setShaderInput(
"normalTex", self.deferredTarget.getAuxTexture(0))
self.blurOcclusionH.setShaderInput(
"normalsView", self.normalPrecompute.getAuxTexture(0))
self.blurOcclusionV.setShaderInput(
"normalsView", self.normalPrecompute.getAuxTexture(0))
# Shader inputs for the blur passes
if self.blurEnabled:
self.blurColorH.setShaderInput(
"dofStorage", self.dofStorage)
self.blurColorV.setShaderInput(
"dofStorage", self.dofStorage)
self.blurColorH.setShaderInput("colorTex",
self.antialias.getResultTexture())
self.blurColorH.setShaderInput("depthTex",
self.deferredTarget.getDepthTexture())
self.blurColorV.setShaderInput("colorTex",
self.blurColorH.getColorTexture())
# Shader inputs for the temporal reprojection
if self.haveCombiner and self.settings.enableTemporalReprojection:
self.combiner.setShaderInput(
"currentComputation",
self.lightingComputeContainer.getColorTexture())
self.combiner.setShaderInput(
"lastFrame", self.lightingComputeCombinedTex)
self.combiner.setShaderInput(
"positionBuffer", self.deferredTarget.getColorTexture())
self.combiner.setShaderInput(
"velocityBuffer", self.deferredTarget.getAuxTexture(1))
self.combiner.setShaderInput("currentPixelShift",
self.currentPixelShift)
self.combiner.setShaderInput("lastPixelShift",
self.lastPixelShift)
if self.blurEnabled:
self.combiner.setShaderInput(
"dofStorage", self.dofStorage)
self.combiner.setShaderInput(
"depthTex", self.deferredTarget.getDepthTexture())
self.combiner.setShaderInput(
"lastPosition", self.lastPositionBuffer)
self.combiner.setShaderInput(
"temporalProjXOffs", self.temporalProjXOffs)
self.combiner.setShaderInput("lastMVP", self.lastMVP)
self.combiner.setShaderInput("cameraPosition", self.cameraPosition)
self.combiner.setShaderInput("currentMVP", self.lastMVP)
# Shader inputs for the final pass
if self.blurEnabled:
self.deferredTarget.setShaderInput(
"colorTex", self.blurColorV.getColorTexture())
else:
self.deferredTarget.setShaderInput(
"colorTex", self.antialias.getResultTexture())
if self.occlusion.requiresBlurring():
self.normalPrecompute.setShaderInput(
"positionTex", self.deferredTarget.getColorTexture())
self.normalPrecompute.setShaderInput(
"mainCam", self.showbase.cam)
self.normalPrecompute.setShaderInput(
"mainRender", self.showbase.render)
self.normalPrecompute.setShaderInput(
"depthTex", self.deferredTarget.getDepthTexture())
if self.haveMRT:
self.deferredTarget.setShaderInput(
"velocityTex", self.deferredTarget.getAuxTexture(1))
self.deferredTarget.setShaderInput(
"depthTex", self.deferredTarget.getDepthTexture())
self.deferredTarget.setShaderInput(
"motionBlurFactor", self.motionBlurFactor)
if self.haveLightingPass:
self.deferredTarget.setShaderInput(
"lastFrame", self.lightingComputeCombinedTex)
if self.haveCombiner and self.settings.enableTemporalReprojection:
self.deferredTarget.setShaderInput(
"newFrame", self.combiner.getColorTexture())
self.deferredTarget.setShaderInput(
"lastPosition", self.lastPositionBuffer)
self.deferredTarget.setShaderInput("debugTex",
self.combiner.getColorTexture())
else:
self.deferredTarget.setShaderInput("debugTex",
self.antialias.getResultTexture())
self.deferredTarget.setShaderInput(
"currentPosition", self.deferredTarget.getColorTexture())
# Set last / current mvp handles
self.showbase.render.setShaderInput("lastMVP", self.lastMVP)
# Set GI inputs
if self.settings.enableGlobalIllumination:
self.globalIllum.bindTo(self.giPrecomputeBuffer, "giData")
self.giPrecomputeBuffer.setShaderInput(
"data0", self.deferredTarget.getColorTexture())
self.giPrecomputeBuffer.setShaderInput(
"data1", self.deferredTarget.getAuxTexture(0))
self.giPrecomputeBuffer.setShaderInput(
"data2", self.deferredTarget.getAuxTexture(1))
self.giPrecomputeBuffer.setShaderInput(
"data3", self.deferredTarget.getAuxTexture(2))
self.giPrecomputeBuffer.setShaderInput(
"cameraPosition", self.cameraPosition)
# Finally, set shaders
self.reloadShaders()
|
[
"def bind_to(self, target):\r\n for key, val in iteritems(self.inputs):\r\n target.set_shader_input(self.name + \".\" + key, val)",
"def bind_to(self, target):\r\n\r\n for pta_name, pta_handle in iteritems(self.ptas):\r\n if self.use_ubo:\r\n target.set_shader_input(self.name + \"_UBO.\" + pta_name, pta_handle)\r\n else:\r\n target.set_shader_input(self.name + \".\" + pta_name, pta_handle)",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def bindTo(self, node, uniformName):\n\n for propid, prop in self.properties.iteritems():\n name = propid.replace(\".\", \"_\")\n node.setShaderInput(name, prop.getPTA())",
"def bake_shaders(self):\n\n selected_shaders = cmds.ls(sl=True)\n del self.shaders_to_apply[:]\n for shdr in selected_shaders:\n self.shaders_to_apply.append(shdr)\n print self.shaders_to_apply",
"def set_input(self, inputs):\n for i, node in enumerate(self.input_nodes):\n node.activation_level = inputs[i]",
"def __init__(self, *shaders):\n self.shaders = list(shaders)\n self.programId = None",
"def applyEffects(shaderEffects, targetNodePath, baseShader=None):\n \n targetNodePath.setShader(getShader(shaderEffects, baseShader))",
"def setOutputShaderFiles(self, *args):\r\n return _osgDB.Output_setOutputShaderFiles(self, *args)",
"def _assign_obj_list_to_shader(self, obj_list=None, shader=None):\n\n shader_SG = self._get_SG_from_shader(shader)\n if obj_list:\n if shader_SG:\n cmds.sets(obj_list, e=True, forceElement=shader_SG)\n else:\n print 'The provided shader {} didn\\'t return a shader_SG'.format(shader)\n else:\n print 'Please select one or more objects'",
"def _addAllUniforms(self):\n for stage in self.stages:\n sourceText = self.stages[stage]\n structures = findUniformStruct(sourceText)\n\n #------------------------------------------------------------------\n # UBO checck: NOTE: preliminary\n uboLastLine = 0\n uboIndex = sourceText.find('layout (std140')\n if uboIndex >= 0:\n endLine = sourceText[uboIndex:].find('}')\n uboBlock = sourceText[uboIndex:uboIndex+endLine+1]\n uboLastLine = uboIndex+endLine\n sourceText = sourceText[:uboIndex] + sourceText[uboLastLine:]\n s0 = uboBlock.find('uniform')\n s1 = uboBlock.find('}')\n uboName = uboBlock[s0:s1].split()[1]\n #NOTE: MUST BE TESTED!!!\n uniformLocation = gl.glGetUniformBlockIndex(self.program,\n uboName)\n self.uniformLocations[uniformName] = uniformLocation\n\n #------------------------------------------------------------------\n index = sourceText.find('uniform')\n start = index\n while index != -1:\n endLine = sourceText[start:].find(';')\n uniformLine = sourceText[start: start + endLine]\n _, uniformType, uniformName, *rest = uniformLine.split()\n index = sourceText[start + endLine:].find('uniform')\n start += endLine + index\n self.uniformTypes[uniformName] = uniformType\n self._addUniformWithStructCheck(uniformName, uniformType,\n structures)",
"def _update(self):\n\n if self._vertices_buffer is not None:\n self._vertices_buffer._delete()\n self._vertices_buffer = self._vertices_list.data.view(VertexBuffer)\n\n if self.itype is not None:\n if self._indices_buffer is not None:\n self._indices_buffer._delete()\n self._indices_buffer = self._indices_list.data.view(IndexBuffer)\n\n if self.utype is not None:\n if self._uniforms_texture is not None:\n self._uniforms_texture._delete()\n\n # We take the whole array (_data), not the data one\n texture = self._uniforms_list._data.view(np.float32)\n size = len(texture)/self._uniforms_float_count\n shape = self._compute_texture_shape(size)\n\n # shape[2] = float count is only used in vertex shader code\n texture = texture.reshape(int(shape[0]), int(shape[1]), 4)\n self._uniforms_texture = texture.view(TextureFloat2D)\n self._uniforms_texture.interpolation = gl.GL_NEAREST\n\n if len(self._programs):\n for program in self._programs:\n program.bind(self._vertices_buffer)\n if self._uniforms_list is not None:\n program[\"uniforms\"] = self._uniforms_texture\n program[\"uniforms_shape\"] = self._ushape",
"def _init_materials(self):\r\n default_shader_pink = GLProgram(xml=GLRenderer.DEFAULT_SHADER)\r\n self.default_mat = Material(default_shader_pink)\r\n self.current_material = self.default_mat\r\n self.current_material._use()",
"def build_target(self):\n for model_name in self.src_models:\n self.dst_models[model_name] = build_model(\n **self.dst_kwargs[model_name])",
"def _initial_target_setup(self):\n # Targets\n self.target = []\n n_targets = self.config['simulation']['n_targets']\n for target in self.config['simulation']['target_building_id']:\n info = {}\n info['target_id'] = target\n info['probability_goals'] = 1 / n_targets\n info['progress_goals'] = 0\n info['probability_goals_indoor'] = 1 / n_targets\n info['progress_goals_indoor'] = 0\n info['defence_perimeter'] = 0\n\n building_info = self.building_info(target)\n info['position'] = building_info['position']\n info['perimeter'] = building_info['perimeter']\n info['area'] = building_info['area']\n info['n_floors'] = building_info['n_floors']\n info['n_defence_perimeter'] = building_info['perimeter'] / (\n self.config['ugv']['defense_radius'] * 2)\n\n self.target.append(info)",
"def input_data(self, inputs):\n for i, x in enumerate(inputs):\n self.activations[0][i] = x",
"def ShaderObjects(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_ShaderObjects(self, *args)",
"def assignPartsShader(self, shader):\r\n #We assign the shader to the selected faces\r\n Selection = cmds.ls(sl=True)\r\n if not Selection:\r\n return\r\n cmds.sets(Selection,edit=True, forceElement = \"%sSG\" % shader)",
"def _build_uniforms(self):\n\n # We might rebuild the program because of snippets but we must\n # keep already bound uniforms\n\n count = 0\n for (name,gtype) in self.all_uniforms:\n if name not in self._uniforms.keys():\n uniform = Uniform(self, name, gtype)\n else:\n uniform = self._uniforms[name]\n gtype = uniform.gtype\n if gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D, gl.GL_SAMPLER_CUBE):\n uniform._texture_unit = count\n count += 1\n self._uniforms[name] = uniform\n self._need_update = True"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Loads the cubemap for image based lighting
|
def _loadFallbackCubemap(self):
print self.settings.defaultReflectionCubemap
cubemap = self.showbase.loader.loadCubeMap(
self.settings.defaultReflectionCubemap)
cubemap.setMinfilter(Texture.FTLinearMipmapLinear)
cubemap.setMagfilter(Texture.FTLinearMipmapLinear)
cubemap.setFormat(Texture.F_srgb)
print math.log(cubemap.getXSize(), 2)
self.lightingComputeContainer.setShaderInput(
"fallbackCubemap", cubemap)
self.lightingComputeContainer.setShaderInput(
"fallbackCubemapMipmaps", math.log(cubemap.getXSize(), 2))
|
[
"def load_height_and_soil_map(self, map_name):\n map = self.maps[map_name]\n self.image_height_map = Image()\n self.image_height_map.load_image(map.height_map_path)\n self.soil_ids_map = Image()\n self.soil_ids_map.load_image(map.texture_map_path)\n # self.transform_and_save_soil_id_map(map.texture_map_path)\n # self.save_image_as_csv(self.image_height_map.image)",
"def render_map_3d(self):\n for y in range(0, self.dimensions[0], self.granularity):\n for z in range(0, self.dimensions[1], self.granularity):\n for x in range(0, self.dimensions[2], self.granularity):\n if self.map[y, z, x] == 1:\n bpy.ops.mesh.primitive_cube_add(location=(x-(self.dimensions[2]/2),\n z-(self.dimensions[1]/2),\n y+1))",
"def _load_heat_map(self):\n heat_map_path = \"media/heat_map_{}.png\".format(self.top1_label)\n heat_map = cv2.imread(heat_map_path)\n self.heat_map = cv2.resize(heat_map, (NET_WEIGHT, NET_HEIGHT))\n debug(\"Heat map loaded.\")",
"def __init__(self):\n this = _coin.new_SoSceneTextureCubeMap()\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def test_texture_map_atlas(self):\n device = torch.device(\"cuda:0\")\n\n obj_filename = TUTORIAL_DATA_DIR / \"cow_mesh/cow.obj\"\n\n # Load mesh and texture as a per face texture atlas.\n verts, faces, aux = load_obj(\n obj_filename,\n device=device,\n load_textures=True,\n create_texture_atlas=True,\n texture_atlas_size=8,\n texture_wrap=None,\n )\n atlas = aux.texture_atlas\n mesh = Meshes(\n verts=[verts],\n faces=[faces.verts_idx],\n textures=TexturesAtlas(atlas=[atlas]),\n )\n\n # Init rasterizer settings\n R, T = look_at_view_transform(2.7, 0, 0)\n cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n\n raster_settings = RasterizationSettings(\n image_size=512,\n blur_radius=0.0,\n faces_per_pixel=1,\n cull_backfaces=True,\n perspective_correct=False,\n )\n\n # Init shader settings\n materials = Materials(device=device, specular_color=((0, 0, 0),), shininess=0.0)\n lights = PointLights(device=device)\n\n # Place light behind the cow in world space. The front of\n # the cow is facing the -z direction.\n lights.location = torch.tensor([0.0, 0.0, 2.0], device=device)[None]\n\n # The HardPhongShader can be used directly with atlas textures.\n rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings)\n renderer = MeshRenderer(\n rasterizer=rasterizer,\n shader=HardPhongShader(lights=lights, cameras=cameras, materials=materials),\n )\n\n images = renderer(mesh)\n rgb = images[0, ..., :3].squeeze()\n\n # Load reference image\n image_ref = load_rgb_image(\"test_texture_atlas_8x8_back.png\", DATA_DIR)\n\n if DEBUG:\n Image.fromarray((rgb.detach().cpu().numpy() * 255).astype(np.uint8)).save(\n DATA_DIR / \"DEBUG_texture_atlas_8x8_back.png\"\n )\n\n self.assertClose(rgb.cpu(), image_ref, atol=0.05)\n\n # Check gradients are propagated\n # correctly back to the texture atlas.\n # Because of how texture sampling is implemented\n # for the texture atlas it is not possible to get\n # gradients back to the vertices.\n atlas.requires_grad = True\n mesh = Meshes(\n verts=[verts],\n faces=[faces.verts_idx],\n textures=TexturesAtlas(atlas=[atlas]),\n )\n raster_settings = RasterizationSettings(\n image_size=512,\n blur_radius=0.0001,\n faces_per_pixel=5,\n cull_backfaces=True,\n clip_barycentric_coords=True,\n )\n images = renderer(mesh, raster_settings=raster_settings)\n images[0, ...].sum().backward()\n\n fragments = rasterizer(mesh, raster_settings=raster_settings)\n # Some of the bary coordinates are outside the\n # [0, 1] range as expected because the blur is > 0\n self.assertTrue(fragments.bary_coords.ge(1.0).any())\n self.assertIsNotNone(atlas.grad)\n self.assertTrue(atlas.grad.sum().abs() > 0.0)",
"def load_grain(grains, k):\n grain = -np.ones(dims)\n ind = grains[k][0]-1\n [x, y, z] = np.unravel_index(ind, dims, order='F')\n val = grains[k][1]\n grain[y,x,z] = val\n verts, faces = measure.marching_cubes_classic(grain, 0, spacing=(1, 1, 1))\n return verts, faces",
"def load_resources(self, path):\n image = pygame.image.load_basic(os.path.join(path, \"board.bmp\"))\n self.resources['background'] = pygame.transform.scale(image, (self.width, self.height))\n\n for abbreviation in self.current_state.piece_values.keys(): # load each piece\n for color in ('white', 'black'): # load each color variant\n name = abbreviation + color # construct image name\n image = pygame.image.load_extended(os.path.join(path, name + '.png'))\n self.resources[name] = pygame.transform.scale(image, (self.tile_size, self.tile_size))",
"def __init__(self):\n this = _coin.new_SoTextureCubeMap()\n try:\n self.this.append(this)\n except __builtin__.Exception:\n self.this = this",
"def load_images_pygame(tmxdata, mapping, *args, **kwargs):\n from itertools import product\n from pygame import Surface\n import pygame, os\n\n\n def handle_transformation(tile, flags):\n if flags:\n fx = flags & TRANS_FLIPX == TRANS_FLIPX\n fy = flags & TRANS_FLIPY == TRANS_FLIPY\n r = flags & TRANS_ROT == TRANS_ROT\n\n if r:\n # not sure why the flip is required...but it is.\n newtile = pygame.transform.rotate(tile, 270)\n newtile = pygame.transform.flip(newtile, 1, 0)\n\n if fx or fy:\n newtile = pygame.transform.flip(newtile, fx, fy)\n\n elif fx or fy:\n newtile = pygame.transform.flip(tile, fx, fy)\n\n # preserve any flags that may have been lost after the transformation\n return newtile.convert(tile)\n\n else:\n return tile\n\n\n pixelalpha = kwargs.get(\"pixelalpha\", False)\n force_colorkey = kwargs.get(\"force_colorkey\", False)\n force_bitdepth = kwargs.get(\"depth\", False)\n\n if force_colorkey:\n try:\n force_colorkey = pygame.Color(*force_colorkey)\n except:\n msg = \"Cannot understand color: {0}\"\n raise Exception, msg.format(force_colorkey)\n\n tmxdata.images = [0] * tmxdata.maxgid\n\n for firstgid, t in sorted((t.firstgid, t) for t in tmxdata.tilesets):\n path = os.path.join(os.path.dirname(tmxdata.filename), t.source)\n\n image = pygame.image.load(path)\n\n w, h = image.get_size()\n tile_size = (t.tilewidth, t.tileheight)\n real_gid = t.firstgid - 1\n\n colorkey = None\n if t.trans:\n colorkey = pygame.Color(\"#{0}\".format(t.trans))\n\n # i dont agree with margins and spacing, but i'll support it anyway\n # such is life. okay.jpg\n tilewidth = t.tilewidth + t.spacing\n tileheight = t.tileheight + t.spacing\n\n # some tileset images may be slightly larger than the tile area\n # ie: may include a banner, copyright, ect. this compensates for that\n width = ((int((w-t.margin*2) + t.spacing) / tilewidth) * tilewidth) - t.spacing\n height = ((int((h-t.margin*2) + t.spacing) / tileheight) * tileheight) - t.spacing\n\n # using product avoids the overhead of nested loops\n p = product(xrange(t.margin, height+t.margin, tileheight),\n xrange(t.margin, width+t.margin, tilewidth))\n\n for (y, x) in p:\n real_gid += 1\n gids = tmxdata.mapGID(real_gid)\n if gids == []: continue\n\n original = image.subsurface(((x,y), tile_size))\n\n for gid, flags in gids:\n tile = handle_transformation(original, flags)\n tile = pygame_convert(tile, colorkey, force_colorkey, pixelalpha)\n tmxdata.images[gid] = tile",
"def load_bone_fracture(self, path):\n img = load(path)\n img_data = img.get_data()\n self.__bone_with_fracture = zeros(img_data.shape)\n self.__bone_with_fracture[img_data == SCAPHOID_COLOR] = SCAPHOID_COLOR\n self.__bone_with_fracture[img_data == FRACTURE_COLOR] = FRACTURE_COLOR\n del img, img_data",
"def LoadTextures(): \n image_1 = open(\"/home/mikeodf/constr/images_opengl/steel_ball3.jpg\") \n image_2 = open(\"/home/mikeodf/constr/images_opengl/steel_green_ball3.jpg\") \n image_3 = open(\"/home/mikeodf/constr/images_opengl/steel_blue_ball3.jpg\") \n image_4 = open(\"/home/mikeodf/constr/images_opengl/steel_red_ball3.jpg\") \n\n image_1 = image_1.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_2 = image_2.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_3 = image_3.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n image_4 = image_4.tostring(\"raw\", \"RGBX\", 0, -1) # convert bmp to the type needed for textures \n glGenTextures(11, texture_1) # Create texture number and names and sizw. \n #===================================== \n texture_setup(image_1, 0, ix, iy) \n texture_setup(image_2, 1, ix, iy) \n texture_setup(image_3, 2, ix, iy) \n texture_setup(image_4, 3, ix, iy)",
"def test_texture_map(self):\n device = torch.device(\"cuda:0\")\n\n obj_filename = TUTORIAL_DATA_DIR / \"cow_mesh/cow.obj\"\n\n # Load mesh + texture\n verts, faces, aux = load_obj(\n obj_filename, device=device, load_textures=True, texture_wrap=None\n )\n tex_map = list(aux.texture_images.values())[0]\n tex_map = tex_map[None, ...].to(faces.textures_idx.device)\n textures = TexturesUV(\n maps=tex_map, faces_uvs=[faces.textures_idx], verts_uvs=[aux.verts_uvs]\n )\n mesh = Meshes(verts=[verts], faces=[faces.verts_idx], textures=textures)\n\n # Init rasterizer settings\n R, T = look_at_view_transform(2.7, 0, 0)\n cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n\n raster_settings = RasterizationSettings(\n image_size=512, blur_radius=0.0, faces_per_pixel=1\n )\n\n # Init shader settings\n materials = Materials(device=device)\n lights = PointLights(device=device)\n\n # Place light behind the cow in world space. The front of\n # the cow is facing the -z direction.\n lights.location = torch.tensor([0.0, 0.0, 2.0], device=device)[None]\n\n blend_params = BlendParams(\n sigma=1e-1,\n gamma=1e-4,\n background_color=torch.tensor([1.0, 1.0, 1.0], device=device),\n )\n # Init renderer\n renderer = MeshRenderer(\n rasterizer=MeshRasterizer(cameras=cameras, raster_settings=raster_settings),\n shader=TexturedSoftPhongShader(\n lights=lights,\n cameras=cameras,\n materials=materials,\n blend_params=blend_params,\n ),\n )\n\n # Load reference image\n image_ref = load_rgb_image(\"test_texture_map_back.png\", DATA_DIR)\n\n for bin_size in [0, None]:\n # Check both naive and coarse to fine produce the same output.\n renderer.rasterizer.raster_settings.bin_size = bin_size\n images = renderer(mesh)\n rgb = images[0, ..., :3].squeeze().cpu()\n\n if DEBUG:\n Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(\n DATA_DIR / \"DEBUG_texture_map_back.png\"\n )\n\n # NOTE some pixels can be flaky and will not lead to\n # `cond1` being true. Add `cond2` and check `cond1 or cond2`\n cond1 = torch.allclose(rgb, image_ref, atol=0.05)\n cond2 = ((rgb - image_ref).abs() > 0.05).sum() < 5\n self.assertTrue(cond1 or cond2)\n\n # Check grad exists\n [verts] = mesh.verts_list()\n verts.requires_grad = True\n mesh2 = Meshes(verts=[verts], faces=mesh.faces_list(), textures=mesh.textures)\n images = renderer(mesh2)\n images[0, ...].sum().backward()\n self.assertIsNotNone(verts.grad)\n\n ##########################################\n # Check rendering of the front of the cow\n ##########################################\n\n R, T = look_at_view_transform(2.7, 0, 180)\n cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n\n # Move light to the front of the cow in world space\n lights.location = torch.tensor([0.0, 0.0, -2.0], device=device)[None]\n\n # Load reference image\n image_ref = load_rgb_image(\"test_texture_map_front.png\", DATA_DIR)\n\n for bin_size in [0, None]:\n # Check both naive and coarse to fine produce the same output.\n renderer.rasterizer.raster_settings.bin_size = bin_size\n\n images = renderer(mesh, cameras=cameras, lights=lights)\n rgb = images[0, ..., :3].squeeze().cpu()\n\n if DEBUG:\n Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(\n DATA_DIR / \"DEBUG_texture_map_front.png\"\n )\n\n # NOTE some pixels can be flaky and will not lead to\n # `cond1` being true. Add `cond2` and check `cond1 or cond2`\n cond1 = torch.allclose(rgb, image_ref, atol=0.05)\n cond2 = ((rgb - image_ref).abs() > 0.05).sum() < 5\n self.assertTrue(cond1 or cond2)\n\n #################################\n # Add blurring to rasterization\n #################################\n R, T = look_at_view_transform(2.7, 0, 180)\n cameras = FoVPerspectiveCameras(device=device, R=R, T=T)\n blend_params = BlendParams(sigma=5e-4, gamma=1e-4)\n raster_settings = RasterizationSettings(\n image_size=512,\n blur_radius=np.log(1.0 / 1e-4 - 1.0) * blend_params.sigma,\n faces_per_pixel=100,\n clip_barycentric_coords=True,\n perspective_correct=False,\n )\n\n # Load reference image\n image_ref = load_rgb_image(\"test_blurry_textured_rendering.png\", DATA_DIR)\n\n for bin_size in [0, None]:\n # Check both naive and coarse to fine produce the same output.\n renderer.rasterizer.raster_settings.bin_size = bin_size\n\n images = renderer(\n mesh.clone(),\n cameras=cameras,\n raster_settings=raster_settings,\n blend_params=blend_params,\n )\n rgb = images[0, ..., :3].squeeze().cpu()\n\n if DEBUG:\n Image.fromarray((rgb.numpy() * 255).astype(np.uint8)).save(\n DATA_DIR / \"DEBUG_blurry_textured_rendering.png\"\n )\n\n self.assertClose(rgb, image_ref, atol=0.05)",
"def load_map(self, node=0):\n # Set current node to new loaded map.\n self.current_node = node\n self.map._map_matrix = self.map.load_mapfile(node)\n \n for y in range(len(self.map._map_matrix)):\n for x in range(len(self.map._map_matrix[y])):\n location = (x*TILE_DIMENSION+10, y*TILE_DIMENSION+10)\n tile_key = self.map._map_matrix[y][x]\n tile_area = self.map.TILES[tile_key].area\n screen.blit(self.map.parent_image,location,tile_area)\n \n pygame.display.flip()\n return",
"def opacitymaps(self):\n\t\treturn load_opacitymaps()",
"def loadTiles(self):\n self.tile = pygame.image.load(\"./hextile.png\").convert()\n self.tile.set_colorkey((0x80, 0x00, 0x80), RLEACCEL) \n\n self.cursor = pygame.image.load(\"./hexcursor.png\").convert()\n self.cursor.set_colorkey((0x80, 0x00, 0x80), RLEACCEL) \n self.cursorPos = self.cursor.get_rect()",
"def SoMultiTextureEnabledElement_enableCubeMap(state: 'SoState', node: 'SoNode', unit: 'int const'=0) -> \"void\":\n return _coin.SoMultiTextureEnabledElement_enableCubeMap(state, node, unit)",
"def load_new_map(self):\n self.map = gamemap.GameMap(self.datamap)",
"def enableCubeMap(state: 'SoState', node: 'SoNode', unit: 'int const'=0) -> \"void\":\n return _coin.SoMultiTextureEnabledElement_enableCubeMap(state, node, unit)",
"def load_images(self):\n self.astim1 = pygame.image.load(data.filepath('roid1.png')).\\\n convert_alpha()\n self.astim2 = pygame.image.load(data.filepath('roid2.png')).\\\n convert_alpha() \n self.astim3 = pygame.image.load(data.filepath('roid3.png')).\\\n convert_alpha() \n self.astim4 = pygame.image.load(data.filepath('roid4.png')).\\\n convert_alpha()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the buffer which precomputes the lights per tile
|
def _makeLightBoundsComputationBuffer(self, w, h):
self.debug("Creating light precomputation buffer of size", w, "x", h)
self.lightBoundsComputeBuff = RenderTarget("ComputeLightTileBounds")
self.lightBoundsComputeBuff.setSize(w, h)
self.lightBoundsComputeBuff.setColorWrite(False)
self.lightBoundsComputeBuff.prepareOffscreenBuffer()
|
[
"def _makeLightingComputeBuffer(self):\n self.lightingComputeContainer = RenderTarget(\"ComputeLighting\")\n\n if self.settings.enableTemporalReprojection:\n self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)\n else:\n self.lightingComputeContainer.setSize(self.size.x, self.size.y)\n\n self.lightingComputeContainer.addColorTexture()\n self.lightingComputeContainer.setColorBits(16)\n self.lightingComputeContainer.prepareOffscreenBuffer()\n\n self.lightingComputeCombinedTex = Texture(\"Lighting-Compute-Combined\")\n self.lightingComputeCombinedTex.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)\n self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)\n self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)\n\n self.lastPositionBuffer = Texture(\"Last-Position-Buffer\")\n self.lastPositionBuffer.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)\n self.lastPositionBuffer.setMinfilter(Texture.FTNearest)\n self.lastPositionBuffer.setMagfilter(Texture.FTNearest)",
"def _makeLightPerTileStorage(self):\n storageSizeX = self.precomputeSize.x * 8\n storageSizeY = self.precomputeSize.y * 8\n\n self.debug(\n \"Creating per tile storage of size\",\n storageSizeX, \"x\", storageSizeY)\n\n self.lightPerTileStorage = Texture(\"LightsPerTile\")\n self.lightPerTileStorage.setup2dTexture(\n storageSizeX, storageSizeY, Texture.TUnsignedShort, Texture.FR32i)\n self.lightPerTileStorage.setMinfilter(Texture.FTNearest)\n self.lightPerTileStorage.setMagfilter(Texture.FTNearest)",
"def _createLightingPipeline(self):\n\n if not self.haveLightingPass:\n self.debug(\"Skipping lighting pipeline\")\n return\n\n self.debug(\"Creating lighting pipeline ..\")\n\n # size has to be a multiple of the compute unit size\n # but still has to cover the whole screen\n sizeX = int(math.ceil(float(self.size.x) / self.patchSize.x))\n sizeY = int(math.ceil(float(self.size.y) / self.patchSize.y))\n\n self.precomputeSize = LVecBase2i(sizeX, sizeY)\n\n self.debug(\"Batch size =\", sizeX, \"x\", sizeY,\n \"Actual Buffer size=\", int(sizeX * self.patchSize.x),\n \"x\", int(sizeY * self.patchSize.y))\n\n self._makeLightPerTileStorage()\n\n # Create a buffer which computes which light affects which tile\n self._makeLightBoundsComputationBuffer(sizeX, sizeY)\n\n # Create a buffer which applies the lighting\n self._makeLightingComputeBuffer()\n\n # Register for light manager\n self.lightManager.setLightingComputator(self.lightingComputeContainer)\n self.lightManager.setLightingCuller(self.lightBoundsComputeBuff)\n\n self._loadFallbackCubemap()\n self._loadLookupCubemap()",
"def regenerate_skylight(self):\n\n lightmap = zeros((16, 16, 128), dtype=uint8)\n\n for x, z in product(xrange(16), repeat=2):\n # The maximum lighting value, unsurprisingly, is 0xf, which is the\n # biggest possible value for a nibble.\n light = 0xf\n\n # Apparently, skylights start at the block *above* the block on\n # which the light is incident?\n height = self.heightmap[x, z] + 1\n\n # The topmost block, regardless of type, is set to maximum\n # lighting, as are all the blocks above it.\n lightmap[x, z, height:] = light\n\n # Dim the light going throught the remaining blocks, until there\n # is no more light left.\n for y in range(height, -1, -1):\n dim = blocks[self.blocks[x, z, y]].dim\n light -= dim\n if light <= 0:\n break\n\n lightmap[x, z, y] = light\n\n # Now it's time to spread the light around. This flavor uses extra\n # memory to speed things up; the basic idea is to spread *all* light,\n # one glow level at a time, rather than spread each block\n # individually.\n max_height = amax(self.heightmap)\n lightable = vectorize(lambda block: blocks[block].dim < 15)(self.blocks)\n # Protip: This is a bitwise AND because logical ANDs on arrays can't\n # happen in Numpy.\n unlighted = logical_not(lightmap) & lightable\n\n # Create a mask to find all blocks that have an unlighted block\n # as a neighbour in the xz-plane.\n mask = zeros((16, 16, max_height), dtype=bool)\n mask[:-1,:,:max_height] |= unlighted[1:, :, :max_height]\n mask[:,:-1,:max_height] |= unlighted[:, 1:, :max_height]\n mask[1:,:,:max_height] |= unlighted[:-1, :, :max_height]\n mask[:,1:,:max_height] |= unlighted[:, :-1, :max_height]\n\n # Apply the mask to the lightmap to find all lighted blocks with one\n # or more unlighted blocks as neighbours.\n edges = logical_and(mask, lightmap[:, :, :max_height]).nonzero()\n\n spread = [tuple(coords) for coords in transpose(edges)]\n visited = set()\n\n # Run the actual glow loop. For each glow level, go over unvisited air\n # blocks and illuminate them.\n for glow in range(14, 0, -1):\n for coords in spread:\n if lightmap[coords] <= glow:\n visited.add(coords)\n continue\n\n for dx, dz, dy in (\n (1, 0, 0),\n (-1, 0, 0),\n (0, 1, 0),\n (0, -1, 0),\n (0, 0, 1),\n (0, 0, -1)):\n x, z, y = coords\n x += dx\n z += dz\n y += dy\n\n if not (0 <= x < 16 and\n 0 <= z < 16 and\n 0 <= y < 128):\n continue\n\n if (x, z, y) in visited:\n continue\n\n if lightable[x, z, y] and lightmap[x, z, y] < glow:\n lightmap[x, z, y] = glow - blocks[self.blocks[x, z, y]].dim\n visited.add((x, z, y))\n spread = visited\n visited = set()\n\n self.skylight = lightmap.clip(0, 15)",
"def create_individual_building_raster(self):\n canvas = np.zeros((self.max_y - self.min_y + 1,\n self.max_x - self.min_x + 1))\n for point in self.points:\n canvas[point[1] - self.min_y, point[0] - self.min_x] = 1\n return canvas",
"def __init__(self):\n\n # Dimensions of the texture array.\n self.__width = 1024\n self.__height = 1024\n self.__depth = 20\n self.__scratch_depth = 2\n\n # Allocate the texture array.\n # NOTE: If this goes wrong, we're probably trying to do this before\n # the opengl context has been created, and things will go horribly\n # wrong later! For some reason glGetError() is returning 0 anyway.\n self.__texture = GL.glGenTextures(1)\n\n # Ok, initialise the texture.\n GL.glBindTexture(GL.GL_TEXTURE_2D_ARRAY, self.__texture)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE)\n GL.glTexImage3D(\n GL.GL_TEXTURE_2D_ARRAY,\n 0, #level\n GL.GL_RGBA8, # internal format\n self.__width,\n self.__height,\n self.__depth + self.__scratch_depth,\n 0, #border\n GL.GL_RGBA, # format\n GL.GL_UNSIGNED_BYTE, # data type\n None # The data.\n )\n\n # We insert images one at a time, and keep track of the current\n # insertion point. When we reach the end of the row, the next\n # row starts at a y coordinate flush with the bottom of the tallest\n # item in the current row. Note that this will end up with lots of\n # wasted space, we don't do any work to optimise the packing!\n self.__cursor = TextureArray.Cursor()\n self.__cursor.end = self.__depth\n\n # Initialise the scratch cursor.\n self.__scratch_cursor = TextureArray.Cursor()\n self.__scratch_cursor.index = self.__depth\n self.__scratch_cursor.end = self.__depth + self.__scratch_depth\n\n # Map from filenames to virtual textures.\n self.__filename_map = {}",
"def lightning(self, start=0, length=10, flashes=5, brightness=None):\n \n current = start\n end = current + length\n\n #for i in range(current, end):\n # self.pixels[i] = (255,255,255)\n # time.sleep(0.01)\n #time.sleep(0.05)\n\n original = []\n lights = []\n dark = []\n for i in range(current, end):\n original.append(self.pixels[i])\n for i in range(0,length):\n lights.append((255, 255, 255))\n dark.append((0,0,0))\n \n\n for i in range(0,flashes):\n #for j in range(current,end):\n # self.pixels[j] = (0,0,0)\n self.write_strip(lights, start=current, end=end)\n time.sleep(0.01)\n #for j in range(current,end):\n # self.pixels[j] = (255,255,255)\n self.write_strip(dark, start=current, end=end)\n time.sleep(0.03)\n self.pixels[current:end] = original\n #for i in range(current, end):\n # self.pixels[i] = (0,0,0)\n # time.sleep(0.01)",
"def __init__(self):\n DebugObject.__init__(self, \"ShadowAtlas\")\n self.size = 512\n self.freeTiles = 0\n self.tileSize = 32",
"def _creatGIPrecomputeBuffer(self):\n\n self.giPrecomputeBuffer = RenderTarget(\"GICompute\")\n self.giPrecomputeBuffer.setSize(self.size.x / 2, self.size.y / 2)\n self.giPrecomputeBuffer.addColorTexture()\n self.giPrecomputeBuffer.addAuxTextures(1)\n self.giPrecomputeBuffer.setColorBits(16)\n self.giPrecomputeBuffer.prepareOffscreenBuffer()",
"def _create_blended(self):\n hm_cpy = self.heat_map.copy()\n # Make filtered channels to be neutral after blend\n for c in range(self.hm_lvl):\n hm_cpy[:, :, c] = GREY_BLEND\n blended = cv2.addWeighted(self.im, 0.5, hm_cpy, 0.5, 0)\n self.output_filename = \"blended_{}.jpg\".format(self.top1_label)\n cv2.imwrite(os.path.join(\"media/\", self.output_filename), blended)",
"def draw_tile_backgrounds(self, tiles):\n\n def process_tile(tile):\n h = tile.height\n h_index = (h - self.parent.min_height) / (self.parent.max_height - self.parent.min_height)\n\n rgb_rand_1 = random.randint(0, self.ocean_noise)\n\n height_rgb = [0, 0, 0]\n height_rgb[0] = self.height_rgb_low[0] + h_index * (self.height_rgb_high[0] - self.height_rgb_low[0])\n height_rgb[1] = self.height_rgb_low[1] + h_index * (self.height_rgb_high[1] - self.height_rgb_low[1])\n height_rgb[2] = self.height_rgb_low[2] + h_index * (self.height_rgb_high[2] - self.height_rgb_low[2])\n\n water_rgb = (rgb_rand_1, rgb_rand_1, 255)\n if self.screen_mode == \"dark\":\n water_rgb = (rgb_rand_1 // 2, rgb_rand_1 // 2, 150)\n if self.screen_mode == \"martin\":\n water_rgb = (195 + rgb_rand_1 * 0.5, 234 + rgb_rand_1 * 0.5, 251)\n\n fillColors = [\n height_rgb, # Ground\n height_rgb, # Rail\n self.road_tile_rgb, # Road\n height_rgb, # Town building\n height_rgb, # Trees\n self.station_rgb, # Stations\n water_rgb, # Water\n height_rgb, # Void\n self.industry_rgb, # Industries\n self.torb_rgb, # Tunnel/bridge\n height_rgb, # Objects\n ]\n fillColor = fillColors[tile.kind % len(fillColors)]\n if tile.kind == 1:\n rail = tile.occupant\n if rail.is_depot:\n fillColor = self.rail_depot_rgb\n\n if tile.kind == 5:\n station = tile.occupant\n if station.station_type == 0:\n fillColor = self.rail_station_rgb\n if station.station_type == 1:\n fillColor = self.airport_rgb\n if station.station_type == 2:\n fillColor = self.bus_station_rgb\n if station.station_type == 3:\n fillColor = self.truck_station_rgb\n if station.station_type == 4:\n fillColor = self.heliport_rgb\n if station.station_type == 5:\n fillColor = self.seaport_rgb\n\n self.draw_square(tile, fillColor)\n if tile.kind == 1:\n rail = tile.occupant\n if not rail.is_depot:\n self.draw_rail_background(tile)\n\n if self.parent.show_progress_bar:\n with alive_bar(len(tiles)) as abar:\n for tile in tiles:\n process_tile(tile)\n abar()\n else:\n for tile in tiles:\n process_tile(tile)",
"def create_light():\n\n # Add new plane\n bpy.ops.mesh.primitive_plane_add(location=(15, -5, 5))\n plane = bpy.context.active_object\n plane.name = 'Light Plane'\n plane.scale = mathutils.Vector((4, 4, 4))\n # tilt\n plane.rotation_euler.rotate_axis('Y', radians(40))\n\n # Create a new material\n material = bpy.data.materials.new(name=\"Plane Light Emission Shader\")\n material.use_nodes = True\n\n # Remove default\n material.node_tree.nodes.remove(material.node_tree.nodes.get('Diffuse BSDF'))\n material_output = material.node_tree.nodes.get('Material Output')\n emission = material.node_tree.nodes.new('ShaderNodeEmission')\n emission.inputs['Strength'].default_value = 5.0\n\n # link emission shader to material\n material.node_tree.links.new(material_output.inputs[0], emission.outputs[0])\n\n # set activer material to your new material\n plane.active_material = material",
"def _createNormalPrecomputeBuffer(self):\n self.normalPrecompute = RenderTarget(\"PrecomputeNormals\")\n self.normalPrecompute.addColorTexture()\n self.normalPrecompute.addAuxTextures(1)\n self.normalPrecompute.setColorBits(16)\n self.normalPrecompute.setAuxBits(16)\n self.normalPrecompute.prepareOffscreenBuffer()",
"def __initTiles(self):\n for m in range(self.amountVertical):\n for n in range(self.amountHorizontal):\n tile = self.themeFactory.createThemeElement(self.mapfile[m][n])\n tile.setCoordinates(m, n)\n tile.number = (m * self.amountHorizontal) + n\n self.tiles.append(tile)\n self.sprites.add(tile)",
"def __init__(self, width, height):\n self.width, self.height, self.tiles, w = width, height, {}, 0\n while (w < self.width):\n h = 0\n while (h < self.height):\n self.tiles[(w,h)] = 0\n h += 1\n w += 1",
"def _initialize(self):\n y = 0 # initial y height\n for x in xrange(-BOUND, BOUND + 1, STEP):\n for z in xrange(-BOUND, BOUND + 1, STEP):\n # create a layer stone an grass everywhere.\n self.add_block((x, y - 3, z), DISPLAY2TEXTURE['stonebrick_carved'], immediate=False)\n self.add_block((x, y - 2, z), DISPLAY2TEXTURE['redstone_ore'], immediate=False)\n self.add_block((x, y - 1, z), DISPLAY2TEXTURE['gravel'], immediate=False)\n self.add_block((x, y - 0, z), DISPLAY2TEXTURE['grass_side'], immediate=False)\n if x in (-BOUND, BOUND) or z in (-BOUND, BOUND):\n # create outer walls.\n for dy in xrange(-3, 8):\n self.add_block((x, y + dy, z), ['stonebrick_carved']*6, immediate=False)\n \n \"\"\" #add random walking block\n for i in range(5):\n x, y, z = random.randint(-50, 50),1,random.randint(-50, 50)\n block = Block((x, y, z),DISPLAY2TEXTURE['brick'],speed=5)\n ex, ey, ez = random.randint(-50, 50),1,random.randint(-50, 50)\n block.add_pinpoint((ex,ey,ez))\n self.move_set.append(block)\n self.add_block((x, y, z), DISPLAY2TEXTURE['brick'], immediate=False,zoom=0.5)\"\"\"\n \n \"\"\"\n for i in range(30):\n x, y, z = random.randint(-50, 50),random.randint(0, 20),random.randint(-50, 50)\n block = Block((x, y, z),DISPLAY2TEXTURE['brick'],speed=0,acceleration_y=GRAVITY) \n end_point=self.check_below((x,y,z))\n if end_point:\n block.add_pinpoint(end_point)\n self.move_set.append(block)\n self.add_block((x, y, z), DISPLAY2TEXTURE['brick'], immediate=False,zoom=0.5)\"\"\"\n \n #self._show_block ((5, 2, 0), DISPLAY2TEXTURE['diamond'])\n #self.add_destroy_stage((5, 2, 0), 'destroy_stage_5')\n #self._show_tri((5, 3, 5),'diamond')",
"def to_tiling(self) -> Tiling:\n pos = [len(self) - 1, self.odd_len()]\n tiling: Tiling = {}\n\n for pbit in self.parity_vector:\n tpos = tuple(pos)\n # print(tpos)\n if pbit == 0:\n if tpos not in tiling:\n tiling[tpos] = [None] * 4\n tiling[tpos][0] = 0\n pos[0] -= 1\n else:\n tpos_east = tpos[0] + 1, tpos[1]\n tpos_south = tpos[0], tpos[1] - 1\n if tpos_east not in tiling:\n tiling[tpos_east] = [None] * 4\n if tpos_south not in tiling:\n tiling[tpos_south] = [None] * 4\n # print(\"east\",tpos_east)\n tiling[tpos_east][3] = 1\n tiling[tpos_south][0] = 0\n pos[0] -= 1\n pos[1] -= 1\n\n for pos in tiling:\n tiling[pos] = SquareGlues(*tiling[pos])\n\n return Tiling(tiling, Collatz_tileset)",
"def create_raster_from_building_objects(building_list, x_res, y_res):\n canvas = np.zeros((x_res, y_res))\n canvas = np.uint16(canvas)\n for current_building in building_list.items():\n for current_point in current_building[1].points:\n canvas[current_point[0], current_point[1]] = current_building[1].label\n return canvas",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the buffer which applies the lighting
|
def _makeLightingComputeBuffer(self):
self.lightingComputeContainer = RenderTarget("ComputeLighting")
if self.settings.enableTemporalReprojection:
self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)
else:
self.lightingComputeContainer.setSize(self.size.x, self.size.y)
self.lightingComputeContainer.addColorTexture()
self.lightingComputeContainer.setColorBits(16)
self.lightingComputeContainer.prepareOffscreenBuffer()
self.lightingComputeCombinedTex = Texture("Lighting-Compute-Combined")
self.lightingComputeCombinedTex.setup2dTexture(
self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)
self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)
self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)
self.lastPositionBuffer = Texture("Last-Position-Buffer")
self.lastPositionBuffer.setup2dTexture(
self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)
self.lastPositionBuffer.setMinfilter(Texture.FTNearest)
self.lastPositionBuffer.setMagfilter(Texture.FTNearest)
|
[
"def _makeLightBoundsComputationBuffer(self, w, h):\n self.debug(\"Creating light precomputation buffer of size\", w, \"x\", h)\n self.lightBoundsComputeBuff = RenderTarget(\"ComputeLightTileBounds\")\n self.lightBoundsComputeBuff.setSize(w, h)\n self.lightBoundsComputeBuff.setColorWrite(False)\n self.lightBoundsComputeBuff.prepareOffscreenBuffer()",
"def _createOcclusionBlurBuffer(self):\n self.blurOcclusionV = RenderTarget(\"blurOcclusionVertical\")\n self.blurOcclusionV.addColorTexture()\n self.blurOcclusionV.prepareOffscreenBuffer()\n\n self.blurOcclusionH = RenderTarget(\"blurOcclusionHorizontal\")\n self.blurOcclusionH.addColorTexture()\n self.blurOcclusionH.prepareOffscreenBuffer()\n\n # Mipmaps for blur?\n # self.blurOcclusionV.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)\n # self.combiner.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)",
"def create_light():\n\n # Add new plane\n bpy.ops.mesh.primitive_plane_add(location=(15, -5, 5))\n plane = bpy.context.active_object\n plane.name = 'Light Plane'\n plane.scale = mathutils.Vector((4, 4, 4))\n # tilt\n plane.rotation_euler.rotate_axis('Y', radians(40))\n\n # Create a new material\n material = bpy.data.materials.new(name=\"Plane Light Emission Shader\")\n material.use_nodes = True\n\n # Remove default\n material.node_tree.nodes.remove(material.node_tree.nodes.get('Diffuse BSDF'))\n material_output = material.node_tree.nodes.get('Material Output')\n emission = material.node_tree.nodes.new('ShaderNodeEmission')\n emission.inputs['Strength'].default_value = 5.0\n\n # link emission shader to material\n material.node_tree.links.new(material_output.inputs[0], emission.outputs[0])\n\n # set activer material to your new material\n plane.active_material = material",
"def add_lighting(obj, track_to=True):\r\n if np.random.rand() > 0.3:\r\n bpy.context.view_layer.objects.active = None\r\n # docrender using method\r\n # d = random.uniform(2, 5)\r\n # litpos = Vector((0, d, 0))\r\n # eul = Euler((0, 0, 0), 'XYZ')\r\n # eul.rotate_axis('Z', random.uniform(math.radians(0), math.radians(180)))\r\n # eul.rotate_axis('X', random.uniform(math.radians(45), math.radians(135)))\r\n # litpos.rotate(eul)\r\n # bpy.ops.object.select_all(action='DESELECT')\r\n # bpy.ops.object.light_add(type='POINT', radius=1, align='WORLD', location=litpos)\r\n bpy.ops.object.light_add(type='POINT', radius=1, align='WORLD', location=(0,0,0))\r\n point_light = bpy.data.objects['Point']\r\n select_object(point_light)\r\n point_light.data.use_nodes = True\r\n pos_z = random.uniform(5, 8)\r\n pos_x = random.uniform(-1.5, 1.5)\r\n pos_y = random.uniform(-1.5, 1.5)\r\n point_light.location = (pos_x, pos_y, pos_z)\r\n nodes=point_light.data.node_tree.nodes\r\n links=point_light.data.node_tree.links\r\n for node in nodes:\r\n if node.type=='OUTPUT':\r\n output_node = node\r\n elif node.type=='EMISSION':\r\n emission_node=node\r\n strngth=random.uniform(1,8)\r\n emission_node.inputs[1].default_value=strngth\r\n bbody=nodes.new(type='ShaderNodeBlackbody')\r\n color_temp=random.uniform(2700,10200)\r\n bbody.inputs[0].default_value=color_temp\r\n links.new(bbody.outputs[0],emission_node.inputs[0])\r\n if track_to:\r\n # Track to constrain\r\n point_light.constraints.new(\"TRACK_TO\")\r\n point_light.constraints['Track To'].target = obj#bpy.data.objects[label]\r\n point_light.constraints['Track To'].up_axis = 'UP_Y'\r\n point_light.constraints['Track To'].track_axis = 'TRACK_NEGATIVE_Z'\r\n # Damped Track constrain\r\n # point_light.constraints.new(\"DAMPED_TRACK\") \r\n # point_light.constraints['Damped Track'].target = bpy.data.objects[label]\r\n # point_light.constraints['Damped Track'].subtarget = \"Control\"#\"Group\"\r\n # point_light.constraints['Damped Track'].track_axis = 'TRACK_NEGATIVE_Z'\r\n else:\r\n # d = random.uniform(2, 4)\r\n # litpos = Vector((0, d, 0))\r\n # eul = Euler((0, 0, 0), 'XYZ')\r\n # eul.rotate_axis('Z', random.uniform(math.radians(0), math.radians(180)))\r\n # eul.rotate_axis('X', random.uniform(math.radians(45), math.radians(135)))\r\n # litpos.rotate(eul)\r\n # bpy.ops.object.light_add(type='AREA', align='WORLD', location=litpos)\r\n bpy.ops.object.light_add(type='AREA', align='WORLD', location=(0,0,0))\r\n area_light = bpy.data.objects['Area']\r\n area_light.data.use_nodes = True\r\n pos_z = random.uniform(4, 8)\r\n pos_x = random.uniform(-1.5, 1.5)\r\n pos_y = random.uniform(-1.5, 1.5)\r\n area_light.location = (pos_x, pos_y, pos_z)\r\n area_light.data.size = random.uniform(1,3)\r\n nodes=area_light.data.node_tree.nodes\r\n links=area_light.data.node_tree.links\r\n for node in nodes:\r\n if node.type=='OUTPUT':\r\n output_node = node\r\n elif node.type=='EMISSION':\r\n emission_node=node\r\n strngth=random.uniform(1,10)\r\n emission_node.inputs[1].default_value=strngth\r\n bbody=nodes.new(type='ShaderNodeBlackbody')\r\n color_temp=random.uniform(4000,9500)\r\n bbody.inputs[0].default_value=color_temp\r\n links.new(bbody.outputs[0],emission_node.inputs[0])\r\n if track_to:\r\n # Track to constrain\r\n area_light.constraints.new(\"TRACK_TO\")\r\n area_light.constraints['Track To'].target = obj#bpy.data.objects[label]\r\n area_light.constraints['Track To'].up_axis = 'UP_Y'\r\n area_light.constraints['Track To'].track_axis = 'TRACK_NEGATIVE_Z'\r\n # Damped Track constrain\r\n # area_light.constraints.new(\"DAMPED_TRACK\") \r\n # area_light.constraints['Damped Track'].target = bpy.data.objects[label]\r\n # area_light.constraints['Damped Track'].subtarget = \"Control\"#\"Group\"\r\n # area_light.constraints['Damped Track'].track_axis = 'TRACK_NEGATIVE_Z'\r\n return",
"def _createLightingPipeline(self):\n\n if not self.haveLightingPass:\n self.debug(\"Skipping lighting pipeline\")\n return\n\n self.debug(\"Creating lighting pipeline ..\")\n\n # size has to be a multiple of the compute unit size\n # but still has to cover the whole screen\n sizeX = int(math.ceil(float(self.size.x) / self.patchSize.x))\n sizeY = int(math.ceil(float(self.size.y) / self.patchSize.y))\n\n self.precomputeSize = LVecBase2i(sizeX, sizeY)\n\n self.debug(\"Batch size =\", sizeX, \"x\", sizeY,\n \"Actual Buffer size=\", int(sizeX * self.patchSize.x),\n \"x\", int(sizeY * self.patchSize.y))\n\n self._makeLightPerTileStorage()\n\n # Create a buffer which computes which light affects which tile\n self._makeLightBoundsComputationBuffer(sizeX, sizeY)\n\n # Create a buffer which applies the lighting\n self._makeLightingComputeBuffer()\n\n # Register for light manager\n self.lightManager.setLightingComputator(self.lightingComputeContainer)\n self.lightManager.setLightingCuller(self.lightBoundsComputeBuff)\n\n self._loadFallbackCubemap()\n self._loadLookupCubemap()",
"def create_buffer(self, data):\n vbo = self.context.buffer(data)\n vao = self.context.vertex_array(self.program, [(vbo, \"3f4 2f4 1f4 /v\", \"aPos\", \"aTexCoord\", \"blockType\")])\n return vbo, vao",
"def _createNormalPrecomputeBuffer(self):\n self.normalPrecompute = RenderTarget(\"PrecomputeNormals\")\n self.normalPrecompute.addColorTexture()\n self.normalPrecompute.addAuxTextures(1)\n self.normalPrecompute.setColorBits(16)\n self.normalPrecompute.setAuxBits(16)\n self.normalPrecompute.prepareOffscreenBuffer()",
"def _setupFinalPass(self):\n # Set wrap for motion blur\n colorTex = self.antialias.getResultTexture()\n colorTex.setWrapU(Texture.WMClamp)\n colorTex.setWrapV(Texture.WMClamp)\n self._setFinalPassShader()",
"def _createFrameBuffer(self):\n print(f'QmlOffscreenRenderer._createFrameBufferObject: {self.size}')\n self._framebuffer = fbo = QOpenGLFramebufferObject(self.size, QOpenGLFramebufferObject.CombinedDepthStencil)\n self._window.setRenderTarget(fbo)",
"def _create_blended(self):\n hm_cpy = self.heat_map.copy()\n # Make filtered channels to be neutral after blend\n for c in range(self.hm_lvl):\n hm_cpy[:, :, c] = GREY_BLEND\n blended = cv2.addWeighted(self.im, 0.5, hm_cpy, 0.5, 0)\n self.output_filename = \"blended_{}.jpg\".format(self.top1_label)\n cv2.imwrite(os.path.join(\"media/\", self.output_filename), blended)",
"def _bind_frame_buffer(self):\n # Release the color and depth buffers if they exist:\n if self._framebuf is not None:\n glDeleteRenderbuffers(2, [self._colorbuf, self._depthbuf])\n glDeleteFramebuffers([self._framebuf])\n\n # Initialize the Framebuffer into which we will perform off-screen rendering\n self._colorbuf, self._depthbuf = glGenRenderbuffers(2)\n glBindRenderbuffer(GL_RENDERBUFFER, self._colorbuf)\n glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA, self._width, self._height)\n glBindRenderbuffer(GL_RENDERBUFFER, self._depthbuf)\n glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, self._width, self._height)\n\n self._framebuf = glGenFramebuffers(1)\n glBindFramebuffer(GL_DRAW_FRAMEBUFFER, self._framebuf)\n glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, self._colorbuf)\n glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, self._depthbuf)",
"def initializeGL(self):\n self.ctx = ModernGL.create_context()\n self.resizeGL(self.width(), self.height())\n\n self.volume_texture = self.ctx.texture3d(self.volume_size, 1, self.volume_data.tobytes(), alignment=4, floats=True)\n self.volume_texture.repeat_x = True\n self.volume_texture.repeat_y = True\n # @Todo: ModernGL this raises an error - probably missing wrapper\n #self.volume_texture.repeat_z = True\n self.volume_texture.filter = ModernGL.LINEAR\n\n tff_data = self.get_tff_data()\n self.tff_texture = self.ctx.texture((len(tff_data),1), 4, tff_data.tobytes(), alignment=4, floats=4)\n self.tff_texture.repeat_x = True\n self.tff_texture.repeat_y = True\n self.tff_texture.filter = ModernGL.NEAREST\n\n\n self.unf_screensize = None\n self.unf_stepsize = None\n self.unf_transferfunc = None\n\n self.color_texture = None\n self.depth_texture = None\n\n self.volume_texture.use(0)\n self.tff_texture.use(1)\n\n # These are the vertices that make up our cube bounding volume. Every row specifies\n # one corner of our unit cube\n self.vbo_vertex = self.ctx.buffer(struct.pack(\n '24f',\n 0.0, 0.0, 0.0,\n 0.0, 0.0, self._z,\n 0.0, self._y, 0.0,\n 0.0, self._y, self._z,\n self._x, 0.0, 0.0,\n self._x, 0.0, self._z,\n self._x, self._y, 0.0,\n self._x, self._y, self._z\n ))\n\n # This is the index buffer for our bounding geometry. Every row specifies a triangle\n # by three indices of our vbo_index vertex buffer\n self.vbo_veridx = self.ctx.buffer(struct.pack(\n '36I',\n 1,5,7,\n 7,3,1,\n 0,2,6,\n 6,4,0,\n 0,1,3,\n 3,2,0,\n 7,5,4,\n 4,6,7,\n 2,3,7,\n 7,6,2,\n 1,0,4,\n 4,5,1\n ))\n\n self.reload_shaders()",
"def shadingLightRelCtx(offCommand=\"string\", image1=\"string\", onCommand=\"string\", shadingCentric=bool, history=bool, exists=bool, image2=\"string\", name=\"string\", image3=\"string\"):\n pass",
"def make_acrylic_surface_wlsmode():\n acrylic_surface = Surface(\"acrylic_surface_detector\")\n acrylic_surface.set('reflect_diffuse', 0.0)\n acrylic_surface.set('reflect_specular',0.0)\n acrylic_surface.set('detect',0.0)\n acrylic_surface.set('absorb',0.0)\n acrylic_surface.set('reemit', datatools.load_hist_data( os.path.dirname(__file__)+\"/raw_tpb_emission.dat\", 350, 640 ) ) # 100% reemission. Actually, should be 120%!! Need to think about this.\n acrylic_surface.transmissive = 1\n return acrylic_surface",
"def draw_buffs(multiplier, color, offset):\n pyglet.graphics.draw(4, pyglet.gl.GL_POLYGON,\n (\"v2i\", (offset, buff_height, offset, buff_height + buff_size,\n offset + buff_size, buff_height + buff_size, offset + buff_size, buff_height)),\n (\"c3B\", color * 4))\n mult = str(\"x{:.2f}\".format(multiplier))\n label = pyglet.text.Label(mult, font_name='Courier New', font_size=11, bold=True,\n x=offset + buff_size*3 // 2, y=buff_height + buff_size // 2, anchor_x='left',\n anchor_y='center')\n label.draw()",
"def place_camera_and_light():\n # Place Camera\n bpy.context.scene.use_nodes = True\n camera = bpy.data.cameras.new(\"Camera\")\n camera_obj = bpy.data.objects.new(\"Camera\", camera)\n camera_obj.location = (0,-200,0)\n camera_obj.rotation_euler = (radians(90),0,0)\n bpy.context.scene.camera = camera_obj\n bpy.context.scene.collection.objects.link(camera_obj)\n\n # create light datablock, set attributes\n light_data = bpy.data.lights.new(name=\"light_2.80\", type='POINT')\n light_data.energy = 30\n\n # create new object with our light datablock\n light_object = bpy.data.objects.new(name=\"light_2.80\", object_data=light_data)\n\n # link light object\n bpy.context.collection.objects.link(light_object)\n\n # make it active \n bpy.context.view_layer.objects.active = light_object\n\n #change location\n light_object.location = (0, -0.5, 0)",
"def update(self):\n # For some reason, simply updating the current overlay causes\n # PiCameraMMALError every time we update. To avoid that, we create a new\n # overlay each time we want to update.\n # We use a temp overlay object because if we remove the current overlay\n # first, it causes flickering (the overlay visibly disappears for a moment).\n temp_overlay = self._camera.add_overlay(\n self._buffer.tobytes(), format='rgba', layer=3, size=self._buffer_dims)\n if self._overlay is not None:\n self._camera.remove_overlay(self._overlay)\n self._overlay = temp_overlay\n self._overlay.update(self._buffer.tobytes())",
"def _creatGIPrecomputeBuffer(self):\n\n self.giPrecomputeBuffer = RenderTarget(\"GICompute\")\n self.giPrecomputeBuffer.setSize(self.size.x / 2, self.size.y / 2)\n self.giPrecomputeBuffer.addColorTexture()\n self.giPrecomputeBuffer.addAuxTextures(1)\n self.giPrecomputeBuffer.setColorBits(16)\n self.giPrecomputeBuffer.prepareOffscreenBuffer()",
"def render(obj_path, viewpoint):\n\n# for index, vp in enumerate(viewpoint_list):\n vp = viewpoint\n cam_location = camera_location(vp.azimuth, vp.elevation, vp.distance)\n cam_rot = camera_rot_XYZEuler(vp.azimuth, vp.elevation, vp.tilt)\n \n bpy.data.objects['Camera'].location[0] = cam_location[0]\n bpy.data.objects['Camera'].location[1] = cam_location[1]\n bpy.data.objects['Camera'].location[2] = cam_location[2]\n\n bpy.data.objects['Camera'].rotation_euler[0] = cam_rot[0]\n bpy.data.objects['Camera'].rotation_euler[1] = cam_rot[1]\n bpy.data.objects['Camera'].rotation_euler[2] = cam_rot[2]\n\n if not os.path.exists(g_syn_depth_folder):\n os.mkdir(g_syn_depth_folder)\n\n file_output_node = bpy.context.scene.node_tree.nodes[2]\n file_output_node.file_slots[0].path = 'blender-######.depth.png' # blender placeholder #\n\n bpy.ops.render.render(write_still=True)\n\n current_frame = bpy.context.scene.frame_current\n bpy.context.scene.frame_set(current_frame + 1)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the buffers needed to blur the occlusion
|
def _createOcclusionBlurBuffer(self):
self.blurOcclusionV = RenderTarget("blurOcclusionVertical")
self.blurOcclusionV.addColorTexture()
self.blurOcclusionV.prepareOffscreenBuffer()
self.blurOcclusionH = RenderTarget("blurOcclusionHorizontal")
self.blurOcclusionH.addColorTexture()
self.blurOcclusionH.prepareOffscreenBuffer()
# Mipmaps for blur?
# self.blurOcclusionV.getColorTexture().setMinfilter(
# Texture.FTLinearMipmapLinear)
# self.combiner.getColorTexture().setMinfilter(
# Texture.FTLinearMipmapLinear)
|
[
"def create_buffer(self, data):\n vbo = self.context.buffer(data)\n vao = self.context.vertex_array(self.program, [(vbo, \"3f4 2f4 1f4 /v\", \"aPos\", \"aTexCoord\", \"blockType\")])\n return vbo, vao",
"def _create_blended(self):\n hm_cpy = self.heat_map.copy()\n # Make filtered channels to be neutral after blend\n for c in range(self.hm_lvl):\n hm_cpy[:, :, c] = GREY_BLEND\n blended = cv2.addWeighted(self.im, 0.5, hm_cpy, 0.5, 0)\n self.output_filename = \"blended_{}.jpg\".format(self.top1_label)\n cv2.imwrite(os.path.join(\"media/\", self.output_filename), blended)",
"def _setOcclusionBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionHorizontal.fragment\")\n self.blurOcclusionV.setShader(blurVShader)\n self.blurOcclusionH.setShader(blurHShader)",
"def _makeLightBoundsComputationBuffer(self, w, h):\n self.debug(\"Creating light precomputation buffer of size\", w, \"x\", h)\n self.lightBoundsComputeBuff = RenderTarget(\"ComputeLightTileBounds\")\n self.lightBoundsComputeBuff.setSize(w, h)\n self.lightBoundsComputeBuff.setColorWrite(False)\n self.lightBoundsComputeBuff.prepareOffscreenBuffer()",
"def _makeLightingComputeBuffer(self):\n self.lightingComputeContainer = RenderTarget(\"ComputeLighting\")\n\n if self.settings.enableTemporalReprojection:\n self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)\n else:\n self.lightingComputeContainer.setSize(self.size.x, self.size.y)\n\n self.lightingComputeContainer.addColorTexture()\n self.lightingComputeContainer.setColorBits(16)\n self.lightingComputeContainer.prepareOffscreenBuffer()\n\n self.lightingComputeCombinedTex = Texture(\"Lighting-Compute-Combined\")\n self.lightingComputeCombinedTex.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)\n self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)\n self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)\n\n self.lastPositionBuffer = Texture(\"Last-Position-Buffer\")\n self.lastPositionBuffer.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)\n self.lastPositionBuffer.setMinfilter(Texture.FTNearest)\n self.lastPositionBuffer.setMagfilter(Texture.FTNearest)",
"def render(obj_path, viewpoint):\n\n# for index, vp in enumerate(viewpoint_list):\n vp = viewpoint\n cam_location = camera_location(vp.azimuth, vp.elevation, vp.distance)\n cam_rot = camera_rot_XYZEuler(vp.azimuth, vp.elevation, vp.tilt)\n \n bpy.data.objects['Camera'].location[0] = cam_location[0]\n bpy.data.objects['Camera'].location[1] = cam_location[1]\n bpy.data.objects['Camera'].location[2] = cam_location[2]\n\n bpy.data.objects['Camera'].rotation_euler[0] = cam_rot[0]\n bpy.data.objects['Camera'].rotation_euler[1] = cam_rot[1]\n bpy.data.objects['Camera'].rotation_euler[2] = cam_rot[2]\n\n if not os.path.exists(g_syn_depth_folder):\n os.mkdir(g_syn_depth_folder)\n\n file_output_node = bpy.context.scene.node_tree.nodes[2]\n file_output_node.file_slots[0].path = 'blender-######.depth.png' # blender placeholder #\n\n bpy.ops.render.render(write_still=True)\n\n current_frame = bpy.context.scene.frame_current\n bpy.context.scene.frame_set(current_frame + 1)",
"def _bind_frame_buffer(self):\n # Release the color and depth buffers if they exist:\n if self._framebuf is not None:\n glDeleteRenderbuffers(2, [self._colorbuf, self._depthbuf])\n glDeleteFramebuffers([self._framebuf])\n\n # Initialize the Framebuffer into which we will perform off-screen rendering\n self._colorbuf, self._depthbuf = glGenRenderbuffers(2)\n glBindRenderbuffer(GL_RENDERBUFFER, self._colorbuf)\n glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA, self._width, self._height)\n glBindRenderbuffer(GL_RENDERBUFFER, self._depthbuf)\n glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, self._width, self._height)\n\n self._framebuf = glGenFramebuffers(1)\n glBindFramebuffer(GL_DRAW_FRAMEBUFFER, self._framebuf)\n glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, self._colorbuf)\n glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, self._depthbuf)",
"def clearBuffers(self) -> None:\n ...",
"def FBOCreate(self, *args):\n return _Graphic3d.Graphic3d_GraphicDriver_FBOCreate(self, *args)",
"def _createNormalPrecomputeBuffer(self):\n self.normalPrecompute = RenderTarget(\"PrecomputeNormals\")\n self.normalPrecompute.addColorTexture()\n self.normalPrecompute.addAuxTextures(1)\n self.normalPrecompute.setColorBits(16)\n self.normalPrecompute.setAuxBits(16)\n self.normalPrecompute.prepareOffscreenBuffer()",
"def create(lucid_kernel=..., blur_kernel=...) -> retval:\n ...",
"def createCommandBuffers(self):\n cmdBufAllocateInfo = vk.VkCommandBufferAllocateInfo(\n sType = vk.VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,\n commandPool = self.cmdPool,\n level = vk.VK_COMMAND_BUFFER_LEVEL_PRIMARY,\n commandBufferCount = self.swapChain.imageCount)\n\n self.drawCmdBuffers = vk.vkAllocateCommandBuffers(self.device, cmdBufAllocateInfo)",
"def _creatGIPrecomputeBuffer(self):\n\n self.giPrecomputeBuffer = RenderTarget(\"GICompute\")\n self.giPrecomputeBuffer.setSize(self.size.x / 2, self.size.y / 2)\n self.giPrecomputeBuffer.addColorTexture()\n self.giPrecomputeBuffer.addAuxTextures(1)\n self.giPrecomputeBuffer.setColorBits(16)\n self.giPrecomputeBuffer.prepareOffscreenBuffer()",
"def pc_output_buffers_full(self, *args):\n return _wavelet_swig.squash_ff_sptr_pc_output_buffers_full(self, *args)",
"def pc_output_buffers_full(self, *args):\n return _mediatools_swig.mediatools_audiosource_s_sptr_pc_output_buffers_full(self, *args)",
"def _render_obs(self):\n pixels = self.env.render(mode=\"rgb_array\")\n if self._res_hw is not None:\n h, w = self._res_hw\n pixels = cv2.resize(\n pixels,\n dsize=(w, h),\n interpolation=cv2.INTER_CUBIC,\n )\n return pixels",
"def sample_then_blur(self,pts_t,text_1d,mag,psf_x,psf_y,x,y):\n\t\t# propagate the coordinate of each sensor pixel onto the \n\t\t# texture plane\n\t\tX_s = (np.arange(self.cfg['szx_sensor']\\\n\t\t\t+psf_x.shape[1]-1+psf_y.shape[1]-1)\\\n\t\t\t-self.cfg['x_prinpts']-(psf_x.shape[1]-1)/2\\\n\t\t\t-(psf_y.shape[1]-1)/2)/mag\\\n\t\t\t-x/self.img_cfg['pix_in_m']\n\t\tY_s = (np.arange(self.cfg['szy_sensor']\\\n\t\t\t+psf_x.shape[0]-1+psf_y.shape[0]-1)\\\n\t\t\t-self.cfg['y_prinpts']-(psf_y.shape[0]-1)/2\\\n\t\t\t-(psf_x.shape[0]-1)/2)/mag\\\n\t\t\t-y/self.img_cfg['pix_in_m']\n\n\t\t# As we are using round padding, we need to mod the X_s\n\t\t# and Y_s to make them within the range of XX and YY\n\t\tX_s = np.remainder(X_s, self.img_cfg['res'][1]-1)\n\t\tY_s = np.remainder(Y_s, self.img_cfg['res'][0]-1)\n\n\t\tX_s, Y_s = np.meshgrid(\n\t\t\tX_s, Y_s\n\t\t)\n\t\tpts_s = np.concatenate(\n\t\t\t(\n\t\t\t\tnp.reshape(X_s,(-1,1)),\n\t\t\t\tnp.reshape(Y_s,(-1,1))\n\t\t\t),\n\t\t\taxis = 1\n\t\t)\n\t\t# the sharp image captured by camera can be approximated\n\t\t# as the interpolation of the sensor coordinates onto\n\t\t# the texture coordinate map\n\t\tP = interpolate.griddata(pts_t, text_1d, pts_s, method = 'linear')\n\t\tP = np.reshape(P,X_s.shape)\n\t\t\n\t\t# We then convolve the sharp image with the blur kernel\n\t\ttemp = signal.convolve2d(P,psf_x,mode='valid')\n\t\treturn signal.convolve2d(temp,psf_y,mode='valid')",
"def _createFrameBuffer(self):\n print(f'QmlOffscreenRenderer._createFrameBufferObject: {self.size}')\n self._framebuffer = fbo = QOpenGLFramebufferObject(self.size, QOpenGLFramebufferObject.CombinedDepthStencil)\n self._window.setRenderTarget(fbo)",
"def __init__(self, source, spec):\n\n # Source image is now blurred, then encoded to be a HLS-encoded array.\n logging.debug(\"Converting to HLS color space.\")\n self.img = source\n self.img_size = source.shape\n self.img_blurred = ops.blur(source, BLUR_SIGMA)\n self.img_luminosity = ops.rgb2hls(self.img_blurred)[:,:,1]\n\n # Now we make a histogram of the blurred luminosities, each in bins.\n logging.debug(\"Preparing first version of output.\")\n L = window(self.img_luminosity)\n hist, bins = np.histogram(L, density=True, bins=BIN_COUNT)\n L_indices = np.digitize(L.flatten(), bins)\n\n # Store the center of all patches by using the luminosity bins. \n coordinates = np.indices((source.shape[0]-PATCH_SIZE, source.shape[1]-PATCH_SIZE)).swapaxes(0,2).swapaxes(0,1)\n coordinates += [PATCH_HALF, PATCH_HALF]\n self.c_coords = self.createBins(L_indices, coordinates)\n\n # For each bin we calculate the average color, per-luminosity which assumes\n # the image patterns don't have too much hue variation.\n c_buckets = self.createBins(L_indices, window(self.img_blurred))\n c_averages = [np.average(bucket, axis=0) for bucket in c_buckets]\n\n # Normalize the specification image based on what our luminosity can provide.\n ml = min(L.flatten())\n sl = max(L.flatten()) - ml\n self.spec = ml + spec * sl\n\n # Apply the same binning process to the spec image....\n S_indices = np.digitize(self.spec.flatten(), bins)\n self.spec_bins = {}\n for i, bn in enumerate(S_indices):\n # Check coordinates and discard if it's out of bounds.\n ty, tx = i//self.spec.shape[1], i%self.spec.shape[1]\n if ty+PATCH_START < 0 or ty+PATCH_FINISH > self.spec.shape[0]:\n continue\n if tx+PATCH_START < 0 or tx+PATCH_FINISH > self.spec.shape[1]:\n continue\n self.spec_bins[(ty, tx)] = min(bn-1, BIN_COUNT-1)\n\n # Generate a first version of the output based on the average given the luminosity\n # of the specification. There are no interesting patterns, just colors.\n self.output = np.array([c_averages[min(bn-1, BIN_COUNT-1)] for bn in S_indices], dtype=np.float32)\\\n .reshape(self.spec.shape[0], self.spec.shape[1], 3)\n self.coverage = np.zeros(self.output.shape[:2], dtype=np.float32)\n\n # Prepare a masking array used for blending and feathering out the edges of patches.\n self.createMask()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates a buffer which reconstructs the normals and position from viewspace
|
def _createNormalPrecomputeBuffer(self):
self.normalPrecompute = RenderTarget("PrecomputeNormals")
self.normalPrecompute.addColorTexture()
self.normalPrecompute.addAuxTextures(1)
self.normalPrecompute.setColorBits(16)
self.normalPrecompute.setAuxBits(16)
self.normalPrecompute.prepareOffscreenBuffer()
|
[
"def create_buffer(self, data):\n vbo = self.context.buffer(data)\n vao = self.context.vertex_array(self.program, [(vbo, \"3f4 2f4 1f4 /v\", \"aPos\", \"aTexCoord\", \"blockType\")])\n return vbo, vao",
"def _update(self):\n\n if self._vertices_buffer is not None:\n self._vertices_buffer._delete()\n self._vertices_buffer = self._vertices_list.data.view(VertexBuffer)\n\n if self.itype is not None:\n if self._indices_buffer is not None:\n self._indices_buffer._delete()\n self._indices_buffer = self._indices_list.data.view(IndexBuffer)\n\n if self.utype is not None:\n if self._uniforms_texture is not None:\n self._uniforms_texture._delete()\n\n # We take the whole array (_data), not the data one\n texture = self._uniforms_list._data.view(np.float32)\n size = len(texture)/self._uniforms_float_count\n shape = self._compute_texture_shape(size)\n\n # shape[2] = float count is only used in vertex shader code\n texture = texture.reshape(int(shape[0]), int(shape[1]), 4)\n self._uniforms_texture = texture.view(TextureFloat2D)\n self._uniforms_texture.interpolation = gl.GL_NEAREST\n\n if len(self._programs):\n for program in self._programs:\n program.bind(self._vertices_buffer)\n if self._uniforms_list is not None:\n program[\"uniforms\"] = self._uniforms_texture\n program[\"uniforms_shape\"] = self._ushape",
"def initial_pos(self,num):\n\n pos, col, vel = self.init_np(num)\n print pos\n print vel\n\n #create the Vertex Buffer Objects\n from OpenGL.arrays import vbo\n pos_vbo = vbo.VBO(data=pos, usage=GL_DYNAMIC_DRAW, target=GL_ARRAY_BUFFER)\n pos_vbo.bind()\n col_vbo = vbo.VBO(data=col, usage=GL_DYNAMIC_DRAW, target=GL_ARRAY_BUFFER)\n col_vbo.bind()\n\n return (pos_vbo, col_vbo, vel)",
"def initializeVertexBuffer(self):\n self.vertexBufferObject = glGenBuffers(1)\n glBindBuffer(GL_ARRAY_BUFFER, self.vertexBufferObject)\n glBufferData(GL_ARRAY_BUFFER, self.vertices, GL_STATIC_DRAW)\n \n glBindBuffer(GL_ARRAY_BUFFER, 0)\n \n self.indexBufferObject = glGenBuffers(1)\n glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.indexBufferObject)\n glBufferData(GL_ELEMENT_ARRAY_BUFFER, self.indices, GL_STATIC_DRAW)\n glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)",
"def _makeLightingComputeBuffer(self):\n self.lightingComputeContainer = RenderTarget(\"ComputeLighting\")\n\n if self.settings.enableTemporalReprojection:\n self.lightingComputeContainer.setSize(self.size.x / 2, self.size.y)\n else:\n self.lightingComputeContainer.setSize(self.size.x, self.size.y)\n\n self.lightingComputeContainer.addColorTexture()\n self.lightingComputeContainer.setColorBits(16)\n self.lightingComputeContainer.prepareOffscreenBuffer()\n\n self.lightingComputeCombinedTex = Texture(\"Lighting-Compute-Combined\")\n self.lightingComputeCombinedTex.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba8)\n self.lightingComputeCombinedTex.setMinfilter(Texture.FTLinear)\n self.lightingComputeCombinedTex.setMagfilter(Texture.FTLinear)\n\n self.lastPositionBuffer = Texture(\"Last-Position-Buffer\")\n self.lastPositionBuffer.setup2dTexture(\n self.size.x, self.size.y, Texture.TFloat, Texture.FRgba16)\n self.lastPositionBuffer.setMinfilter(Texture.FTNearest)\n self.lastPositionBuffer.setMagfilter(Texture.FTNearest)",
"def from_normal_modes(self, pos):\n pos = pos.copy()\n pos[:4] = self.R @ pos[:4]\n return pos",
"def gen_buffers(mesh):\n data = list(itertools.chain(*[[*item[0], *item[1], *item[2]]\n for item in zip(mesh.verts, mesh.normals, mesh.texcoords)]))\n indices = list(itertools.chain(*mesh.triangles))\n\n vbo = gl.glGenBuffers(1)\n gl.glBindBuffer(gl.GL_ARRAY_BUFFER, vbo)\n gl.glBufferData(gl.GL_ARRAY_BUFFER, len(data) * float_size,\n convert(c_float, data), gl.GL_STATIC_DRAW)\n ibo = gl.glGenBuffers(1)\n gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, ibo)\n gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, len(indices),\n convert(c_ubyte, indices), gl.GL_STATIC_DRAW)\n return vbo, ibo",
"def gen_array():\n vao = gl.glGenVertexArrays(1)\n gl.glBindVertexArray(vao)\n gl.glVertexAttribPointer(\n 0, 3, gl.GL_FLOAT, gl.GL_FALSE, 8 * float_size, None)\n gl.glEnableVertexAttribArray(0)\n gl.glVertexAttribPointer(\n 1, 3, gl.GL_FLOAT, gl.GL_FALSE, 8 * float_size, c_void_p(3 * float_size))\n gl.glEnableVertexAttribArray(1)\n gl.glVertexAttribPointer(\n 2, 2, gl.GL_FLOAT, gl.GL_FALSE, 8 * float_size, c_void_p(6 * float_size))\n gl.glEnableVertexAttribArray(2)\n return vao",
"def _makeLightBoundsComputationBuffer(self, w, h):\n self.debug(\"Creating light precomputation buffer of size\", w, \"x\", h)\n self.lightBoundsComputeBuff = RenderTarget(\"ComputeLightTileBounds\")\n self.lightBoundsComputeBuff.setSize(w, h)\n self.lightBoundsComputeBuff.setColorWrite(False)\n self.lightBoundsComputeBuff.prepareOffscreenBuffer()",
"def generate_dataset_using_independent_normals(target_statistics):\n pass",
"def _generate_mesh(self):\n self._mesh_points = self._make_pos()",
"def render_normal(outpath, cam=None, obj_names=None, camera_space=True):\n from .object import add_sphere\n from .camera import point_camera_to, get_2d_bounding_box\n\n logger_name = thisfile + '->render_normal()'\n\n cam_name, obj_names, scene, outnode = _render_prepare(cam, obj_names)\n\n # # Make normals consistent\n # for obj_name in obj_names:\n # scene.objects.active = bpy.data.objects[obj_name]\n # bpy.ops.object.mode_set(mode='EDIT')\n # bpy.ops.mesh.select_all()\n # bpy.ops.mesh.normals_make_consistent()\n # bpy.ops.object.mode_set(mode='OBJECT')\n\n # Add reference normal ball\n if 'ref-ball' in obj_names:\n world_origin = (0, 0, 0)\n sphere = add_sphere(location=world_origin)\n point_camera_to(cam, world_origin, up=(0, 0, 1)) # point camera to there\n # Decide scale of the ball so that it, when projected, fits into the frame\n bbox = get_2d_bounding_box(sphere, cam)\n s = max((bbox[1, 0] - bbox[0, 0]) / scene.render.resolution_x,\n (bbox[3, 1] - bbox[0, 1]) / scene.render.resolution_y) * 1.2\n sphere.scale = (1 / s, 1 / s, 1 / s)\n\n # Set up scene node tree\n node_tree = scene.node_tree\n nodes = node_tree.nodes\n scene.render.layers['RenderLayer'].use_pass_normal = True\n set_alpha_node = nodes.new('CompositorNodeSetAlpha')\n node_tree.links.new(nodes['Render Layers'].outputs['Alpha'],\n set_alpha_node.inputs['Alpha'])\n node_tree.links.new(nodes['Render Layers'].outputs['Normal'],\n set_alpha_node.inputs['Image'])\n result_socket = set_alpha_node.outputs['Image']\n\n # Select rendering engine based on whether camera or object space is desired\n if camera_space:\n scene.render.engine = 'BLENDER_RENDER'\n scene.render.alpha_mode = 'TRANSPARENT'\n else:\n scene.render.engine = 'CYCLES'\n scene.cycles.film_transparent = True\n scene.cycles.samples = 16 # for anti-aliased edges\n\n # Render\n outpath = _render(scene, outnode, result_socket, outpath)\n\n logger.name = logger_name\n logger.info(\"Normal map of %s rendered through '%s' to %s\", obj_names, cam_name, outpath)\n logger.warning(\"The scene node tree has changed\")",
"def to_prototypes_view(self, var_buffer: np.ndarray) -> np.ndarray:\n raise NotImplementedError(\"You should implement this!\")",
"def generate_mesh(self):\n length = self.length\n Nx = self.Nx\n Nz = self.Nz\n self.mesh = RectangleMesh(Point(0,0), Point(length, 1), Nx, Nz, \"left/right\")\n\n # Now deform top and bottom based on surface and base profiles\n coordinates = self.mesh.coordinates()\n surf = self.surf_fun(coordinates[:,0])\n bot = self.bot_fun(coordinates[:,0])\n thick = surf-bot\n coordinates[:,1] = coordinates[:,1]*thick + bot\n self.mesh.bounding_box_tree().build(self.mesh)",
"def create_vertex_buffers(self):\n buffer_formats = []\n for name in self.__attribute_types:\n size, data_type = self.__attribute_types[name]\n buffer_formats.append((name, size, data_type))\n return VertexData(self, buffer_formats)",
"def updateTangentSpace(self):\n # set up tangent space\n self.tangentsData = CgfFormat.DataStreamChunk()\n self.tangentsData.dataStreamType = CgfFormat.DataStreamType.TANGENTS\n self.tangentsData.bytesPerElement = 16\n self.tangentsData.numElements = self.numVertices\n self.tangentsData.tangents.updateSize()\n selftangentsData_iter = iter(self.tangentsData.tangents)\n\n # set Crysis tangents info\n tangents, binormals, orientations = pyffi.utils.tangentspace.getTangentSpace(\n vertices = list((vert.x, vert.y, vert.z)\n for vert in self.verticesData.vertices),\n normals = list((norm.x, norm.y, norm.z)\n for norm in self.normalsData.normals),\n uvs = list((uv.u, uv.v)\n for uv in self.uvsData.uvs),\n triangles = list(self.getTriangles()),\n orientation = True)\n\n for crytangent, tan, bin, orient in izip(self.tangentsData.tangents,\n tangents, binormals, orientations):\n if orient > 0:\n tangent_w = 32767\n else:\n tangent_w = -32767\n crytangent[1].x = int(32767 * tan[0])\n crytangent[1].y = int(32767 * tan[1])\n crytangent[1].z = int(32767 * tan[2])\n crytangent[1].w = tangent_w\n crytangent[0].x = int(32767 * bin[0])\n crytangent[0].y = int(32767 * bin[1])\n crytangent[0].z = int(32767 * bin[2])\n crytangent[0].w = tangent_w",
"def render(obj_path, viewpoint):\n\n# for index, vp in enumerate(viewpoint_list):\n vp = viewpoint\n cam_location = camera_location(vp.azimuth, vp.elevation, vp.distance)\n cam_rot = camera_rot_XYZEuler(vp.azimuth, vp.elevation, vp.tilt)\n \n bpy.data.objects['Camera'].location[0] = cam_location[0]\n bpy.data.objects['Camera'].location[1] = cam_location[1]\n bpy.data.objects['Camera'].location[2] = cam_location[2]\n\n bpy.data.objects['Camera'].rotation_euler[0] = cam_rot[0]\n bpy.data.objects['Camera'].rotation_euler[1] = cam_rot[1]\n bpy.data.objects['Camera'].rotation_euler[2] = cam_rot[2]\n\n if not os.path.exists(g_syn_depth_folder):\n os.mkdir(g_syn_depth_folder)\n\n file_output_node = bpy.context.scene.node_tree.nodes[2]\n file_output_node.file_slots[0].path = 'blender-######.depth.png' # blender placeholder #\n\n bpy.ops.render.render(write_still=True)\n\n current_frame = bpy.context.scene.frame_current\n bpy.context.scene.frame_set(current_frame + 1)",
"def DrawAll(quad_buffer,texture):\n if quad_buffer.is_ui:\n ui_buffers.Add(quad_buffer,texture)\n return\n DrawAllNowNormals(quad_buffer,texture,geom_shader)",
"def __init__(self, scene):\n self.scene = scene\n self._width = self.scene.camera.intrinsics.width\n self._height = self.scene.camera.intrinsics.height\n self._vaids = None\n self._colorbuf, self._depthbuf = None, None\n self._framebuf = None\n self._window = None\n\n # Initialize the OpenGL context with a 1x1 window and hide it immediately\n try:\n conf = pyglet.gl.Config(\n depth_size=24,\n double_buffer=True,\n major_version=3,\n minor_version=2\n )\n self._window = pyglet.window.Window(config=conf, visible=False, resizable=False, width=1, height=1)\n except:\n raise ValueError('Meshrender requires OpenGL 3+!')\n\n # Bind the frame buffer for offscreen rendering\n self._bind_frame_buffer()\n\n # Use the depth test functionality of OpenGL. Don't clip -- many normals may be backwards.\n glEnable(GL_DEPTH_TEST)\n glDepthMask(GL_TRUE)\n glDepthFunc(GL_LESS)\n glDepthRange(0.0, 1.0)\n\n # Load the meshes into VAO's\n self._vaids = self._load_meshes()\n\n # Load the shaders\n # Fix for pyopengl -- bind a framebuffer\n glBindVertexArray(self._vaids[0])\n self._full_shader = self._load_shaders(vertex_shader, fragment_shader)\n self._depth_shader = self._load_shaders(depth_vertex_shader, depth_fragment_shader)\n glBindVertexArray(0)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Creates the texture where the dof factor is stored in, so we don't recompute it each pass
|
def _createDofStorage(self):
self.dofStorage = Texture("DOFStorage")
self.dofStorage.setup2dTexture(
self.size.x, self.size.y,
Texture.TFloat, Texture.FRg16)
|
[
"def create_3d_texture(perlin_noise, scale, image):\n\tstart = time.time()\n\t\n\twidth = len(perlin_noise)\n\tcoords = range(width)\n\ttexel = (ctypes.c_byte * width**3)()\n\thalf = 0 #width * scale / 2.0 \n\t\n\tfor z in coords:\n\t\tfor y in coords:\n\t\t\tfor x in coords:\n\t\t\t\tv = (perlin_noise[x][y][z])# + 1.0)/2.0\n\n\t\t\t\t# v = v #* math.sqrt(imagenumber) # + imagenumber**6\n\t\t\t\t# texel[x + (y * width) + (z * width**2)] = int(v * 127.0)\n\t\t\t\timagenumber = int(image[y][x])/255.0\n\t\t\t\t# if imagenumber > .7:\n\t\t\t\t# \tprint imagenumber\n\t\t\t\tv = v * math.sqrt(imagenumber) + imagenumber**6\n\t\t\t\tif v < .9:\n\t\t\t\t\tv = v + .1\n\t\t\t\telif v > 1:\n\t\t\t\t\tv = 1\n\t\t\t\tif v > .4:\n\t\t\t\t\ttexel[x + (y * width) + (z * width**2)] = 127\n\t\t\t\telse: \n\t\t\t\t\ttexel[x + (y * width) + (z * width**2)] = 0\n\t\t\n\tglPixelStorei(GL_UNPACK_ALIGNMENT, 1)\n\tglTexImage3D(GL_TEXTURE_3D, 0, GL_LUMINANCE, width, width, width, 0, \n\t\tGL_LUMINANCE, GL_BYTE, ctypes.byref(texel))\n\tend = time.time()\n\tprint end - start\n\treturn texel",
"def _add_texture_coefficients(crystal_sym, sample_sym, name, degree):\n pass",
"def make_cube_1(texture, texture_index): \n glBindTexture(GL_TEXTURE_2D,texture[texture_index])\t \n # Front Face (Each texture's corner is matched a quad's corner.) \n glBegin(GL_QUADS)\t \n\tglTexCoord2f(0.0, 0.0); glVertex3f(-1.0, -1.0, 1.0)\t# Bottom Left Of The Texture and Quad \n\tglTexCoord2f(1.0, 0.0); glVertex3f( 1.0, -1.0, 1.0)\t# Bottom Right Of The Texture and Quad \n\tglTexCoord2f(1.0, 1.0); glVertex3f( 1.0, 1.0, 1.0)\t# Top Right Of The Texture and Quad \n\tglTexCoord2f(0.0, 1.0); glVertex3f(-1.0, 1.0, 1.0)\t# Top Left Of The Texture and Quad\t \n\tglEnd();",
"def __init__(self):\n\n # Dimensions of the texture array.\n self.__width = 1024\n self.__height = 1024\n self.__depth = 20\n self.__scratch_depth = 2\n\n # Allocate the texture array.\n # NOTE: If this goes wrong, we're probably trying to do this before\n # the opengl context has been created, and things will go horribly\n # wrong later! For some reason glGetError() is returning 0 anyway.\n self.__texture = GL.glGenTextures(1)\n\n # Ok, initialise the texture.\n GL.glBindTexture(GL.GL_TEXTURE_2D_ARRAY, self.__texture)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE)\n GL.glTexParameteri(GL.GL_TEXTURE_2D_ARRAY, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE)\n GL.glTexImage3D(\n GL.GL_TEXTURE_2D_ARRAY,\n 0, #level\n GL.GL_RGBA8, # internal format\n self.__width,\n self.__height,\n self.__depth + self.__scratch_depth,\n 0, #border\n GL.GL_RGBA, # format\n GL.GL_UNSIGNED_BYTE, # data type\n None # The data.\n )\n\n # We insert images one at a time, and keep track of the current\n # insertion point. When we reach the end of the row, the next\n # row starts at a y coordinate flush with the bottom of the tallest\n # item in the current row. Note that this will end up with lots of\n # wasted space, we don't do any work to optimise the packing!\n self.__cursor = TextureArray.Cursor()\n self.__cursor.end = self.__depth\n\n # Initialise the scratch cursor.\n self.__scratch_cursor = TextureArray.Cursor()\n self.__scratch_cursor.index = self.__depth\n self.__scratch_cursor.end = self.__depth + self.__scratch_depth\n\n # Map from filenames to virtual textures.\n self.__filename_map = {}",
"def generate_image(self) -> None:",
"def textureWindow(docTag=\"string\", displayPreselection=bool, useTemplate=\"string\", toggleGamma=bool, displayStyle=\"string\", singleBuffer=bool, panel=\"string\", setUvSet=int, loadImage=\"string\", imagePixelSnap=bool, imageBaseColor=float, forceMainConnection=\"string\", scaleBlue=float, reset=bool, displayAxes=bool, drawAxis=bool, checkerDensity=int, scaleGreen=float, defineTemplate=\"string\", parent=\"string\", tileLabels=bool, useFaceGroup=bool, doubleBuffer=bool, displaySolidMap=bool, internalFaces=bool, divisions=int, writeImage=\"string\", displayCheckered=bool, exposure=float, stateString=bool, control=bool, toggleExposure=bool, imageNumber=int, imageDisplay=bool, selectionConnection=\"string\", removeImage=bool, numberOfImages=int, viewTransformName=\"string\", uvSets=bool, viewPortImage=bool, selectRelatedFaces=bool, lockMainConnection=bool, nbImages=bool, spacing=float, toggle=bool, mainListConnection=\"string\", displayDivisionLines=bool, imageSize=bool, updateMainConnection=bool, style=int, frontFacingColor=float, capture=\"string\", backFacingColor=float, unlockMainConnection=bool, displayLabels=bool, displayImage=int, relatedFaces=bool, removeAllImages=bool, numUvSets=bool, imageTileRange=float, selectInternalFaces=bool, maxResolution=int, refresh=bool, unParent=bool, changeCommand=\"string\", imageRatio=bool, forceRebake=bool, distortionPerObject=bool, drawSubregions=bool, highlightConnection=\"string\", displayGridLines=bool, exists=bool, frameAll=bool, displayDistortion=bool, captureSequenceNumber=int, labelPosition=\"string\", gamma=float, frameSelected=bool, realSize=bool, rendererString=\"string\", size=float, saveImage=bool, imageUnfiltered=bool, clearImage=bool, imageNames=bool, scaleRed=float, filter=\"string\", cmEnabled=bool):\n pass",
"def createTextureView(self, QOpenGLTexture_Target, QOpenGLTexture_TextureFormat, p_int, p_int_1, p_int_2, p_int_3): # real signature unknown; restored from __doc__\n return QOpenGLTexture",
"def image_process():\n\n texture = Image.new('RGBA', import_coords(4, 4), (0, 0, 0, 0))\n imgdir = sorted(os.listdir('textures'), key=get_block_id)\n files = len(imgdir)\n x = 0\n y = 0\n while x <= 4:\n while y <= 4:\n for fn in imgdir:\n fnpath = imgpath(fn)\n files -= 1\n if files < 0:\n break\n fnimg = flip_image(Image.open(fnpath))\n texture.paste(fnimg, import_coords(x, y))\n print('Pasted texture ' + fn + \" into textures with coords \" + str(x) + \", \" + str(y))\n x += 1\n if x == 4:\n y += 1\n x = 0\n if files < 0:\n break\n if files < 0:\n break\n texture = texture.transpose(Image.FLIP_TOP_BOTTOM)\n\n # Save texture internally\n\n try:\n texture.save(basepath('_texture.png'))\n except IOError:\n print(\"Couldn't save temponary texture file. Check write-access?\")\n else:\n print(\"Saved temponary texture file from memory, checking md5 checksum...\")\n\n # Compute hash texture in memory (that we created above)\n\n try:\n hash = md5_file(basepath('_texture.png'))\n except:\n print(\"Couldn't hash texture. md5 not installed?\")\n else:\n print(\"Succesfully hashed texture in memory. Checksum is: \" + hash)\n\n # Compute hash for old texture.png, if it exists\n\n try:\n newhash = md5_file('texture.png')\n except IOError:\n print(\"Couldn't open texture.png, check if it is properly saved, or maybe it doesn't exist now?\")\n newhash = 0\n else:\n print(\"Checksum for texture.png is: \" + newhash)\n\n # Saving texture.png from memory\n if hash != newhash:\n try:\n texture.save(TEXTURE_PATH)\n except:\n print('Failed to create texture.png! Maybe check if write-access has given?')\n raise IOError(\"Failed to create texture map.\")\n else:\n print(\"Successfully created texture.png, maybe it didn't exist or corrupted\")\n else:\n print(\"All okay, cached textures will do the job, no need to resave.\")",
"def create_torus(Nx, Ny, R=1., r=0.4):\n\n import numpy as np\n\n # Discretizing parameters space\n x = np.linspace(0, 2*np.pi, Nx+1)[:-1]\n y = np.linspace(0, 2*np.pi, Ny+1)[:-1]\n x2d, y2d = np.meshgrid(x, y)\n\n # Mesh size\n num_vertices = Nx * Ny\n num_triangles = 2 * num_vertices\n\n # Assembling texture coordinates\n tcoords = np.hstack((x2d.reshape(-1, 1), y2d.reshape(-1, 1)))\n\n # Assembling vertices coordinates\n vertices = np.empty((num_vertices, 3))\n vertices[:, 0] = (R + r * np.cos(tcoords[:, 1])) * np.cos(tcoords[:, 0])\n vertices[:, 1] = (R + r * np.cos(tcoords[:, 1])) * np.sin(tcoords[:, 0])\n vertices[:, 2] = r * np.sin(tcoords[:, 1])\n\n # Assembling normals\n normals = np.empty((num_vertices, 3))\n normals[:, 0] = np.cos(tcoords[:, 1]) * np.cos(tcoords[:, 0])\n normals[:, 1] = np.cos(tcoords[:, 1]) * np.sin(tcoords[:, 0])\n normals[:, 2] = np.sin(tcoords[:, 1])\n\n # Assembling triangles\n triangles = np.empty((num_triangles, 3), dtype=np.int64)\n\n # Low part\n triangle_archetype = np.array([0, 1, Nx], dtype=np.int64)\n triangles[:num_vertices, :] = triangle_archetype + np.arange(num_vertices).reshape(-1, 1)\n triangles[Nx - 1:num_vertices:Nx, 1] -= Nx\n triangles[num_vertices - Nx:num_vertices, 2] -= num_vertices\n\n # High part\n triangle_archetype = np.array([1, Nx + 1, Nx], dtype=np.int64)\n triangles[-num_vertices:, :] = triangle_archetype + np.arange(num_vertices).reshape(-1, 1)\n triangles[-num_vertices + Nx - 1::Nx, :2] -= Nx\n triangles[-Nx:, 1:] -= num_vertices\n\n return vertices, triangles, normals, tcoords / (2 * np.pi)",
"def gen_powphase2d_old(si, phiF, rF, inner, outer, dx, dy, xW, yW, normfres=True, debug=True):\r\n # specified diffraction and refraction scales\r\n ld = rF / phiF \r\n lr = rF * phiF \r\n\r\n nx = int(xW/dx)\r\n ny = nx\r\n if debug: print 'targeted number of x,y samples = ', nx,ny\n \n #print \"nx\", nx\n #print \"ny\", ny\n \r\n xvec = (arange(0.,nx)-nx/2+1)*dx\r\n yvec = (arange(0.,ny)-ny/2+1)*dy\r\n\r\n dqx = 2.*pi / xW \r\n dqy = 2.*pi / yW\r\n qmaxx = (2.*pi) / (2.*dx)\r\n qmaxy = (2.*pi) / (2.*dy)\r\n\r\n nqx = 2*int(qmaxx/dqx)\r\n nqy = 2*int(qmaxy/dqy)\r\n if debug: print 'targeted number of q samples = ', nqx, nqy \r\n if nqx != nx: \r\n print \"Forcing nqx = nx = \", nx\r\n nqx = nx\r\n if nqy != ny: \r\n print \"Forcing nqy = ny = \", ny\r\n nqy = ny\r\n qxvec = (arange(0.,nqx)-nqx/2+1)*dqx\r\n qxvec = roll(qxvec,nqx/2+1)\r\n qyvec = (arange(0.,nqy)-nqy/2+1)*dqy\r\n qyvec = roll(qyvec,nqy/2+1)\r\n\r\n qin = 2.*pi / inner\r\n qout = 2.*pi / outer\r\n qshape = zeros((nqx, nqy))\r\n \r\n for i, qxi in enumerate(qxvec):\r\n for j, qyj in enumerate(qyvec):\r\n qsq = qxi**2 + qyj**2\r\n qshape[i,j] = (qout**2 + qsq)**(-si/4.) \r\n #qshape[i,j] = (qout**2 + qsq)**(-si/4.) * exp(-(qsq/(2.*qin**2))) \r\n npoints = size(qshape)\r\n\r\n if debug:\r\n print si, inner, outer, dx, npoints\r\n print dqx, dqy, qin, qout\r\n\r\n xformr=randn(nqx, nqy)*qshape\r\n xformi=randn(nqx, nqy)*qshape\r\n xform = xformr + 1j*xformi\r\n spectrum=real(xform*conj(xform))\r\n xseries = real(ifft2(xform))\r\n\r\n if normfres:\r\n frindx = int(rF/dx)\r\n x1dcut = xseries[0,:]\r\n var_fres_in = var(x1dcut[0:size(x1dcut)-frindx]-x1dcut[frindx:])\r\n xseries_norm = xseries * rF / sqrt(var_fres_in) \r\n xn1dcut = xseries_norm[0,:]\r\n var_fres_out = var(xn1dcut[0:size(xn1dcut)-frindx]-xn1dcut[frindx:])\r\n #var_fres_out = var(xseries_norm[0:size(xseries_norm)-frindx]-xseries_norm[frindx:])\r\n print \"index of fresnel scale = \", frindx\r\n print var_fres_in, var_fres_out\r\n\r\n return xvec, yvec, xseries, xseries_norm, qxvec, qyvec, qshape",
"def write_3d_lut(self, process_function, file_path, preset):\n pass",
"def preDraw(self):\n self.imageTexture.bindTexture(gl.GL_TEXTURE0)",
"def initialize_fragment(self):\n # if self.ndim == 1:\n # shader_pointcoord = \".x\"\n # else:\n # shader_pointcoord = \"\"\n shader_pointcoord = \"\"\n fragment = \"\"\"\n out_color = texture%dD(tex_sampler, varying_tex_coords%s);\n \"\"\" % (self.ndim, shader_pointcoord)\n # print fragment\n self.add_fragment_main(fragment)",
"def _generate_template(object_name):\n object_color, object_type = object_name.split()\n template = np.zeros((UPSAMPLE_SIZE, UPSAMPLE_SIZE))\n half = UPSAMPLE_SIZE // 2\n if object_type == \"triangle\":\n for i in range(UPSAMPLE_SIZE):\n for j in range(UPSAMPLE_SIZE):\n if (j <= half and i >= 2 * (half - j)) or (j > half and i >= 2 *\n (j - half)):\n template[i, j] = 1.\n elif object_type == \"square\":\n template[:, :] = 1.\n elif object_type == \"empty_square\":\n template[:2, :] = 1.\n template[-2:, :] = 1.\n template[:, :2] = 1.\n template[:, -2:] = 1.\n elif object_type == \"plus\":\n template[:, half - 1:half + 2] = 1.\n template[half - 1:half + 2, :] = 1.\n elif object_type == \"inverse_plus\":\n template[:, :] = 1.\n template[:, half - 1:half + 2] = 0.\n template[half - 1:half + 2, :] = 0.\n elif object_type == \"ex\":\n for i in range(UPSAMPLE_SIZE):\n for j in range(UPSAMPLE_SIZE):\n if abs(i - j) <= 1 or abs(UPSAMPLE_SIZE - 1 - j - i) <= 1:\n template[i, j] = 1.\n elif object_type == \"inverse_ex\":\n for i in range(UPSAMPLE_SIZE):\n for j in range(UPSAMPLE_SIZE):\n if not (abs(i - j) <= 1 or abs(UPSAMPLE_SIZE - 1 - j - i) <= 1):\n template[i, j] = 1.\n elif object_type == \"circle\":\n for i in range(UPSAMPLE_SIZE):\n for j in range(UPSAMPLE_SIZE):\n if (i - half)**2 + (j - half)**2 <= half**2:\n template[i, j] = 1.\n elif object_type == \"empty_circle\":\n for i in range(UPSAMPLE_SIZE):\n for j in range(UPSAMPLE_SIZE):\n if abs((i - half)**2 + (j - half)**2 - half**2) < 6:\n template[i, j] = 1.\n elif object_type == \"tee\":\n template[:, half - 1:half + 2] = 1.\n template[:3, :] = 1.\n elif object_type == \"upside_down_tee\":\n template[:, half - 1:half + 2] = 1.\n template[-3:, :] = 1.\n elif object_type == \"h\":\n template[:, :3] = 1.\n template[:, -3:] = 1.\n template[half - 1:half + 2, :] = 1.\n elif object_type == \"u\":\n template[:, :3] = 1.\n template[:, -3:] = 1.\n template[-3:, :] = 1.\n elif object_type == \"upside_down_u\":\n template[:, :3] = 1.\n template[:, -3:] = 1.\n template[:3, :] = 1.\n elif object_type == \"vertical_stripes\":\n for j in range(half + UPSAMPLE_SIZE % 2):\n template[:, 2*j] = 1.\n elif object_type == \"horizontal_stripes\":\n for i in range(half + UPSAMPLE_SIZE % 2):\n template[2*i, :] = 1.\n else:\n raise ValueError(\"Unknown object: {}\".format(object_type))\n\n if object_color not in COLORS:\n raise ValueError(\"Unknown color: {}\".format(object_color))\n\n template = np.tensordot(template, COLORS[object_color], axes=0)\n\n return template",
"def initializeGL(self):\n self.ctx = ModernGL.create_context()\n self.resizeGL(self.width(), self.height())\n\n self.volume_texture = self.ctx.texture3d(self.volume_size, 1, self.volume_data.tobytes(), alignment=4, floats=True)\n self.volume_texture.repeat_x = True\n self.volume_texture.repeat_y = True\n # @Todo: ModernGL this raises an error - probably missing wrapper\n #self.volume_texture.repeat_z = True\n self.volume_texture.filter = ModernGL.LINEAR\n\n tff_data = self.get_tff_data()\n self.tff_texture = self.ctx.texture((len(tff_data),1), 4, tff_data.tobytes(), alignment=4, floats=4)\n self.tff_texture.repeat_x = True\n self.tff_texture.repeat_y = True\n self.tff_texture.filter = ModernGL.NEAREST\n\n\n self.unf_screensize = None\n self.unf_stepsize = None\n self.unf_transferfunc = None\n\n self.color_texture = None\n self.depth_texture = None\n\n self.volume_texture.use(0)\n self.tff_texture.use(1)\n\n # These are the vertices that make up our cube bounding volume. Every row specifies\n # one corner of our unit cube\n self.vbo_vertex = self.ctx.buffer(struct.pack(\n '24f',\n 0.0, 0.0, 0.0,\n 0.0, 0.0, self._z,\n 0.0, self._y, 0.0,\n 0.0, self._y, self._z,\n self._x, 0.0, 0.0,\n self._x, 0.0, self._z,\n self._x, self._y, 0.0,\n self._x, self._y, self._z\n ))\n\n # This is the index buffer for our bounding geometry. Every row specifies a triangle\n # by three indices of our vbo_index vertex buffer\n self.vbo_veridx = self.ctx.buffer(struct.pack(\n '36I',\n 1,5,7,\n 7,3,1,\n 0,2,6,\n 6,4,0,\n 0,1,3,\n 3,2,0,\n 7,5,4,\n 4,6,7,\n 2,3,7,\n 7,6,2,\n 1,0,4,\n 4,5,1\n ))\n\n self.reload_shaders()",
"def generate_pastiche(content_image):\n return Variable(content_image.data.clone(), requires_grad = True)",
"def createSquareDL(txid,wdth,hght,coords):\n lsid = glGenLists(1)\n glNewList(lsid,GL_COMPILE)\n glBindTexture(GL_TEXTURE_2D, txid)\n (l,r,b,t) = coords\n \n glBegin(GL_QUADS)\n glTexCoord2f(0, 0); glVertex2f(l*wdth, b*hght)\n glTexCoord2f(0, 1); glVertex2f(l*wdth, t*hght)\n glTexCoord2f(1, 1); glVertex2f(r*wdth, t*hght)\n glTexCoord2f(1, 0); glVertex2f(r*wdth, b*hght)\n glEnd()\n glFinish()\n\n glEndList()\n\n return lsid",
"def create_test_image(self):\n return np.tile(np.arange(0, 255).repeat(2), (100, 1)).astype(np.float32) / 255",
"def world_texture(hdr_name):\r\n world=bpy.data.worlds['World']\r\n world.use_nodes = True\r\n links = world.node_tree.links\r\n nodes = world.node_tree.nodes\r\n for l in links:\r\n links.remove(l)\r\n for n in nodes:\r\n nodes.remove(n)\r\n world_output = nodes.new(type='ShaderNodeOutputWorld')\r\n background_node = nodes.new(type='ShaderNodeBackground')\r\n if hdr_name[-3:] == 'exr':\r\n background_node.inputs[1].default_value = 100\r\n env_node = nodes.new(type='ShaderNodeTexEnvironment')\r\n env_node.image = bpy.data.images.load(hdr_name)\r\n mapping_node = nodes.new(type='ShaderNodeMapping')\r\n mapping_node.inputs[2].default_value[1] = random.uniform(0, 3.14)\r\n cor_node = nodes.new(type='ShaderNodeTexCoord')\r\n links.new(cor_node.outputs['Generated'],mapping_node.inputs['Vector'])\r\n links.new(mapping_node.outputs['Vector'],env_node.inputs['Vector'])\r\n links.new(env_node.outputs['Color'],background_node.inputs['Color'])\r\n links.new(background_node.outputs['Background'],world_output.inputs['Surface'])\r\n return"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the shaders which blur the occlusion
|
def _setOcclusionBlurShader(self):
blurVShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"BlurOcclusionVertical.fragment")
blurHShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"BlurOcclusionHorizontal.fragment")
self.blurOcclusionV.setShader(blurVShader)
self.blurOcclusionH.setShader(blurHShader)
|
[
"def _setBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurHorizontal.fragment\")\n self.blurColorV.setShader(blurVShader)\n self.blurColorH.setShader(blurHShader)",
"def _createOcclusionBlurBuffer(self):\n self.blurOcclusionV = RenderTarget(\"blurOcclusionVertical\")\n self.blurOcclusionV.addColorTexture()\n self.blurOcclusionV.prepareOffscreenBuffer()\n\n self.blurOcclusionH = RenderTarget(\"blurOcclusionHorizontal\")\n self.blurOcclusionH.addColorTexture()\n self.blurOcclusionH.prepareOffscreenBuffer()\n\n # Mipmaps for blur?\n # self.blurOcclusionV.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)\n # self.combiner.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)",
"def bake_shaders(self):\n\n selected_shaders = cmds.ls(sl=True)\n del self.shaders_to_apply[:]\n for shdr in selected_shaders:\n self.shaders_to_apply.append(shdr)\n print self.shaders_to_apply",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def apply_shader(hz, act):\n\n frag_decl = \\\n \"\"\"\n uniform float selected;\n uniform float opacity_level;\n \"\"\"\n\n frag_impl = \\\n \"\"\"\n if (selected == 1){\n fragOutput0 = fragOutput0 + vec4(0.2, 0.2, 0, opacity_level);\n }\n \"\"\"\n\n shaders.shader_to_actor(act, \"vertex\", impl_code=\"\\n\",\n replace_first=False,\n replace_all=False)\n shaders.shader_to_actor(act, \"fragment\", decl_code=frag_decl,\n block=\"coincident\")\n shaders.shader_to_actor(act, \"fragment\", impl_code=frag_impl,\n block=\"light\")\n\n def shader_selected_callback(caller, event, calldata=None):\n program = calldata\n if program is not None:\n try:\n program.SetUniformf(\"selected\",\n hz.cea[act]['selected'])\n except KeyError:\n pass\n try:\n program.SetUniformf(\"selected\",\n hz.cla[act]['selected'])\n except KeyError:\n pass\n program.SetUniformf(\"opacity_level\", 1)\n\n shaders.add_shader_callback(act, shader_selected_callback, priority=100)",
"def SetFilter(self, *args):\n return _Graphic3d.Graphic3d_TextureParams_SetFilter(self, *args)",
"def _setLightingShader(self):\n lightShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ApplyLighting.fragment\")\n self.lightingComputeContainer.setShader(lightShader)",
"def load_shaders(self):\n context = self.context\n self.prog = load_shaders(context, 'tiny_gl_engine/primitives/shaders/cube_vertex.glsl',\n 'tiny_gl_engine/primitives/shaders/cube_fragment.glsl')",
"def DetachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_DetachShader(self, *args)",
"def AttachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_AttachShader(self, *args)",
"def _setGIComputeShader(self):\n giShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ComputeGI.fragment\")\n self.giPrecomputeBuffer.setShader(giShader)",
"def Filter(self, *args):\n return _Graphic3d.Graphic3d_TextureParams_Filter(self, *args)",
"def __init__(self, *shaders):\n self.shaders = list(shaders)\n self.programId = None",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def _generateShaderConfiguration(self):\n\n self.debug(\"(Re)Generating shader configuration\")\n\n # Generate list of defines\n defines = []\n\n if self.settings.antialiasingTechnique == \"SMAA\":\n quality = self.settings.smaaQuality.upper()\n if quality in [\"LOW\", \"MEDIUM\", \"HIGH\", \"ULTRA\"]:\n defines.append((\"SMAA_PRESET_\" + quality, \"\"))\n else:\n self.error(\"Unrecognized SMAA quality:\", quality)\n return\n\n defines.append(\n (\"LIGHTING_COMPUTE_PATCH_SIZE_X\", self.settings.computePatchSizeX))\n defines.append(\n (\"LIGHTING_COMPUTE_PATCH_SIZE_Y\", self.settings.computePatchSizeY))\n defines.append(\n (\"LIGHTING_MIN_MAX_DEPTH_ACCURACY\", self.settings.minMaxDepthAccuracy))\n\n if self.blurEnabled:\n defines.append((\"USE_DOF\", 1))\n\n if self.settings.useSimpleLighting:\n defines.append((\"USE_SIMPLE_LIGHTING\", 1))\n\n if self.settings.anyLightBoundCheck:\n defines.append((\"LIGHTING_ANY_BOUND_CHECK\", 1))\n\n if self.settings.accurateLightBoundCheck:\n defines.append((\"LIGHTING_ACCURATE_BOUND_CHECK\", 1))\n\n if self.settings.renderShadows:\n defines.append((\"USE_SHADOWS\", 1))\n\n defines.append((\"AMBIENT_CUBEMAP_SAMPLES\", self.settings.ambientCubemapSamples))\n\n defines.append(\n (\"SHADOW_MAP_ATLAS_SIZE\", self.settings.shadowAtlasSize))\n defines.append(\n (\"SHADOW_MAX_UPDATES_PER_FRAME\", self.settings.maxShadowUpdatesPerFrame))\n defines.append(\n (\"SHADOW_GEOMETRY_MAX_VERTICES\", self.settings.maxShadowUpdatesPerFrame * 3))\n\n\n defines.append((\"SHADOW_NUM_PCF_SAMPLES\", self.settings.numPCFSamples))\n defines.append((\"SHADOW_NUM_PCSS_SEARCH_SAMPLES\", self.settings.numPCSSSearchSamples))\n defines.append((\"SHADOW_NUM_PCSS_FILTER_SAMPLES\", self.settings.numPCSSFilterSamples))\n\n defines.append((\"SHADOW_PSSM_BORDER_PERCENTAGE\", self.settings.shadowCascadeBorderPercentage))\n\n if self.settings.useHardwarePCF:\n defines.append((\"USE_HARDWARE_PCF\", 1))\n\n defines.append((\"WINDOW_WIDTH\", self.size.x))\n defines.append((\"WINDOW_HEIGHT\", self.size.y))\n\n if self.settings.motionBlurEnabled:\n defines.append((\"USE_MOTION_BLUR\", 1))\n\n defines.append(\n (\"MOTION_BLUR_SAMPLES\", self.settings.motionBlurSamples))\n\n # Occlusion\n defines.append(\n (\"OCCLUSION_TECHNIQUE_\" + self.occlusion.getIncludeName(), 1))\n defines.append(\n (\"OCCLUSION_RADIUS\", self.settings.occlusionRadius))\n defines.append(\n (\"OCCLUSION_STRENGTH\", self.settings.occlusionStrength))\n defines.append(\n (\"OCCLUSION_SAMPLES\", self.settings.occlusionSampleCount))\n\n if self.settings.displayOnscreenDebugger:\n defines.append((\"DEBUGGER_ACTIVE\", 1))\n\n extraSettings = self.guiManager.getDefines()\n defines += extraSettings\n\n if self.settings.enableTemporalReprojection:\n defines.append((\"USE_TEMPORAL_REPROJECTION\", 1))\n\n if self.settings.enableGlobalIllumination:\n defines.append((\"USE_GLOBAL_ILLUMINATION\", 1))\n\n if self.settings.enableScattering:\n defines.append((\"USE_SCATTERING\", 1))\n\n # Pass near far\n defines.append((\"CAMERA_NEAR\", Globals.base.camLens.getNear()))\n defines.append((\"CAMERA_FAR\", Globals.base.camLens.getFar()))\n\n # Generate\n\toutput = \"#pragma once\\n\"\n output += \"// Autogenerated by RenderingPipeline.py\\n\"\n output += \"// Do not edit! Your changes will be lost.\\n\\n\"\n\n for key, value in defines:\n output += \"#define \" + key + \" \" + str(value) + \"\\n\"\n\n # Try to write the file\n\n try:\n with open(\"PipelineTemp/ShaderAutoConfig.include\", \"w\") as handle:\n handle.write(output)\n except Exception, msg:\n self.fatal(\"Error writing shader autoconfig. Maybe no write-access?\")\n return",
"def _setupFinalPass(self):\n # Set wrap for motion blur\n colorTex = self.antialias.getResultTexture()\n colorTex.setWrapU(Texture.WMClamp)\n colorTex.setWrapV(Texture.WMClamp)\n self._setFinalPassShader()",
"def ShaderObjects(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_ShaderObjects(self, *args)",
"def doBlur(length=float, sharpness=float, colorFile=\"string\", smoothColor=bool, smooth=float, vectorFile=\"string\"):\n pass",
"def check_shaders(self, context):\n settings = context.scene.foo\n\n # Check for source file changes or other setting changes\n try:\n self.user_shader.update_settings(settings)\n settings.last_shader_error = self.user_shader.last_error\n except Exception as e:\n settings.last_shader_error = str(e)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the shader which computes the GI
|
def _setGIComputeShader(self):
giShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"ComputeGI.fragment")
self.giPrecomputeBuffer.setShader(giShader)
|
[
"def assignPartsShader(self, shader):\r\n #We assign the shader to the selected faces\r\n Selection = cmds.ls(sl=True)\r\n if not Selection:\r\n return\r\n cmds.sets(Selection,edit=True, forceElement = \"%sSG\" % shader)",
"def reloadShader(cls, shader, *args, **kwargs):\r\n cgfxFile = pm.getAttr(shader+'.shader')\r\n if cgfxFile:\r\n pm.cgfxShader(shader, edit=True, fx=cgfxFile)",
"def compile(self, shader_type):\n shader = gl.glext_arb.glCreateShaderObjectARB(shader_type)\n gl.glext_arb.glShaderSourceARB(shader, 1)\n gl.glext_arb.glCompileShaderARB(shader)\n gl.glext_arb.glAttachObjectARB(self.program, shader)\n gl.glext_arb.glDeleteObjectARB(shader)",
"def setSolidShader(self, shader):\n self._solid_shader = shader",
"def _setLightingShader(self):\n lightShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ApplyLighting.fragment\")\n self.lightingComputeContainer.setShader(lightShader)",
"def CreateCgShader(self):\n dirPath = self.GetShadersDirectory()\n shaderPath = self.get_unique_asset_name(SHADER_FILE_NAME, dirPath)\n shader = ''\n self.CreateAsset(shaderPath, shader)",
"def repathShader(cls, shader, newPath, *args, **kwargs):\r\n cgfxFile = pm.getAttr(shader+'.shader')\r\n if cgfxFile:\r\n pm.cgfxShader(shader, edit=True, fx=path.repath(cgfxFile, newPath) )",
"def setWireframeShader(self, shader):\n self._wireframe_shader = shader",
"def load_shaders(self):\n context = self.context\n self.prog = load_shaders(context, 'tiny_gl_engine/primitives/shaders/cube_vertex.glsl',\n 'tiny_gl_engine/primitives/shaders/cube_fragment.glsl')",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def apply_shader(hz, act):\n\n frag_decl = \\\n \"\"\"\n uniform float selected;\n uniform float opacity_level;\n \"\"\"\n\n frag_impl = \\\n \"\"\"\n if (selected == 1){\n fragOutput0 = fragOutput0 + vec4(0.2, 0.2, 0, opacity_level);\n }\n \"\"\"\n\n shaders.shader_to_actor(act, \"vertex\", impl_code=\"\\n\",\n replace_first=False,\n replace_all=False)\n shaders.shader_to_actor(act, \"fragment\", decl_code=frag_decl,\n block=\"coincident\")\n shaders.shader_to_actor(act, \"fragment\", impl_code=frag_impl,\n block=\"light\")\n\n def shader_selected_callback(caller, event, calldata=None):\n program = calldata\n if program is not None:\n try:\n program.SetUniformf(\"selected\",\n hz.cea[act]['selected'])\n except KeyError:\n pass\n try:\n program.SetUniformf(\"selected\",\n hz.cla[act]['selected'])\n except KeyError:\n pass\n program.SetUniformf(\"opacity_level\", 1)\n\n shaders.add_shader_callback(act, shader_selected_callback, priority=100)",
"def __init__(self, shader_dir):\n\n # Note: see the following, which was referenced in the PyOpenGL\n # documentation:\n # https://bitbucket.org/rndblnch/opengl-programmable/src/tip/10-g\n # l3.2core.py?fileviewer=file-view-default\n\n # Create the program object.\n self.__shader_program = GL.glCreateProgram()\n\n # We're going to build up a list of inputs.\n program_uniforms = set()\n program_attributes = set()\n self.__attribute_types = {}\n\n # Compile all of the source files and attach the resulting\n # shader objects to our shader program.\n for (filename, shader_type) in self.__list_shader_files(shader_dir):\n (file_uniforms, file_attributes, attribute_types) = \\\n self.__parse_uniforms_and_attributes(filename)\n program_uniforms.update(file_uniforms);\n program_attributes.update(file_attributes);\n self.__attribute_types.update(attribute_types)\n shader = GL.glCreateShader(shader_type)\n GL.glShaderSource(shader, open(filename, 'r').read())\n GL.glCompileShader(shader)\n if GL.glGetShaderiv(shader, GL.GL_COMPILE_STATUS) != GL.GL_TRUE:\n raise Exception(GL.glGetShaderInfoLog(shader))\n GL.glAttachShader(self.__shader_program, shader)\n\n # Assign locations to vertex attributes. We'll bind them in the program later...\n self.__attrib_locations = dict((k, v) for (v, k) in enumerate(program_attributes))\n\n # Uniform locations will be determined by OpenGL, we'll get them later.\n self.__uniform_locations = {}\n\n # Now we can bind all of the vertex attributes to their\n # assigned locations.\n for attrib in program_attributes:\n GL.glBindAttribLocation(self.__shader_program,\n self.__attrib_locations[attrib],\n attrib)\n\n # Now link the program.\n GL.glLinkProgram(self.__shader_program)\n if GL.glGetProgramiv(self.__shader_program, GL.GL_LINK_STATUS) != GL.GL_TRUE:\n raise Exception(GL.glGetProgramInfoLog(self.__shader_program))\n\n # Retrieve the uniform locations and remember them.\n for uniform in program_uniforms:\n self.__uniform_locations[uniform] = GL.glGetUniformLocation(self.__shader_program, uniform)\n if self.__uniform_locations[uniform] == -1:\n print (\"Warning: Uniform '%s' does not exist.\" % uniform)",
"def use(self):\n\n gl.glUseProgram(self.__program)",
"def _setBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurHorizontal.fragment\")\n self.blurColorV.setShader(blurVShader)\n self.blurColorH.setShader(blurHShader)",
"def __init__(self, *shaders):\n self.shaders = list(shaders)\n self.programId = None",
"def AttachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_AttachShader(self, *args)",
"def __init__(self):\n \n OpenMayaMPx.MPxNode.__init__(self)\n\n # translation table for shader\n self.attributes = {}\n self.luxType = \"glossy\"\n self.attributes['Kd'] = ShaderColorAttribute('color')\n self.attributes['Ks'] = ShaderColorAttribute('specularColor')\n self.attributes['uroughness'] = ShaderFloatAttribute('cosinePower', preScale = 0.01, invert=True, reciprocal = True, postScale = 0.1)\n self.attributes['vroughness'] = ShaderFloatAttribute('cosinePower', preScale = 0.01, invert=True, reciprocal = True, postScale = 0.1)",
"def register_shader(name, **kwargs):\n\n ShaderPart(name, **kwargs)",
"def initialize_fragment(self):\n # if self.ndim == 1:\n # shader_pointcoord = \".x\"\n # else:\n # shader_pointcoord = \"\"\n shader_pointcoord = \"\"\n fragment = \"\"\"\n out_color = texture%dD(tex_sampler, varying_tex_coords%s);\n \"\"\" % (self.ndim, shader_pointcoord)\n # print fragment\n self.add_fragment_main(fragment)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the shaders which blur the color
|
def _setBlurShader(self):
blurVShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"BlurVertical.fragment")
blurHShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"BlurHorizontal.fragment")
self.blurColorV.setShader(blurVShader)
self.blurColorH.setShader(blurHShader)
|
[
"def _setOcclusionBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurOcclusionHorizontal.fragment\")\n self.blurOcclusionV.setShader(blurVShader)\n self.blurOcclusionH.setShader(blurHShader)",
"def _createOcclusionBlurBuffer(self):\n self.blurOcclusionV = RenderTarget(\"blurOcclusionVertical\")\n self.blurOcclusionV.addColorTexture()\n self.blurOcclusionV.prepareOffscreenBuffer()\n\n self.blurOcclusionH = RenderTarget(\"blurOcclusionHorizontal\")\n self.blurOcclusionH.addColorTexture()\n self.blurOcclusionH.prepareOffscreenBuffer()\n\n # Mipmaps for blur?\n # self.blurOcclusionV.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)\n # self.combiner.getColorTexture().setMinfilter(\n # Texture.FTLinearMipmapLinear)",
"def _setLightingShader(self):\n lightShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ApplyLighting.fragment\")\n self.lightingComputeContainer.setShader(lightShader)",
"def doBlur(length=float, sharpness=float, colorFile=\"string\", smoothColor=bool, smooth=float, vectorFile=\"string\"):\n pass",
"def DetachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_DetachShader(self, *args)",
"def _setupFinalPass(self):\n # Set wrap for motion blur\n colorTex = self.antialias.getResultTexture()\n colorTex.setWrapU(Texture.WMClamp)\n colorTex.setWrapV(Texture.WMClamp)\n self._setFinalPassShader()",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def apply_shader(hz, act):\n\n frag_decl = \\\n \"\"\"\n uniform float selected;\n uniform float opacity_level;\n \"\"\"\n\n frag_impl = \\\n \"\"\"\n if (selected == 1){\n fragOutput0 = fragOutput0 + vec4(0.2, 0.2, 0, opacity_level);\n }\n \"\"\"\n\n shaders.shader_to_actor(act, \"vertex\", impl_code=\"\\n\",\n replace_first=False,\n replace_all=False)\n shaders.shader_to_actor(act, \"fragment\", decl_code=frag_decl,\n block=\"coincident\")\n shaders.shader_to_actor(act, \"fragment\", impl_code=frag_impl,\n block=\"light\")\n\n def shader_selected_callback(caller, event, calldata=None):\n program = calldata\n if program is not None:\n try:\n program.SetUniformf(\"selected\",\n hz.cea[act]['selected'])\n except KeyError:\n pass\n try:\n program.SetUniformf(\"selected\",\n hz.cla[act]['selected'])\n except KeyError:\n pass\n program.SetUniformf(\"opacity_level\", 1)\n\n shaders.add_shader_callback(act, shader_selected_callback, priority=100)",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def blur(self):\n return self._blur_cuda.detach().cpu()",
"def bake_shaders(self):\n\n selected_shaders = cmds.ls(sl=True)\n del self.shaders_to_apply[:]\n for shdr in selected_shaders:\n self.shaders_to_apply.append(shdr)\n print self.shaders_to_apply",
"def blur(img, blur=(5, 5)):\n \"\"\"Returns an image with blur\"\"\"\n kernel = np.ones(blur, np.float32)/(blur[0]*blur[1])\n dst = cv2.filter2D(img, -1, kernel)\n return dst",
"def SetFilter(self, *args):\n return _Graphic3d.Graphic3d_TextureParams_SetFilter(self, *args)",
"def AttachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_AttachShader(self, *args)",
"def colorfran(self,):\r\n self.objeto_varios.color(self.random,self.color)",
"def applyEffects(shaderEffects, targetNodePath, baseShader=None):\n \n targetNodePath.setShader(getShader(shaderEffects, baseShader))",
"def setColorSaturation(self, saturateColors=..., colorSaturationValue=...) -> None:\n ...",
"def main():\n # Import a image ready to be blurred\n old_img = SimpleImage(\"images/smiley-face.png\")\n # Show the original image\n old_img.show()\n\n # Blur the original for one time\n blurred_img = blur(old_img)\n # Keep to blur the blurred image till the total times that user wished to blur the image are completed\n for i in range(BLUR_TIMES - 1):\n blurred_img = blur(blurred_img)\n # Show the final blurred image\n blurred_img.show()",
"def polyColorBlindData(useMin=bool, aboveMaxColorBlue=float, minColorGreen=float, noColorBlue=float, minColorBlue=float, maxColorBlue=float, colorRed=float, noColorRed=float, value=\"string\", dataType=\"string\", belowMinColorGreen=float, maxColorRed=float, enableFalseColor=bool, belowMinColorRed=float, mode=int, minColorRed=float, clashColorGreen=float, aboveMaxColorRed=float, typeId=int, aboveMaxColorGreen=float, maxColorGreen=float, clashColorRed=float, useMax=bool, numIdTypes=int, noColorGreen=float, queryMode=bool, colorGreen=float, minValue=float, maxValue=float, attrName=\"string\", clashColorBlue=float, colorBlue=float, belowMinColorBlue=float):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the shader which applies the light
|
def _setLightingShader(self):
lightShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"ApplyLighting.fragment")
self.lightingComputeContainer.setShader(lightShader)
|
[
"def set_lighting(self):\n prop = self.GetProperty()\n prop.SetAmbient(0.)\n prop.SetDiffuse(0.)\n prop.SetSpecular(1.0)",
"def setSolidShader(self, shader):\n self._solid_shader = shader",
"def setWireframeShader(self, shader):\n self._wireframe_shader = shader",
"def apply_shader(hz, act):\n\n frag_decl = \\\n \"\"\"\n uniform float selected;\n uniform float opacity_level;\n \"\"\"\n\n frag_impl = \\\n \"\"\"\n if (selected == 1){\n fragOutput0 = fragOutput0 + vec4(0.2, 0.2, 0, opacity_level);\n }\n \"\"\"\n\n shaders.shader_to_actor(act, \"vertex\", impl_code=\"\\n\",\n replace_first=False,\n replace_all=False)\n shaders.shader_to_actor(act, \"fragment\", decl_code=frag_decl,\n block=\"coincident\")\n shaders.shader_to_actor(act, \"fragment\", impl_code=frag_impl,\n block=\"light\")\n\n def shader_selected_callback(caller, event, calldata=None):\n program = calldata\n if program is not None:\n try:\n program.SetUniformf(\"selected\",\n hz.cea[act]['selected'])\n except KeyError:\n pass\n try:\n program.SetUniformf(\"selected\",\n hz.cla[act]['selected'])\n except KeyError:\n pass\n program.SetUniformf(\"opacity_level\", 1)\n\n shaders.add_shader_callback(act, shader_selected_callback, priority=100)",
"def AttachShader(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_AttachShader(self, *args)",
"def _setBlurShader(self):\n blurVShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurVertical.fragment\")\n blurHShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"BlurHorizontal.fragment\")\n self.blurColorV.setShader(blurVShader)\n self.blurColorH.setShader(blurHShader)",
"def setSolidFlatShader(self, shader):\n self._solid_flat_shader = shader",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def setShading(self, shading=True):\n if shading is True:\n self.GetProperty().LightingOn()\n if shading is False:\n self.GetProperty().LightingOff()",
"def assignPartsShader(self, shader):\r\n #We assign the shader to the selected faces\r\n Selection = cmds.ls(sl=True)\r\n if not Selection:\r\n return\r\n cmds.sets(Selection,edit=True, forceElement = \"%sSG\" % shader)",
"def create_light():\n\n # Add new plane\n bpy.ops.mesh.primitive_plane_add(location=(15, -5, 5))\n plane = bpy.context.active_object\n plane.name = 'Light Plane'\n plane.scale = mathutils.Vector((4, 4, 4))\n # tilt\n plane.rotation_euler.rotate_axis('Y', radians(40))\n\n # Create a new material\n material = bpy.data.materials.new(name=\"Plane Light Emission Shader\")\n material.use_nodes = True\n\n # Remove default\n material.node_tree.nodes.remove(material.node_tree.nodes.get('Diffuse BSDF'))\n material_output = material.node_tree.nodes.get('Material Output')\n emission = material.node_tree.nodes.new('ShaderNodeEmission')\n emission.inputs['Strength'].default_value = 5.0\n\n # link emission shader to material\n material.node_tree.links.new(material_output.inputs[0], emission.outputs[0])\n\n # set activer material to your new material\n plane.active_material = material",
"def setTargetLight(self, target):\n self.targetLight = target",
"def add_light(self, light):\n self.viewer.SetLightOn(light.GetHandle())",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def apply_material(self, mat):\n\n if not hasattr(mat, \"gl_mat\"): # evaluate once the mat properties, and cache the values in a glDisplayList.\n diffuse = numpy.array( [0.8, 0.8, 0.8, .5])\n specular = numpy.array([0., 0., 0., 0.5])\n ambient = numpy.array([0.2, 0.2, 0.2, 0.5])\n emissive = numpy.array([0., 0., 0., 1.0])\n shininess = 128\n wireframe = 0\n twosided = 1 #mat.properties.get(\"twosided\", 1)\n\n mat[\"gl_mat\"] = glGenLists(1)\n glNewList(mat[\"gl_mat\"], GL_COMPILE)\n \n glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE, diffuse)\n glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, specular)\n glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT, ambient)\n glMaterialfv(GL_FRONT_AND_BACK, GL_EMISSION, emissive)\n glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, shininess)\n glPolygonMode(GL_FRONT_AND_BACK, GL_LINE if wireframe else GL_FILL)\n glDisable(GL_CULL_FACE) if twosided else glEnable(GL_CULL_FACE)\n \n glEndList()\n \n glCallList(mat[\"gl_mat\"])",
"def setNoLightWireframeShader(self, shader):\n self._nolight_wireframe_shader = shader",
"def register_shader(name, **kwargs):\n\n ShaderPart(name, **kwargs)",
"def commit_properties(self):\r\n glLightfv(self.id, GL_AMBIENT, self.ambient)\r\n glLightfv(self.id, GL_DIFFUSE, self.diffuse)\r\n glLightfv(self.id, GL_SPECULAR, self.specular)\r\n glLightfv(self.id, GL_POSITION, self.position + [self.type])",
"def load_shaders(self):\n context = self.context\n self.prog = load_shaders(context, 'tiny_gl_engine/primitives/shaders/cube_vertex.glsl',\n 'tiny_gl_engine/primitives/shaders/cube_fragment.glsl')"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the shader which constructs the normals from position
|
def _setNormalExtractShader(self):
npShader = Shader.load(Shader.SLGLSL,
"DefaultPostProcess.vertex",
"ExtractNormals.fragment")
self.normalPrecompute.setShader(npShader)
|
[
"def setNumNormals(self, num: 'int const') -> \"void\":\n return _coin.SoNormalGenerator_setNumNormals(self, num)",
"def getNormals(self, points, normals=...) -> normals:\n ...",
"def setNormal(self, *args):\n return _coin.SoPrimitiveVertex_setNormal(self, *args)",
"def setVerticesNormals(self, vertices, normals):\n # Far Cry\n self.numVertices = len(vertices)\n self.vertices.updateSize()\n\n # Crysis\n self.verticesData = CgfFormat.DataStreamChunk()\n self.verticesData.dataStreamType = CgfFormat.DataStreamType.VERTICES\n self.verticesData.bytesPerElement = 12\n self.verticesData.numElements = len(vertices)\n self.verticesData.vertices.updateSize()\n\n self.normalsData = CgfFormat.DataStreamChunk()\n self.normalsData.dataStreamType = CgfFormat.DataStreamType.NORMALS\n self.normalsData.bytesPerElement = 12\n self.normalsData.numElements = len(vertices)\n self.normalsData.normals.updateSize()\n\n # set vertex coordinates and normals for Far Cry\n for cryvert, vert, norm in izip(self.vertices, vertices, normals):\n cryvert.p.x = vert[0]\n cryvert.p.y = vert[1]\n cryvert.p.z = vert[2]\n cryvert.n.x = norm[0]\n cryvert.n.y = norm[1]\n cryvert.n.z = norm[2]\n\n # set vertex coordinates and normals for Crysis\n for cryvert, crynorm, vert, norm in izip(self.verticesData.vertices,\n self.normalsData.normals,\n vertices, normals):\n cryvert.x = vert[0]\n cryvert.y = vert[1]\n cryvert.z = vert[2]\n crynorm.x = norm[0]\n crynorm.y = norm[1]\n crynorm.z = norm[2]",
"def setNormal(self, *args) -> \"void\":\n return _coin.SoPrimitiveVertex_setNormal(self, *args)",
"def SoNormalElement_set(state: 'SoState', node: 'SoNode', numNormals: 'int32_t const', normals: 'SbVec3f', normalsAreUnitLength: 'SbBool const'=0) -> \"void\":\n return _coin.SoNormalElement_set(state, node, numNormals, normals, normalsAreUnitLength)",
"def set(state: 'SoState', node: 'SoNode', numNormals: 'int32_t const', normals: 'SbVec3f', normalsAreUnitLength: 'SbBool const'=0) -> \"void\":\n return _coin.SoNormalElement_set(state, node, numNormals, normals, normalsAreUnitLength)",
"def generateNormals(self, onoff: 'SbBool') -> \"void\":\n return _coin.SoReorganizeAction_generateNormals(self, onoff)",
"def getNormals(self) -> \"SbVec3f const *\":\n return _coin.SoNormalGenerator_getNormals(self)",
"def _createNormalPrecomputeBuffer(self):\n self.normalPrecompute = RenderTarget(\"PrecomputeNormals\")\n self.normalPrecompute.addColorTexture()\n self.normalPrecompute.addAuxTextures(1)\n self.normalPrecompute.setColorBits(16)\n self.normalPrecompute.setAuxBits(16)\n self.normalPrecompute.prepareOffscreenBuffer()",
"def show_normals(self):\n \n for polydata in self.polydatas:\n normals = vtk.vtkPolyDataNormals()\n normals.ConsistencyOff()\n # normals.ComputePointNormalsOn()\n normals.ComputeCellNormalsOn()\n if vtk.VTK_MAJOR_VERSION <= 5:\n normals.SetInput(polydata)\n else:\n normals.SetInputData(polydata)\n normals.Update()\n\n normals_at_centers = vtk.vtkCellCenters()\n normals_at_centers.SetInputConnection(normals.GetOutputPort())\n\n normals_mapper = vtk.vtkPolyDataMapper()\n if vtk.VTK_MAJOR_VERSION <= 5:\n normals_output = normals.GetOutput()\n normals_mapper.SetInput(normals_output)\n else:\n normals_mapper.SetInputData(normals.GetOutput())\n normals_actor = vtk.vtkActor()\n normals_actor.SetMapper(normals_mapper)\n\n arrows = vtk.vtkArrowSource()\n arrows.SetTipResolution(16)\n arrows.SetTipLength(0.5)\n arrows.SetTipRadius(0.1)\n\n glyph = vtk.vtkGlyph3D()\n glyph.SetSourceConnection(arrows.GetOutputPort())\n glyph.SetInputConnection(normals_at_centers.GetOutputPort())\n glyph.SetVectorModeToUseNormal()\n glyph.SetScaleFactor(1) # FIXME: may be too big ...\n # glyph.SetVectorModeToUseNormal()\n # glyph.SetVectorModeToUseVector()\n # glyph.SetScaleModeToDataScalingOff()\n glyph.Update()\n\n glyph_mapper = vtk.vtkPolyDataMapper()\n glyph_mapper.SetInputConnection(glyph.GetOutputPort())\n\n glyph_actor = vtk.vtkActor()\n glyph_actor.SetMapper(glyph_mapper)\n\n self.renderer.AddActor(glyph_actor)\n self.normals.append(glyph_actor)",
"def generateDefaultNormals(self, *args) -> \"SbBool\":\n return _coin.SoVRMLVertexShape_generateDefaultNormals(self, *args)",
"def setNormal(self, index: 'int32_t const', normal: 'SbVec3f') -> \"void\":\n return _coin.SoNormalGenerator_setNormal(self, index, normal)",
"def SetVertexNormal(self, *args):\n return _Graphic3d.Graphic3d_ArrayOfPrimitives_SetVertexNormal(self, *args)",
"def getNormals(self) -> \"SbVec3f const *\":\n return _coin.SoNormalCache_getNormals(self)",
"def from_normal_modes(self, pos):\n pos = pos.copy()\n pos[:4] = self.R @ pos[:4]\n return pos",
"def normalVector(self):\n for i in range(0, len(self.tris)):\n # read indices\n t1 = self.tris[i].iv[0]\n t2 = self.tris[i].iv[1]\n t3 = self.tris[i].iv[2]\n # Calculate vectors of the edges\n edge1 = self.pts[t2]-self.pts[t1]\n edge2 = self.pts[t3]-self.pts[t1]\n # Calculate normal vector\n normalvec = edge1%edge2;\n # save normal vector in Triangle class\n self.tris[i].nvec = normalvec\n # save normal vector in Mesh class vector\n self.nvec.append(normalvec)",
"def generateDefaultNormals(self, *args) -> \"SbBool\":\n return _coin.SoVertexShape_generateDefaultNormals(self, *args)",
"def getNormals(self):\n if self.vertices:\n for vert in self.vertices:\n yield vert.n\n elif self.normalsData:\n for norm in self.normalsData.normals:\n yield norm"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Attaches the update tasks to the showbase
|
def _attachUpdateTask(self):
self.showbase.addTask(
self._preRenderCallback, "RP_BeforeRender", sort=-5000)
self.showbase.addTask(
self._update, "RP_Update", sort=-10)
if self.haveLightingPass:
self.showbase.addTask(
self._updateLights, "RP_UpdateLights", sort=-9)
self.showbase.addTask(
self._updateShadows, "RP_UpdateShadows", sort=-8)
self.showbase.addTask(
self._processShadowCallbacks, "RP_ShadowCallbacks", sort=-5)
if self.settings.displayOnscreenDebugger:
self.showbase.addTask(
self._updateGUI, "RP_UpdateGUI", sort=7)
self.showbase.addTask(
self._postRenderCallback, "RP_AfterRender", sort=5000)
|
[
"def do_update(self):\n pass",
"def update_widgets(self):\n self.request_update = True",
"def update_tasks(self):\n for atask in self.task_running_queue:\n atask.update()\n self.update_task_queue()",
"def send_update_to_task_server(self):\n if self.server:\n data = {'finished': self.finished, 'active': self.active,\n 'max_links': self.max_links, 'whitelist': self.get_parsed_whitelist(),\n 'blacklist': self.get_parsed_blacklist(),\n 'expire_date': str(self.expire_date), 'mime_type': self.mime_type.split()}\n self.server.send('/update', 'post', json.dumps(data))",
"def run_update(self, **kw):\n\n for task in self.preprocessed_task_list:\n qubit = [qb for qb in self.meas_objs if qb.name == task['qb']][0]\n T1 = self.analysis.proc_data_dict['analysis_params_dict'][\n qubit.name]['T1']\n qubit.set(f'T1{task[\"transition_name\"]}', T1)",
"def update_controller():\n update_items(\n inst, config_entry, mikrotik_controller, async_add_entities, tracked\n )",
"def test_terminal_v1_tasks_update(self):\n pass",
"async def put(self): # bulk update of tasks\n input_tasks = json_decode(self.request.body)\n async with self.__database.transaction():\n for input_task in input_tasks[\"tasks\"]:\n id = input_task[\"id\"]\n query = self.__tasks.update().where(self.__tasks.c.id == id).values(**input_task)\n await self.__database.execute(query=query)\n await self.__scheduler.add_task(*get_task_schedule(input_task))",
"def update_application_tasks(self) -> None:\n\n if self.current_task_type and self.task:\n end_process_task(self.task, self.request.user)\n\n if self.next_task_type:\n Task.objects.create(\n process=self.application, task_type=self.next_task_type, previous=self.task\n )",
"def _taskUpdateUrl(self, task):\n return '/tasks/gci/task/update/%s' %task.key().id()",
"def update(self, task):\n\t\tself.tasks.append(db.Text(task))\n\t\treturn self",
"def ReloadTasksView(self):\n print(\"Reloading tasks view\")\n self.ClearTaskGrid()\n rawTasks = self.dataManip.GetTasksToView()\n guiTasks = RawToGuiTasks(rawTasks)\n self.AddTasksToGrid(guiTasks)",
"def update_application_tasks(self) -> None:\n\n if self.current_task_type and self.task:\n end_process_task(self.task, self.request.user) # type: ignore[attr-defined]\n\n if self.next_task_type:\n Task.objects.create(\n process=self.application, task_type=self.next_task_type, previous=self.task\n )",
"def _update_info(self):",
"def refresh_task_data(self):\n # Pull updated task data (tasklists and tasks) from the services.\n self.tasktree = self.tasktree_service.refresh()\n \n # Update the UI task tree.\n self.view.update_task_tree(self.tasktree)",
"def run_update(self, **kw):\n\n for task in self.preprocessed_task_list:\n qubit = [qb for qb in self.meas_objs if qb.name == task['qb']][0]\n pulse_par = self.analysis.proc_data_dict['analysis_params_dict'][\n qubit.name]['qscale']\n if self.analysis.pulse_par_name == 'motzoi':\n qubit.set(f'{task[\"transition_name_input\"]}_motzoi', pulse_par)\n else:\n qubit.set(f'{task[\"transition_name_input\"]}_env_mod_freq',\n pulse_par)",
"def update_relative(self, other: \"TaskMixin\", upstream=True) -> None:",
"def test_terminal_v1_tasks_partial_update(self):\n pass",
"def test_update_instructions(self):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Computes the current camera bounds, i.e. for light culling
|
def _computeCameraBounds(self):
cameraBounds = self.camera.node().getLens().makeBounds()
cameraBounds.xform(self.camera.getMat(self.showbase.render))
return cameraBounds
|
[
"def _computeLightBounds(self):\n self.bounds = BoundingSphere(Point3(self.position), self.radius)",
"def camera_view_bounds_2d(scene, camera_object, mesh_object):\r\n\r\n \"\"\" Get the inverse transformation matrix. \"\"\"\r\n matrix = camera_object.matrix_world.normalized().inverted()\r\n \"\"\" Create a new mesh data block, using the inverse transform matrix to undo any transformations. \"\"\"\r\n mesh = mesh_object.to_mesh()#scene, True, 'RENDER')\r\n mesh.transform(mesh_object.matrix_world)\r\n mesh.transform(matrix)\r\n\r\n \"\"\" Get the world coordinates for the camera frame bounding box, before any transformations. \"\"\"\r\n frame = [-v for v in camera_object.data.view_frame(scene=scene)[:3]]\r\n\r\n lx = []\r\n ly = []\r\n\r\n for v in mesh.vertices:\r\n co_local = v.co\r\n z = -co_local.z\r\n\r\n if z <= 0.0:\r\n \"\"\" Vertex is behind the camera; ignore it. \"\"\"\r\n continue\r\n else:\r\n \"\"\" Perspective division \"\"\"\r\n frame = [(v / (v.z / z)) for v in frame]\r\n\r\n min_x, max_x = frame[1].x, frame[2].x\r\n min_y, max_y = frame[0].y, frame[1].y\r\n\r\n x = (co_local.x - min_x) / (max_x - min_x)\r\n y = (co_local.y - min_y) / (max_y - min_y)\r\n\r\n lx.append(x)\r\n ly.append(y)\r\n try:\r\n bpy.data.meshes.remove(mesh)\r\n except: None\r\n \"\"\" Image is not in view if all the mesh verts were ignored \"\"\"\r\n if not lx or not ly:\r\n return None\r\n\r\n min_x = np.clip(min(lx), 0.0, 1.0)\r\n min_y = np.clip(min(ly), 0.0, 1.0)\r\n max_x = np.clip(max(lx), 0.0, 1.0)\r\n max_y = np.clip(max(ly), 0.0, 1.0)\r\n\r\n \"\"\" Image is not in view if both bounding points exist on the same side \"\"\"\r\n if min_x == max_x or min_y == max_y:\r\n return None\r\n\r\n \"\"\" Figure out the rendered image size \"\"\"\r\n render = scene.render\r\n fac = render.resolution_percentage * 0.01\r\n dim_x = render.resolution_x * fac\r\n dim_y = render.resolution_y * fac\r\n\r\n return (min_x, min_y), (max_x, max_y)",
"def getBounds(self, *args):\n return _coin.SbBox3d_getBounds(self, *args)",
"def _get_spatial_bounds(self): \n # This should be a MultiRasterIO method\n with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, \"path\")) as src_layer:\n pass # later we need src_layer for src_layer.window_transform(win)\n win_transform = src_layer.window_transform(self._window)\n bounds = rasterio.windows.bounds(window=self._window,\n transform=win_transform,\n height=0, width=0)\n return bounds",
"def bounds(self):\n xmin, ymax = self.top_left\n nrow, ncol = self.shape\n xmax = xmin + ncol * self.resolution\n ymin = ymax - nrow * self.resolution\n return xmin, ymin, xmax, ymax",
"def cf_bounds(self,r,c):\n return self.cfs[r,c].get_bounds(self.src)",
"def getBounds(self, *args):\n return _coin.SbBox3f_getBounds(self, *args)",
"def getBounds(self, *args):\n return _coin.SbBox2d_getBounds(self, *args)",
"def bounds(self):\n raise NotImplementedError",
"def getBounds(self, *args) -> \"void\":\n return _coin.SbBox2d_getBounds(self, *args)",
"def bounds(self):\n return [p.bounds for p in self.params]",
"def compute_world_bounds(homographies, height, width):\n xbounds = [0, width - 1]\n ybounds = [0, height - 1]\n\n for h in homographies: \n # find transformed image bounding box\n x = np.array([0, width - 1, 0, width - 1])\n y = np.array([0, 0, height - 1, height - 1])\n [xt, yt] = transform2h(x, y, np.linalg.inv(h))\n xbounds[0] = min(xbounds[0], min(xt))\n xbounds[1] = max(xbounds[1], max(xt))\n ybounds[0] = min(ybounds[0], min(yt))\n ybounds[1] = max(ybounds[1], max(yt))\n\n return xbounds, ybounds",
"def adjustCameraDepthExtent(self):\n bounds = self.scene.bounds(transformed=True)\n if bounds is None:\n bounds = numpy.array(((0., 0., 0.), (1., 1., 1.)),\n dtype=numpy.float32)\n bounds = self.camera.extrinsic.transformBounds(bounds)\n\n if isinstance(self.camera.intrinsic, transform.Perspective):\n # This needs to be reworked\n zbounds = - bounds[:, 2]\n zextent = max(numpy.fabs(zbounds[0] - zbounds[1]), 0.0001)\n near = max(zextent / 1000., 0.95 * zbounds[1])\n far = max(near + 0.1, 1.05 * zbounds[0])\n\n self.camera.intrinsic.setDepthExtent(near, far)\n elif isinstance(self.camera.intrinsic, transform.Orthographic):\n # Makes sure z bounds are included\n border = max(abs(bounds[:, 2]))\n self.camera.intrinsic.setDepthExtent(-border, border)\n else:\n raise RuntimeError('Unsupported camera', self.camera.intrinsic)",
"def cal_bounds(self):\n pass",
"def _get_bounds(self, img: np.ndarray, limit) -> Tuple[List[list], list]:\n\n def bound_limit(value):\n return np.clip(value - limit, 0, 255), np.clip(value + limit, 0, 255)\n\n minbounds, maxbounds, bounds, initial = [], [], [], []\n\n for i, j, k in product(range(img.shape[-3]), range(img.shape[-2]), range(img.shape[-1])):\n temp = img[i, j, k]\n initial += [temp]\n bound = bound_limit(temp)\n if self.es == 0:\n minbounds += [bound[0]]\n maxbounds += [bound[1]]\n else:\n bounds += [bound]\n if self.es == 0:\n bounds = [minbounds, maxbounds]\n\n return bounds, initial",
"def get_bounding_box(vehicle, camera):\n\n bb_cords = BBoxUtil._create_bb_points(vehicle)\n cords_x_y_z = BBoxUtil._vehicle_to_sensor(bb_cords, vehicle, camera)[:3, :]\n cords_y_minus_z_x = np.concatenate([cords_x_y_z[1, :], -cords_x_y_z[2, :], cords_x_y_z[0, :]])\n bbox = np.transpose(np.dot(camera.calibration, cords_y_minus_z_x))\n camera_bbox = np.concatenate([bbox[:, 0] / bbox[:, 2], bbox[:, 1] / bbox[:, 2], bbox[:, 2]], axis=1)\n return camera_bbox",
"def find_bounded_wcs(self):\n if self.total_mask is None:\n print(\"Please add exposures before computing bounding box WCS...\")\n\n # start by computing the bounding box for the footprint\n ymin, ymax, xmin, xmax = calc_bounding_box(self.total_mask)\n # make a copy of the full WCS to be revised\n self.bounded_wcs = self.meta_wcs.deepcopy()\n self.bounding_box = [slice(ymin, ymax), slice(xmin, xmax)]\n\n # Use this box to compute new CRPIX position\n self.bounded_wcs.wcs.crpix -= [xmin, ymin]\n self.bounded_wcs.pixel_shape = [xmax - xmin + 1, ymax - ymin + 1]",
"def _makeLightBoundsComputationBuffer(self, w, h):\n self.debug(\"Creating light precomputation buffer of size\", w, \"x\", h)\n self.lightBoundsComputeBuff = RenderTarget(\"ComputeLightTileBounds\")\n self.lightBoundsComputeBuff.setSize(w, h)\n self.lightBoundsComputeBuff.setColorWrite(False)\n self.lightBoundsComputeBuff.prepareOffscreenBuffer()",
"def get_bounds(obj):\n \n xmin, ymin, zmin = tuple(obj.data.vertices[0].co)\n xmax, ymax, zmax = xmin, ymin, zmin\n \n for point in obj.data.vertices:\n x, y, z = tuple(point.co)\n if x < xmin:\n xmin = x\n elif x > xmax:\n xmax = x\n \n if y < ymin:\n ymin = y\n elif y > ymax:\n ymax = y\n \n if z < zmin:\n zmin = z\n elif z > zmax:\n zmax = z\n \n return xmin, xmax, ymin, ymax, zmin, zmax"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Task which updates/culls the lights
|
def _updateLights(self, task=None):
self.lightManager.updateLights()
if task is not None:
return task.cont
|
[
"def _updateShadows(self, task=None):\n self.lightManager.updateShadows()\n if task is not None:\n return task.cont",
"async def update_led(led):\n\n global flash_count\n flash_count = 0\n\n while True:\n if flash_count > 0:\n await flash_led(led, 100)\n flash_count -= 1\n else:\n await fade_led(led)",
"def handle_lighting():\n\n status_led = RGBLED(13, 19, 26)\n steps = 100\n current_step = 0\n\n while not QUIT_EVENT.is_set():\n if GPS_STATUS in GPSStatus.locked_states():\n set_rgb_colour(status_led, Colour.GREEN)\n sleep(1)\n else:\n current_step = (current_step + 1) % steps\n cycle_rgb_led(status_led, current_step, steps)\n sleep(1 / steps)\n\n status_led.off()\n status_led.close()",
"async def async_update(self) -> None:\n if self.device.appliance.status.get(self._key, {}).get(ATTR_VALUE) is True:\n self._state = True\n elif self.device.appliance.status.get(self._key, {}).get(ATTR_VALUE) is False:\n self._state = False\n else:\n self._state = None\n\n _LOGGER.debug(\"Updated, new light state: %s\", self._state)\n\n if self._ambient:\n color = self.device.appliance.status.get(self._custom_color_key, {})\n\n if not color:\n self._hs_color = None\n self._brightness = None\n else:\n colorvalue = color.get(ATTR_VALUE)[1:]\n rgb = color_util.rgb_hex_to_rgb_list(colorvalue)\n hsv = color_util.color_RGB_to_hsv(rgb[0], rgb[1], rgb[2])\n self._hs_color = [hsv[0], hsv[1]]\n self._brightness = ceil((hsv[2] - 10) * 255 / 90)\n _LOGGER.debug(\"Updated, new brightness: %s\", self._brightness)\n\n else:\n brightness = self.device.appliance.status.get(self._brightness_key, {})\n if brightness is None:\n self._brightness = None\n else:\n self._brightness = ceil((brightness.get(ATTR_VALUE) - 10) * 255 / 90)\n _LOGGER.debug(\"Updated, new brightness: %s\", self._brightness)",
"def lighting_process(db, controls):\n try:\n # Get the current hour & the corresponding RGB data\n hour = str(datetime.datetime.now().hour)\n rgb_data = db['RGB_data'][hour]\n red = rgb_data['R']\n green = rgb_data['G']\n blue = rgb_data['B']\n\n # Check for manual override on the RGB LED Strip\n if not db['Manual Overrides']['RGB LED']:\n # Adjust the RGB Accordingly and update the status\n controls['RGB LED'].adjust_color(red_content=red, green_content=green, blue_content=blue)\n db['RGB LED Status'] = [red, green, blue]\n\n # Check for manual override on the UV LED Strip\n if not db['Manual Overrides']['UV LED']:\n # Get the UV light data for the current hour\n if db['UV_data'][hour]:\n controls['UV LED'].turn_on()\n db['UV LED Status'] = \"ON\"\n else:\n controls['UV LED'].turn_off()\n db['UV LED Status'] = \"OFF\"\n except Exception as err:\n return err\n return 0",
"async def _wled_turn_on(self) -> None:\n await self.wled.nightlight(on=True)",
"def _triggersensorupdate(self):\r\n\r\n\r\n self._sendPacket(self._activation_packet)\r\n self._sendPacket(b'\\x52\\x02\\x13\\x05\\x9a')\r\n\r\n # Sending OFF signal\r\n for dev_id, device in self.devices.items():\r\n self._hass.add_job(\r\n self.async_see(dev_id, STATE_OFF)\r\n )",
"def swatchRefresh():\n pass",
"async def hue_colourloop(self, ctx, *, name=None):\n if not await self.get_bridge():\n await ctx.send(\"No IP has been set.\")\n return\n for light in self.lights:\n if name is None or light.name.lower() == name.lower():\n if light.effect != \"colorloop\" and light.on:\n light.effect = \"colorloop\"\n continue\n if light.effect == \"colorloop\" and light.on:\n light.effect = \"none\"\n continue",
"async def _wled_turn_off(self) -> None:\n await self.wled.nightlight(on=False)",
"def initLights(self):\n\t\tself.interface.sendClear(False)\n\t\tself.interface.drainBytes()\n\t\tself.clearTime(0)\n\t\tself.sendChangesForTime([interfaceProtocol.ColorChangeMessage(i, lightColor.Color(0xcc, 0, 0, 0, True)) for i in range(50)], 0) # Turn everything off\n\t\ttime.sleep(1) # Make sure everything is set",
"async def _wled_turn_on(self) -> None:\n await self.wled.sync(receive=True)",
"def operate_lane_signal(self):\n self.signal.turn_on_green()\n time.sleep(7)\n self.signal.turn_on_yellow()\n time.sleep(4)\n self.signal.turn_on_red()\n time.sleep(2)",
"def mainLoop(self):\n\t\tcurrColors = [lightColor.Color(0xcc, 0, 0, 0) for i in range(50)]\n\t\tcurrTime = 0\n\t\tresetTime = 0\n\t\twhile True:\n\t\t\tif self.syncTime and (resetTime is not None):\n\t\t\t\tself.waitForRealTime() # Allows blocking\n\n\t\t\tnextColors = copy.deepcopy(currColors)\n\t\t\tif self.microInterval == 0:\n\t\t\t\tself.runColorListUpdate(currTime, nextColors)\n\t\t\telse:\n\t\t\t\tself.runColorListUpdate(self.getNextTime(currTime), nextColors)\n\n\t\t\tif resetTime is not None:\n\t\t\t\tif self.syncTime:\n\t\t\t\t\tself.interface.sendClear()\n\t\t\t\tself.clearTime(resetTime)\n\t\t\t\tresetTime = None\n\n\t\t\tif self.microInterval != 0:\n\t\t\t\tmicroTemp = copy.deepcopy(currColors)\n\t\t\t\tmicroTime = 0\n\t\t\t\twhile microTime < self.interval:\n\t\t\t\t\tupdates = self.runInterpolation(currColors, microTemp, nextColors, microTime)\n\t\t\t\t\tself.sendChangesForTime(updates, currTime + microTime)\n\n\t\t\t\t\tmicroTime += self.microInterval\n\t\t\t\tupdates = self.computeChanges(microTemp, nextColors) # Make sure everything is up to date (even if no gradient)\n\t\t\t\tself.sendChangesForTime(updates, currTime + self.interval)\n\t\t\t\tcurrColors = microTemp\n\n\t\t\telse:\n\t\t\t\tupdates = self.computeChanges(currColors, nextColors)\n\t\t\t\tself.sendChangesForTime(updates, currTime)\n\n\t\t\tcurrColors = nextColors\n\t\t\tnewTime = self.getNextTime(currTime)\n\t\t\tif newTime == 0:\n\t\t\t\tresetTime = currTime + self.interval\n\n\t\t\tcurrTime = newTime",
"def update_shading(self):\n self.simulation.update_colourin()",
"async def test_white_light(hass: HomeAssistant) -> None:\n config_entry = MockConfigEntry(\n domain=DOMAIN,\n data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE},\n unique_id=MAC_ADDRESS,\n )\n config_entry.add_to_hass(hass)\n bulb = _mocked_bulb()\n bulb.mode = \"ww\"\n bulb.protocol = None\n bulb.color_modes = {FLUX_COLOR_MODE_DIM}\n bulb.color_mode = FLUX_COLOR_MODE_DIM\n with _patch_discovery(), _patch_wifibulb(device=bulb):\n await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}})\n await hass.async_block_till_done()\n\n entity_id = \"light.bulb_rgbcw_ddeeff\"\n\n state = hass.states.get(entity_id)\n assert state.state == STATE_ON\n attributes = state.attributes\n assert attributes[ATTR_BRIGHTNESS] == 128\n assert attributes[ATTR_COLOR_MODE] == \"brightness\"\n assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [\"brightness\"]\n assert ATTR_EFFECT_LIST in attributes # single channel now supports effects\n\n await hass.services.async_call(\n LIGHT_DOMAIN, \"turn_off\", {ATTR_ENTITY_ID: entity_id}, blocking=True\n )\n bulb.async_turn_off.assert_called_once()\n await async_mock_device_turn_off(hass, bulb)\n\n assert hass.states.get(entity_id).state == STATE_OFF\n\n await hass.services.async_call(\n LIGHT_DOMAIN, \"turn_on\", {ATTR_ENTITY_ID: entity_id}, blocking=True\n )\n bulb.async_turn_on.assert_called_once()\n bulb.async_turn_on.reset_mock()\n\n await hass.services.async_call(\n LIGHT_DOMAIN,\n \"turn_on\",\n {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100},\n blocking=True,\n )\n bulb.async_set_brightness.assert_called_with(100)\n bulb.async_set_brightness.reset_mock()",
"async def trigger_led(self):\n self.led_on()\n await asyncio.sleep(self.relay_output_duration)\n self.led_off()",
"async def hue_switch(self, ctx, *, name=None):\n if not await self.get_bridge():\n await ctx.send(\"No IP has been set.\")\n return\n for light in self.lights:\n if name is None or light.name.lower() == name.lower():\n if light.on:\n light.on = False\n continue\n if not light.on:\n light.on = True\n continue",
"async def _wled_turn_on(self) -> None:\n await self.wled.sync(send=True)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Task which updates the shadow maps
|
def _updateShadows(self, task=None):
self.lightManager.updateShadows()
if task is not None:
return task.cont
|
[
"def _updateShadowSources(self):\n self.shadowSources[0].setPos(self.position + self.direction * 500.0)\n self.shadowSources[0].lookAt(self.position)\n self.shadowSources[0].invalidate()",
"def _updateShadowSources(self):\n\n cubemapDirections = [\n Vec3(-1, 0, 0),\n Vec3(1, 0, 0),\n Vec3(0, -1, 0),\n Vec3(0, 1, 0),\n Vec3(0, 0, -1),\n Vec3(0, 0, 1),\n ]\n\n for index, direction in enumerate(cubemapDirections):\n self.shadowSources[index].setPos(self.position)\n self.shadowSources[index].lookAt(self.position + direction)\n\n # self.shadowSources[0].setPos(\n # self.position + Vec3(0, self.spacing * 2.0, 0))\n # self.shadowSources[0].setHpr(Vec3(180, 0, 0))\n\n # self.shadowSources[1].setPos(\n # self.position - Vec3(0, self.spacing * 2.0, 0))\n # self.shadowSources[1].setHpr(Vec3(0, 0, 0))",
"def recreate_map(self):\n self.create_map()\n for item in self.saved_positions.items():\n print(item[1][-1])\n self.update_position(item[1][-1])\n self.draw_historic_path(device_id=item[1][-1]['device_id'],last=20)\n m.draw_map()",
"def after_map(self, map):",
"def tax_maps_staging_update():\r\n LOG.info(\"Start: Update tax map staging repository.\")\r\n start_time = datetime.datetime.now()\r\n conn = credential.UNCPathCredential(\r\n path.RLID_DATA_STAGING_SHARE, **credential.RLID_DATA_SHARE\r\n )\r\n with conn:\r\n count = Counter()\r\n for source_path in document.repository_file_paths(path.LANE_TAX_MAP_IMAGES):\r\n staging_path = os.path.join(\r\n REPO_PATH[\"tax-map-staging\"],\r\n # Tax maps have a one-deep bin.\r\n os.path.split(os.path.dirname(source_path))[-1],\r\n os.path.basename(source_path),\r\n )\r\n if document.changed(staging_path, source_path):\r\n result_key = document.update_document(source_path, staging_path)\r\n count[result_key] += 1\r\n document.log_state_counts(count, documents_type=\"tax maps (staging)\")\r\n elapsed(start_time, LOG)\r\n LOG.info(\"End: Update.\")",
"def tax_maps_update():\r\n start_time = datetime.datetime.now()\r\n conn = credential.UNCPathCredential(\r\n path.RLID_DATA_SHARE, **credential.RLID_DATA_SHARE\r\n )\r\n with conn:\r\n # Attach logfile handler for repository update logfile.\r\n logfile = logging.FileHandler(\r\n os.path.join(\r\n REPO_PATH[\"tax-map\"], \"Tax_Map_Update_{}.log\".format(start_time.year)\r\n )\r\n )\r\n logfile.setLevel(logging.INFO)\r\n logfile.setFormatter(LOGFILE_FORMATTER)\r\n LOG.addHandler(logfile)\r\n LOG.info(\"START SCRIPT: Update RLID tax map repository from staging.\")\r\n file_name_release_date = tax_map_file_name_release_map(\r\n start_datetime=rlid_data_currency(\"Tax Maps\")\r\n )\r\n count = Counter()\r\n # Iterate through path/date map, adding, archiving & updating.\r\n for file_name, release_date in file_name_release_date.items():\r\n rlid_path = rlid_document_path(file_name, document_type=\"tax-map\")\r\n staging_path = rlid_document_path(\r\n file_name, document_type=\"tax-map-staging\"\r\n )\r\n result_key = update_tax_map(\r\n staging_path, rlid_path, release_date, archive_previous=True\r\n )\r\n count[result_key] += 1\r\n document.log_state_counts(count, documents_type=\"tax maps\")\r\n # Finally, update tax map repository currency date (if we placed any).\r\n if count[\"updated\"]:\r\n rlid_data_currency_setter(\"Tax Maps\", max(file_name_release_date.values()))\r\n elapsed(start_time, LOG)\r\n LOG.info(\"END SCRIPT: Update\")",
"def _updateLights(self, task=None):\n self.lightManager.updateLights()\n\n if task is not None:\n return task.cont",
"def updateParams(self,mapName):\n pass",
"def _attachUpdateTask(self):\n\n self.showbase.addTask(\n self._preRenderCallback, \"RP_BeforeRender\", sort=-5000)\n\n self.showbase.addTask(\n self._update, \"RP_Update\", sort=-10)\n\n if self.haveLightingPass:\n self.showbase.addTask(\n self._updateLights, \"RP_UpdateLights\", sort=-9)\n self.showbase.addTask(\n self._updateShadows, \"RP_UpdateShadows\", sort=-8)\n\n self.showbase.addTask(\n self._processShadowCallbacks, \"RP_ShadowCallbacks\", sort=-5)\n\n\n if self.settings.displayOnscreenDebugger:\n self.showbase.addTask(\n self._updateGUI, \"RP_UpdateGUI\", sort=7)\n\n self.showbase.addTask(\n self._postRenderCallback, \"RP_AfterRender\", sort=5000)",
"def update_light(self, tiles, light_map):\n light_distance = self.light_distance()\n self.emit_light(light_distance, tiles, light_map)",
"def update_objects(self):\n\t\tself.update_projectiles()",
"def update_weather_async(self, _):\n threading.Thread(target=self.update_weather).start()",
"async def update_area_registry() -> None:\n\n get_base().areas = await hass_areas()",
"def request_rebuild_midi_map(self):\n pass",
"async def update_color_zones(self):\n zone = 0\n top = 1\n while self.available and zone < top:\n # Each get_color_zones can update 8 zones at once\n resp = await AwaitAioLIFX().wait(\n partial(self.bulb.get_color_zones, start_index=zone)\n )\n if resp:\n zone += 8\n top = resp.count\n\n # We only await multizone responses so don't ask for just one\n if zone == top - 1:\n zone -= 1",
"def run(self, dryrun=False):\n\n if self.running:\n print(\n \"[WARN] The run() function was called during a running scan. This request was ignored.\"\n )\n pass\n\n self.running = True\n subprocess.run(self.zmap_cmd())\n self.running = False",
"async def on_ready(self):\n try:\n self.update_townless.start() # pylint: disable=no-member\n except RuntimeError:\n pass",
"def update(self):\n if self.passive:\n self.do_mount_ss(True)\n else:\n self.thin_out_snapshots()\n self.create_ss()",
"def can_update_maps(self):\n return # boolean"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Task which updates the onscreen gui debugger
|
def _updateGUI(self, task=None):
self.guiManager.update()
if task is not None:
return task.cont
|
[
"def _attachUpdateTask(self):\n\n self.showbase.addTask(\n self._preRenderCallback, \"RP_BeforeRender\", sort=-5000)\n\n self.showbase.addTask(\n self._update, \"RP_Update\", sort=-10)\n\n if self.haveLightingPass:\n self.showbase.addTask(\n self._updateLights, \"RP_UpdateLights\", sort=-9)\n self.showbase.addTask(\n self._updateShadows, \"RP_UpdateShadows\", sort=-8)\n\n self.showbase.addTask(\n self._processShadowCallbacks, \"RP_ShadowCallbacks\", sort=-5)\n\n\n if self.settings.displayOnscreenDebugger:\n self.showbase.addTask(\n self._updateGUI, \"RP_UpdateGUI\", sort=7)\n\n self.showbase.addTask(\n self._postRenderCallback, \"RP_AfterRender\", sort=5000)",
"def run( self, ):\r\n # move from controller to decouple type of gui\r\n self.gui_running = True\r\n self.root.after( self.parameters.gt_delta_t, self.controller.polling )\r\n\r\n # when to close or flush is a bit of issue, flush when using edit button ??\r\n if self.parameters.comm_logging_fn is not None:\r\n # !! may need work to make sure in right directory\r\n self.comm_log = open( self.parameters.comm_logging_fn, \"a\" )\r\n else:\r\n self.comm_log = None\r\n\r\n self.root.mainloop()\r\n self.gui_running = False\r\n if self.comm_log is not None:\r\n self.comm_log.close()",
"def start(self):\n self.update = True\n self.show()",
"def display_tasks(self):\n skuld_tab, skuld_window = \\\n __find_vim_window__(self.SKULD_BUFFER_NAME)\n if skuld_window is None:\n vim.command('tabedit ' + self.SKULD_BUFFER_NAME)\n else:\n vim.current.tabpage = skuld_tab\n vim.current.window = skuld_window\n self.update_buf_content(vim.current.window.buffer)\n vim.current.buffer.options['modified'] = False\n vim.current.buffer.options['buftype'] = 'nofile'\n vim.current.buffer.options['bufhidden'] = 'hide'\n vim.current.buffer.options['swapfile'] = False\n vim.command('call SkuldBufOpenHook()')",
"def Execute(self):\n if DEBUG_SHOW_FPS:\n self.fps_text.text = \"fps: \" + str(self.core.current_fps)\n\n if DEBUG_SHOW_NUM_PROCESSES:\n self.num_process_text.text = \"pro: \" + str(self.core.process_count)",
"def update(self, task):\r\n dt = globalClock.getDt()\r\n\r\n # Getting mouse position\r\n md = self._showbase.win.getPointer(0)\r\n\t \r\n x = md.getX()\r\n y = md.getY()\r\n center_x = self._showbase.win.getXSize() // 2\r\n center_y = self._showbase.win.getYSize() // 2\r\n \r\n if self._showbase.win.movePointer(0, center_x, center_y):\r\n self._heading = self._heading - (x - center_x) * self._mouse_sensivity\r\n self._pitch = self._pitch - (y - center_y) * self._mouse_sensivity\r\n \r\n # Set camera rotation based on mouse position\r\n self._showbase.camera.setHpr(self._heading, self._pitch, 0)\r\n\r\n pos_increment = dt * self._velocity * 60.0\r\n\r\n # Setting camera position based on inputs \r\n if self._input_state.isSet('forward'):\r\n self._showbase.camera.setY(self._showbase.camera, pos_increment)\r\n\r\n if self._input_state.isSet('backward'):\r\n self._showbase.camera.setY(self._showbase.camera, -pos_increment)\r\n\r\n if self._input_state.isSet('left'):\r\n self._showbase.camera.setX(self._showbase.camera, -pos_increment)\r\n\r\n if self._input_state.isSet('right'):\r\n self._showbase.camera.setX(self._showbase.camera, pos_increment)\r\n\r\n if self._input_state.isSet('up'):\r\n self._showbase.camera.setZ(self._showbase.camera, pos_increment)\r\n\r\n if self._input_state.isSet('down'):\r\n self._showbase.camera.setZ(self._showbase.camera, -pos_increment) \r\n \r\n return Task.cont",
"def start(self):\n\n\t\t#refresh the view\n\t\ttry:\n\t\t\tself.details.destroy()\n\t\texcept AttributeError:\n\t\t\tpass\n\n\t\ttask = self._get_select_item()\n\t\t\n\t\t\n\t\tWritter.event('start to work on {}'.format(task.name))\n\n\n\t\tif task:\n\n\t\t\tself.new_worktime = WorkTime(task)\n\n\t\t\tstarted_time = time.localtime(self.new_worktime.begin )\n\n\t\t\t#call back stop button clicked\n\t\t\tdef callback():\n\t\t\t\tself.new_worktime.add()\n\t\t\t\tself.show_details()\n\t\t\t\tself._tree()\n\n\n\n\t\t\tself.details = LabelFrame(self, text='\"{}\" in progress...'.\n\t\t\t\tformat(task.name), \n\t\t\t\t\trelief=FLAT,\n\t\t\t\t\tpadx=Setting.PADDING, pady=Setting.PADDING, \n\t\t\t\t\tfont=Setting.FONT_TITLE ,\n\t\t\t\t\tforeground=Setting.COLOR_TXT, background=Setting.COLOR_BKG)\n\n\t\t\tself.time_value = StringVar()\n\n\n\t\t\tself.time_value.set(\"Tâche en cours\")\n\t\t\tLabel(self.details , \n\t\t\t\ttext='Started @{}'.format(time.strftime('%H:%M',started_time)),\n\t\t\t\tfont=Setting.FONT_TEXT , \n\t\t\t\tforeground=Setting.COLOR_TXT, \n\t\t\t\tbackground=Setting.COLOR_BKG).pack(fill=X)\n\t\t\tLabel(self.details , textvariable=self.time_value,font=Setting.FONT_TEXT , \n\t\t\t\tforeground=Setting.COLOR_TXT, \n\t\t\t\tbackground=Setting.COLOR_BKG).pack(fill=X)\n\t\t\tButton(self.details, text=\"stop\", command=callback).pack(fill=X)\n\n\n\n\t\t\tdef update_time():\n\t\t\t\t\"\"\"get time delat & update string var\"\"\"\n\t\t\t\tself.time_value.set( self.new_worktime.spend_from_now() )\n\t\t\t\tself.after(100, update_time)\n\n\t\t\tupdate_time()\n\n\n\n\t\t\tself.details.pack(fill=X )\n\n\t\telse:\n\t\t\tprint('task not found')",
"def update(self, task=None):\n #update inputs\n if self.update_clips:\n self.update_clips=False\n self.clip_tex.load(self.pfm_clips)\n #self.gui_root.set_shader_input('clips', self.clip_tex)\n if self.update_pos_scale:\n self.update_pos_scale=False\n self.pos_scale_tex.load(self.pfm_pos_scale)\n #self.gui_root.set_shader_input('pos_scale', self.pos_scale_tex)\n #track mouse\n if base.mouseWatcherNode.hasMouse():\n mouse_pos = (base.mouseWatcherNode.get_mouse()+Point2(1.0, 1.0))/2.0\n mouse_pos.x=mouse_pos.x*self.win_size[0]\n mouse_pos.y=self.win_size[1]-(mouse_pos.y*self.win_size[1])\n self.mouse_cam.set_pos(mouse_pos.x, mouse_pos.y, 100)\n #dispatch click events if any\n if not self.mouse_is_down and self.last_frame_mouse_is_down:\n self.on_mouse_click()\n elif self.mouse_is_down:\n delta=mouse_pos-self.last_frame_mouse_pos\n self.on_mouse_hold(delta)\n #store for next frame\n self.last_frame_mouse_pos=mouse_pos\n self.last_frame_mouse_is_down=self.mouse_is_down\n #run task again, if called from a task\n if task:\n return task.again",
"def visualize_progress_f(self):\n if self.app != False:\n self.app.end()\n root = self.master\n self.app = VisualizeProgressUi(self.user_logs, self.game_data, \n master=root, index=7)\n self.app.mainloop()",
"def runEpisode(self):\n self.mainUpdate()",
"def run_async(self):\n self.remotes = list(self.get_remotes().keys())\n if not self.remotes:\n self.view.window().show_quick_panel([NO_REMOTES_MESSAGE], None)\n else:\n self.view.window().show_quick_panel(\n self.remotes,\n self.on_select_remote,\n flags=sublime.MONOSPACE_FONT\n )",
"def update(self):\n self.tk_gui.update()",
"def inc_ir_prog(self):\r\n\r\n global ir_prog\r\n ir_prog += 1\r\n self._parent._clock.draw_clock()\r\n self._parent._master.update_idletasks()\r\n self._parent._master.update()",
"def mainloop(self):\n while self.running:\n self.updateview();\n self.handlekey(self.scr.getch());",
"def test_terminal_v1_tasks_update(self):\n pass",
"def update(self):\n self.step += 1\n self.step %= 4\n self.l4.setText('waiting ' + self.step * '.')\n self.b1.setEnabled(False)\n self.b2.setEnabled(False)\n self.transaction.setEnabled(False)\n\n if self.transactionIsVisible:\n self.lTransaction.setVisible(False)\n self.transactionTable.setVisible(False)\n self.transactionIsVisible = False\n \n self.deactivateButton.emit()",
"def update(self) -> None:\n self.run_id += 1\n time.sleep(1)\n self.create_threads()\n print(\"Updated.\")",
"def exec_refresh(self):\n # Create new thread each time Refresh button is pressed.\n nodes_thread = Thread(target=self.blue_node.find_nodes, name=\"NodesThread\")\n nodes_thread.daemon = True\n\n nodes_thread.start()\n\n # Wait for searching for other nodes to finish.\n nodes_thread.join()\n\n self.gui_input_queue.put((CommandTypes.REFRESH, \"(Refresh) Searching for nodes has finished\"))",
"def run(self):\n\n self.main_window.show()\n self.work_thread.start()\n \n return self.exec_()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Computes the current mvp. Actually, this is the worldViewProjectionMatrix, but for convience it's called mvp.
|
def _computeMVP(self):
camLens = self.showbase.camLens
projMat = Mat4.convertMat(
CSYupRight,
camLens.getCoordinateSystem()) * camLens.getProjectionMat()
transformMat = TransformState.makeMat(
Mat4.convertMat(self.showbase.win.getGsg().getInternalCoordinateSystem(),
CSZupRight))
modelViewMat = transformMat.invertCompose(
self.showbase.render.getTransform(self.showbase.cam)).getMat()
return UnalignedLMatrix4f(modelViewMat * projMat)
|
[
"def get_matrix(self):\n return self.mvp",
"def build_proj_matrix(self):\n self.p = Matrix44.perspective_projection(self.fov, \n self.width / self.height, self.near_plane, self.far_plane)\n\n self.mvp = numpy.array(self.p * self.m).astype(\"f4\")",
"def build_matrix(self):\n # Note that by nature, a camera perspective inverts everything\n # So we negate everything and also do it in reverse\n\n # Overrides PositionMatrix, reverse everything, ignore scale \n m = Matrix44.identity()\n m = Matrix44.from_translation(-1 * Vector3(self.position)) * m\n m = Matrix44.from_z_rotation(-math.radians(self.roll)) * m\n m = Matrix44.from_y_rotation(-math.radians(self.yaw)) * m\n m = Matrix44.from_x_rotation(-math.radians(self.pitch)) * m\n if self.tp:\n # Third person enabled\n m = Matrix44.from_translation([0,0,-self.tp_distance]) * m\n \n self.m = m\n self.mvp = numpy.array(self.p * self.m).astype(\"f4\")",
"def getCameraSpaceMatrix(self) -> \"SbDPMatrix\":\n return _coin.SbDPViewVolume_getCameraSpaceMatrix(self)",
"def projectionMatrix(*args, **kwargs):\n \n pass",
"def project(self, win_width, win_height, vision_field=512, viewer_dist=5):\n factor = vision_field / (viewer_dist + self.z)\n x = self.x * factor + win_width / 2\n y = -self.y * factor + win_height / 2\n return Point3D(x, y, 1)",
"def computeProjection(self):\n if (not self.MComputed):\n self.M = np.zeros((self.nZernike,self.nZernike,self.nHeight,self.nStars))\n for i in tqdm(range(self.nHeight), desc='Height'): \n for j in tqdm(range(self.nStars), desc='Stars'): \n if (self.numericalProjection):\n self.M[:,:,i,j] = projection.zernikeProjectionMatrixNumerical(self.nZernike, self.beta[i,j], self.t[i,j], self.angle[i,j], verbose=True, radius=128, includePiston=self.addPiston)\n else:\n self.M[:,:,i,j] = projection.zernikeProjectionMatrix(self.nZernike, self.beta[i,j], self.t[i,j], self.angle[i,j], verbose=True, includePiston=self.addPiston)\n np.savez('matrices/transformationMatrices_{0}.npz'.format(uuid.uuid4()), self.M, self.heights, self.nStars, self.nZernike, self.fov, self.DTel)\n self.stackProjection()",
"def getProjection(self):\r\n return self.__projection",
"def world_matrix(self) -> Matrix44:\n return self._world_matrix",
"def world_to_screen(self, p):\n\t\td = self.d or 1e8\n\t\t# TODO: add zoom effect\n\t\tnp = self.coor.transform(worldCoor, p)\n\t\tx, y, z = np[0, 0], np[1, 0], np[2, 0]\n\t\tk = d / (d + z)\n\t\treturn (self.w + k * x, self.h + k * y), (z >= 0)",
"def setProjection(self, perspective=False, viewport=0):\n cam = self.rens[viewport].GetActiveCamera()\n cam.SetParallelProjection(perspective)",
"def getCameraSpaceMatrix(self) -> \"SbMatrix\":\n return _coin.SbViewVolume_getCameraSpaceMatrix(self)",
"def getWorldToLocalMatrix(self) -> \"SbMatrix\":\n return _coin.SoDragger_getWorldToLocalMatrix(self)",
"def setView3D( self ):\n\t\t(vup,vpn,vrp,d,b,du,f,C,R) = \\\n\t\t\tself.config('vup','vpn','vrp','d','b','basis','f','cols','rows')\n\t\t\n\t\tdv = du * R / C\n\t\tU = vup.cross(vpn)\n\t\tvup = vpn.cross(U) # vrc needs to be orthogonal\n\t\tvtm = Mtx()\n\t\tvtm.translate(-vrp[0],-vrp[1],-vrp[2])\n\t\t\n\t\tU.normalize()\n\t\tvup.normalize()\n\t\tvpn.normalize()\n\t\tvtm.rotateXYZ( U, vup,vpn )\n\t\t\n\t\tvtm.translate(0,0,d)\n\t\t\n\t\t# scale to cvv\n\t\tvrp = vtm.form_vector( vrp )\n\t\tb += d\n\n\t\tvtm.scale(2*d/(b*du),2*d/(b*dv),1/b)\n\t\tvtm.transform[3,2] /= b\n\t\tf = ( vrp[2] + f ) / b\n\t\t\t\t\n\t\td /= b\n\t\tvtm.perspective( d )\n\n\t\tvtm.scale2D( -C/(2*d), -R/(2*d) )\n\t\tvtm.translate2D( C/2, R/2 )\n\t\t\n\t\tself._camera['vtm'] = vtm\n\t\tself.config(vup=vup,vrp=vrp,b=b,f=f,d=d)\n\n\t\tvtm.camera = self # tricksy cyclical hack, done with care\n\t\treturn vtm",
"def projected(self, point: Vector3) -> pygame.Vector2:\n # Check if point projects\n if point.z > 0:\n # Project onto viewport\n point = pygame.Vector2(\n self.observer.focal/point.z * point.x,\n self.observer.focal/point.z * point.y\n )\n # Return projected point\n return point\n else:\n # Point is not in front of observer, so not projected\n return None",
"def getProjectionMatrix(self) -> \"SbMatrix const &\":\n return _coin.SoCallbackAction_getProjectionMatrix(self)",
"def get_perspective_matrix():\n pp_src = np.array([(200,684),(1120,684),(542,475),(743,475)]).astype(np.float32)\n pp_dst = np.array([(320,720),(960,720),(320,0),(960,0)]).astype(np.float32)\n pp_mtx = cv2.getPerspectiveTransform(pp_src, pp_dst)\n pp_mtx_inv = cv2.getPerspectiveTransform(pp_dst, pp_src)\n return pp_mtx, pp_mtx_inv",
"def getProjectionPoint(self) -> \"SbVec3d const &\":\n return _coin.SbDPViewVolume_getProjectionPoint(self)",
"def point_projection(self, scene_point):\n dist = scene_point - self.position\n d = np.dot(dist, self.optical_axis())\n if d == 0:\n # to avoid explosion!!!\n d = np.finfo(np.float32).eps\n\n u = self.u0 + self.focal * np.dot(dist, self.horizontal_axis()) * self.bu / d\n v = self.v0 + self.focal * np.dot(dist, self.vertical_axis()) * self.bv / d\n return box_coord(u), box_coord(v)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns a handle to the light manager
|
def getLightManager(self):
return self.lightManager
|
[
"def light(self):\n return self._light",
"def light_details(self):\n return self.__fetch_dict(\n {'smartlife.iot.smartbulb.lightingservice': {'get_light_details': ''}}\n )",
"def light(self, name):\n if name is None:\n group = GroupController(name='{0}.light:{1}'.format(self.name, name))\n group.add_members(self._lights)\n return group\n try:\n return self._lights[name]\n except KeyError:\n return None",
"def _read_light(self):\n return self._readadc(LIGHT_ADC)",
"def __init__(self, hue_IP, hue_light=''):\n # Load logging config\n logging.config.dictConfig(configure_logger())\n self.logger = logging.getLogger('EDHue.HueLight')\n self.logger.debug('Initializing HueLightControl')\n self.star_red = 255\n self.star_green = 255\n self.star_blue = 255\n self.star_bright = 0.8\n self.red = 1\n self.green = 1\n self.blue = 1\n self.bright = 0.8\n self.ciex = 0.3122\n self.ciey = 0.3282\n self.color_loop = False\n self.state = False\n self.alert_status = 'none'\n self.light = hue_light\n\n try:\n self.logger.debug('Trying to connect to Hue bridge')\n self.validate_connection(hue_IP)\n self.bridge = phue.Bridge(hue_IP)\n except phue.PhueRequestTimeout:\n self.logger.debug('Failed to connect to Hue bridge')\n raise\n self.logger.debug('Getting light status.')\n if self.light != '':\n self.logger.debug('Light object: ' + str(self.light))\n self.state = self.bridge.get_light(light_id=self.light, parameter='on')\n self.logger.debug('Light status: ' + str(self.state))\n else:\n self.logger.debug(\"Light undefined. Unable to control hue light.\\n\"\n \"n.b.: This is expected if a light hasn't been \"\n \"selected yet.\")",
"def get_manager(self):\n return self.__manager",
"def load_light_color(self):\n if hasattr(self, \"lightcolor\"):\n return self.lightcolor\n try:\n lightcolor = list(self.load_image(\"light_normal.png\").getdata())\n except Exception:\n logging.warning(\"Light color image could not be found.\")\n lightcolor = None\n self.lightcolor = lightcolor\n return lightcolor",
"def manager():\n return _global_manager",
"def handle(self):\n return self.__texture",
"def getLightType(self):\n return LightType.NoType",
"def add_light(self, light):\n self.viewer.SetLightOn(light.GetHandle())",
"def get_manager():\n global _MANAGER\n if _MANAGER is None:\n _MANAGER = ResourceManager()\n return _MANAGER",
"def lighting_process(db, controls):\n try:\n # Get the current hour & the corresponding RGB data\n hour = str(datetime.datetime.now().hour)\n rgb_data = db['RGB_data'][hour]\n red = rgb_data['R']\n green = rgb_data['G']\n blue = rgb_data['B']\n\n # Check for manual override on the RGB LED Strip\n if not db['Manual Overrides']['RGB LED']:\n # Adjust the RGB Accordingly and update the status\n controls['RGB LED'].adjust_color(red_content=red, green_content=green, blue_content=blue)\n db['RGB LED Status'] = [red, green, blue]\n\n # Check for manual override on the UV LED Strip\n if not db['Manual Overrides']['UV LED']:\n # Get the UV light data for the current hour\n if db['UV_data'][hour]:\n controls['UV LED'].turn_on()\n db['UV LED Status'] = \"ON\"\n else:\n controls['UV LED'].turn_off()\n db['UV LED Status'] = \"OFF\"\n except Exception as err:\n return err\n return 0",
"def getLightModel(arg1: 'SoState') -> \"int32_t\":\n return _coin.SoLazyElement_getLightModel(arg1)",
"def light_detection(self) -> bool:\n return self.details['light_detection_switch']",
"def lock_manager(self):\n return self.__lock_manager",
"def GetManager(*args, **kwargs):\n return _aui.AuiManager_GetManager(*args, **kwargs)",
"def get_light_color(self, light_name: str):\n self.__send_command(CommandsBytes.GET_LIGHT_COLOR)\n # Send the name\n self.__send_string(light_name)\n result = self.__receive_string()\n if result == \"ok\":\n # Receive the color\n r = self.__receive_int()\n g = self.__receive_int()\n b = self.__receive_int()\n return r, g, b\n print(\"Error getting light color\")\n return None",
"def lightsCollectionInstance(self):\n \n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the default shader for objects
|
def getDefaultObjectShader(self, tesselated=False):
if not tesselated:
shader = Shader.load(Shader.SLGLSL,
"DefaultObjectShader/vertex.glsl",
"DefaultObjectShader/fragment.glsl")
else:
self.warn(
"Tesselation is only experimental! Remember "
"to convert the geometry to patches first!")
shader = Shader.load(Shader.SLGLSL,
"DefaultObjectShader/vertex.glsl",
"DefaultObjectShader/fragment.glsl",
"",
"DefaultObjectShader/tesscontrol.glsl",
"DefaultObjectShader/tesseval.glsl")
return shader
|
[
"def wireframeShader(self):\n return self._wireframe_shader",
"def solidShader(self):\n return self._solid_shader",
"def solidFlatShader(self):\n return self._solid_flat_shader",
"def ShaderObjects(self, *args):\n return _Graphic3d.Graphic3d_ShaderProgram_ShaderObjects(self, *args)",
"def _init_materials(self):\r\n default_shader_pink = GLProgram(xml=GLRenderer.DEFAULT_SHADER)\r\n self.default_mat = Material(default_shader_pink)\r\n self.current_material = self.default_mat\r\n self.current_material._use()",
"def noLightWireframeShader(self):\n return self._nolight_wireframe_shader",
"def noLightSolidShader(self):\n return self._nolight_solid_shader",
"def CreateShader():\n\n\n def inText(windowName=\"ShaderName\", type=\"ZebraRedshift\"):\n text1, accept = QInputDialog.getText(None, type, windowName)\n\n if accept:\n\n return text1\n\n else:\n return None\n\n\n InputText = inText()\n\n if InputText:\n Mesh = pm.ls(type=\"mesh\", dag=True, selection=True)[0]\n\n GetParent = Mesh.getAllParents()[-1]\n\n ShaderSG = pm.sets(renderable=True, noSurfaceShader=True, empty=True,\n name=(GetParent + '_' + InputText + '_' + 'SG'))\n\n ShaderRaySwitch = pm.shadingNode('RedshiftRaySwitch', asShader=True,\n name=(GetParent + '_' + InputText + '_' + 'SW'))\n\n ShaderStandard = pm.shadingNode('RedshiftMaterial', asShader=True, name=(GetParent + '_' + InputText + '_' + 'SH'))\n\n ShaderSimple = pm.shadingNode('RedshiftMaterial', asShader=True,\n name=(GetParent + '_' + InputText + '_' + 'Simple'))\n ShaderSimple.refl_color.set(0, 0, 0)\n ShaderSimple.refl_weight.set(0)\n\n ShaderRaySwitch.outColor >> ShaderSG.surfaceShader\n\n ShaderRaySwitch.cameraSwitchFrontBack.set(1)\n ShaderStandard.outColor >> ShaderRaySwitch.cameraColor\n ShaderStandard.outColor >> ShaderRaySwitch.cameraColorBack\n\n ShaderRaySwitch.reflectionSwitch.set(1)\n ShaderSimple.outColor >> ShaderRaySwitch.reflectionColor\n\n ShaderRaySwitch.refractionSwitch.set(1)\n ShaderSimple.outColor >> ShaderRaySwitch.refractionColor\n\n ShaderRaySwitch.giSwitch.set(1)\n ShaderSimple.outColor >> ShaderRaySwitch.giColor\n\n pm.select(Mesh)\n\n pm.hyperShade(a=ShaderRaySwitch)\n\n pm.inViewMessage(amg=\"<hl>Create</hl>_%s_Shader\" % (InputText), font='Bold', pos='midCenter',\n fade=True)",
"def Type(self, *args):\n return _Graphic3d.Graphic3d_ShaderObject_Type(self, *args)",
"def CreateCgShader(self):\n dirPath = self.GetShadersDirectory()\n shaderPath = self.get_unique_asset_name(SHADER_FILE_NAME, dirPath)\n shader = ''\n self.CreateAsset(shaderPath, shader)",
"def create_empty_shadernetwork(self):\r\n NODE_COLOR = (0.282353, 0.819608, 0.8)\r\n NODE_POS = (3, 0)\r\n\r\n custom_name = self.ui.sel_asset_name.text().upper()\r\n\r\n scene_root = hou.node('/obj/')\r\n shaderpack_name = 'SHADER_' + custom_name\r\n shaderpack_root = '/obj/' + shaderpack_name\r\n if hou.node(shaderpack_root):\r\n hou.node(shaderpack_root).destroy()\r\n else:\r\n pass\r\n shaderpack_node = scene_root.createNode('geo', shaderpack_name)\r\n shaderpack_node.setColor(hou.Color(*NODE_COLOR))\r\n\r\n shaderpack_node.createNode('shopnet', 'shopnet')\r\n shaderpack_node.createNode('matnet', 'matnet').setPosition(\r\n hou.Vector2(*NODE_POS)\r\n )",
"def readShader(self, *args):\r\n return _osgDB.Input_readShader(self, *args)",
"def _create_shader_group(self, material, name='shader'):\n shader_group = cmds.sets(renderable=True, noSurfaceShader=True, empty=True, name=name)\n cmds.connectAttr(material + '.outColor', shader_group + '.surfaceShader')\n return shader_group",
"def Source(self, *args):\n return _Graphic3d.Graphic3d_ShaderObject_Source(self, *args)",
"def getShaderSuffix():\n\n if float(fslgl.GL_COMPATIBILITY) < 2.1: return 'prog'\n else: return 'glsl'",
"def readShader(self, *args):\r\n return _osgDB.DeprecatedDotOsgWrapperManager_readShader(self, *args)",
"def _makeShader(shaderEffects, baseShader=None):\n\n \n \n source=shadereffects.makeSource(shaderEffects, baseShader)\n key=source\n if key not in builtEffectsShaders:\n if useShaderFiles:\n # preflatten should not impact anything much, but lets us get the full names\n shaderEffects=[s.flatten() for s in shaderEffects]\n name='debug('+','.join([e.name for e in shaderEffects])+')'\n outLoc='ShadersOut/'+name+'.sha'\n print 'Making Shader: '+outLoc\n \n builtEffectsShaders[key]=Shader.make(source)\n \n if useShaderFiles:\n fOut=open(outLoc, 'w')\n fOut.write(source)\n fOut.close()\n \n return builtEffectsShaders[key]",
"def Graphic3d_ShaderObject_CreateFromSource(*args):\n return _Graphic3d.Graphic3d_ShaderObject_CreateFromSource(*args)",
"def getAllShaders(cls, *args, **kwargs):\r\n return pm.ls(type='cgfxShader')"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets a scattering model to use. Only has an effect if enableScattering is enabled
|
def setScattering(self, scatteringModel):
self.debug("Loading scattering model ..")
if not self.settings.enableScattering:
self.error("You cannot set a scattering model as scattering is not"
" enabled in your pipeline.ini!")
return
self.lightingComputeContainer.setShaderInput(
"transmittanceSampler", scatteringModel.getTransmittanceResult())
self.lightingComputeContainer.setShaderInput(
"inscatterSampler", scatteringModel.getInscatterTexture())
scatteringModel.bindTo(
self.lightingComputeContainer, "scatteringOptions")
|
[
"def enableDefaultEarthScattering(self):\n earthScattering = Scattering()\n\n scale = 1000000000\n earthScattering.setSettings({\n \"atmosphereOffset\": Vec3(0, 0, - (6360.0 + 9.5) * scale),\n \"atmosphereScale\": Vec3(scale)\n })\n earthScattering.precompute()\n self.setScattering(earthScattering)",
"def configureScatterSimulation(self, scatterSimulation):\n\t\ttry:\n\t\t\tscatterSimulation.gasJetRadius = self.__getNumericFieldValue(\"gasJetDiameter\") / 2.0\n\t\t\tscatterSimulation.gasJetIntersectionDistance = self.__getNumericFieldValue(\"gasJetIntersectionDistance\")\n\t\t\tscatterSimulation.gasJetCosineSquaredDistribution = self.mainWindow.gasJetCosineSquaredDistribution.isChecked()\n\n\t\t\tscatterSimulation.electronBeamRadius = self.__getNumericFieldValue(\"electronBeamDiameter\") / 2.0\n\t\t\tscatterSimulation.electronsCount = self.__getNumericFieldValue(\"electronsCount\")\n\n\t\t\tscatterSimulation.laserBeamRadius = self.__getNumericFieldValue(\"laserBeamDiameter\") / 2.0\n\t\t\tscatterSimulation.laserBeamIntersectionDistance = self.__getNumericFieldValue(\"laserBeamIntersectionDistance\")\n\t\t\tscatterSimulation.laserBeamApexLength = self.__getNumericFieldValue(\"laserBeamApexLength\")\n\t\t\tscatterSimulation.laserBeamWavelength = self.__getNumericFieldValue(\"laserBeamWavelength\")\n\t\t\tscatterSimulation.laserBeamElectronEnergy = self.__getNumericFieldValue(\"laserBeamElectronEnergy\")\n\t\t\tscatterSimulation.laserBeamPower = self.__getNumericFieldValue(\"laserBeamPower\")\n\t\t\tscatterSimulation.laserBeamGaussianDistribution = self.mainWindow.laserBeamGaussianDistribution.isChecked()\n\t\texcept ValueError as exception:\n\t\t\terrorMessage = QMessageBox.critical(self, \"Input Error\", ('Could not understand the value of the field \"%s\".\\n\\nPlease make sure that it\\'s a number.' % exception.fieldName))\n\t\t\treturn False\n\n\t\t# These are not implemented yet\n\t\tscatterSimulation.horizontalAngleInDegrees = 90\n\t\tscatterSimulation.maximumBoundLength = 1e10\n\t\tscatterSimulation.laserBeamPolarizationAngleInDegrees = 0\n\n\t\treturn scatterSimulation",
"def add_scatter(self, x, y, **props):\n # self._add_trace(_scatter, x, y, props)\n self._add_trace(igo.Scatter, x, y, self.defs, props)\n return self",
"def show_scatterplot(self, *args, **kwargs):\n raise NotImplementedError()",
"def toggle_data_line_scatter (self):\n if len(self.plot_data)==0: return\n cur_linestyle, cur_marker, alpha = self.get_current_data_style()\n new_marker, new_linestyle, alpha = data_line_scatter(cur_linestyle,cur_marker,alpha)\n self.apply_prev_data_style(new_linestyle,new_marker,alpha)",
"def setUseGlobalSmootherFilter(self, use_FGS=...) -> None:\n ...",
"def scatter(self, dim, index, source): # real signature unknown; restored from __doc__\n pass",
"def addSeriesScatterPlot(self, dataSrc, fieldY, fieldX = None, fieldLabel = None):\n self.graphSeries.append(_SeriesScatterPlot(dataSrc, fieldY, fieldX, fieldLabel))",
"def add_scatter_plot_step(self, attributes=None):\n if attributes is None:\n attributes = self.DEFAULT_SCATTER_PLOT_ATTRIBUTES\n scatter_dir = os.path.join(self.eval_dir, \"scatter\")\n\n def make_scatter_plot(config_nick, rev1, rev2, attribute):\n name = \"-\".join([self.name, rev1, rev2, attribute, config_nick])\n print \"Make scatter plot for\", name\n algo1 = \"%s-%s\" % (rev1, config_nick)\n algo2 = \"%s-%s\" % (rev2, config_nick)\n report = ScatterPlotReport(\n filter_config=[algo1, algo2],\n attributes=[attribute],\n get_category=lambda run1, run2: run1[\"domain\"],\n legend_location=(1.3, 0.5))\n report(\n self.eval_dir,\n os.path.join(scatter_dir, rev1 + \"-\" + rev2, name))\n\n def make_scatter_plots():\n for config in self._configs:\n for rev1, rev2 in itertools.combinations(self._revisions, 2):\n for attribute in self.get_supported_attributes(\n config.nick, attributes):\n make_scatter_plot(config.nick, rev1, rev2, attribute)\n\n self.add_step(Step(\"make-scatter-plots\", make_scatter_plots))",
"def apply_scattering_rules(self):\n pass",
"def shap_plot(self, name, **kw):\n shap_plot(\n X=self.x_train,\n y=self.y_train,\n ct=self.ct,\n model=self.models[name],\n **kw)",
"def scatter_(self, dim, index, src): # real signature unknown; restored from __doc__\n pass",
"def create_scattering(self, plot_name=\"S Parameter Plot Nominal\", sweep_name=None, port_names=None, port_excited=None,\n variations=None):\n\n Families = [\"Freq:=\", [\"All\"]]\n if variations:\n Families += variations\n else:\n Families += self.get_nominal_variation()\n if not sweep_name:\n sweep_name = self.existing_analysis_sweeps[1]\n elif sweep_name not in self.existing_analysis_sweeps:\n self._messenger.add_error_message(\n \"Setup {} doesn't exist in the Setup list.\".format(sweep_name))\n return False\n if not port_names:\n port_names = self.modeler.get_excitations_name()\n full_matrix = False\n if not port_excited:\n port_excited = port_names\n full_matrix = True\n if type(port_names) is str:\n port_names = [port_names]\n if type(port_excited) is str:\n port_excited = [port_excited]\n list_y = []\n for p in list(port_names):\n for q in list(port_excited):\n if not full_matrix:\n list_y.append(\"dB(S(\" + p + \",\" + q + \"))\")\n elif port_excited.index(q) >= port_names.index(p):\n list_y.append(\"dB(S(\" + p + \",\" + q + \"))\")\n\n Trace = [\"X Component:=\", \"Freq\", \"Y Component:=\", list_y]\n solution_data = \"\"\n if self.solution_type == \"DrivenModal\":\n solution_data = \"Modal Solution Data\"\n elif self.solution_type == \"DrivenTerminal\":\n solution_data = \"Terminal Solution Data\"\n if solution_data != \"\":\n # run CreateReport function\n\n self.post.oreportsetup.CreateReport(\n plot_name,\n solution_data,\n \"Rectangular Plot\",\n sweep_name,\n [\"Domain:=\", \"Sweep\"],\n Families,\n Trace,\n [])\n return True\n return False",
"def create_scatter_plot(self):\n xy = self.get_x_and_y_as_dict()\n x = xy[\"x\"]\n y = xy[\"y\"]\n plt.scatter(x, y)\n plt.xlabel(\"x\")\n plt.ylabel(\"y\")\n plt.title(\"Scatter plot of x and y values\")\n plt.savefig(f\"{self.save_directory}/task_2_scatter_plot.png\")",
"def setupModelInstance(self, geom, dssatexe):\n return super(Model, self).setupModelInstance(geom, \"DSSAT_Ex.exe\")",
"def setTransportModel(self, transp, withSoret = 0):\n itr = transp.transport_hndl()\n _cantera.stflow_setTransport(self._hndl, itr, withSoret)",
"def notebook_scatter(self,**kwargs):\n \n cols_x = self.data.columns\n cols_y = [self.data.columns[1],self.data.columns[0],*self.data.columns[2:]]\n\n @interact(\n x = cols_x,\n y = cols_y,\n hue = [self.dataset.target,*self.dataset.features,None],\n size = [None,*self.data.columns]\n )\n def show(x,y,hue,size):\n self.show_scatter(x,y,hue,size,**kwargs)",
"def add_scatterplot(self):\n new_plot = {\"type\":\"xy\", \"xseries\":\"\", \"yseries\":\"\"}\n parent = QtGui.QStandardItem(\"New scatter plot\")\n for key in new_plot:\n val = new_plot[key]\n child0 = QtGui.QStandardItem(key)\n child1 = QtGui.QStandardItem(str(val))\n parent.appendRow([child0, child1])\n self.sections[\"Plots\"].appendRow(parent)\n self.update_tab_text()",
"def show_scatterplot(self, array, name=None, *args, **kwargs):\n\n figure = super().show_scatterplot(array, name, show=False, *args, **kwargs)\n figure_image = figure_to_image(figure)\n\n return figure_image"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Adds a standard scattering model, representing the atmosphere of the earth. This is a shortcut for creating a Scattering instance and precomputing it
|
def enableDefaultEarthScattering(self):
earthScattering = Scattering()
scale = 1000000000
earthScattering.setSettings({
"atmosphereOffset": Vec3(0, 0, - (6360.0 + 9.5) * scale),
"atmosphereScale": Vec3(scale)
})
earthScattering.precompute()
self.setScattering(earthScattering)
|
[
"def setScattering(self, scatteringModel):\n self.debug(\"Loading scattering model ..\")\n if not self.settings.enableScattering:\n self.error(\"You cannot set a scattering model as scattering is not\"\n \" enabled in your pipeline.ini!\")\n return\n\n self.lightingComputeContainer.setShaderInput(\n \"transmittanceSampler\", scatteringModel.getTransmittanceResult())\n self.lightingComputeContainer.setShaderInput(\n \"inscatterSampler\", scatteringModel.getInscatterTexture())\n scatteringModel.bindTo(\n self.lightingComputeContainer, \"scatteringOptions\")",
"def add_scatter(self, x, y, **props):\n # self._add_trace(_scatter, x, y, props)\n self._add_trace(igo.Scatter, x, y, self.defs, props)\n return self",
"def setupModelInstance(self, geom, dssatexe):\n return super(Model, self).setupModelInstance(geom, \"DSSAT_Ex.exe\")",
"def add_scatterplot(self):\n new_plot = {\"type\":\"xy\", \"xseries\":\"\", \"yseries\":\"\"}\n parent = QtGui.QStandardItem(\"New scatter plot\")\n for key in new_plot:\n val = new_plot[key]\n child0 = QtGui.QStandardItem(key)\n child1 = QtGui.QStandardItem(str(val))\n parent.appendRow([child0, child1])\n self.sections[\"Plots\"].appendRow(parent)\n self.update_tab_text()",
"def new_solar_system():\n sun = Particle()\n sun.name = 'SUN'\n sun.mass = 1.0 | units.MSun\n sun.radius = 1.0 | units.RSun\n planets = _planets_only()\n \n particles = Particles()\n particles.add_particle(sun)\n particles.add_particles(planets)\n particles.move_to_center()\n return particles",
"def create_single_local_smoother(self,sz,spacing):\n from . import module_parameters as pars\n s_m_params = pars.ParameterDict()\n s_m_params['smoother']['type'] = 'gaussian'\n s_m_params['smoother']['gaussian_std'] = self.params['forward_model']['smoother']['deep_smoother'][\n 'deep_network_local_weight_smoothing']\n self.embedded_smoother = SF.SmootherFactory(sz[2:], spacing).create_smoother(s_m_params)",
"def add_3d_scatter(self, root, params, color_bar=True, alpha=1, extra_thin=1, scatter_size=None,\n ax=None, alpha_samples=False, **kwargs):\n ax = self.get_axes(ax)\n params = self.get_param_array(root, params)\n if alpha_samples:\n mcsamples = self.sample_analyser.samples_for_root(root)\n weights, pts = mcsamples.weights, mcsamples.samples\n else:\n pts = self.sample_analyser.load_single_samples(root)\n weights = 1\n mcsamples = None\n names = self.param_names_for_root(root)\n samples = []\n for param in params:\n if hasattr(param, 'getDerived'):\n samples.append(param.getDerived(self._make_param_object(names, pts)))\n else:\n samples.append(pts[:, names.numberOfName(param.name)])\n fixed_color = kwargs.get('fixed_color') # if actually just a plain scatter plot\n if mcsamples:\n # use most samples, but alpha with weight\n from matplotlib.cm import ScalarMappable\n from matplotlib.colors import Normalize, to_rgb\n max_weight = np.max(weights)\n dup_fac = 4\n filt = weights > max_weight / (100 * dup_fac)\n x = samples[0][filt]\n y = samples[1][filt]\n z = samples[2][filt]\n # split up high-weighted samples into multiple copies\n weights = weights[filt] / max_weight * dup_fac\n intweights = np.ceil(weights)\n thin_ix = mcsamples.thin_indices(1, intweights)\n x = x[thin_ix]\n y = y[thin_ix]\n z = z[thin_ix]\n weights /= intweights\n weights = weights[thin_ix]\n mappable = ScalarMappable(Normalize(z.min(), z.max()), self.settings.colormap_scatter)\n mappable.set_array(z)\n cols = mappable.to_rgba(z)\n if fixed_color:\n cols[:, :3] = to_rgb(fixed_color)\n cols[:, 3] = weights / dup_fac * alpha\n alpha = None\n self.last_scatter = mappable\n ax.scatter(x, y, edgecolors='none', s=scatter_size or self.settings.scatter_size,\n c=cols, alpha=alpha)\n else:\n if extra_thin > 1:\n samples = [pts[::extra_thin] for pts in samples]\n self.last_scatter = ax.scatter(samples[0], samples[1], edgecolors='none',\n s=scatter_size or self.settings.scatter_size,\n c=fixed_color or samples[2],\n cmap=self.settings.colormap_scatter, alpha=alpha)\n\n if color_bar and not fixed_color:\n self.last_colorbar = self.add_colorbar(params[2], mappable=self.last_scatter, ax=ax, **kwargs)\n xbounds = [min(samples[0]), max(samples[0])]\n r = xbounds[1] - xbounds[0]\n xbounds[0] -= r / 20\n xbounds[1] += r / 20\n ybounds = [min(samples[1]), max(samples[1])]\n r = ybounds[1] - ybounds[0]\n ybounds[0] -= r / 20\n ybounds[1] += r / 20\n return [xbounds, ybounds]",
"def create_scatter_plot(self):\n xy = self.get_x_and_y_as_dict()\n x = xy[\"x\"]\n y = xy[\"y\"]\n plt.scatter(x, y)\n plt.xlabel(\"x\")\n plt.ylabel(\"y\")\n plt.title(\"Scatter plot of x and y values\")\n plt.savefig(f\"{self.save_directory}/task_2_scatter_plot.png\")",
"def ellModel(galaxy):\n\n ellipse=defineEllipse(galaxy) # unsheared ellipse\n diskRadiusSheared, diskBASheared, diskPASheared=shearEllipse(ellipse, galaxy.g1, galaxy.g2)\n ellmodel=np.array([diskPASheared, diskBASheared]) # model sheared ellipse observables\n\n return ellmodel",
"def apply_scattering_rules(self):\n pass",
"def add_scatterplot(self):\n new_plot = {\"Type\":\"xy\",\"Title\":\"\", \"XSeries\":\"[]\", \"YSeries\":\"[]\"}\n parent = QtGui.QStandardItem(\"New scatter plot\")\n for key in new_plot:\n val = new_plot[key]\n child0 = QtGui.QStandardItem(key)\n child0.setEditable(False)\n child1 = QtGui.QStandardItem(str(val))\n parent.appendRow([child0, child1])\n self.sections[\"Plots\"].appendRow(parent)\n self.update_tab_text()",
"def get_model_atmosphere(self, grid='odfnew'):\n if self.teff == None or self.logg == None or self.feh == None:\n logger.error('To create model atmosphere, star must have all '+\n 'three fundamental parameters: Teff, logg, and '+\n '[Fe/H].')\n return None\n if hasattr(self, 'feh_model'):\n feh = self.feh_model\n else:\n feh = self.feh\n x = modatm.interpolate(self.teff, self.logg,\n feh, grid)\n if x != None:\n self.model_atmosphere = x\n self.model_atmosphere_grid = grid",
"def __add__(s,o):\n #\n # First, check if the particles are compatible\n #\n if ((s.np > 1) or (o.np>1)):\n raise TypeError('Cannot add multi-particle objects')\n if ((s.nlam != o.nlam) or (np.abs((s.lam-o.lam)/s.lam).any()>1e-4)):\n raise RuntimeError('Wavelength grids differ')\n if (s.scat):\n if ((s.nang != o.nang) or\n (np.abs((s.scatang[1:]-o.scatang[1:])/s.scatang[1:]).any()>1e-4)):\n # We don't check the first value, could be 0\n raise RuntimeError('Angular grids differ')\n if (s.norm != o.norm):\n raise RuntimeError('Scattering normalizations differ')\n #\n # Now do the adding\n #\n x = copy.deepcopy(s)\n x.kabs = x.kabs+o.kabs\n x.ksca = x.ksca+o.ksca\n x.kext = x.kext+o.kext\n # F11 is linear in the integral for the computation of g.\n # So we can just take the weighted mean for g.\n x.gsca = (x.ksca*x.gsca + o.ksca*o.gsca) / (x.ksca+o.ksca)\n x.massscale = s.massscale + o.massscale\n\n if s.scat:\n # There is a scattering matrix.\n if s.norm == 'hovenier':\n # Add, weighted by kappa_scat\n ws = s.ksca[:,:,None]\n wo = o.ksca[:,:,None]\n wn = ws+wo\n else:\n # Just add the values\n ws, wo, wn = 1.,1.,1.\n x.f11 = (s.f11*ws + o.f11*wo) / wn\n x.f12 = (s.f12*ws + o.f12*wo) / wn\n x.f22 = (s.f22*ws + o.f22*wo) / wn\n x.f33 = (s.f33*ws + o.f33*wo) / wn\n x.f34 = (s.f34*ws + o.f34*wo) / wn\n x.f44 = (s.f44*ws + o.f44*wo) / wn\n #\n # Invalidate attributes that no longer make sense.\n #\n x.materials = np.hstack((x.materials,o.materials))\n if (x.fmax != o.fmax ): x.fmax = -1\n if (x.pcore != o.pcore ): x.pcore = -1\n if (x.pmantle != o.pmantle): x.pmantle = -1\n if (x.amin != o.amin ): x.amin = -1\n if (x.amax != o.amax ): x.amax = -1\n if (x.nsub != o.nsub ): x.nsub = -1\n if (x.apow != o.apow ): x.apow = -1\n if (x.rho != o.rho ): x.rho = -1\n if (x.chop != o.chop ): x.chop = -1\n x.a1,x.a2,x.a3 = -1,-1,-1\n\n if hasattr(s, 'kplanck'):\n kplanck = -1\n kross = -1 \n temp = -1\n\n return x",
"def update_rasters(self):\n # Update preview_overview_scatter_plot\n self.plots.preview_overview_scatter_plot.setData(self.plots_data.all_spots)\n if self.Includes2DActiveWindowScatter:\n self.plots.scatter_plot.setData(self.plots_data.all_spots)",
"def createTSNE():\r\n \r\n embedded = TSNE(n_components=2).fit_transform(allData)\r\n if(not outliers):\r\n embedded = removeOutliers(embedded)\r\n\r\n clusters = kmeans2(embedded,CANCER_TYPES,minit='points')\r\n labels = clusters[1]\r\n clusters = clusters[0]\r\n \r\n\r\n cancerType = 0\r\n for i in range(embedded.shape[0]):\r\n if(startingPositions[cancerType+1]==i):\r\n cancerType+=1\r\n\r\n plt.scatter(embedded[i][0],embedded[i][1],c=colors[cancerType],label=cancerNames[cancerType],marker=markerStyles[labels[i] % len(markerStyles)])\r\n\r\n handles, labels = plt.gca().get_legend_handles_labels()\r\n newLabels, newHandles = [], []\r\n for handle, label in zip(handles, labels):\r\n if label not in newLabels:\r\n newLabels.append(label)\r\n newHandles.append(handle)\r\n\r\n\r\n plt.scatter(clusters[:,0],clusters[:,1],s=80,c=\"red\")\r\n plt.legend(newHandles,newLabels)\r\n plt.show()",
"def add_scatter_plot_step(self, attributes=None):\n if attributes is None:\n attributes = self.DEFAULT_SCATTER_PLOT_ATTRIBUTES\n scatter_dir = os.path.join(self.eval_dir, \"scatter\")\n\n def make_scatter_plot(config_nick, rev1, rev2, attribute):\n name = \"-\".join([self.name, rev1, rev2, attribute, config_nick])\n print \"Make scatter plot for\", name\n algo1 = \"%s-%s\" % (rev1, config_nick)\n algo2 = \"%s-%s\" % (rev2, config_nick)\n report = ScatterPlotReport(\n filter_config=[algo1, algo2],\n attributes=[attribute],\n get_category=lambda run1, run2: run1[\"domain\"],\n legend_location=(1.3, 0.5))\n report(\n self.eval_dir,\n os.path.join(scatter_dir, rev1 + \"-\" + rev2, name))\n\n def make_scatter_plots():\n for config in self._configs:\n for rev1, rev2 in itertools.combinations(self._revisions, 2):\n for attribute in self.get_supported_attributes(\n config.nick, attributes):\n make_scatter_plot(config.nick, rev1, rev2, attribute)\n\n self.add_step(Step(\"make-scatter-plots\", make_scatter_plots))",
"def send_solid_earth(self):\n self._generate_solid_earth_forcing_file()\n self._describe_grid() # See section general send\n self._describe_variables() # See section general send",
"def scatter(self, x, y, s=20, c='b', marker='o', cmap=None, norm=None,\n vmin=None, vmax=None, alpha=1.0, linewidths=None,\n faceted=True, verts=None,\n **kwargs):\n\n if not self._hold: self.cla()\n\n syms = { # a dict from symbol to (numsides, angle)\n 's' : (4,math.pi/4.0,0), # square\n 'o' : (20,0,0), # circle\n '^' : (3,0,0), # triangle up\n '>' : (3,math.pi/2.0,0), # triangle right\n 'v' : (3,math.pi,0), # triangle down\n '<' : (3,3*math.pi/2.0,0), # triangle left\n 'd' : (4,0,0), # diamond\n 'p' : (5,0,0), # pentagram\n 'h' : (6,0,0), # hexagon\n '8' : (8,0,0), # octagon\n '+' : (4,0,2), # plus\n 'x' : (4,math.pi/4.0,2) # cross\n }\n\n self._process_unit_info(xdata=x, ydata=y, kwargs=kwargs)\n\n x, y, s, c = delete_masked_points(x, y, s, c)\n\n # The inherent ambiguity is resolved in favor of color\n # mapping, not interpretation as rgb or rgba.\n\n if not is_string_like(c):\n sh = npy.shape(c)\n if len(sh) == 1 and sh[0] == len(x):\n colors = None # use cmap, norm after collection is created\n else:\n colors = mcolors.colorConverter.to_rgba_list(c, alpha)\n else:\n colors = mcolors.colorConverter.to_rgba_list(c, alpha)\n\n if not iterable(s):\n scales = (s,)\n else:\n scales = s\n\n if faceted: edgecolors = None\n else: edgecolors = 'None'\n\n sym = None\n symstyle = 0\n\n # to be API compatible\n if marker is None and not (verts is None):\n marker = (verts, 0)\n verts = None\n\n if is_string_like(marker):\n # the standard way to define symbols using a string character\n sym = syms.get(marker)\n if sym is None and verts is None:\n raise ValueError('Unknown marker symbol to scatter')\n numsides, rotation, symstyle = syms[marker]\n\n elif iterable(marker):\n # accept marker to be:\n # (numsides, style, [angle])\n # or\n # (verts[], style, [angle])\n\n if len(marker)<2 or len(marker)>3:\n raise ValueError('Cannot create markersymbol from marker')\n\n if cbook.is_numlike(marker[0]):\n # (numsides, style, [angle])\n\n if len(marker)==2:\n numsides, rotation = marker[0], 0.\n elif len(marker)==3:\n numsides, rotation = marker[0], marker[2]\n sym = True\n\n if marker[1] in (1,2):\n symstyle = marker[1]\n\n else:\n verts = npy.asarray(marker[0])\n\n if sym is not None:\n if symstyle==0:\n\n collection = mcoll.RegularPolyCollection(\n self.figure.dpi,\n numsides, rotation, scales,\n facecolors = colors,\n edgecolors = edgecolors,\n linewidths = linewidths,\n offsets = zip(x,y),\n transOffset = self.transData,\n )\n elif symstyle==1:\n collection = mcoll.StarPolygonCollection(\n self.figure.dpi,\n numsides, rotation, scales,\n facecolors = colors,\n edgecolors = edgecolors,\n linewidths = linewidths,\n offsets = zip(x,y),\n transOffset = self.transData,\n )\n elif symstyle==2:\n collection = mcoll.AsteriskPolygonCollection(\n self.figure.dpi,\n numsides, rotation, scales,\n facecolors = colors,\n edgecolors = edgecolors,\n linewidths = linewidths,\n offsets = zip(x,y),\n transOffset = self.transData,\n )\n else:\n # rescale verts\n rescale = npy.sqrt(max(verts[:,0]**2+verts[:,1]**2))\n verts /= rescale\n\n scales = npy.asarray(scales)\n scales = npy.sqrt(scales * self.figure.dpi.get() / 72.)\n if len(scales)==1:\n verts = [scales[0]*verts]\n else:\n # todo -- make this nx friendly\n verts = [verts*s for s in scales]\n collection = mcoll.PolyCollection(\n verts,\n facecolors = colors,\n edgecolors = edgecolors,\n linewidths = linewidths,\n offsets = zip(x,y),\n transOffset = self.transData,\n )\n collection.set_transform(mtrans.identity_transform())\n collection.set_alpha(alpha)\n collection.update(kwargs)\n\n if colors is None:\n if norm is not None: assert(isinstance(norm, mcolors.Normalize))\n if cmap is not None: assert(isinstance(cmap, mcolors.Colormap))\n collection.set_array(npy.asarray(c))\n collection.set_cmap(cmap)\n collection.set_norm(norm)\n\n if vmin is not None or vmax is not None:\n collection.set_clim(vmin, vmax)\n else:\n collection.autoscale_None()\n\n temp_x = x\n temp_y = y\n\n minx = npy.amin(temp_x)\n maxx = npy.amax(temp_x)\n miny = npy.amin(temp_y)\n maxy = npy.amax(temp_y)\n\n w = maxx-minx\n h = maxy-miny\n\n # the pad is a little hack to deal with the fact that we don't\n # want to transform all the symbols whose scales are in points\n # to data coords to get the exact bounding box for efficiency\n # reasons. It can be done right if this is deemed important\n padx, pady = 0.05*w, 0.05*h\n corners = (minx-padx, miny-pady), (maxx+padx, maxy+pady)\n self.update_datalim( corners)\n self.autoscale_view()\n\n # add the collection last\n self.add_collection(collection)\n return collection",
"def configureScatterSimulation(self, scatterSimulation):\n\t\ttry:\n\t\t\tscatterSimulation.gasJetRadius = self.__getNumericFieldValue(\"gasJetDiameter\") / 2.0\n\t\t\tscatterSimulation.gasJetIntersectionDistance = self.__getNumericFieldValue(\"gasJetIntersectionDistance\")\n\t\t\tscatterSimulation.gasJetCosineSquaredDistribution = self.mainWindow.gasJetCosineSquaredDistribution.isChecked()\n\n\t\t\tscatterSimulation.electronBeamRadius = self.__getNumericFieldValue(\"electronBeamDiameter\") / 2.0\n\t\t\tscatterSimulation.electronsCount = self.__getNumericFieldValue(\"electronsCount\")\n\n\t\t\tscatterSimulation.laserBeamRadius = self.__getNumericFieldValue(\"laserBeamDiameter\") / 2.0\n\t\t\tscatterSimulation.laserBeamIntersectionDistance = self.__getNumericFieldValue(\"laserBeamIntersectionDistance\")\n\t\t\tscatterSimulation.laserBeamApexLength = self.__getNumericFieldValue(\"laserBeamApexLength\")\n\t\t\tscatterSimulation.laserBeamWavelength = self.__getNumericFieldValue(\"laserBeamWavelength\")\n\t\t\tscatterSimulation.laserBeamElectronEnergy = self.__getNumericFieldValue(\"laserBeamElectronEnergy\")\n\t\t\tscatterSimulation.laserBeamPower = self.__getNumericFieldValue(\"laserBeamPower\")\n\t\t\tscatterSimulation.laserBeamGaussianDistribution = self.mainWindow.laserBeamGaussianDistribution.isChecked()\n\t\texcept ValueError as exception:\n\t\t\terrorMessage = QMessageBox.critical(self, \"Input Error\", ('Could not understand the value of the field \"%s\".\\n\\nPlease make sure that it\\'s a number.' % exception.fieldName))\n\t\t\treturn False\n\n\t\t# These are not implemented yet\n\t\tscatterSimulation.horizontalAngleInDegrees = 90\n\t\tscatterSimulation.maximumBoundLength = 1e10\n\t\tscatterSimulation.laserBeamPolarizationAngleInDegrees = 0\n\n\t\treturn scatterSimulation"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the light source for the global illumination. The GI uses this light to shade the voxels, so this light is the only light which "casts" global illumination. When GI is disabled, this has no effect
|
def setGILightSource(self, light):
if self.settings.enableGlobalIllumination:
self.globalIllum.setTargetLight(light)
|
[
"def setTargetLight(self, target):\n self.targetLight = target",
"def set_lighting(self):\n prop = self.GetProperty()\n prop.SetAmbient(0.)\n prop.SetDiffuse(0.)\n prop.SetSpecular(1.0)",
"def on(self):\n self.light.turnOn()",
"def GUI_Set_Light_Intensity(self):\n print \"Setting Light Intensity\"\n self.light_intensity=float(self.light_intensity_ent.get())\n self.VA_Set_Light_Intensity(self.light_intensity)\n self.master.after(300, self.Read_Photodiode)\n return",
"def turn_light_on(self):\n self.ui.bl(103)",
"def add_light(self, light):\n self.viewer.SetLightOn(light.GetHandle())",
"def UpdateLighting(self, block):\n LightMaster.SetLightValue(self.environment, block)",
"def Illumination(self, illum, level):\n self.bus.sb9600_send(MODULE_FRONTPANEL, illum, level, 0x58)",
"def ambient_light(self):\n return _read_sysfs(self._opt3002 + '/in_illuminance_input')",
"def light(self, **kwargs):\n del kwargs\n\n if not self.player:\n return\n\n self.machine.extra_ball_controller.light()",
"def initLights(self):\n\t\tself.interface.sendClear(False)\n\t\tself.interface.drainBytes()\n\t\tself.clearTime(0)\n\t\tself.sendChangesForTime([interfaceProtocol.ColorChangeMessage(i, lightColor.Color(0xcc, 0, 0, 0, True)) for i in range(50)], 0) # Turn everything off\n\t\ttime.sleep(1) # Make sure everything is set",
"def low_light(self, img):\n retval, threshold = cv2.threshold(img, 12, 255, cv2.THRESH_BINARY)\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n gaus = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C,\\\n cv2.THRESH_BINARY, 115, 1)\n cv2.imshow('low_light', gaus)\n def color_filtering(self, lower, upper):\n \"\"\"\n Allows you to filter an image for a selected color. lower argument is\n a list of three numbers representing lower range limit for\n Hue, Saturation, Value. e.g. [50, 0, 0]. upper argument has the same\n form and represents the upper range limit.\n \"\"\"\n hsv = cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV)\n lower = np.array(lower)\n upper = np.array(upper)\n \n mask = cv2.inRange(hsv, lower, upper)\n res = cv2.bitwise_and(self.img, self.img, mask=mask)\n\n cv2.imshow('img', self.img)\n cv2.imshow('mask', mask)\n cv2.imshow('result', res)\n return res",
"def _setLightingShader(self):\n lightShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ApplyLighting.fragment\")\n self.lightingComputeContainer.setShader(lightShader)",
"def on(self):\n self.transite_light_state(on_off=1)",
"def VA_Set_Light_Intensity(self, light_intensity):\n print \"LightIntensity = %f\"%light_intensity \n if light_intensity < 0.0 or light_intensity > 100.0:\n print \"Light Intensity value not between 0 and 100. Exiting\"\n sys.exit()\n\n self.VA_Drive_Shutter(12000)\n # This opens the shutter all the way\n time.sleep(0.5)\n\n sp = self.VA_Calculate_Shutter_Position(light_intensity)\n # This linearizes the non-linearity of the shutter\n Shutter_Position = int(12000 * sp / 100) - 12000\n self.VA_Drive_Shutter(Shutter_Position)\n # This opens the shutter the requested amount\n time.sleep(0.5)\n return",
"def set_ambient_light_color(self, r: int, g: int, b: int):\n self.__send_command(CommandsBytes.SET_AMBIENT_LIGHT_COLOR)\n # Send the color\n self.__send_int(r)\n self.__send_int(g)\n self.__send_int(b)\n result = self.__receive_string()\n if result != \"ok\":\n print(\"Error setting ambient light color\")",
"def saturation(self, saturation):\n self.transite_light_state(saturation=saturation)",
"def ambient_light(self, color):\n self.scene.ambient_light(tuple(color))",
"async def saturation_set(self, ctx, saturation: int = 254, *, name=None):\n if not await self.get_bridge():\n await ctx.send(\"No IP has been set.\")\n return\n saturation = await self.max_min_check(saturation, 254, 0)\n for light in self.lights:\n if name is None or light.name.lower() == name.lower() and light.on:\n light.saturation = saturation"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Genrates the global shader include which defines most values used in the shaders.
|
def _generateShaderConfiguration(self):
self.debug("(Re)Generating shader configuration")
# Generate list of defines
defines = []
if self.settings.antialiasingTechnique == "SMAA":
quality = self.settings.smaaQuality.upper()
if quality in ["LOW", "MEDIUM", "HIGH", "ULTRA"]:
defines.append(("SMAA_PRESET_" + quality, ""))
else:
self.error("Unrecognized SMAA quality:", quality)
return
defines.append(
("LIGHTING_COMPUTE_PATCH_SIZE_X", self.settings.computePatchSizeX))
defines.append(
("LIGHTING_COMPUTE_PATCH_SIZE_Y", self.settings.computePatchSizeY))
defines.append(
("LIGHTING_MIN_MAX_DEPTH_ACCURACY", self.settings.minMaxDepthAccuracy))
if self.blurEnabled:
defines.append(("USE_DOF", 1))
if self.settings.useSimpleLighting:
defines.append(("USE_SIMPLE_LIGHTING", 1))
if self.settings.anyLightBoundCheck:
defines.append(("LIGHTING_ANY_BOUND_CHECK", 1))
if self.settings.accurateLightBoundCheck:
defines.append(("LIGHTING_ACCURATE_BOUND_CHECK", 1))
if self.settings.renderShadows:
defines.append(("USE_SHADOWS", 1))
defines.append(("AMBIENT_CUBEMAP_SAMPLES", self.settings.ambientCubemapSamples))
defines.append(
("SHADOW_MAP_ATLAS_SIZE", self.settings.shadowAtlasSize))
defines.append(
("SHADOW_MAX_UPDATES_PER_FRAME", self.settings.maxShadowUpdatesPerFrame))
defines.append(
("SHADOW_GEOMETRY_MAX_VERTICES", self.settings.maxShadowUpdatesPerFrame * 3))
defines.append(("SHADOW_NUM_PCF_SAMPLES", self.settings.numPCFSamples))
defines.append(("SHADOW_NUM_PCSS_SEARCH_SAMPLES", self.settings.numPCSSSearchSamples))
defines.append(("SHADOW_NUM_PCSS_FILTER_SAMPLES", self.settings.numPCSSFilterSamples))
defines.append(("SHADOW_PSSM_BORDER_PERCENTAGE", self.settings.shadowCascadeBorderPercentage))
if self.settings.useHardwarePCF:
defines.append(("USE_HARDWARE_PCF", 1))
defines.append(("WINDOW_WIDTH", self.size.x))
defines.append(("WINDOW_HEIGHT", self.size.y))
if self.settings.motionBlurEnabled:
defines.append(("USE_MOTION_BLUR", 1))
defines.append(
("MOTION_BLUR_SAMPLES", self.settings.motionBlurSamples))
# Occlusion
defines.append(
("OCCLUSION_TECHNIQUE_" + self.occlusion.getIncludeName(), 1))
defines.append(
("OCCLUSION_RADIUS", self.settings.occlusionRadius))
defines.append(
("OCCLUSION_STRENGTH", self.settings.occlusionStrength))
defines.append(
("OCCLUSION_SAMPLES", self.settings.occlusionSampleCount))
if self.settings.displayOnscreenDebugger:
defines.append(("DEBUGGER_ACTIVE", 1))
extraSettings = self.guiManager.getDefines()
defines += extraSettings
if self.settings.enableTemporalReprojection:
defines.append(("USE_TEMPORAL_REPROJECTION", 1))
if self.settings.enableGlobalIllumination:
defines.append(("USE_GLOBAL_ILLUMINATION", 1))
if self.settings.enableScattering:
defines.append(("USE_SCATTERING", 1))
# Pass near far
defines.append(("CAMERA_NEAR", Globals.base.camLens.getNear()))
defines.append(("CAMERA_FAR", Globals.base.camLens.getFar()))
# Generate
output = "#pragma once\n"
output += "// Autogenerated by RenderingPipeline.py\n"
output += "// Do not edit! Your changes will be lost.\n\n"
for key, value in defines:
output += "#define " + key + " " + str(value) + "\n"
# Try to write the file
try:
with open("PipelineTemp/ShaderAutoConfig.include", "w") as handle:
handle.write(output)
except Exception, msg:
self.fatal("Error writing shader autoconfig. Maybe no write-access?")
return
|
[
"def _build_uniforms(self):\n\n # We might rebuild the program because of snippets but we must\n # keep already bound uniforms\n\n count = 0\n for (name,gtype) in self.all_uniforms:\n if name not in self._uniforms.keys():\n uniform = Uniform(self, name, gtype)\n else:\n uniform = self._uniforms[name]\n gtype = uniform.gtype\n if gtype in (gl.GL_SAMPLER_1D, gl.GL_SAMPLER_2D, gl.GL_SAMPLER_CUBE):\n uniform._texture_unit = count\n count += 1\n self._uniforms[name] = uniform\n self._need_update = True",
"def _addAllUniforms(self):\n for stage in self.stages:\n sourceText = self.stages[stage]\n structures = findUniformStruct(sourceText)\n\n #------------------------------------------------------------------\n # UBO checck: NOTE: preliminary\n uboLastLine = 0\n uboIndex = sourceText.find('layout (std140')\n if uboIndex >= 0:\n endLine = sourceText[uboIndex:].find('}')\n uboBlock = sourceText[uboIndex:uboIndex+endLine+1]\n uboLastLine = uboIndex+endLine\n sourceText = sourceText[:uboIndex] + sourceText[uboLastLine:]\n s0 = uboBlock.find('uniform')\n s1 = uboBlock.find('}')\n uboName = uboBlock[s0:s1].split()[1]\n #NOTE: MUST BE TESTED!!!\n uniformLocation = gl.glGetUniformBlockIndex(self.program,\n uboName)\n self.uniformLocations[uniformName] = uniformLocation\n\n #------------------------------------------------------------------\n index = sourceText.find('uniform')\n start = index\n while index != -1:\n endLine = sourceText[start:].find(';')\n uniformLine = sourceText[start: start + endLine]\n _, uniformType, uniformName, *rest = uniformLine.split()\n index = sourceText[start + endLine:].find('uniform')\n start += endLine + index\n self.uniformTypes[uniformName] = uniformType\n self._addUniformWithStructCheck(uniformName, uniformType,\n structures)",
"def bake_shaders(self):\n\n selected_shaders = cmds.ls(sl=True)\n del self.shaders_to_apply[:]\n for shdr in selected_shaders:\n self.shaders_to_apply.append(shdr)\n print self.shaders_to_apply",
"def _setGIComputeShader(self):\n giShader = Shader.load(Shader.SLGLSL, \n \"DefaultPostProcess.vertex\",\n \"ComputeGI.fragment\")\n self.giPrecomputeBuffer.setShader(giShader)",
"def generate_shader_code(self):\r\n\r\n content = \"#pragma once\\n\\n\"\r\n content += \"// Autogenerated by the render pipeline\\n\"\r\n content += \"// Do not edit! Your changes will be lost.\\n\\n\"\r\n\r\n structs = {}\r\n inputs = []\r\n\r\n for input_name, handle in iteritems(self.ptas):\r\n parts = input_name.split(\".\")\r\n\r\n # Single input, simply add it to the input list\r\n if len(parts) == 1:\r\n inputs.append(self.pta_to_glsl_type(handle) + \" \" + input_name + \";\")\r\n\r\n # Nested input, like scattering.sun_color\r\n elif len(parts) == 2:\r\n struct_name = parts[0]\r\n actual_input_name = parts[1]\r\n if struct_name in structs:\r\n # Struct is already defined, add member definition\r\n structs[struct_name].append(\r\n self.pta_to_glsl_type(handle) + \" \" + actual_input_name + \";\")\r\n else:\r\n # Construct a new struct and add it to the list of inputs\r\n inputs.append(struct_name + \"_UBOSTRUCT \" + struct_name + \";\")\r\n structs[struct_name] = [\r\n self.pta_to_glsl_type(handle) + \" \" + actual_input_name + \";\"\r\n ]\r\n\r\n # Nested input, like scattering.some_setting.sun_color, not supported yet\r\n else:\r\n self.warn(\"Structure definition too nested, not supported (yet):\", input_name)\r\n\r\n # Add structures\r\n for struct_name, members in iteritems(structs):\r\n content += \"struct \" + struct_name + \"_UBOSTRUCT {\\n\"\r\n for member in members:\r\n content += \" \" * 4 + member + \"\\n\"\r\n content += \"};\\n\\n\"\r\n\r\n # Add actual inputs\r\n if len(inputs) < 1:\r\n self.debug(\"No UBO inputs present for\", self.name)\r\n else:\r\n if self.use_ubo:\r\n\r\n content += \"layout(shared, binding={}) uniform {}_UBO {{\\n\".format(\r\n self.bind_id, self.name)\r\n for ipt in inputs:\r\n content += \" \" * 4 + ipt + \"\\n\"\r\n content += \"} \" + self.name + \";\\n\"\r\n else:\r\n content += \"uniform struct {\\n\"\r\n for ipt in inputs:\r\n content += \" \" * 4 + ipt + \"\\n\"\r\n content += \"} \" + self.name + \";\\n\"\r\n\r\n content += \"\\n\"\r\n return content",
"def shaderCollection(self):\n return self._shader_collection",
"def add_initialize_constants(self):\n constants = self.model.map[\"initialize\"][0]['constants']\n constants[\"spo_pop_size\"] = self.spo_pop_size\n constants[\"gam_pop_size\"] = self.gam_pop_size\n constants[\"microspore_pool\"] = self.microspore_pool\n constants[\"spo_mutation_rate\"] = self.spo_mutation_rate\n constants[\"gam_mutation_rate\"] = self.gam_mutation_rate\n constants[\"spo_female_to_male_ratio\"] = self.spo_female_to_male_ratio\n constants[\"gam_female_to_male_ratio\"] = self.gam_female_to_male_ratio\n constants[\"spo_megaspores_per\"] = self.spo_megaspores_per\n constants[\"spo_microspores_per\"] = self.spo_microspores_per\n constants[\"gam_eggs_per_megaspore\"] = self.gam_eggs_per_megaspore\n constants[\"gam_sperm_per_microspore\"] = self.gam_sperm_per_microspore\n constants[\"spo_clone_rate\"] = self.spo_clone_rate\n constants[\"spo_clones_per\"] = self.spo_clones_per\n constants[\"spo_self_chance\"] = self.spo_self_chance\n constants[\"gam_self_rate\"] = self.gam_self_rate\n constants[\"gam_clone_rate\"] = self.gam_clone_rate\n constants[\"gam_clones_per\"] = self.gam_clones_per\n constants[\"spo_maternal_effect\"] = self.spo_maternal_effect\n constants[\"gam_maternal_effect\"] = self.gam_maternal_effect\n constants[\"spo_random_death_chance\"] = self.spo_random_death_chance\n constants[\"gam_random_death_chance\"] = self.gam_random_death_chance",
"def getAllShaders(cls, *args, **kwargs):\r\n return pm.ls(type='cgfxShader')",
"def _setShaderInputs(self):\n\n # Shader inputs for the light-culling pass\n if self.haveLightingPass:\n self.lightBoundsComputeBuff.setShaderInput(\n \"destination\", self.lightPerTileStorage)\n self.lightBoundsComputeBuff.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightBoundsComputeBuff.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n # Shader inputs for the light-applying pass\n self.lightingComputeContainer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.lightingComputeContainer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.lightingComputeContainer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n\n\n self.lightingComputeContainer.setShaderInput(\n \"depth\", self.deferredTarget.getDepthTexture())\n self.lightingComputeContainer.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.lightingComputeContainer.setShaderInput(\n \"mainRender\", self.showbase.render)\n\n if self.occlusion.requiresViewSpacePosNrm():\n self.lightingComputeContainer.setShaderInput(\n \"viewSpaceNormals\",\n self.normalPrecompute.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\n \"viewSpacePosition\",\n self.normalPrecompute.getAuxTexture(0))\n\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlas\", self.lightManager.getAtlasTex())\n\n if self.settings.useHardwarePCF:\n self.lightingComputeContainer.setShaderInput(\n \"shadowAtlasPCF\", self.lightManager.getAtlasTex(), self.lightManager.getPCFSampleState())\n\n self.lightingComputeContainer.setShaderInput(\n \"destination\", self.lightingComputeCombinedTex)\n self.lightingComputeContainer.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.lightingComputeContainer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n self.lightingComputeContainer.setShaderInput(\n \"noiseTexture\",\n self.showbase.loader.loadTexture(\"Data/Occlusion/noise4x4.png\"))\n self.lightingComputeContainer.setShaderInput(\n \"lightsPerTile\", self.lightPerTileStorage)\n\n\n if self.settings.enableGlobalIllumination:\n self.lightingComputeContainer.setShaderInput(\"giDiffuseTex\", self.giPrecomputeBuffer.getColorTexture())\n self.lightingComputeContainer.setShaderInput(\"giReflectionTex\", self.giPrecomputeBuffer.getAuxTexture(0))\n\n\n # Shader inputs for the occlusion blur passes\n if self.occlusion.requiresBlurring() and self.haveCombiner:\n self.blurOcclusionH.setShaderInput(\n \"colorTex\", self.blurOcclusionV.getColorTexture())\n\n if self.settings.enableTemporalReprojection:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\", self.combiner.getColorTexture())\n else:\n self.blurOcclusionV.setShaderInput(\n \"colorTex\",\n self.lightingComputeContainer.getColorTexture())\n\n self.blurOcclusionH.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalTex\", self.deferredTarget.getAuxTexture(0))\n self.blurOcclusionH.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n self.blurOcclusionV.setShaderInput(\n \"normalsView\", self.normalPrecompute.getAuxTexture(0))\n\n # Shader inputs for the blur passes\n if self.blurEnabled:\n self.blurColorH.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorV.setShaderInput(\n \"dofStorage\", self.dofStorage)\n self.blurColorH.setShaderInput(\"colorTex\",\n self.antialias.getResultTexture())\n self.blurColorH.setShaderInput(\"depthTex\",\n self.deferredTarget.getDepthTexture())\n self.blurColorV.setShaderInput(\"colorTex\",\n self.blurColorH.getColorTexture())\n\n # Shader inputs for the temporal reprojection\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.combiner.setShaderInput(\n \"currentComputation\",\n self.lightingComputeContainer.getColorTexture())\n self.combiner.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n self.combiner.setShaderInput(\n \"positionBuffer\", self.deferredTarget.getColorTexture())\n self.combiner.setShaderInput(\n \"velocityBuffer\", self.deferredTarget.getAuxTexture(1))\n self.combiner.setShaderInput(\"currentPixelShift\",\n self.currentPixelShift)\n self.combiner.setShaderInput(\"lastPixelShift\",\n self.lastPixelShift)\n\n if self.blurEnabled:\n self.combiner.setShaderInput(\n \"dofStorage\", self.dofStorage)\n\n self.combiner.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.combiner.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n self.combiner.setShaderInput(\n \"temporalProjXOffs\", self.temporalProjXOffs)\n self.combiner.setShaderInput(\"lastMVP\", self.lastMVP)\n self.combiner.setShaderInput(\"cameraPosition\", self.cameraPosition)\n self.combiner.setShaderInput(\"currentMVP\", self.lastMVP)\n\n # Shader inputs for the final pass\n if self.blurEnabled:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.blurColorV.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\n \"colorTex\", self.antialias.getResultTexture())\n\n if self.occlusion.requiresBlurring():\n self.normalPrecompute.setShaderInput(\n \"positionTex\", self.deferredTarget.getColorTexture())\n self.normalPrecompute.setShaderInput(\n \"mainCam\", self.showbase.cam)\n self.normalPrecompute.setShaderInput(\n \"mainRender\", self.showbase.render)\n self.normalPrecompute.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n\n if self.haveMRT:\n self.deferredTarget.setShaderInput(\n \"velocityTex\", self.deferredTarget.getAuxTexture(1))\n\n self.deferredTarget.setShaderInput(\n \"depthTex\", self.deferredTarget.getDepthTexture())\n self.deferredTarget.setShaderInput(\n \"motionBlurFactor\", self.motionBlurFactor)\n\n if self.haveLightingPass:\n self.deferredTarget.setShaderInput(\n \"lastFrame\", self.lightingComputeCombinedTex)\n\n if self.haveCombiner and self.settings.enableTemporalReprojection:\n self.deferredTarget.setShaderInput(\n \"newFrame\", self.combiner.getColorTexture())\n self.deferredTarget.setShaderInput(\n \"lastPosition\", self.lastPositionBuffer)\n\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.combiner.getColorTexture())\n else:\n self.deferredTarget.setShaderInput(\"debugTex\",\n self.antialias.getResultTexture())\n\n self.deferredTarget.setShaderInput(\n \"currentPosition\", self.deferredTarget.getColorTexture())\n\n # Set last / current mvp handles\n self.showbase.render.setShaderInput(\"lastMVP\", self.lastMVP)\n\n # Set GI inputs\n if self.settings.enableGlobalIllumination:\n self.globalIllum.bindTo(self.giPrecomputeBuffer, \"giData\")\n\n self.giPrecomputeBuffer.setShaderInput(\n \"data0\", self.deferredTarget.getColorTexture())\n self.giPrecomputeBuffer.setShaderInput(\n \"data1\", self.deferredTarget.getAuxTexture(0))\n self.giPrecomputeBuffer.setShaderInput(\n \"data2\", self.deferredTarget.getAuxTexture(1))\n self.giPrecomputeBuffer.setShaderInput(\n \"data3\", self.deferredTarget.getAuxTexture(2))\n self.giPrecomputeBuffer.setShaderInput(\n \"cameraPosition\", self.cameraPosition)\n\n # Finally, set shaders\n self.reloadShaders()",
"def load_shaders(self):\n context = self.context\n self.prog = load_shaders(context, 'tiny_gl_engine/primitives/shaders/cube_vertex.glsl',\n 'tiny_gl_engine/primitives/shaders/cube_fragment.glsl')",
"def setUniformBindings(self, wireframe=False):\n normalMatrix = self._transform.normalMatrix()\n self._active_shader.setUniformValue(\"modelMatrix\", self._transform)\n self._active_shader.setUniformValue(\"viewMatrix\", self._scene.camera.viewMatrix)\n self._active_shader.setUniformValue(\"projectionMatrix\", self._scene.camera.projectionMatrix)\n self._active_shader.setUniformValue(\"normalMatrix\", normalMatrix)\n if self.texture() is not None:\n self._active_shader.setUniformValue(\"texObject\", 0)\n \n ## bind active material\n if self.isSelectable() and self.isSelected():\n self._active_shader.setUniformValue(\"selected\", 1.0)\n else:\n self._active_shader.setUniformValue(\"selected\", 0.65)\n\n ## set highlight color\n if self.isHighlighted():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n else:\n self._active_shader.setUniformValue(\"material.emission\", self._active_material.emissionColor)\n self._active_shader.setUniformValue(\"material.ambient\", self._active_material.ambientColor)\n \n ## set the enabled color\n if self.isEnabled():\n self._active_shader.setUniformValue(\"material.emission\", QVector3D(0.25, 0.25, 0.25))\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n else:\n self._active_shader.setUniformValue(\"material.diffuse\", self._active_material.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._active_material.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._active_material.shininess)\n \n ## set the error and warning colors\n if self._errorHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._errorMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._errorMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._errorMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._errorMaterial.shininess)\n if self._warningHighlight:\n self._active_shader.setUniformValue(\"material.ambient\", self._warningMaterial.ambientColor)\n self._active_shader.setUniformValue(\"material.diffuse\", self._warningMaterial.diffuseColor)\n self._active_shader.setUniformValue(\"material.specular\", self._warningMaterial.specularColor)\n self._active_shader.setUniformValue(\"material.shininess\", self._warningMaterial.shininess) \n \n ## bind lights\n camera_position = QVector4D(self._scene.camera.position[0], self._scene.camera.position[1], self._scene.camera.position[2], 1.0)\n if self._scene.light.headlight:\n if self._scene.light.directional:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 1.0, 0.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", QVector4D(0.0, 0.0, 0.0, 1.0))\n else:\n self._active_shader.setUniformValue(\"lightPosition\", self._scene.camera.viewMatrix * self._scene.light.position)\n\n self._active_shader.setUniformValue(\"light.ambient\", self._scene.light.ambientColor)\n self._active_shader.setUniformValue(\"light.diffuse\", self._scene.light.diffuseColor)\n self._active_shader.setUniformValue(\"light.specular\", self._scene.light.specularColor)\n self._active_shader.setUniformValue(\"lightAttenuation\", self._scene.light.attenuation)",
"def apply_shader(hz, act):\n\n frag_decl = \\\n \"\"\"\n uniform float selected;\n uniform float opacity_level;\n \"\"\"\n\n frag_impl = \\\n \"\"\"\n if (selected == 1){\n fragOutput0 = fragOutput0 + vec4(0.2, 0.2, 0, opacity_level);\n }\n \"\"\"\n\n shaders.shader_to_actor(act, \"vertex\", impl_code=\"\\n\",\n replace_first=False,\n replace_all=False)\n shaders.shader_to_actor(act, \"fragment\", decl_code=frag_decl,\n block=\"coincident\")\n shaders.shader_to_actor(act, \"fragment\", impl_code=frag_impl,\n block=\"light\")\n\n def shader_selected_callback(caller, event, calldata=None):\n program = calldata\n if program is not None:\n try:\n program.SetUniformf(\"selected\",\n hz.cea[act]['selected'])\n except KeyError:\n pass\n try:\n program.SetUniformf(\"selected\",\n hz.cla[act]['selected'])\n except KeyError:\n pass\n program.SetUniformf(\"opacity_level\", 1)\n\n shaders.add_shader_callback(act, shader_selected_callback, priority=100)",
"def add_shader_integer_extra_datas(self, trishape):\n for shaderindex in self.USED_EXTRA_SHADER_TEXTURES[self.properties.game]:\n shadername = self.EXTRA_SHADER_TEXTURES[shaderindex]\n trishape.add_integer_extra_data(shadername, shaderindex)",
"def draw_loom_preset_flags(self, context):\n preset_flags = context.scene.loom.render_preset_flags\n layout = self.layout\n layout.use_property_split = True\n layout.use_property_decorate = False\n layout.separator(factor=0.5)\n layout.emboss='NORMAL'\n col = layout.column(heading=\"Also include:\")\n #act = col.column()\n #act.prop(preset_flags, \"include_engine_settings\")\n #act.enabled = False\n col.prop(preset_flags, \"include_resolution\")\n col.prop(preset_flags, \"include_file_format\")\n col.prop(preset_flags, \"include_output_path\")\n col.prop(preset_flags, \"include_scene_settings\", text=\"Scene Camera\")\n col.prop(preset_flags, \"include_passes\")\n col.prop(preset_flags, \"include_color_management\")\n col.prop(preset_flags, \"include_metadata\")\n #col.prop(preset_flags, \"include_post_processing\")\n layout.separator(factor=0.3)",
"def get(self, partnames):\n\n rv = self.cache.get(partnames, None)\n if rv is not None:\n return rv\n\n partnameset = set(partnames)\n partnameset.add(renpy.config.default_shader)\n sortedpartnames = tuple(sorted(partnameset))\n\n rv = self.cache.get(sortedpartnames, None)\n if rv is not None:\n self.cache[partnames] = rv\n return rv\n\n # If the cache missed entirely, we have to generate the source code for the\n # shaders.\n\n vertex_variables = set()\n vertex_parts = [ ]\n vertex_functions = [ ]\n\n fragment_variables = set()\n fragment_parts = [ ]\n fragment_functions = [ ]\n\n for i in sortedpartnames:\n\n p = shader_part.get(i, None)\n\n if p is None:\n raise Exception(\"{!r} is not a known shader part.\".format(i))\n\n vertex_variables |= p.vertex_variables\n vertex_parts.extend(p.vertex_parts)\n vertex_functions.append(p.vertex_functions)\n\n fragment_variables |= p.fragment_variables\n fragment_parts.extend(p.fragment_parts)\n fragment_functions.append(p.fragment_functions)\n\n vertex = source(vertex_variables, vertex_parts, vertex_functions, False, self.gles)\n fragment = source(fragment_variables, fragment_parts, fragment_functions, True, self.gles)\n\n from renpy.gl2.gl2shader import Program\n\n rv = Program(sortedpartnames, vertex, fragment)\n rv.load()\n\n self.cache[partnames] = rv\n self.cache[sortedpartnames] = rv\n return rv",
"def getAllShaders(cls, *args, **kwargs):\r\n return pm.ls(type='hlslShader')",
"def __init__(self, shader_dir):\n\n # Note: see the following, which was referenced in the PyOpenGL\n # documentation:\n # https://bitbucket.org/rndblnch/opengl-programmable/src/tip/10-g\n # l3.2core.py?fileviewer=file-view-default\n\n # Create the program object.\n self.__shader_program = GL.glCreateProgram()\n\n # We're going to build up a list of inputs.\n program_uniforms = set()\n program_attributes = set()\n self.__attribute_types = {}\n\n # Compile all of the source files and attach the resulting\n # shader objects to our shader program.\n for (filename, shader_type) in self.__list_shader_files(shader_dir):\n (file_uniforms, file_attributes, attribute_types) = \\\n self.__parse_uniforms_and_attributes(filename)\n program_uniforms.update(file_uniforms);\n program_attributes.update(file_attributes);\n self.__attribute_types.update(attribute_types)\n shader = GL.glCreateShader(shader_type)\n GL.glShaderSource(shader, open(filename, 'r').read())\n GL.glCompileShader(shader)\n if GL.glGetShaderiv(shader, GL.GL_COMPILE_STATUS) != GL.GL_TRUE:\n raise Exception(GL.glGetShaderInfoLog(shader))\n GL.glAttachShader(self.__shader_program, shader)\n\n # Assign locations to vertex attributes. We'll bind them in the program later...\n self.__attrib_locations = dict((k, v) for (v, k) in enumerate(program_attributes))\n\n # Uniform locations will be determined by OpenGL, we'll get them later.\n self.__uniform_locations = {}\n\n # Now we can bind all of the vertex attributes to their\n # assigned locations.\n for attrib in program_attributes:\n GL.glBindAttribLocation(self.__shader_program,\n self.__attrib_locations[attrib],\n attrib)\n\n # Now link the program.\n GL.glLinkProgram(self.__shader_program)\n if GL.glGetProgramiv(self.__shader_program, GL.GL_LINK_STATUS) != GL.GL_TRUE:\n raise Exception(GL.glGetProgramInfoLog(self.__shader_program))\n\n # Retrieve the uniform locations and remember them.\n for uniform in program_uniforms:\n self.__uniform_locations[uniform] = GL.glGetUniformLocation(self.__shader_program, uniform)\n if self.__uniform_locations[uniform] == -1:\n print (\"Warning: Uniform '%s' does not exist.\" % uniform)",
"def surfaceShaderList(remove=\"string\", add=\"string\"):\n pass",
"def initializeShaders(self,shaderDict):\n shaderObjects = []\n self.shaderProgram = glCreateProgram()\n \n for shaderType, shaderString in shaderDict.items():\n shaderObjects.append(glCreateShader(shaderType))\n glShaderSource(shaderObjects[-1], shaderString)\n \n glCompileShader(shaderObjects[-1])\n status = glGetShaderiv(shaderObjects[-1], GL_COMPILE_STATUS)\n if status == GL_FALSE:\n if shaderType is GL_VERTEX_SHADER:\n strShaderType = \"vertex\"\n elif shaderType is GL_GEOMETRY_SHADER:\n strShaderType = \"geometry\"\n elif shaderType is GL_FRAGMENT_SHADER:\n strShaderType = \"fragment\"\n raise RuntimeError(\"Compilation failure (\" + strShaderType + \" shader):\\n\" + glGetShaderInfoLog(shaderObjects[-1]).decode('utf-8'))\n \n glAttachShader(self.shaderProgram, shaderObjects[-1])\n \n glLinkProgram(self.shaderProgram)\n status = glGetProgramiv(self.shaderProgram, GL_LINK_STATUS)\n \n if status == GL_FALSE:\n raise RuntimeError(\"Link failure:\\n\" + glGetProgramInfoLog(self.shaderProgram).decode('utf-8'))\n \n for shader in shaderObjects:\n glDetachShader(self.shaderProgram, shader)\n glDeleteShader(shader)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Call this whenever the window resized
|
def onWindowResized(self):
raise NotImplementedError()
|
[
"def resize(self, event):\n self.redraw()",
"def resizeEvent(self, e):\n\n self.initUI()",
"def window_resize(self):\r\n\r\n offset = (self.canvas.winfo_width()-self.container.winfo_reqwidth(),\r\n self.canvas.winfo_height()-self.container.winfo_reqheight())\r\n self.check_scrollbar(self.hsb, offset[0])\r\n self.check_scrollbar(self.vsb, offset[1])\r\n self.canvas.itemconfig(\r\n self.window,\r\n width=max(self.canvas.winfo_width(), self.container.winfo_reqwidth()),\r\n height=max(self.canvas.winfo_height(), self.container.winfo_reqheight())\r\n )",
"def on_resize(self):\n # Notice that we are calling refresh() here instead of \n # request_refresh() since we want the refresh to happen\n # immediately. Otherwise the resize layouts will appear \n # to lag in the ui. This is a safe operation since by the\n # time we get this resize event, the widget has already \n # changed size. Further, the only geometry that gets set\n # by the layout manager is that of our children. And should\n # it be required to resize this widget from within the layout\n # call, then the layout manager will do that asynchronously.\n self.shell_obj.refresh()",
"def on_resize(self, size: tuple[int, int]) -> None:\n\n width, height = size\n\n for window in self._windows:\n newx = max(0, min(window.pos[0], width - window.width))\n newy = max(0, min(window.pos[1], height - window.height))\n\n window.pos = (newx, newy)\n\n self.print()",
"def handle_resize(self, event):\n self.update()\n self._return_joystick_to_center()",
"def on_window_resize(self, event):\n image_width = event.width\n image_height = int(event.width / self.aspect_ratio)\n\n if image_height > event.height:\n image_height = event.height\n image_width = int(event.height * self.aspect_ratio)\n\n self.cv_displayed_image = cv2.resize(self.cv_image, (image_width, image_height))\n self.zoom_ratio = self.cv_displayed_image.shape[1] / self.cv_image.shape[1]\n self.add_rectangles()\n self.show_cv_image(self.cv_displayed_image)",
"def onTimer(self):\r\n\t\t\r\n\t\t#self.setMinimumSize(10,10)\r\n\t\tself.setMaximumSize(10000,10000)",
"def _resize(self, event):\n h,w = Graph.reset(self)\n \n w_spacing = w/len(self._tf)\n Graph.draw_axes(self, h, w)\n self._draw_connections(h, w_spacing) \n self._draw_points(h, w_spacing)\n print(\"Graph Displayed\") # Confirmation",
"def on_screen_size_change(self, zscreen):\n\n pass",
"def windowEvent(self, *args, **kwargs):\n super().windowEvent(*args, **kwargs)\n\n for win, cam, pixel2d in self.forcedAspectWins:\n aspectRatio = self.getAspectRatio(win)\n cam.node().getLens().setAspectRatio(aspectRatio)\n\n # Fix pixel2d scale for new window size\n # Temporary hasattr for old Pandas\n if not hasattr(win, 'getSbsLeftXSize'):\n pixel2d.setScale(2.0 / win.getXSize(), 1.0, 2.0 / win.getYSize())\n else:\n pixel2d.setScale(2.0 / win.getSbsLeftXSize(), 1.0, 2.0 / win.getSbsLeftYSize())",
"def adjustWindowSize(self):\r\n w = 600\r\n h = 900\r\n self.width = w\r\n self.height = h",
"def _on_child_resize(self, event: DOMResizeEvent):\n self._rerender(resize=event.resize_axis)",
"def on_resize(self, width, height):\n\n # call overrided function\n super().on_resize(width, height)\n\n # update camera value\n (width, height) = self.get_size()\n self.left = -self.zoom_level * width/2\n self.right = self.zoom_level * width/2\n self.bottom = -self.zoom_level * height/2\n self.top = self.zoom_level * height/2\n self.zoomed_width = self.zoom_level * width\n self.zoomed_height = self.zoom_level * height",
"def resizeEvent(self, ev):\n\t\tself.q_buffer\t\t\t\t\t\t\t= QImage()\n\t\tself.q_buffer.create(self.width(),self.height(),8)",
"def glfw_window_resize_callback(self, window, width, height):\r\n self._width, self._height = width, height\r\n self._buffer_width, self._buffer_height = glfw.get_framebuffer_size(\r\n self._window\r\n )\r\n self.set_default_viewport()\r\n\r\n super().resize(self._buffer_width, self._buffer_height)",
"def adjust_window_to_current_width(self, event=None):\n width = self.root.winfo_width()\n self.set_window_size(width)",
"def _updateSizes(self) -> None:\n print(f'QmlOffscreenRenderer._updateSizes: {self.size}')\n width, height = self.size.toTuple()\n self._window.setGeometry(0, 0, width, height)\n self._rootItem.setWidth(width)\n self._rootItem.setHeight(height)",
"def adjust_window_to_current_state(self, event=None):\n # zoomed to normal\n if((self.unit == self.root.winfo_screenheight()//GRID_ROWS-2 or\n self.unit == self.root.winfo_screenwidth()//GRID_COLUMNS) and\n self.root.state() == \"normal\"):\n width = DEFAULT_WIDTH_WINDOW\n self.set_window_size(width)\n # normal to zoomed\n if(not (self.unit == self.root.winfo_screenheight()//GRID_ROWS-2 or\n self.unit == self.root.winfo_screenwidth()//GRID_COLUMNS) and\n self.root.state() == \"zoomed\"):\n width = self.root.winfo_screenwidth()\n self.set_window_size(width)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This reloads the whole pipeline, same as destroy(); create()
|
def reload(self):
self.debug("Reloading pipeline")
self.destroy()
self.create()
|
[
"def reload_pipeline():\n\n api.uninstall()\n _uninstall_menu()\n\n for module in (\"avalon.api\",\n \"avalon.io\",\n \"avalon.lib\",\n \"avalon.pipeline\",\n \"avalon.tools\",\n \"avalon.nuke\",\n \"avalon.nuke.pipeline\",\n \"avalon.nuke.lib\",\n \"avalon.nuke.workio\"\n ):\n\n log.info(\"Reloading module: {}...\".format(module))\n\n module = importlib.import_module(module)\n reload(module)\n\n import avalon.nuke\n api.install(avalon.nuke)\n\n _register_events()",
"def cmd_new_scene(self, **kwargs):\n self.canvas.delete(\"all\")\n self.models = list()",
"def setUp(self): \n self.pipeline.clearmodels()",
"def rebuild (self):\n\n return self.recreate(self)",
"def unload(self):\n self.loaded = False",
"def reload(self):\n flow = self.reloading()\n self._set_config(flow._config)\n return self",
"def rebuild():",
"def reset(self):\n if self._thread is not None:\n self.stop_loading()\n super(Loader, self).reset()\n self._load = True",
"def reload(self):\n self.model.load_image()",
"def do_refresh():\n importlib.reload(sprite_splitter)\n print(\"Refreshed sprite loader!\")",
"def reset(cls):\n cls.instances = []\n cls.next_id = 0",
"def ReloadPool(self):\n with tempfile.NamedTemporaryFile() as f:\n cPickle.dump(self.sync_stage.pool, f)\n f.flush()\n self._run.options.validation_pool = f.name\n self.sync_stage = sync_stages.CommitQueueSyncStage(self._run)\n self.sync_stage.HandleSkip()",
"def clear_old_scenes(self): \n pass",
"def get_finalize_pipeline(self):\n raise NotImplementedError('Must be implemented in subclasses.')",
"def destroy(self):\n self.__shader.destroy()\n self.__shader = None",
"def reload_transforms(self):\n to_load = self.transform_collections[:]\n\n for module in to_load:\n TransformPresenter.delete_module(module.package_name)\n\n self.transform_collections = []\n self.load_files(\n map(lambda x: x.collection_name, to_load)\n )",
"def purgeScenes(self) -> None:\r\n\r\n\r\n print(\"\\n\\nPurging scenes!\\n\\n\")\r\n\r\n self.scenes = []\r\n self.requests.append({\"type\": \"GetSceneList\"})",
"def unload(self):\n\t\tself.config(image=None)\n\t\tself.frames = None",
"def snapshot(self):\n self._instances = self._instances_active\n self._instances_active = set()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Method that makes a decision about worker placement
|
def decide_place(self, wid):
x = 0
y = 0
while x < 6 and y < 6:
worker_present = self.__state.get_worker_id(x, y)
if worker_present is None and self.__rule_checker.check_place(self.__pid, wid, x, y):
return (x, y)
x += 1
y += 1
raise ValueError("Nowhere to place")
|
[
"def getWorker(self, position: Union[Unit, Point2, Point3]):\n workersGathering: Units = self.workers.gathering\n\n if workersGathering:\n # select worker closest to pos or unit\n return workersGathering.closest_to(position)\n else:\n raise Exception(\"There are no gathering workers which could be used to build \" + self.currentTask)",
"def _is_worker(self):\n return (\n dist_utils.has_chief_oracle() and not dist_utils.is_chief_oracle()\n )",
"def spawn_worker_bees_in_zone(self, zone, count):\n self.worker_bees = self.worker_bees + zone.spawn_workers(count, self.function)\n return zone.best_bee",
"def selectWorker(self):\n min_ = float(\"inf\")\n selected = None\n\n print \"class Coordinator: selectWorker(): cache = \"\n print cache[nodes]\n if cache[nodes]:\n for key, value in cache[nodes].items():\n if int(value)< min_:\n selected = key\n min_=int(value)\n\n print \"class Coordinator: selectWorker(): selected worker: \" + selected\n\n if not self.checkWorkload(selected, min_):\n self.selectWorker()\n\n else:\n print \"class Coordinator: selectWorker(): no cache[nodes], returning None\"\n return selected",
"def is_root_worker():\n return get_rank() == ROOT_RANK",
"def is_worker():\n return _IN_WORKER is True",
"def assign_work(workorder):\n\n # get cert techs\n elig_workers = get_eligible_workers(workorder['equipment_type'])\n\n # get facility location and calculate travel time and note if full\n wo_fac = data_tools.get_facility_detail(workorder['facility'])\n gmaps_tools.get_drive_time(wo_fac['latit'], wo_fac['longit'])\n\n # need to figure out how to store current time left of job -- feature in workers?\n\n # check priority\n #",
"def _barrier_worker(self):\n pass",
"def fetch_free_worker(self):\n for w in range(self.N):\n if self.matched_workers[w] == -1:\n return w\n else:\n return w + 1",
"def _startSomeWorkers(self):\r\n if len(self.processes) < self.max:\r\n self.startAWorker()",
"def find_worker_at_time(self, time_dt):\n\n self.logger.debug(\"Finding worker for time: {}\".format(time_dt.strftime(\"%H:%M\")))\n ending_shift = {}\n for shift_position in settings[\"order_to_assign_general_shift\"]:\n self.logger.debug(\"For position: {}\".format(shift_position))\n for worker in schedule[shift_position]:\n self.logger.debug(\" Checking worker: {}\".format(worker[\"last_name\"] + \" , \" + worker[\"first_name\"]))\n start_time, end_time = self.convert_times_to_datetime(worker[\"start_time\"], worker[\"end_time\"])\n if self.compare_times(start_time, time_dt) <= 0 and self.compare_times(end_time, time_dt) == 1:\n self.logger.debug(\" Found worker!\")\n return worker\n elif self.compare_times(start_time, time_dt) <= 0 and self.compare_times(end_time, time_dt) <= 0:\n if ending_shift == {}:\n self.logger.debug(\" Worker is possibly the last worker of the night\")\n ending_shift = {\"end_time\": end_time,\n \"last_name\": worker[\"last_name\"],\n \"first_name\": worker[\"first_name\"],\n \"position\": shift_position}\n elif self.compare_times(end_time, ending_shift[\"end_time\"]) > 0:\n self.logger.debug(\" Worker is possibly the last worker of the night\")\n ending_shift = {\"end_time\": end_time,\n \"last_name\": worker[\"last_name\"],\n \"first_name\": worker[\"first_name\"],\n \"position\": shift_position}\n elif \"Manager\" in shift_position:\n self.logger.debug(\" Checking if worker is the last worker of the night\")\n if self.compare_times(ending_shift[\"end_time\"], time_dt) <= 0:\n self.logger.debug(\" Worker is possibly the last worker of the night\")\n ending_shift = {\"end_time\": end_time,\n \"last_name\": worker[\"last_name\"],\n \"first_name\": worker[\"first_name\"]}\n else:\n self.logger.debug(\" Worker is not the last worker of the night\")\n\n if ending_shift == {}:\n self.logger.info(\"Time '{}' has no workers\".format(time_dt.strftime(\"%H:%M\")))\n return {\"last_name\": \"{Unassigned}\", \"first_name\": \"{Unassigned}\"}\n else:\n self.logger.info(\"Time '{0}' has worker '{1}, '{2}'\".format(time_dt.strftime(\"%H:%M\"),\n ending_shift[\"last_name\"],\n ending_shift[\"first_name\"]))\n return {\"last_name\": ending_shift[\"last_name\"], \"first_name\": ending_shift[\"first_name\"]}\n # Managers only?",
"def allocate_worker(self):\n raise NotImplementedError",
"def do_worker_verification(self, worker_obj):\n pass",
"def is_local_root_worker():\n return get_local_rank() == ROOT_RANK",
"def addHardWorkerWithTaskMustHaveShift(self):\n\n\n for d in range(self.num_days):\n for w in range(self.num_workers):\n self.solver.Add((self.task[(w, d)] >= 1) == (self.shift[(w, d)] >= 1))",
"def addHardWorkersMustBeAssignedToAllowedTasks(self):\n #Example:\n #At least 2 M shifts must be set on day 0\n #exp1 = [self.shifts[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp1) >= 3)\n #numero de supervisores assignados =1 en turno manana\n #exp2 = [self.tasks[(w, 0)] == 1 for w in range(self.num_workers)]\n #self.solver.Add(self.solver.Sum(exp2) == 1)\n\n exp1 = [(self.task[(w, 0)] == 1) * (self.shift[(w, 0)] == 1) for w in range(self.num_workers)]\n exp2 = [(self.task[(w, 0)] == 2) * (self.shift[(w, 0)] == 1) for w in range(self.num_workers)]\n self.solver.Add(self.solver.Sum(exp1) >= 4)\n self.solver.Add(self.solver.Sum(exp2) >= 2)",
"def workRequirement(world, action):",
"def myWorkerDistribution(self):\n\n mineralTags = [x.tag for x in self.mineral_field]\n gas_buildingTags = [x.tag for x in self.gas_buildings]\n\n workerPool = Units([], self)\n workerPoolTags = set()\n\n # # Find all gas_buildings that have surplus or deficit\n deficit_gas_buildings = {}\n surplusgas_buildings = {}\n for g in self.gas_buildings.filter(lambda x: x.vespene_contents > 0):\n # Only loop over gas_buildings that have still gas in them\n deficit = g.ideal_harvesters - g.assigned_harvesters\n if deficit > 0:\n deficit_gas_buildings[g.tag] = {\"unit\": g, \"deficit\": deficit}\n elif deficit < 0:\n surplusWorkers = self.workers.closer_than(10, g).filter(\n lambda w: w not in workerPoolTags\n and len(w.orders) == 1\n and w.orders[0].ability.id in [AbilityId.HARVEST_GATHER]\n and w.orders[0].target in gas_buildingTags\n )\n for i in range(-deficit):\n if surplusWorkers.amount > 0:\n w = surplusWorkers.pop()\n workerPool.append(w)\n workerPoolTags.add(w.tag)\n surplusgas_buildings[g.tag] = {\"unit\": g, \"deficit\": deficit}\n\n # # Find all townhalls that have surplus or deficit\n deficitTownhalls = {}\n surplusTownhalls = {}\n for th in self.townhalls:\n deficit = th.ideal_harvesters - th.assigned_harvesters\n if deficit > 0:\n deficitTownhalls[th.tag] = {\"unit\": th, \"deficit\": deficit}\n elif deficit < 0:\n surplusWorkers = self.workers.closer_than(10, th).filter(\n lambda w: w.tag not in workerPoolTags\n and len(w.orders) == 1\n and w.orders[0].ability.id in [AbilityId.HARVEST_GATHER]\n and w.orders[0].target in mineralTags\n )\n # workerPool.extend(surplusWorkers)\n for i in range(-deficit):\n if surplusWorkers.amount > 0:\n w = surplusWorkers.pop()\n workerPool.append(w)\n workerPoolTags.add(w.tag)\n surplusTownhalls[th.tag] = {\"unit\": th, \"deficit\": deficit}\n \n # ---------------------------------------------\n\n # We now know which building has a deficit and which one has a surplus. If a building has a surplus\n # the workers are added to the worker pool. Whenever we have anything in the worker pool we want to\n # distribute those first.\n\n if bool(workerPool):\n\n # iterate deficit townhalls\n for townhallTag, info in deficitTownhalls.items():\n # get the minerals close to the current townhall\n mineralFields: Units = self.mineral_field.closer_than(10, info[\"unit\"])\n # if there are any\n if mineralFields:\n # get the deficit (missing worker to optimal performance)\n deficit = info[\"deficit\"]\n # check if the worker pool does contain anything\n workersLeft = bool(workerPool)\n # if there is a deficit move one worker to the townhall from the worker pool\n if deficit > 0 and workersLeft:\n worker: Unit = workerPool.pop()\n mineralField: Unit = mineralFields.closest_to(worker)\n self.loggerBase.info(\"Moving one worker to harvest minerals at \" + str(info[\"unit\"]))\n if len(worker.orders) == 1 and worker.orders[0].ability.id in [AbilityId.HARVEST_RETURN]:\n worker.gather(mineralField, queue=True)\n else:\n worker.gather(mineralField)\n # iterate deficit gas buildings\n for gasTag, info in deficit_gas_buildings.items():\n # get the deficit (missing worker to optimal performance)\n deficit = info[\"deficit\"]\n # check if the worker pool does contain anything\n workersLeft = bool(workerPool)\n # if there is a deficit move one worker to the townhall from the worker pool\n if deficit > 0 and workersLeft:\n worker: Unit = workerPool.pop()\n self.loggerBase.info(\"Moving one worker to harvest gas at \" + str(info[\"unit\"]))\n if len(worker.orders) == 1 and worker.orders[0].ability.id in [AbilityId.HARVEST_RETURN]:\n worker.gather(info[\"unit\"], queue=True)\n else:\n worker.gather(info[\"unit\"])\n else:\n # Whenever we do not have worker in the worker pool we want to move some workers to harvest gas but only if a certain ratio between\n # total vespene workers and total mineral workers is not exceeded.\n\n totalMineralWorkers = 0\n totalVespeneWorkers = 0\n\n for townhall in self.townhalls.ready:\n totalMineralWorkers += townhall.assigned_harvesters\n for gasBuilding in self.gas_buildings.ready:\n totalVespeneWorkers += gasBuilding.assigned_harvesters\n\n # only if less than 33% workers are on vespene\n if (totalVespeneWorkers / (totalMineralWorkers + totalVespeneWorkers)) < 0.34:\n for gasTag, info in deficit_gas_buildings.items():\n worker: Unit = self.workers.gathering.closest_to(info[\"unit\"].position)\n self.loggerBase.info(\"Moving one worker to \" + str(info[\"unit\"]))\n if len(worker.orders) == 1 and worker.orders[0].ability.id in [AbilityId.HARVEST_RETURN]:\n worker.gather(info[\"unit\"], queue=True)\n else:\n worker.gather(info[\"unit\"])\n \n # redistribute idle workers\n if len(self.workers.idle) > 0:\n if self.townhalls:\n for worker in self.workers.idle:\n townhall: Unit = self.townhalls.closest_to(worker)\n mineralFields: Units = self.mineral_field.closer_than(10, townhall)\n if mineralFields:\n mineralField: Unit = mineralFields.closest_to(worker)\n self.loggerBase.info(\"Moving one worker to harvest minerals at \" + str(mineralField))\n worker.gather(mineralField)",
"def survey_component(self, environment: env.map.Map):\n\n position_before = np.copy(self.geometry.position)\n\n if self.current_path is None:\n self.current_path = Path()\n self.current_path.add_position(self.geometry.position)\n\n if self.current_visited_sites is None:\n self.current_visited_sites = []\n\n next_position, current_direction = self.move(environment)\n if simple_distance(self.geometry.position, next_position) <= Agent.MOVEMENT_PER_STEP:\n self.geometry.position = next_position\n ret = self.current_path.advance()\n\n if not ret:\n self.update_local_occupancy_map(environment)\n\n # if the component was considered to be unfinished but is now confirmed to be, switch to next another\n if self.check_component_finished(self.local_occupancy_map):\n self.current_task = Task.FIND_NEXT_COMPONENT\n self.task_history.append(self.current_task)\n self.current_path = None\n self.current_visited_sites = None\n return\n\n current_site_tuple = (tuple(self.current_grid_position), tuple(self.current_grid_direction))\n if current_site_tuple in self.current_visited_sites:\n if self.check_component_finished(self.local_occupancy_map):\n self.current_task = Task.TRANSPORT_BLOCK\n self.task_history.append(self.current_task)\n self.current_path = None\n self.current_visited_sites = None\n else:\n self.current_task = Task.RETURN_BLOCK\n self.task_history.append(self.current_task)\n self.current_path = None\n self.current_visited_sites = None\n return\n\n # adding location and direction here to check for revisiting\n self.current_visited_sites.append(current_site_tuple)\n\n # the checks need to determine whether the current position is a valid attachment site\n position_ahead_occupied = environment.check_occupancy_map(\n self.current_grid_position + self.current_grid_direction)\n position_around_corner_empty = environment.check_occupancy_map(\n self.current_grid_position + self.current_grid_direction +\n np.array([-self.current_grid_direction[1], self.current_grid_direction[0], 0], dtype=\"int64\"),\n lambda x: x == 0)\n\n # if block ahead, turn right\n # if position around corner empty, turn left\n # if neither of these, continue straight\n if position_ahead_occupied:\n # turn right\n self.current_grid_direction = np.array([self.current_grid_direction[1],\n -self.current_grid_direction[0], 0],\n dtype=\"int64\")\n elif position_around_corner_empty:\n # first move forward (to the corner)\n self.current_path.add_position(self.geometry.position + Block.SIZE * self.current_grid_direction)\n reference_position = self.current_path.positions[-1]\n\n # then turn left\n self.current_grid_position += self.current_grid_direction\n self.current_grid_direction = np.array([-self.current_grid_direction[1],\n self.current_grid_direction[0], 0],\n dtype=\"int64\")\n self.current_grid_position += self.current_grid_direction\n self.current_path.add_position(reference_position + Block.SIZE * self.current_grid_direction)\n else:\n # otherwise site \"around the corner\" occupied -> continue straight ahead\n self.current_grid_position += self.current_grid_direction\n self.current_path.add_position(self.geometry.position + Block.SIZE * self.current_grid_direction)\n else:\n self.geometry.position = self.geometry.position + current_direction\n\n self.per_task_distance_travelled[Task.SURVEY_COMPONENT] += simple_distance(position_before,\n self.geometry.position)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Method that makes a decision about where to build
|
def decide_build(self, action):
pass
|
[
"def _check_custom_build(self):\n pass",
"def build(self):\n if self.moveCloseToObject(self.radius[Locals.BUILD], self.currentTask):\n Builder.build(self)",
"def buildBase(self):\n if bool(self.expansionLocations):\n location: Point2 = self.expansionLocations.pop(0)\n worker: Unit = self.workers.gathering.closest_to(location)\n if self.can_place(self.currentTask, location):\n worker.build(self.currentTask, location)\n else:\n raise Exception(\"could not build the command center where it was supposed to be built!\")\n else:\n raise Exception(\"No more places to build command expansions!\")",
"def build_path(self):\n print(\"Attempting to build path from {} to {}\".format(self.world_start, self.world_goal))\n try:\n self.determine_path()\n except PathNotFoundError:\n print(\"Couldn't find a legal path from {} to {}, skipping this edge of MST!\".format(self.world_start, self.world_goal))\n if self.build_in_minecraft:\n self.place_path()",
"def _build(self):\n if self.args.VM_NAME:\n bld = Builder(self.args.VM_NAME)\n else:\n bld = Builder(self._discover_templates())\n if self.args.stable:\n bld.build('stable')\n result = bld.upload(build='stable')\n else:\n bld.build()\n result = bld.upload()\n # Send mail only if asked and Builder.upload() return\n # not empty 'uploaded' list.\n if self.args.mail and result[1]:\n bld.mail(result[0])\n return result",
"def conditionBuild(self, batch):\n pass",
"def prepare_leo(self):",
"def test_build_creation(self):",
"def build2(_, clean=False):\n if clean:\n print('cleaning')\n print('building')",
"def build_model(self):\n ...",
"def build_cfg(self):\n for block in self.basic_blocks:\n if not block.ends_unconditional():\n if block.next:\n block.next.parents.append(block)\n block.children.append(block.next)\n targets = block.get_targets()\n if len(targets) > 0:\n for b in self.basic_blocks:\n starters = b.get_start_markers()\n for t in targets:\n if t in starters:\n b.parents.append(block)\n block.children.append(b)\n break\n if 'onOptionsItemSelected' in self.signature and 'MainActivity' in self.file.name:\n from graphviz import Digraph\n dot = Digraph()\n self.basic_blocks[0].graph(dot, done=[])\n # dot.render('OUT.png', view=True)\n with open('cfg.dot', 'w+') as f:\n f.write(dot.source)",
"def test_multiple_build_retrieval(self):",
"def setup_build_properties(self):",
"def build_steps_to_fetch_from(self, build_steps_to_check):\n release_build_steps = set()\n debug_build_steps = set()\n for builder, step in build_steps_to_check:\n port = self._tool.port_factory.get_from_builder_name(builder)\n if port.test_configuration().build_type == 'release':\n release_build_steps.add((builder, step))\n else:\n debug_build_steps.add((builder, step))\n\n build_steps_to_fallback_paths = defaultdict(dict)\n #TODO: we should make the selection of (builder, step) deterministic\n for builder, step in list(release_build_steps) + list(\n debug_build_steps):\n if not self._tool.builders.is_wpt_builder(builder):\n # Some result db related unit tests set step to None\n is_legacy_step = step is None or 'blink_web_tests' in step\n flag_spec_option = self._tool.builders.flag_specific_option(\n builder, step)\n port = self._tool.port_factory.get_from_builder_name(builder)\n port.set_option_default('flag_specific', flag_spec_option)\n fallback_path = port.baseline_search_path()\n if fallback_path not in list(\n build_steps_to_fallback_paths[is_legacy_step].values()):\n build_steps_to_fallback_paths[\n is_legacy_step][builder, step] = fallback_path\n return (set(build_steps_to_fallback_paths[True])\n | set(build_steps_to_fallback_paths[False]))",
"def create_reference_project(info_list):\n prj = Project(True)\n\n for building in info_list[:]:\n print('------------')\n print(building.building_number)\n print(building.area)\n print(building)\n\n if building.usage_type == 'office':\n prj.type_bldg_office(\n name=str(building.building_number),\n year_of_construction=building.year_of_construction,\n number_of_floors=building.floors,\n height_of_floors=building.height_of_floors,\n net_leased_area=building.area,\n office_layout=0,\n window_layout=0,\n construction_type=building.weight)\n elif building.usage_type == 'institute8':\n prj.type_bldg_institute8(\n name=str(building.building_number),\n year_of_construction=building.year_of_construction,\n number_of_floors=building.floors,\n height_of_floors=building.height_of_floors,\n net_leased_area=building.area,\n office_layout=0,\n window_layout=0,\n construction_type=building.weight)\n elif building.usage_type == 'institute4':\n prj.type_bldg_institute4(\n name=str(building.building_number),\n year_of_construction=building.year_of_construction,\n number_of_floors=building.floors,\n height_of_floors=building.height_of_floors,\n net_leased_area=building.area,\n office_layout=0,\n window_layout=0,\n construction_type=building.weight)\n elif building.usage_type == 'institute':\n prj.type_bldg_institute(\n name=str(building.building_number),\n year_of_construction=building.year_of_construction,\n number_of_floors=building.floors,\n height_of_floors=building.height_of_floors,\n net_leased_area=building.area,\n office_layout=0,\n window_layout=0,\n construction_type=building.weight)\n elif building.usage_type == 'residential':\n prj.type_bldg_residential(\n name=str(building.building_number),\n year_of_construction=building.year_of_construction,\n number_of_floors=building.floors,\n height_of_floors=building.height_of_floors,\n net_leased_area=building.area,\n residential_layout=0,\n neighbour_buildings=0,\n attic=0,\n cellar=0,\n dormer=0,\n construction_type=building.weight)\n return prj",
"def _build(self):\r\n self._buildWindow = Toplevel()\r\n\r\n Label(self._buildWindow, text=\"What would you like to do?\").grid(\r\n row=0, column=0, columnspan=2)\r\n Button(self._buildWindow, text=\"Build\", command=self._createBuildWindow).grid(row=1, column=0)\r\n Button(self._buildWindow, text=\"Sell\", command=self._createSellWindow).grid(row=1, column=1)",
"def test_build(self):\n self.assertTrue(hasattr(template.Page, 'build'))\n self.assertTrue(\n hasattr(template.Page.build, '__isabstractmethod__') and\n getattr(template.Page.build, '__isabstractmethod__', False))",
"def any_builds_running(self):",
"def buildGasBuilding(self):\n # cant build more gas buildings than townhalls\n if len(self.gas_buildings.ready) + self.already_pending(self.currentTask) <= len(self.townhalls) * 2:\n \n # prefer townhalls that are ready\n for townhall in self.townhalls.ready:\n return self.buildGasBuildingAtTownhall(townhall)\n # townhalls that are not ready\n for townhall in self.townhalls.not_ready:\n return self.buildGasBuildingAtTownhall(townhall)",
"def _build(self, builder: Any) -> Any:\n try:\n return build(builder)\n except MissingFieldError as err:\n raise ASTEvaluationError(\n f\"missing required attribute '{err.field.name}'\", source=self.source\n )"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Reads file and stores base64 data in database based on creation date.
|
def read_file_data(filepath, pin, time_session):
create_time = time.gmtime(os.path.getmtime(filepath))
create_datetime = datetime.fromtimestamp(time.mktime(create_time))
# Open file and read data
with open(filepath, "rb") as f:
data_raw = f.read()
# Encode to base64
b64data = base64.b64encode(data_raw)
# Save to DB
try:
with connection.cursor() as cursor:
cursor.execute("SELECT MAX(dashr_create_time) FROM "
"dashr WHERE pin = {}"
.format(pin))
max_time = cursor.fetchone()["MAX(dashr_create_time)"]
if max_time is None:
max_time = datetime.min
except Exception as e:
print(str(e))
return str(e)
try:
with connection.cursor() as cursor:
if create_datetime > max_time:
# Assuming DASHR RTCs don't reset and/or go backwards
print("time is greater - save data to db")
cursor.execute("INSERT INTO dashr VALUES ({},'{}','{}','{}')".
format(pin, b64data.decode("utf-8"),
create_datetime, time_session))
else:
# if data past this time has previously been inserted into DB
print("don't save")
print("create time: " + datetime.strftime(create_datetime,
"%Y-%m-%d %H:%M:%S"))
print("max time: " + datetime.strftime(max_time,
"%Y-%m-%d %H:%M:%S"))
return 0
# Commit changes (insert) to DB
connection.commit()
return 1
except Exception as e:
print(str(e))
return str(e)
|
[
"def _encode_file_base64_(self, file_path):\n encoded_file = base64.b64encode(open(file_path, 'rb').read())\n return self._base64_to_str(encoded_file)\n # return str(encoded_file)[2:-1]",
"def base64_read_file(filepath):\n with open(filepath, 'rb') as stream:\n data = stream.read()\n file_64_encode = base64.standard_b64encode(data)\n sys.stdout.write(file_64_encode.decode('utf-8'))\n sys.stdout.flush()",
"def store_file(self, input_data):\n absolute_path = os.path.join(systems['systems'][input_data['source_system']]['base_path'], input_data['source_path'])\n logging.info(\"Storing file at: {0}\".format(absolute_path))\n try:\n os.mkdir(os.path.split(absolute_path)[0])\n with open(absolute_path, 'wb') as f:\n f.write(base64.b64decode(input_data['data'].replace(\"\\n\", \"\"), validate=True))\n del input_data['data']\n except OSError as e:\n logging.error(\"Unable to store uploaded file: {0} - {1}\".format(absolute_path, e))\n except binascii.Error:\n logging.error(\"Invalid base64 encoded data\")\n return input_data",
"def File_to_DB():\n conn = mysql.connector.connect(\n user='root',\n password='MaximumHaze16',\n host='localhost',\n database='seniordesign'\n )\n cur = conn.cursor()\n fr = open(\"C:\\\\users\\\\sarah\\\\desktop\\\\dbtransfer2\\\\transferFile.txt\", 'r')\n count =0\n for line in fr:\n id = int(line[0:line.find(\"%\")])\n title= line[line.find(\"%\")+1:line.find(\"%%\")]\n author = line[line.find(\"%%\")+2:line.find(\"%%%\")]\n genre = line[line.find(\"%%%\")+3:line.find(\"%%%%\")]\n length = int(line[line.find(\"%%%%\")+4:line.find(\"%%%%%\")])\n cur.execute(\"insert into example values(%s,%s,%s,%s,%s)\",(id,title,author,genre,length))\n\n conn.commit()\n conn.close()\n fr.close()",
"def read_data_base(self):\n try:\n with open(self.path, 'rb') as file:\n self.data = pickle.load(file)\n except (OSError, IOError):\n print('users_database not found')",
"def decode_db(self, fp, append_queue=False):\n (length,) = int_encoding.unpack(fp.read(4))\n collection_id = fp.read(length)\n col_filename = os.path.join(self.dir, 'new_collection_id.txt')\n col_fp = open_create(col_filename)\n try:\n col_fp.write(collection_id)\n finally:\n col_fp.close()\n (length,) = int_encoding.unpack(fp.read(4))\n collection_secret = fp.read(length)\n col_filename = os.path.join(self.dir, 'new_collection_secret.txt')\n col_fp = open_create(col_filename)\n try:\n col_fp.write(collection_secret)\n finally:\n col_fp.close()\n (length,) = int_encoding.unpack(fp.read(4))\n db_name = os.path.join(self.dir, 'new_database')\n queue_filename = os.path.join(self.dir, 'queue')\n queue_index_fp = None\n if os.path.exists(queue_filename + '.index'):\n queue_index_fp = open(queue_filename + '.index', 'rb')\n lock_file(queue_index_fp, LOCK_EX, 0, 0, os.SEEK_SET)\n new_fp = open_create(db_name + '.index')\n try:\n self._copy_chunked(fp, new_fp, length)\n if queue_index_fp is not None:\n new_fp.write(queue_index_fp.read())\n finally:\n new_fp.close()\n (length,) = int_encoding.unpack(fp.read(4))\n new_fp = open_create(db_name)\n try:\n self._copy_chunked(fp, new_fp, length)\n if append_queue and os.path.exists(queue_filename):\n with open(queue_filename, 'rb') as copy_fp:\n ## FIXME: chunk\n new_fp.write(copy_fp.read())\n finally:\n new_fp.close()\n for name in 'new_collection_id.txt', 'new_collection_secret.txt', 'new_database.index', 'new_database':\n os.rename(os.path.join(self.dir, name),\n os.path.join(self.dir, name[4:]))\n if append_queue:\n ## FIXME: also not atomic:\n for name in 'queue', 'queue.index':\n name = os.path.join(self.dir, name)\n if os.path.exists(name):\n os.unlink(name)\n if queue_index_fp is not None:\n lock_file(queue_index_fp, LOCK_UN, 0, 0, os.SEEK_SET)",
"def do_POST(self):\n\n content_len = int(self.headers.get_all('content-length')[0])\n x = self.rfile.read(content_len)\n \n uuid,img = x.split(b';')\n uuid = (uuid.decode('ascii'))\n \n img = bs.b64decode(img)\n\n params = (uuid,memoryview(img))\n self.c.execute('insert into images values(?, ?)', params)\n self.send_response(200)\n self.end_headers()\n dat = self.c.execute('select * from images;')\n\n self.conn.commit()",
"def process_file(self, file, load):\n with gzip.open(file, mode='rt', encoding=\"ISO-8859-1\") as f:\n # add to susp users list for plotting later\n file_content = f.readlines()\n last_tweet = json.loads(file_content[0])\n user = {\n \"user_id\": last_tweet[\"user\"][\"id\"],\n \"tweets\": file_content\n }\n if self.is_suspicious(file_content):\n self.suspicious_users.append(user)\n # Build objects that will be inserted into dynamodb\n objs = self.build_objects_to_store(\n file_content,\n user[\"user_id\"]\n )\n print(file)\n for obj in objs:\n load(obj)\n else:\n self.legit_users.append(user)",
"def b64content(self) -> bytes:\n with self.as_file() as file:\n return base64.b64encode(file.read())",
"def b64_to_image(self,data,savepath):\r\n fl = open(savepath,\"wb\")\r\n fl.write(data.decode('base4'))\r\n fl.close()",
"def add_file_to_db(filename, dbname='HuGaDB.db'):\n con = sqlite3.connect(dbname) \n cur = con.cursor() \n \n data = np.genfromtxt(filename, delimiter='\\t', skip_header=4)\n \n if isabs(filename):\n filename=basename(filename)\n\n cur.execute(\"INSERT INTO files VALUES(NULL, '{0}')\".format(filename))\n con.commit()\n row_id = cur.execute('SELECT id FROM files WHERE filename=\"{0}\"'.format(filename)).fetchone()[0]\n con.commit()\n \n for i, row in enumerate(data):\n cur.execute(\"INSERT INTO data VALUES(NULL, {0}, {1}, \".format(row_id, i) + str(tuple(row.tolist())).replace(\"(\", \"\"))\n con.commit()\n con.close()",
"def save(self):\n data = {\n \"file_path\": self._file_path,\n \"local_md5\": self._local_md5,\n \"remote_md5\": self._remote_md5,\n \"created_at\": self._created_at.strftime(\n DbndLocalFileMetadataRegistry._date_format\n ),\n \"ttl\": self._ttl,\n }\n with open(self._cache_file_path, \"w+\") as f:\n f.write(json.dumps(data))",
"def t_createfile(self, filepath, uesec_ctime=None):",
"def to_internal_value(self, data):\n if not hasattr(data, 'file'):\n data = self.to_base64(data)\n return super().to_internal_value(data)",
"def load_fabrics(file_name):\n\n\traw_data = open(file_name)\n\n\tfor line in raw_data:\n\t\trow = line.rstrip().lstrip().split(\"|\")\n\t\tfabric_name = row[0]\n\t\tfabric_description = row[1]\n\t\tfabric_thumbnail = row[2]\n\t\tdiscontinued = row[3]\n\n\t\tfabric = Fabric(fabric_name=fabric_name, fabric_description=fabric_description, fabric_thumbnail=fabric_thumbnail, discontinued=discontinued)\n\n\t\tdb.session.add(fabric)\n\tdb.session.commit()",
"def HashFileToDatabase(self, filename):\n stat = os.stat(filename)\n timestamp_seconds = int(stat.st_mtime)\n size = stat.st_size\n path, base_name = os.path.split(filename)\n from_database = self.repository.Get(path, base_name)\n if from_database:\n if (from_database.GetTimestampSeconds() == timestamp_seconds\n and from_database.GetSize() == size):\n return from_database\n md5hash = HashFile(filename, self.console)\n if not md5hash:\n return None\n file_stats = FileStats(path, base_name, md5hash, size, timestamp_seconds)\n self.repository.Upsert(file_stats)\n return file_stats",
"def b64_to_image(self, data, savepath):\n\t\tfl = open(savepath, \"wb\")\n\t\tfl.write(data.decode('base64'))\n\t\tfl.close()",
"def test_read_img_as_b64(file_path, b64_path):\n from image import read_img_as_b64\n\n # Read expected string from text file\n with open(b64_path, \"r\") as file_obj:\n expected = file_obj.read()\n\n # Strip newline\n expected = (expected.split(\"\\n\"))[0]\n\n b64_string = read_img_as_b64(file_path)\n\n assert b64_string == expected",
"def read_raw_file(self) -> bytes:\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling ``bridges.Bridge.getID()`` should return the binary encoded ``fingerprint``.
|
def test_integration_getID(self):
bridge = bridges.Bridge(self.nickname, self.ip, self.orport,
self.fingerprint)
self.assertEqual(self.id_digest, bridge.getID())
|
[
"def getparticipantid(conn, fingerprint):\n oid = intquery(conn, \"select original_id from participant where \" \\\n + \"fingerprint=%s\", (fingerprint,))\n if oid != 0:\n return oid\n\n return intquery(conn, \"select idparticipant from participant where \" \\\n + \"fingerprint=%s\", (fingerprint,))",
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def test_Bridge_str_without_fingerprint_without_nickname(self):\n bridge = bridges.Bridge()\n identifier = str(bridge)\n self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))",
"def get_datapath_id(self):\n return self.db_get_val('Bridge', self.br_name, 'datapath_id')",
"def _getfingerprint(self):\n\n return base64.b64encode(encryption.sha512(self.publickeyxml.encode())).decode()",
"def get_hub_identity():\n # TODO - implement reading from beaglebone IDPROM\n # For now this is a test data (same as backend/models/ExampleData.SQL)\n return 'I8FJPAN11X', 'AUTH_KEY IS EMPTY'",
"def fingerprint(self):\n return \"%s|%s\" % (self._debit_note_uid, self._uid)",
"def get_iphone_GUID(self):\n return self.parsed_info_file['GUID']",
"def fingerprint(self):\n return self.read_metadata_by_name(self.FINGERPRINT_KEY)",
"def fingerprint(self):\n if self._fingerprint is None:\n if self.molecule:\n self._fingerprint = self.molecule[0].fingerprint\n return self._fingerprint",
"def fingerprint(blob):\n # is it an integer?\n try:\n mpz_value = mpz(blob)\n except (TypeError, ValueError):\n pass\n else:\n string = utils.mpztob64(mpz_value)\n return b64encode(crypthash(string).digest())\n\n # is it a string?\n if isinstance(blob, str):\n return b64encode(crypthash(blob).digest())\n # is it a list?\n if isinstance(blob, (list, tuple)):\n list_of_fingerprints = [fingerprint(i) for i in blob]\n string = json.dumps(list_of_fingerprints, separators=(',',':'))\n return b64encode(crypthash(string).digest())\n # is it a dict?\n if isinstance(blob, dict):\n # is this dict already a hash of something?\n if \"#\" in blob:\n return blob[\"#\"]\n # otherwise, transform dict into array and fingerprint it\n keys = sorted(blob)\n list_of_fingerprints = [fingerprint([k, blob[k]]) \\\n for k in keys]\n string = json.dumps(list_of_fingerprints, separators=(',',':'))\n return b64encode(crypthash(string).digest())\n # is it None\n if blob is None:\n return fingerprint('None')\n # is it an object?\n try:\n # is it a class for which we can compute a fingerprint?\n return blob.fingerprint()\n except AttributeError:\n pass\n assert False, \"fingerprint cannot parse object\"",
"def fingerprint(self):\n # check whether the hash of this object is already known\n if self.attr_fingerprint[\"#\"] is not None:\n return self.attr_fingerprint[\"#\"]\n list_to_hash = []\n # Going through all fields that need to be taken into account\n for key in sorted(self.to_fingerprint):\n # Computing missing hashes\n if self.attr_fingerprint[key] is None:\n self.attr_fingerprint[key] = \\\n\t\t fingerprint([key, getattr(self, key)])\n # Building final string\n list_to_hash.append(self.attr_fingerprint[key])\n string = json.dumps(list_to_hash, separators=(',',':'))\n result = b64encode(crypthash(string).digest())\n self.attr_fingerprint[\"#\"] = result\n return result",
"def _get_fingerprint(arg, controller):\n\n if not arg:\n try:\n return controller.get_info('fingerprint')\n except:\n raise ValueError(\"We aren't a relay, no information to provide\")\n elif stem.util.tor_tools.is_valid_fingerprint(arg):\n return arg\n elif stem.util.tor_tools.is_valid_nickname(arg):\n try:\n return controller.get_network_status(arg).fingerprint\n except:\n raise ValueError(\"Unable to find a relay with the nickname of '%s'\" % arg)\n elif ':' in arg or stem.util.connection.is_valid_ipv4_address(arg):\n if ':' in arg:\n address, port = arg.rsplit(':', 1)\n\n if not stem.util.connection.is_valid_ipv4_address(address):\n raise ValueError(\"'%s' isn't a valid IPv4 address\" % address)\n elif port and not stem.util.connection.is_valid_port(port):\n raise ValueError(\"'%s' isn't a valid port\" % port)\n\n port = int(port)\n else:\n address, port = arg, None\n\n matches = {}\n\n for desc in controller.get_network_statuses():\n if desc.address == address:\n if not port or desc.or_port == port:\n matches[desc.or_port] = desc.fingerprint\n\n if len(matches) == 0:\n raise ValueError('No relays found at %s' % arg)\n elif len(matches) == 1:\n return list(matches.values())[0]\n else:\n response = \"There's multiple relays at %s, include a port to specify which.\\n\\n\" % arg\n\n for i, or_port in enumerate(matches):\n response += ' %i. %s:%s, fingerprint: %s\\n' % (i + 1, address, or_port, matches[or_port])\n\n raise ValueError(response)\n else:\n raise ValueError(\"'%s' isn't a fingerprint, nickname, or IP address\" % arg)",
"def fingerprint(self):\n public_key = self.key.publickey().exportKey('DER')\n return SHA256.new(public_key).hexdigest()",
"def public_id(self):\n return modhex(pack('>I', self.id))",
"def by_x509_fingerprint(self, fingerprint):\n dao = self.session.query(CertificateFingerprint)\\\n .filter(CertificateFingerprint.fingerprint == fingerprint)\\\n .one()\n return self.dto({\n 'type': 'x509.fingerprint',\n 'gsid': dao.gsid.hex,\n })",
"def unique_id(self):\n return _raw_util.raw_peak_detector2_fb_sptr_unique_id(self)",
"def ftduino_id_get(self):\n return self.comm('ftduino_id_get')",
"def hashid(self) :\n\t\ttry :\n\t\t\treturn self._hashid\n\t\texcept Exception as e:\n\t\t\traise e"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test setting the `verified` attribute on a Bridge.
|
def test_integration_setVerified(self):
raise unittest.SkipTest(
("The setVerified() and isVerified() methods were not refactored "
"into the new bridgedb.bridges.Bridge class, as it's not clear "
"yet if they are necessary. Skip these tests for now."))
bridge = bridges.Bridge(self.nickname, self.ip, self.orport,
self.fingerprint)
bridge.setVerified()
self.assertTrue(bridge.isVerified())
self.assertTrue(bridge.verified)
self.assertEqual(self.id_digest, bridge.getID())
|
[
"def test_account_verified(self):\n user = User.objects.get()\n token, uid = RegistrationAPIView.send_account_activation_email(user=user, send_email=False)\n response = self.verify_account(token, uid)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n user = User.objects.get()\n self.assertTrue(user.is_verified)",
"def test_user_can_be_verified(self):\n # Given\n form_data = {\"username\": \"testuser@email.com\", \"password\": \"password\"}\n self.client.post(\"/api/account/create\", data=form_data, headers=self.headers)\n\n # When\n\n form_data = {\"username\": \"testuser@email.com\", \"account_verified\": \"true\"}\n response = self.client.put(\"/api/account/create\", data=form_data, headers=self.headers)\n\n # Then\n self.assertEqual(response.status_code, 201)",
"def test_integration_setRunningStable(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.stable)\n bridge.setStatus(True, True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.stable)",
"def verify(self, value, save=True): # pragma: no cover\n raise NotImplementedError(\"This method must be implemented\")",
"def set_identity_verification(self, country_id, is_verified):\n is_verified = bool(is_verified)\n action = 'verify' if is_verified else 'unverify'\n\n with self.db.get_cursor() as cursor:\n old = cursor.one(\"\"\"\n\n SELECT id, is_verified\n FROM participant_identities\n WHERE participant_id=%(participant_id)s\n AND country_id=%(country_id)s\n\n \"\"\", dict(locals(), participant_id=self.id))\n\n cursor.run(\"\"\"\n\n UPDATE participant_identities\n SET is_verified=%(is_verified)s\n WHERE participant_id=%(participant_id)s\n AND country_id=%(country_id)s\n\n \"\"\", dict(locals(), participant_id=self.id))\n\n payload = dict( id=self.id\n , identity_id=old.id if old else None\n , country_id=country_id\n , new_value=is_verified\n , old_value=old.is_verified if old else None\n , action=action + ' identity'\n )\n\n self.app.add_event(cursor, 'participant', payload)\n self._update_has_verified_identity(cursor)",
"def test_proofing_with_a_verified_nin(self):\n user = self.app.central_userdb.get_user_by_eppn(self.test_user_eppn)\n verified_nin = Nin(\n number=self.test_user_nin, created_by='test', is_verified=True, verified_by='test', is_primary=True\n )\n user.nins.add(verified_nin)\n self.app.central_userdb.save(user)\n\n response = self.send_letter(self.test_user_nin, validate_response=False)\n self._check_error_response(\n response, type_='POST_LETTER_PROOFING_PROOFING_FAIL', payload={'message': 'User is already verified'},\n )\n\n proofing_state = self.app.proofing_statedb.get_state_by_eppn(user.eppn, raise_on_missing=False)\n assert proofing_state is None",
"def set_email_verified_status(self, is_verified: bool):\n self.is_email_verified = is_verified\n db.session.commit()",
"def _isTweetVerified(self):\n if self._tweet[\"user\"][\"verified\"] == True:\n return 1\n else:\n return 0",
"def test_voting_deploy(voting_pre):\n assert len(voting_pre.account) == 42\n assert len(voting_pre.address) == 42\n assert voting_pre.account != voting_pre.address",
"def test_ach_save_success(self):\n\n funding_source = FundingSources.get_user_ach_funding_source()\n\n amounts = self.client.funding_sources.ach(\n funding_source.token).verification_amounts()\n\n ach_verification = {\n \"verify_amount1\": amounts.verify_amount1,\n \"verify_amount2\": amounts.verify_amount2\n }\n\n result = self.client.funding_sources.ach.save(\n funding_source.token, ach_verification)\n\n verify = self.get_funding_source_verify(funding_source)\n\n verify['verification_status'] = 'ACH_VERIFIED'\n verify['active'] = True\n\n verify_ach_response_model(self, result, verify)",
"def test_provision_good(self):\n f = Mock()\n f.provision_user.return_value = 'http://example.org/a/user'\n app.config['PROVIDERS'] = {'foo': f}\n\n with uaac_set(app):\n with app.test_client() as c:\n rv = c.post('/api/v1/provision', data={'provider': 'foo', 'id': 'bar'})\n assert rv.status_code == 201\n assert rv.location == 'http://example.org/a/user'",
"def test_BridgeBackwardsCompatibility_setStatus_stable(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=self.orPort,\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.stable)\n self.assertFalse(bridge.flags.stable)\n\n bridge.setStatus(stable=True)\n self.assertTrue(bridge.stable)\n self.assertTrue(bridge.flags.stable)",
"def _is_verified(cls, contact_point: ContactPoint):\n return any([\n 'pmi-verified' in extension.url and getattr(extension, 'valueBoolean', False)\n for extension in contact_point.extension or []\n ])",
"def test_BridgeBackwardsCompatibility_setStatus_running(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=\"anyport\",\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.flags.running)\n\n bridge.setStatus(running=True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.flags.running)",
"def test_is_avalible(self, obj, session):\n assert obj.is_avalible() is False\n\n session['convent_id'] = 'something'\n assert obj.is_avalible() is True",
"def test_Bridge_verifyExtraInfoSignature_good_signature(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.assertIsNone(self.bridge._verifyExtraInfoSignature(self.extrainfo))",
"def test_Bridge_setBlockedIn_IR_address(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn('IR', address='179.178.155.140')\n self.assertTrue(self.bridge.isBlockedIn('ir'))\n self.assertFalse(self.bridge.isBlockedIn('cn'))",
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_setBlockedIn_CN_obfs2(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.bridge.setBlockedIn('CN', methodname='obfs2')\n self.assertTrue(self.bridge.isBlockedIn('CN'))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test setting the `running` and `stable` attributes on a Bridge.
|
def test_integration_setRunningStable(self):
bridge = bridges.Bridge(self.nickname, self.ip, self.orport,
self.fingerprint)
self.assertFalse(bridge.running)
self.assertFalse(bridge.stable)
bridge.setStatus(True, True)
self.assertTrue(bridge.running)
self.assertTrue(bridge.stable)
|
[
"def test_BridgeBackwardsCompatibility_setStatus_running(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=\"anyport\",\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.flags.running)\n\n bridge.setStatus(running=True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.flags.running)",
"def test_BridgeBackwardsCompatibility_setStatus_stable(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=self.orPort,\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.stable)\n self.assertFalse(bridge.flags.stable)\n\n bridge.setStatus(stable=True)\n self.assertTrue(bridge.stable)\n self.assertTrue(bridge.flags.stable)",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"async def test_attributes(hass: HomeAssistant) -> None:\n await setup_platform(hass, SWITCH_DOMAIN)\n\n state = hass.states.get(DEVICE_ID)\n assert state.state == STATE_OFF",
"async def test_binary_sensors(spa, setup_entry, hass: HomeAssistant) -> None:\n\n entity_id = f\"binary_sensor.{spa.brand}_{spa.model}_online\"\n state = hass.states.get(entity_id)\n # disabled by default\n assert state is None\n\n entity_id = f\"binary_sensor.{spa.brand}_{spa.model}_error\"\n state = hass.states.get(entity_id)\n assert state is not None\n assert state.state == STATE_OFF",
"async def test_state_detection(self):\n await self.assertUpdate([False, None, -1, None, None, None],\n (constants.STATE_OFF, None, None))\n\n await self.assertUpdate([True, False, -1, None, None, None],\n (constants.STATE_STANDBY, None, None))\n\n await self.assertUpdate([True, True, 1, \"com.amazon.tv.launcher\", None, None],\n (constants.STATE_IDLE, \"com.amazon.tv.launcher\", [\"com.amazon.tv.launcher\"]))\n\n # Amazon Video\n await self.assertUpdate([True, True, 1, constants.APP_AMAZON_VIDEO, 3, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_PLAYING, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n await self.assertUpdate([True, True, 1, constants.APP_AMAZON_VIDEO, 2, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_PAUSED, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n await self.assertUpdate([True, True, 1, constants.APP_AMAZON_VIDEO, 1, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_IDLE, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n # Amazon Video with custom state detection rules\n self.ftv._state_detection_rules = {constants.APP_AMAZON_VIDEO: ['media_session_state']}\n\n await self.assertUpdate([True, True, 2, constants.APP_AMAZON_VIDEO, 2, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_PAUSED, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n await self.assertUpdate([True, True, 5, constants.APP_AMAZON_VIDEO, 3, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_PLAYING, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n await self.assertUpdate([True, True, 5, constants.APP_AMAZON_VIDEO, 1, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_IDLE, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n self.ftv._state_detection_rules = {constants.APP_AMAZON_VIDEO: [{'standby': {'media_session_state': 2}}]}\n await self.assertUpdate([True, True, 2, constants.APP_AMAZON_VIDEO, None, [constants.APP_AMAZON_VIDEO]],\n (constants.STATE_IDLE, constants.APP_AMAZON_VIDEO, [constants.APP_AMAZON_VIDEO]))\n\n # Firefox\n await self.assertUpdate([True, True, 3, constants.APP_FIREFOX, 3, [constants.APP_FIREFOX]],\n (constants.STATE_PLAYING, constants.APP_FIREFOX, [constants.APP_FIREFOX]))\n\n await self.assertUpdate([True, True, 1, constants.APP_FIREFOX, 3, [constants.APP_FIREFOX]],\n (constants.STATE_IDLE, constants.APP_FIREFOX, [constants.APP_FIREFOX]))\n\n # Hulu\n await self.assertUpdate([True, True, 4, constants.APP_HULU, 3, [constants.APP_HULU]],\n (constants.STATE_PLAYING, constants.APP_HULU, [constants.APP_HULU]))\n\n await self.assertUpdate([True, True, 2, constants.APP_HULU, 3, [constants.APP_HULU]],\n (constants.STATE_PAUSED, constants.APP_HULU, [constants.APP_HULU]))\n\n await self.assertUpdate([True, True, 1, constants.APP_HULU, 3, [constants.APP_HULU]],\n (constants.STATE_IDLE, constants.APP_HULU, [constants.APP_HULU]))\n\n # Jellyfin\n await self.assertUpdate([True, True, 2, constants.APP_JELLYFIN_TV, 3, [constants.APP_JELLYFIN_TV]],\n (constants.STATE_PLAYING, constants.APP_JELLYFIN_TV, [constants.APP_JELLYFIN_TV]))\n\n await self.assertUpdate([True, True, 4, constants.APP_JELLYFIN_TV, 3, [constants.APP_JELLYFIN_TV]],\n (constants.STATE_PAUSED, constants.APP_JELLYFIN_TV, [constants.APP_JELLYFIN_TV]))\n\n # Netfilx\n await self.assertUpdate([True, True, 1, constants.APP_NETFLIX, 3, [constants.APP_NETFLIX]],\n (constants.STATE_PLAYING, constants.APP_NETFLIX, [constants.APP_NETFLIX]))\n\n await self.assertUpdate([True, True, 1, constants.APP_NETFLIX, 2, [constants.APP_NETFLIX]],\n (constants.STATE_PAUSED, constants.APP_NETFLIX, [constants.APP_NETFLIX]))\n\n await self.assertUpdate([True, True, 1, constants.APP_NETFLIX, 1, [constants.APP_NETFLIX]],\n (constants.STATE_IDLE, constants.APP_NETFLIX, [constants.APP_NETFLIX]))\n\n # Plex\n await self.assertUpdate([True, True, 1, constants.APP_PLEX, 3, [constants.APP_PLEX]],\n (constants.STATE_PLAYING, constants.APP_PLEX, [constants.APP_PLEX]))\n\n await self.assertUpdate([True, True, 2, constants.APP_PLEX, 3, [constants.APP_PLEX]],\n (constants.STATE_PAUSED, constants.APP_PLEX, [constants.APP_PLEX]))\n\n await self.assertUpdate([True, True, 1, constants.APP_PLEX, 1, [constants.APP_PLEX]],\n (constants.STATE_IDLE, constants.APP_PLEX, [constants.APP_PLEX]))\n\n # Sport 1\n await self.assertUpdate([True, True, 3, constants.APP_SPORT1, 3, [constants.APP_SPORT1]],\n (constants.STATE_PLAYING, constants.APP_SPORT1, [constants.APP_SPORT1]))\n\n await self.assertUpdate([True, True, 2, constants.APP_SPORT1, 3, [constants.APP_SPORT1]],\n (constants.STATE_PAUSED, constants.APP_SPORT1, [constants.APP_SPORT1]))\n\n await self.assertUpdate([True, True, 1, constants.APP_SPORT1, 3, [constants.APP_SPORT1]],\n (constants.STATE_IDLE, constants.APP_SPORT1, [constants.APP_SPORT1]))\n\n # Spotify\n await self.assertUpdate([True, True, 1, constants.APP_SPOTIFY, 3, [constants.APP_SPOTIFY]],\n (constants.STATE_PLAYING, constants.APP_SPOTIFY, [constants.APP_SPOTIFY]))\n\n await self.assertUpdate([True, True, 1, constants.APP_SPOTIFY, 2, [constants.APP_SPOTIFY]],\n (constants.STATE_PAUSED, constants.APP_SPOTIFY, [constants.APP_SPOTIFY]))\n\n await self.assertUpdate([True, True, 1, constants.APP_SPOTIFY, 1, [constants.APP_SPOTIFY]],\n (constants.STATE_IDLE, constants.APP_SPOTIFY, [constants.APP_SPOTIFY]))\n\n # Twitch\n await self.assertUpdate([True, True, 2, constants.APP_TWITCH, 3, [constants.APP_TWITCH]],\n (constants.STATE_PAUSED, constants.APP_TWITCH, [constants.APP_TWITCH]))\n\n await self.assertUpdate([True, True, 1, constants.APP_TWITCH, 3, [constants.APP_TWITCH]],\n (constants.STATE_PLAYING, constants.APP_TWITCH, [constants.APP_TWITCH]))\n\n await self.assertUpdate([True, True, 1, constants.APP_TWITCH, 4, [constants.APP_TWITCH]],\n (constants.STATE_PLAYING, constants.APP_TWITCH, [constants.APP_TWITCH]))\n\n await self.assertUpdate([True, True, 1, constants.APP_TWITCH, 1, [constants.APP_TWITCH]],\n (constants.STATE_IDLE, constants.APP_TWITCH, [constants.APP_TWITCH]))\n\n # Waipu TV\n await self.assertUpdate([True, True, 3, constants.APP_WAIPU_TV, 1, [constants.APP_WAIPU_TV]],\n (constants.STATE_PLAYING, constants.APP_WAIPU_TV, [constants.APP_WAIPU_TV]))\n\n await self.assertUpdate([True, True, 2, constants.APP_WAIPU_TV, 1, [constants.APP_WAIPU_TV]],\n (constants.STATE_PAUSED, constants.APP_WAIPU_TV, [constants.APP_WAIPU_TV]))\n\n await self.assertUpdate([True, True, 1, constants.APP_WAIPU_TV, 1, [constants.APP_WAIPU_TV]],\n (constants.STATE_IDLE, constants.APP_WAIPU_TV, [constants.APP_WAIPU_TV]))\n\n # Unknown app\n await self.assertUpdate([True, True, 1, 'unknown', 3, ['unknown']],\n (constants.STATE_PLAYING, 'unknown', ['unknown']))\n\n await self.assertUpdate([True, True, 1, 'unknown', 2, ['unknown']],\n (constants.STATE_PAUSED, 'unknown', ['unknown']))\n\n await self.assertUpdate([True, True, 1, 'unknown', 1, ['unknown']],\n (constants.STATE_IDLE, 'unknown', ['unknown']))\n\n await self.assertUpdate([True, True, 1, 'unknown', None, ['unknown']],\n (constants.STATE_PLAYING, 'unknown', ['unknown']))\n\n await self.assertUpdate([True, True, 2, 'unknown', None, ['unknown']],\n (constants.STATE_PAUSED, 'unknown', ['unknown']))",
"def test_dev(self):\r\n dev = Config.dev()\r\n self.assertIsInstance(dev, bool)\r\n \r\n Config.data['dev'] = 'True'\r\n dev = Config.dev()\r\n self.assertFalse(dev)\r\n \r\n Config.data['dev'] = True\r\n dev = Config.dev()\r\n self.assertTrue(dev)\r\n \r\n Config.data['dev'] = 'Yes'\r\n dev = Config.dev()\r\n self.assertFalse(dev)",
"def test_integration_setVerified(self):\n raise unittest.SkipTest(\n (\"The setVerified() and isVerified() methods were not refactored \"\n \"into the new bridgedb.bridges.Bridge class, as it's not clear \"\n \"yet if they are necessary. Skip these tests for now.\"))\n\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n bridge.setVerified()\n self.assertTrue(bridge.isVerified())\n self.assertTrue(bridge.verified)\n self.assertEqual(self.id_digest, bridge.getID())",
"def test_update_Fast_Stable(self):\n self.flags.update([\"Fast\", \"Stable\"])\n self.assertTrue(self.flags.fast)\n self.assertTrue(self.flags.stable)",
"def test_report_active_slave(self, bond):\n mode = self.hosts_nets_nic_dict.get(0).get(bond).get(\"mode\")\n testflow.step(\n \"Check that the active slave name bond %s mode %s that reported \"\n \"via engine match to the active slave name on the host\", bond, mode\n )\n assert helper.compare_active_slave_from_host_to_engine(\n bond=bond\n ), (\n \"Active slave name bond %s mode %s that reported via engine \"\n \"isn't match to the active slave name on the host\" % (bond, mode)\n )",
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"async def test_brightness_mode(\n hass: HomeAssistant,\n mock_lametric: MagicMock,\n device_registry: dr.DeviceRegistry,\n entity_registry: er.EntityRegistry,\n) -> None:\n state = hass.states.get(\"select.frenck_s_lametric_brightness_mode\")\n assert state\n assert (\n state.attributes.get(ATTR_FRIENDLY_NAME) == \"Frenck's LaMetric Brightness mode\"\n )\n assert state.attributes.get(ATTR_ICON) == \"mdi:brightness-auto\"\n assert state.attributes.get(ATTR_OPTIONS) == [\"auto\", \"manual\"]\n assert state.state == BrightnessMode.AUTO\n\n entry = entity_registry.async_get(state.entity_id)\n assert entry\n assert entry.device_id\n assert entry.entity_category is EntityCategory.CONFIG\n assert entry.unique_id == \"SA110405124500W00BS9-brightness_mode\"\n\n device = device_registry.async_get(entry.device_id)\n assert device\n assert device.configuration_url is None\n assert device.connections == {(dr.CONNECTION_NETWORK_MAC, \"aa:bb:cc:dd:ee:ff\")}\n assert device.entry_type is None\n assert device.hw_version is None\n assert device.identifiers == {(DOMAIN, \"SA110405124500W00BS9\")}\n assert device.manufacturer == \"LaMetric Inc.\"\n assert device.name == \"Frenck's LaMetric\"\n assert device.sw_version == \"2.2.2\"\n\n await hass.services.async_call(\n SELECT_DOMAIN,\n SERVICE_SELECT_OPTION,\n {\n ATTR_ENTITY_ID: \"select.frenck_s_lametric_brightness_mode\",\n ATTR_OPTION: \"manual\",\n },\n blocking=True,\n )\n\n assert len(mock_lametric.display.mock_calls) == 1\n mock_lametric.display.assert_called_once_with(brightness_mode=BrightnessMode.MANUAL)",
"def test_stable_true(self):\n try:\n self.view001(stable=True)\n except Exception as err:\n self.fail('An unexpected error was encountered: '+str(err))",
"async def test_sensor_state(hass: HomeAssistant) -> None:\n prior = 0.2\n config = {\n \"binary_sensor\": {\n \"name\": \"Test_Binary\",\n \"platform\": \"bayesian\",\n \"observations\": [\n {\n \"platform\": \"state\",\n \"entity_id\": \"sensor.test_monitored\",\n \"to_state\": \"off\",\n \"prob_given_true\": 0.8,\n \"prob_given_false\": 0.4,\n }\n ],\n \"prior\": prior,\n \"probability_threshold\": 0.32,\n }\n }\n\n assert await async_setup_component(hass, \"binary_sensor\", config)\n await hass.async_block_till_done()\n\n hass.states.async_set(\"sensor.test_monitored\", \"on\")\n await hass.async_block_till_done()\n state = hass.states.get(\"binary_sensor.test_binary\")\n\n assert state.attributes.get(\"occurred_observation_entities\") == [\n \"sensor.test_monitored\"\n ]\n assert state.attributes.get(\"observations\")[0][\"prob_given_true\"] == 0.8\n assert state.attributes.get(\"observations\")[0][\"prob_given_false\"] == 0.4\n assert abs(0.0769 - state.attributes.get(\"probability\")) < 0.01\n # Calculated using bayes theorum where P(A) = 0.2, P(~B|A) = 0.2 (as negative observation), P(~B|notA) = 0.6\n assert state.state == \"off\"\n\n hass.states.async_set(\"sensor.test_monitored\", \"off\")\n await hass.async_block_till_done()\n state = hass.states.get(\"binary_sensor.test_binary\")\n\n assert state.attributes.get(\"occurred_observation_entities\") == [\n \"sensor.test_monitored\"\n ]\n assert abs(0.33 - state.attributes.get(\"probability\")) < 0.01\n # Calculated using bayes theorum where P(A) = 0.2, P(~B|A) = 0.8 (as negative observation), P(~B|notA) = 0.4\n assert state.state == \"on\"\n\n hass.states.async_remove(\"sensor.test_monitored\")\n await hass.async_block_till_done()\n state = hass.states.get(\"binary_sensor.test_binary\")\n\n assert state.attributes.get(\"occurred_observation_entities\") == []\n assert abs(prior - state.attributes.get(\"probability\")) < 0.01\n assert state.state == \"off\"\n\n hass.states.async_set(\"sensor.test_monitored\", STATE_UNAVAILABLE)\n await hass.async_block_till_done()\n state = hass.states.get(\"binary_sensor.test_binary\")\n\n assert state.attributes.get(\"occurred_observation_entities\") == []\n assert abs(prior - state.attributes.get(\"probability\")) < 0.01\n assert state.state == \"off\"\n\n hass.states.async_set(\"sensor.test_monitored\", STATE_UNKNOWN)\n await hass.async_block_till_done()\n state = hass.states.get(\"binary_sensor.test_binary\")\n\n assert state.attributes.get(\"occurred_observation_entities\") == []\n assert abs(prior - state.attributes.get(\"probability\")) < 0.01\n assert state.state == \"off\"",
"def test_update(self):\n name = 'test'\n switch = Switch.objects.create(name=name, active=True)\n\n call_command('waffle_switch', name, 'off')\n switch.refresh_from_db()\n self.assertFalse(switch.active)\n\n call_command('waffle_switch', name, 'on')\n switch.refresh_from_db()\n self.assertTrue(switch.active)",
"def test_network(self):\n self.net.pingAll()\n self.pingAllV6()\n for link in self.switch_matrix:\n s1, s2 = link[0], link[2]\n output(f\"Setting link between {s1} and {s2} down\\n\")\n self.net.configLinkStatus(s1, s2, \"down\")\n self.net.pingAll()\n self.pingAllV6()\n output(f\"Setting link between {s1} and {s2} up\\n\")\n self.net.configLinkStatus(s1, s2, \"up\")\n self.net.pingAll()\n self.pingAllV6()",
"def test_properties(mqtt_client: MockedMQTT):\n device = DysonPureHotCoolLink(SERIAL, CREDENTIAL, DEVICE_TYPE)\n device.connect(HOST)\n\n # Status\n assert device.focus_mode is True\n\n new_status = {\"product-state\": {\"ffoc\": [\"ON\", \"OFF\"]}}\n mqtt_client.state_change(new_status)\n assert device.focus_mode is False",
"def test_toggle_sysinfo(self):\n Bridge.toggle_sysinfo(self.model, self.gui)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Should return a config line without a fingerprint.
|
def test_integration_getConfigLine_vanilla_withoutFingerprint(self):
#self.skip = True
bridge = bridges.Bridge('nofpr', '23.23.23.23', 2323, self.fingerprint,
or_addresses=self.or_addresses)
bridgeLine = bridge.getConfigLine()
ip = bridgeLine.split(':')[0]
self.assertTrue(ipaddr.IPAddress(ip))
|
[
"def test_integration_getConfigLine_vanilla_withFingerprint(self):\n bridge = bridges.Bridge('fpr', '23.23.23.23', 2323,\n id_digest=self.id_digest,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine(includeFingerprint=True)\n self.assertIsNotNone(bridgeLine)\n self.assertSubstring(self.fingerprint, bridgeLine)\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def rem_comment(line):\n return line.split(\"#\", 1)[0].rstrip()",
"def _prepare_line(self, line):\r\n return line.rstrip('\\r\\n').strip()",
"def __unlabel_line(self, line):\n\n if line[0:3] == '!*!':\n line = line[3:]\n return line",
"def without(self, line: FileLine) -> 'Localization':\n return self.exclude_lines([line])",
"def test_get_configdict_from_configfile_with_lines_commented_out(tmp_path):\n os.chdir(tmp_path)\n configfile_content = \"verbose: False\\n\" \"# htmlify: True\\n\"\n Path(CONFIGFILE_NAME).write_text(configfile_content)\n expected = {\"verbose\": False}\n assert get_configdict() == expected",
"def _build_disabled_config(self, ifname):\n return 'except-interface=%s\\n' % ifname",
"def consume_line(line):\n if gc_args.trace_on == gc_arg_set[0]: # on: Remove comments, if any.\n if gc_all_ex.findall(line) and gc_dslash_ex.match(line):\n return False, gc_dslash_ex.sub(\"\", line, 1)\n return False, line\n if gc_args.trace_on == gc_arg_set[1]: # off: Comment out, if not commented.\n if gc_all_ex.findall(line) and not gc_dslash_ex.match(line):\n return False, '//' + line\n return False, line\n if gc_args.trace_on == gc_arg_set[2]: # remove: Remove uncommented std lines.\n if gc_ini_ex.findall(line) and not gc_dslash_ex.match(line):\n return False, '//' + line\n if gc_std_ex.findall(line) and not gc_dslash_ex.match(line):\n return True, line\n return False, line\n if gc_args.trace_on == gc_arg_set[3]: # removeall: Remove every std line.\n if gc_ini_ex.findall(line) and not gc_dslash_ex.match(line):\n return False, '//' + line\n if gc_std_ex.findall(line):\n return True, line\n return False, line\n return False, line",
"def _remove_prompt(self, line):\n if line.startswith(self.prompt_first):\n return line[len(self.prompt_first):]\n elif line.startswith(self.prompt_next):\n return line[len(self.prompt_next):]\n else:\n return line",
"def strip_definer(mysqldump_line):\n if not mysqldump_line.startswith(\"/*\") or len(mysqldump_line) > 500:\n # speed things up, lines with DEFINER in them \n # (1) start with '/*'\n # (2) are shorter than 500 characters.\n return mysqldump_line\n return DEFINER_PATTERN.sub('', mysqldump_line)",
"def _strip_doctest_line(line: str) -> str:\n stripped = re.sub(\"(>>>|\\.\\.\\.)\\s?\", \"\", line)\n\n if re.match(\"\\s*$\", stripped):\n stripped = \"\"\n\n return stripped",
"def get_ignore_checksum():\n return env_to_bool(\"CRDS_IGNORE_MAPPING_CHECKSUM\", False)",
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def clean_line(line):\n return line.replace(\"\\0\", \"\").strip()",
"def test_ignored_line(indent, line_to_ignore):\n example = f\"{indent}{line_to_ignore}\\n\"\n result = ignored_line.parse(example)\n assert result == \"\"",
"def _read_line(self, f):\n l = f.readline().strip()\n while l == \"\" or l[0] == \"#\": # comment or an empty line\n l = f.readline().strip()\n return l",
"def skippable_line(self, line):\n\n # Skip date lines\n if re.match('--\\d{4}-\\d{2}-\\d{2}', line):\n return True\n if re.match('\\d{4}-\\d{2}-\\d{2}', line):\n return True\n # Skip HTTP status code lines since we already have that info\n if re.match('\\d{3} ', line):\n return True\n # Skip Saving to and progress lines\n if re.match('(Saving to:|\\s*\\d+K)', line):\n return True\n # Skip notice about ignoring body on HTTP error\n if re.match('Skipping \\d+ byte', line):\n return True",
"def pop_line(self, line_number):\n if line_number not in self.lines:\n print('Line # {} not found. Available lines in this group ({}) are:'.format(line_number, self.name))\n self.print_lines()\n \n label, description, dtype = self.lines[line_number]\n \n del self.mappings[description]\n del self.lines[line_number]\n\n return (label, description, dtype)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Should return a config line with a fingerprint.
|
def test_integration_getConfigLine_vanilla_withFingerprint(self):
bridge = bridges.Bridge('fpr', '23.23.23.23', 2323,
id_digest=self.id_digest,
or_addresses=self.or_addresses)
bridgeLine = bridge.getConfigLine(includeFingerprint=True)
self.assertIsNotNone(bridgeLine)
self.assertSubstring(self.fingerprint, bridgeLine)
ip = bridgeLine.split(':')[0]
self.assertTrue(ipaddr.IPAddress(ip))
|
[
"def test_integration_getConfigLine_vanilla_withoutFingerprint(self):\n #self.skip = True\n bridge = bridges.Bridge('nofpr', '23.23.23.23', 2323, self.fingerprint,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine()\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def test_integration_getConfigLine_scramblesuit_withFingerprint(self):\n bridge = bridges.Bridge('philipkdick', '23.23.23.23', 2323,\n id_digest=self.id_digest,\n or_addresses=self.or_addresses)\n ptArgs = {'password': 'NEQGQYLUMUQGK5TFOJ4XI2DJNZTS4LRO'}\n pt = bridges.PluggableTransport(bridge.fingerprint, 'scramblesuit',\n ipaddr.IPAddress('42.42.42.42'), 4242,\n ptArgs)\n bridge.transports.append(pt)\n bridgeLine = bridge.getConfigLine(includeFingerprint=True,\n transport='scramblesuit')\n ptArgsList = ' '.join([\"{0}={1}\".format(k,v) for k,v in ptArgs.items()])\n self.assertEqual(\"scramblesuit 42.42.42.42:4242 %s %s\"\n % (self.fingerprint, ptArgsList),\n bridgeLine)",
"def fingerprint(self):\n return self.read_metadata_by_name(self.FINGERPRINT_KEY)",
"def read_fingerprint(node):\n return _get_attr(node, ATTR_FINGERPRINT)",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_get_configdict_from_configfile_with_lines_commented_out(tmp_path):\n os.chdir(tmp_path)\n configfile_content = \"verbose: False\\n\" \"# htmlify: True\\n\"\n Path(CONFIGFILE_NAME).write_text(configfile_content)\n expected = {\"verbose\": False}\n assert get_configdict() == expected",
"def parse(self, procfile):\r\n cfg = OrderedDict()\r\n with open(procfile) as f:\r\n lines = f.readlines()\r\n for line in lines:\r\n m = RE_LINE.match(line)\r\n if m:\r\n cfg[m.group(1)] = m.group(2)\r\n return cfg",
"def get_config_section(self):\n\n if self.label:\n r = \"{name}:{label}\".format(name=self.name, label=self.label)\n else:\n r = \"{name}\".format(name=self.name)\n\n return r",
"def get_line_identifier(self):",
"def _hostconfigfile():\n h = socket.gethostname().split('.')[0]\n return pyperc('Config.%s' % h)",
"def extract_alias_config(defined_config):\n\n regex = '(^ alias.*?\\n)$'\n match = re.search(regex, defined_config, re.M|re.S)\n\n return match.group(1)",
"def parse_line(self, line, conf):\n line = line.split('=', 1)\n if len(line) == 2:\n key = line[0]\n if line[0].startswith('monitoring_logging_') \\\n or line[0].startswith('fuse_kafka_') \\\n or line[0] == 'monitoring_top_substitutions':\n key = key.replace('monitoring_', '')\n key = key.replace('fuse_kafka_', '')\n key = key.replace('logging_', '').replace('top_', '')\n if not key in conf.keys(): conf[key] = []\n parsed = json.loads(line[1])\n if type(parsed) is dict:\n for parsed_key in parsed.keys():\n conf[key].append(parsed_key)\n conf[key].append(parsed[parsed_key])\n else:\n conf[key].extend(parsed)",
"def test_config_attribute_returns_list_it_is_given(baseline):\n config = baseline.split(\"\\n\")\n assert config == transintentlation.Configuring(config).config",
"def find_line_with_hash(lines, hex_id):\n for line_id, line in enumerate(lines):\n if hex_id in line: \n return (line_id, line)\n\n return None",
"def to_config_line(event):\n areas = []\n area: ActivityArea\n for area in event.activity_areas:\n areas.append(area.to_config_line())\n activity_text = Configuration.string_encode(\"\\n\".join(areas))\n\n output = (f\"{event.type}={event.profile}:_profilename={event._profilename}:_profilestate={event._profilestate}:\"\n f\"ima_dead={event.ima_dead}:ima_sens={event.ima_sens}:activity_level:{event.activity_level}:\"\n f\"vm_list={event.vm_list}:ot_type={event.ot_type}:activity_directions={event.activity_directions}:\"\n f\"activity_area={activity_text}\")\n\n return output",
"def config_hash():\n global config\n f = StringIO()\n yaml.dump(config, f)\n return str(hash(f.getvalue()))",
"def fingerprint(self):\n if self._fingerprint is None:\n if self.molecule:\n self._fingerprint = self.molecule[0].fingerprint\n return self._fingerprint",
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def address_line2(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"address_line2\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Should return a scramblesuit config line with a fingerprint.
|
def test_integration_getConfigLine_scramblesuit_withFingerprint(self):
bridge = bridges.Bridge('philipkdick', '23.23.23.23', 2323,
id_digest=self.id_digest,
or_addresses=self.or_addresses)
ptArgs = {'password': 'NEQGQYLUMUQGK5TFOJ4XI2DJNZTS4LRO'}
pt = bridges.PluggableTransport(bridge.fingerprint, 'scramblesuit',
ipaddr.IPAddress('42.42.42.42'), 4242,
ptArgs)
bridge.transports.append(pt)
bridgeLine = bridge.getConfigLine(includeFingerprint=True,
transport='scramblesuit')
ptArgsList = ' '.join(["{0}={1}".format(k,v) for k,v in ptArgs.items()])
self.assertEqual("scramblesuit 42.42.42.42:4242 %s %s"
% (self.fingerprint, ptArgsList),
bridgeLine)
|
[
"def test_integration_getConfigLine_vanilla_withFingerprint(self):\n bridge = bridges.Bridge('fpr', '23.23.23.23', 2323,\n id_digest=self.id_digest,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine(includeFingerprint=True)\n self.assertIsNotNone(bridgeLine)\n self.assertSubstring(self.fingerprint, bridgeLine)\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def sbatch_line(script_name):\n\n line = ['sbatch']\n\n for key in sbatch_info:\n line.append( sbatch_info[key].output() )\n\n line.append( script_name )\n\n return ' '.join( line )",
"def test_integration_getConfigLine_vanilla_withoutFingerprint(self):\n #self.skip = True\n bridge = bridges.Bridge('nofpr', '23.23.23.23', 2323, self.fingerprint,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine()\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def fingerprint(spki, hash):\n return \":\".join(c.encode(\"hex\") for c in hash(spki).digest())",
"def batch_shipyard_encryption_pfx_sha1_thumbprint(config):\n # type: (dict) -> str\n try:\n tp = config['batch_shipyard']['encryption']['pfx']['sha1_thumbprint']\n except KeyError:\n tp = None\n return tp",
"def _get_sk_from_file(self):\n pass",
"def show_bios_configuration(ctx, profile, configuration):\n\n bios_recipe = BIOSRecipe(ctx.obj['client'])\n config_data = bios_recipe.get_selected_configuration(configuration, profile=profile)\n print(json.dumps(config_data, indent=4, sort_keys=True))",
"def config_hash():\n global config\n f = StringIO()\n yaml.dump(config, f)\n return str(hash(f.getvalue()))",
"def test_config_attribute_returns_list_it_is_given(baseline):\n config = baseline.split(\"\\n\")\n assert config == transintentlation.Configuring(config).config",
"def _getfingerprint(self):\n\n return base64.b64encode(encryption.sha512(self.publickeyxml.encode())).decode()",
"def generate_GRA_6_default_config(self, filename):\n # Create the actual config for GRA_6\n\n # Load device_a with registration in SAS Test Harness.\n device_c1 = json_load(\n os.path.join('testcases', 'testdata', 'device_a.json'))\n\n # Load grant request.\n grant_g1 = json_load(\n os.path.join('testcases', 'testdata', 'grant_0.json'))\n\n grant_g2 = json_load(\n os.path.join('testcases', 'testdata', 'grant_0.json'))\n grant_g2['operationParam']['operationFrequencyRange'][\n 'lowFrequency'] = 3645000000\n grant_g2['operationParam']['operationFrequencyRange'][\n 'highFrequency'] = 3655000000\n sas_harness_config = {\n 'sasTestHarnessName': 'SAS-TestHarness-1',\n 'hostName': getFqdnLocalhost(),\n 'port': getUnusedPort(),\n 'serverCert': getCertFilename('sas.cert'),\n 'serverKey': getCertFilename('sas.key'),\n 'caCert': getCertFilename('ca.cert')\n }\n sas_harness_dump_records = {\n 'cbsdRecords': generateCbsdRecords([device_c1],\n [[grant_g2]])\n }\n\n config = {\n 'registrationRequestC1': device_c1,\n 'grantRequestG1': grant_g1,\n 'sasTestHarnessConfig': sas_harness_config,\n 'sasTestHarnessDumpRecords': sas_harness_dump_records\n }\n writeConfig(filename, config)",
"def print_cpu_hand_mask_first_card(hand):\n print(\"Dealer's hand:\")\n print(\"??\", end=' ')\n print_hand(hand[2:])",
"def configHDCP(qd, hdcpin):\n if hdcpin !='follow':\n if 'None' == hdcpin:\n qd.hdcp_alyzSwitch('0')\n elif '14' == hdcpin:\n qd.hdcp_alyzSwitch('1')\n elif '22' == hdcpin:\n qd.hdcp_alyzSwitch('2')\n else:\n raise (\"Unknow Quantum hdcp key!\")",
"def read_configuration_header(stream):\n surveyname = read_string(stream, 128) # \"Loch Ness\"\n transectname = read_string(stream, 128)\n soundername = read_string(stream, 128) # \"ER60\"\n version = read_string(stream, 30)\n read_bytes(stream, 98) # spare\n transducercount = read_long(stream) # 1 to 7\n\n return ConfigurationHeader(surveyname, transectname,\n soundername, version, transducercount)",
"def _get_linecard(self):\n return self.__linecard",
"def filter_pskcrack_output(results, ip, ip_dir, psk_file):\n output = \"\"\n cracked = \"\"\n lines = results.splitlines()\n for line in lines:\n if not line.startswith(('Starting', 'Ending', 'Running')):\n if not line.startswith('no match found'):\n LOG.info('PSK Cracked!: {}'.format(line))\n cracked = \"Cracked psk on ip: {}. PSK file: {}, psk-crack output: {}\".format(\n ip, os.path.join(ip_dir, psk_file), line)\n output = line\n return (output, cracked)",
"def fingerprint():\n directory_walker(fingerprint_audiofile, (os.path.join(STORAGE_BASE_PATH,\n FILEHANDLING_CONFIG['checksummed_path']),\n os.path.join(STORAGE_BASE_PATH,\n FILEHANDLING_CONFIG['fingerprinted_path'])))",
"def phred_autodetect(input_file, USER_PHRED):\n\n if input_file.endswith('.gz'): # Open file\n infile = gzip.open(input_file, 'rt')\n else: \n infile = open(input_file, 'r') \n\n # Phred sets\n phred64_set = set(\"@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefgh\")\n phred33_set = set(\"!\\\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJ\")\n\n quality_string = '' # Initialize variables\n line_count = 0\n is_phred33 = False \n is_phred64 = False\n phred_determined = False\n\n line = infile.readline()[:-1] # Read line by line, until phred type is found\n while phred_determined == False:\n line_count += 1\n\n if line_count == 4: # At this point, we are looking at a quality string\n quality_string = line\n is_phred33 = set(quality_string).issubset(phred33_set)\n is_phred64 = set(quality_string).issubset(phred64_set)\n line_count = 0\n\n if is_phred33 and not is_phred64:\n phred_determined = True\n return \"33\"\n\n elif not is_phred33 and is_phred64:\n phred_determined = True\n return \"64\"\n \n line = infile.readline().strip()\n\n infile.close()\n\n # In case phred can't be determined, use the users input. \n if not phred_determined: \n # If user did not specify phred type \n if USER_PHRED == '':\n print('ERROR: We cannot autodetect the phred encoding type of your file(s). Please specify it in the input.')\n sys.exit(1)\n phred_determined = True\n return USER_PHRED",
"def get_config_digest(self) -> FormattedSHA256:\n return formatted_digest(self.config)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Upon initialisation, all flags should be ``False``.
|
def test_init(self):
for flag in self._all_flag_names:
f = getattr(self.flags, flag, None)
self.assertFalse(f, "%s should be False" % flag)
|
[
"def IsInitOnly(self) -> bool:",
"def setInitialFlags(self):\n\n self.isRebalanceTriggered()\n self.isRestructureTriggered()",
"def disabledInit(self) -> None:\n ...",
"def __initialise_states(self):\n\n # Start not dead and not powered up\n self.powered_up = False\n self.dead = False",
"def initialized(self) -> bool:",
"def __init__(self):\n self.bases = [False,False,False]\n self.runs = 0",
"def __init__(self, **kwds: Any) -> None:\n self._available = False\n super().__init__(**kwds)",
"def initialized(self, value):\n\n\t\tif value is not None:\n\t\t\tassert type(value) is bool, \"'{0}' attribute: '{1}' type is not 'bool'!\".format(\"initialized\", value)\n\t\tself.__initialized = value",
"def setUp(self):\n class FXF(Flags):\n # Implicitly assign three flag values based on definition order\n READ = FlagConstant()\n WRITE = FlagConstant()\n APPEND = FlagConstant()\n\n # Explicitly assign one flag value by passing it in\n EXCLUSIVE = FlagConstant(0x20)\n\n # Implicitly assign another flag value, following the previously\n # specified explicit value.\n TEXT = FlagConstant()\n\n self.FXF = FXF",
"def _check_flags(self):\n for flag in self.required_flags:\n if not getattr(self.configuration, flag, None):\n raise exception.CinderException(_('%s is not set') % flag)",
"def prep_flags(flags):\n\n if flags:\n setattr(flags, \"logging_level\", flags.logging)\n setattr(flags, \"auth_host_name\", \"localhost\")\n setattr(flags, \"auth_host_port\", [8000, 8085])\n setattr(flags, \"noauth_local_webserver\", False)\n return flags",
"def __initialise_frightened_mode(self):\n\n self.frightened = False",
"def clearFlags(self):\n\n self.sorted = False\n self.statted = False",
"def _setup(self):\n tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.DEBUG)\n if KerasBenchmark.local_flags is None:\n for flag_method in self.flag_methods:\n flag_method()\n # Loads flags to get defaults to then override. List cannot be empty.\n flags.FLAGS(['foo'])\n # Overrides flag values with defaults for the class of tests.\n for k, v in self.default_flags.items():\n setattr(FLAGS, k, v)\n saved_flag_values = flagsaver.save_flag_values()\n KerasBenchmark.local_flags = saved_flag_values\n else:\n flagsaver.restore_flag_values(KerasBenchmark.local_flags)",
"def initializeStateFlags(*args):\n return set(args)",
"def initial_global_state(self):\n\n return ()",
"def initialize(self):\n self.initialized = False\n self.initialize_cameras()\n self.initialize_electronics()\n self.initialized = True",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setting the Stable flag to ``True`` should result in Flags.stable being ``True``.
|
def test_settingStable(self):
self.flags.stable = True
self.assertTrue(self.flags.stable, "The Stable flag should be True")
|
[
"def test_update_Fast_Stable(self):\n self.flags.update([\"Fast\", \"Stable\"])\n self.assertTrue(self.flags.fast)\n self.assertTrue(self.flags.stable)",
"def test_update_Fast(self):\n self.flags.update([\"Fast\"])\n self.assertTrue(self.flags.fast)\n self.assertFalse(self.flags.stable)",
"def stableMode(self, use=True):\n # if option changes, reset everything\n if self.options.stable != use:\n self.initialized = False\n\n if use is True:\n self.options.stable = True\n elif use is False:\n self.options.stable = False\n else:\n raise NameError('Incorrect option input')",
"def test_stable_true(self):\n try:\n self.view001(stable=True)\n except Exception as err:\n self.fail('An unexpected error was encountered: '+str(err))",
"def test_integration_setRunningStable(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.stable)\n bridge.setStatus(True, True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.stable)",
"def cmd_setflag(self):\n for entry in self.generate_entries():\n if self.args.verbose:\n messager.msg(entry.feed, entry.title)\n entry.set_flag(self.args.new_flag)",
"def test_toggle_staged_state_true(self):\n rule_name = 'unstaged_rule'\n self._create_db_rule_with_name(rule_name)\n\n # Make sure the item that was added is not staged\n item = self.rule_table._table.get_item(Key={'RuleName': rule_name})\n assert_equal(item['Item']['Staged'], False)\n\n # Try to toggle the state to staged\n self.rule_table.toggle_staged_state(rule_name, True)\n\n # Make sure the item is now staged\n item = self.rule_table._table.get_item(Key={'RuleName': rule_name})\n assert_equal(item['Item']['Staged'], True)",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def set_bool(self,obj,key,val='',test=0):\n val=val.lower()\n if val not in ['0','1','no','yes','true','false','on','off']:\n return (1,'%s is not a boolean value' % val)\n if test : return (0,'')\n if val in ['1','yes','true','on'] : obj.__dict__[key]=True\n else : obj.__dict__[key]=False\n return (0,'')",
"def set_flag(flag = 'exit'):\n FLAG[flag] = True",
"def test_BridgeBackwardsCompatibility_setStatus_stable(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=self.orPort,\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.stable)\n self.assertFalse(bridge.flags.stable)\n\n bridge.setStatus(stable=True)\n self.assertTrue(bridge.stable)\n self.assertTrue(bridge.flags.stable)",
"def setValueMutable(self, boolean: bool) -> None:\n ...",
"def set_flag(self, flag, state = True):\r\n arg_str = p2e._base._util._convert_args_to_string(\"set.object.flag\", \r\n self._object._eco_id, flag, state)\r\n p2e._app.Exec(arg_str)",
"def _flag(self, series, meta=dict()):\n meta = self._parse_meta(series, meta)\n self.flags[series['series_id']] = meta",
"def set_bool(self, key: str, value: bool):\n self.set_str(key, \"True\" if value else \"False\")",
"def test_stable_update_true(self):\n try:\n self.view001(update='true')\n except Exception as err:\n self.fail('An unexpected error was encountered: '+str(err))",
"def _update_use_command_table_flag(self):\n device_param = f\"{self._awg.name}_use_command_table\"\n device_value = self.pulsar.get(device_param) \\\n if hasattr(self.pulsar, device_param) else False\n\n channel_param = f\"{self.i_channel_name}_use_command_table\"\n channel_value = self.pulsar.get(channel_param) \\\n if hasattr(self.pulsar, channel_param) else False\n\n self._use_command_table = device_value | channel_value",
"def test_toggle_staged_state_false(self):\n rule_name = 'staged_rule'\n self._create_db_rule_with_name(rule_name, True)\n\n # Make sure the item that was added is staged\n item = self.rule_table._table.get_item(Key={'RuleName': rule_name})\n assert_equal(item['Item']['Staged'], True)\n\n # Try to toggle the state to unstaged\n self.rule_table.toggle_staged_state(rule_name, False)\n\n # Make sure the item is now unstaged\n item = self.rule_table._table.get_item(Key={'RuleName': rule_name})\n assert_equal(item['Item']['Staged'], False)",
"def setVerbosity(self, flag: 'SbBool') -> \"void\":\n return _coin.SoToVRMLAction_setVerbosity(self, flag)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Setting the Running flag to ``True`` should result in Flags.running being ``True``.
|
def test_settingRunning(self):
self.flags.running = True
self.assertTrue(self.flags.running, "The Running flag should be True")
|
[
"def is_running(self, is_running):\n\n self._is_running = is_running",
"def mark_as_running(self):\n self.status = self.STATUS_RUNNING\n self.started_running_datetime = timezone.now()\n self.clean()\n self.save()",
"def set_cmdrunning(self, cmdrunning):\n self.cmdrunning = cmdrunning # toggle state variable\n \n # enable or disable run-related buttons\n if cmdrunning:\n disable_on_run = 'disable'\n enable_on_run = 'normal'\n else:\n disable_on_run = 'normal'\n enable_on_run = 'disable'\n self.b_reset.config(state=disable_on_run)\n self.e_cmd.config(state=disable_on_run)\n self.b_run_batch.config(state=disable_on_run)\n self.b_stop.config(state=enable_on_run)",
"def set_flag(self, flag, state = True):\r\n arg_str = p2e._base._util._convert_args_to_string(\"set.object.flag\", \r\n self._object._eco_id, flag, state)\r\n p2e._app.Exec(arg_str)",
"async def test_sets_is_running_with_streamlit_flag(self):\n # This will frequently be True from other tests\n streamlit._is_running_with_streamlit = False\n await self.runtime.start()\n self.assertTrue(streamlit._is_running_with_streamlit)",
"def cmd_setflag(self):\n for entry in self.generate_entries():\n if self.args.verbose:\n messager.msg(entry.feed, entry.title)\n entry.set_flag(self.args.new_flag)",
"def running(self):\r\n with self._condition:\r\n return self._state == RUNNING",
"def send_run(self) -> None:\n self._set_state(\"RUNNING\")",
"def running(self):\n return self.status == \"STARTED\"",
"def set_flag(flag = 'exit'):\n FLAG[flag] = True",
"def fan_running(self):\n if self._data['hasFan']:\n return self._data['fanData']['fanIsRunning']\n else:\n return False",
"def test_integration_setRunningStable(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.stable)\n bridge.setStatus(True, True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.stable)",
"def is_running(self) -> bool:\n return self.game_running",
"def start(self):\n self.running = True",
"def TurnOn(self, flag_name):\n flag = self.flags.get(flag_name)\n if flag is None:\n return\n\n flag.TurnOn()",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def display_state(self, running_state):\n if not running_state in [\"running_continuous\",\n \"running_single\",\n \"paused\",\n \"stopped\"]:\n raise ValueError(\"Na running_state should be either \"\n \"running_continuous, \"\n \"running_single, \"\n \"paused or \"\n \"stopped\")\n if running_state==\"running_continuous\":\n self.button_single.setEnabled(False)\n self.button_single.setText(\"Run single\")\n self.button_continuous.setEnabled(True)\n self.button_continuous.setText(\"Pause\")\n return\n if running_state== \"running_single\":\n self.button_single.setEnabled(True)\n self.button_single.setText(\"Pause\")\n self.button_continuous.setEnabled(False)\n self.button_continuous.setText(\"Run continuous\")\n return\n if running_state == \"paused\":\n self.button_continuous.setText(\"Resume continuous\")\n self.button_single.setText(\"Run single\")\n self.button_continuous.setEnabled(True)\n self.button_single.setEnabled(False)\n return\n if running_state == \"stopped\":\n self.button_continuous.setText(\"Run continuous\")\n self.button_single.setText(\"Run single\")\n self.button_continuous.setEnabled(True)\n self.button_single.setEnabled(True)\n return",
"def is_running(self):\n return self._job.state(jobset.NoCache()) == jobset._RUNNING",
"def measurement_running(self):\n running = self.comm('STW')[6] == '0'\n return running"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test changing flags with the update() method.
|
def test_update_Fast_Stable(self):
self.flags.update(["Fast", "Stable"])
self.assertTrue(self.flags.fast)
self.assertTrue(self.flags.stable)
|
[
"def test_update_Fast(self):\n self.flags.update([\"Fast\"])\n self.assertTrue(self.flags.fast)\n self.assertFalse(self.flags.stable)",
"def test_stable_update_true(self):\n try:\n self.view001(update='true')\n except Exception as err:\n self.fail('An unexpected error was encountered: '+str(err))",
"def test_enabled_feature_update(self):\n self._test_method('put', True, dummy=123)",
"def test_update_deactivate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=False, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertFalse(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_update_activate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=True, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertTrue(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def test_patch_feature_flag(self):\n pass",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def test_update_instructions(self):\n pass",
"def test_changes_invalid_flag_A(self):\n self.game.invalidFlagA = True\n self.game.roll_the_dice(\"a\")\n self.assertEqual(False,self.game.invalidFlagA,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_changes_invalid_flag_B(self):\n self.game.invalidFlagB = True\n self.game.roll_the_dice(\"b\")\n self.assertEqual(False,self.game.invalidFlagB,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_update(self):\n # this is really tested graphically, no unit test here\n pass",
"def testSetModified(self):\n\n\t\tself.assertEqual(self.testFilterBasedModule.modified, self.oldModifiedFlag)\n\t\tself.testFilterBasedModule.setModified(self.newModifiedFlag)\n\t\tself.assertEqual(self.testFilterBasedModule.modified, self.newModifiedFlag)",
"def test_update(self):\n name = 'test'\n switch = Switch.objects.create(name=name, active=True)\n\n call_command('waffle_switch', name, 'off')\n switch.refresh_from_db()\n self.assertFalse(switch.active)\n\n call_command('waffle_switch', name, 'on')\n switch.refresh_from_db()\n self.assertTrue(switch.active)",
"def test_post_apply_feature_flag_change_request(self):\n pass",
"def test_get_feature_flag_change_request(self):\n pass",
"def test_mutate(self):\n\n\t\tpass",
"def test_post_feature_flag_change_request(self):\n pass",
"def test_update_status_of_redflag(self):\n self.app.post(\n \"/api/v2/redflags\", headers=self.headers, data=json.dumps(self.redflag_data))\n response = self.app.patch(\n \"/api/v2/redflags/1/status\", headers=self.headers, data=json.dumps({\"status\": \"resolved\"}))\n result = json.loads(response.data)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(result['data'][0]['message'],\n \"Updated redflag record's status\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test changing flags with the update() method.
|
def test_update_Fast(self):
self.flags.update(["Fast"])
self.assertTrue(self.flags.fast)
self.assertFalse(self.flags.stable)
|
[
"def test_update_Fast_Stable(self):\n self.flags.update([\"Fast\", \"Stable\"])\n self.assertTrue(self.flags.fast)\n self.assertTrue(self.flags.stable)",
"def test_stable_update_true(self):\n try:\n self.view001(update='true')\n except Exception as err:\n self.fail('An unexpected error was encountered: '+str(err))",
"def test_enabled_feature_update(self):\n self._test_method('put', True, dummy=123)",
"def test_update_deactivate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=False, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertFalse(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_update_activate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=True, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertTrue(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def test_patch_feature_flag(self):\n pass",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def test_update_instructions(self):\n pass",
"def test_changes_invalid_flag_A(self):\n self.game.invalidFlagA = True\n self.game.roll_the_dice(\"a\")\n self.assertEqual(False,self.game.invalidFlagA,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_changes_invalid_flag_B(self):\n self.game.invalidFlagB = True\n self.game.roll_the_dice(\"b\")\n self.assertEqual(False,self.game.invalidFlagB,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_update(self):\n # this is really tested graphically, no unit test here\n pass",
"def testSetModified(self):\n\n\t\tself.assertEqual(self.testFilterBasedModule.modified, self.oldModifiedFlag)\n\t\tself.testFilterBasedModule.setModified(self.newModifiedFlag)\n\t\tself.assertEqual(self.testFilterBasedModule.modified, self.newModifiedFlag)",
"def test_update(self):\n name = 'test'\n switch = Switch.objects.create(name=name, active=True)\n\n call_command('waffle_switch', name, 'off')\n switch.refresh_from_db()\n self.assertFalse(switch.active)\n\n call_command('waffle_switch', name, 'on')\n switch.refresh_from_db()\n self.assertTrue(switch.active)",
"def test_post_apply_feature_flag_change_request(self):\n pass",
"def test_get_feature_flag_change_request(self):\n pass",
"def test_mutate(self):\n\n\t\tpass",
"def test_post_feature_flag_change_request(self):\n pass",
"def test_update_status_of_redflag(self):\n self.app.post(\n \"/api/v2/redflags\", headers=self.headers, data=json.dumps(self.redflag_data))\n response = self.app.patch(\n \"/api/v2/redflags/1/status\", headers=self.headers, data=json.dumps({\"status\": \"resolved\"}))\n result = json.loads(response.data)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(result['data'][0]['message'],\n \"Updated redflag record's status\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test adding the HSDir flag with the update() method.
|
def test_update_HSDir(self):
self.flags.update(["Fast", "Stable", "HSDir"])
self.assertTrue(self.flags.fast)
self.assertTrue(self.flags.stable)
# We don't care about the HSDir flag:
self.assertIsNone(getattr(self.flags, "hsdir", None))
|
[
"def test_vrfs_update(self):\n pass",
"def test_update(self):\n # this is really tested graphically, no unit test here\n pass",
"def test_full_update_system(self):\n pass",
"def test_update_level(self):\n pass",
"def test_update_on_close(self):\n buff = self.make_copy(self.fh)\n fh2 = WeldxFile(buff, mode=\"rw\", sync=True)\n fh2[\"test\"] = True\n fh2.close()\n buff.seek(0)\n fh3 = WeldxFile(buff, mode=\"r\")\n assert fh3[\"test\"]",
"def testSingleDailyUpdate(self):\n l0pid = self.addProduct('level 0')\n l1pid = self.addProduct('level 1', level=1)\n l01process, l01code = self.addProcess('level 0-1', l1pid)\n self.addProductProcessLink(l0pid, l01process)\n l0fid = self.addFile('level_0_20120101_v1.0.0', l0pid)\n l1fid = self.addFile('level_1_20120101_v1.0.0', l1pid)\n self.dbu.addFilefilelink(l1fid, l0fid)\n expected = []\n # Should be up to date\n self.checkCommandLines(l0fid, expected)\n #Updated version of L0\n fid = self.addFile('level_0_20120101_v1.1.0', l0pid)\n expected = [[\n os.path.join(self.td, 'codes', 'scripts', 'junk.py'),\n 'level_0-1_args',\n os.path.join(self.td, 'data', 'junk', 'level_0_20120101_v1.1.0'),\n 'level_1_20120101_v1.1.0']]",
"def test_update_mft_folder(self):\n pass",
"def endOfDirectory(handle, succeeded=None, updateListing=None, cacheToDisc=None):\n if os.path.exists(\"/etc/debugxb\"):\n\t pass#print \"*** endOfDirectory ***\"",
"def test_reload_state(self):\n self.reload_helper(\"State\")",
"def test_update_existing_proper_update(tmpdir):\n d1 = np.ones((10, 3)) * 2\n d2 = np.ones(3) * 3\n d3 = np.ones(17) * 4\n d4 = np.ones((10, 4)) * 5\n d5 = np.ones(14)\n trees = [\n {\"d1\": d1, \"d2\": d2, \"d3\": d3, \"d4\": d4},\n {\"d1\": d1, \"d3\": d3},\n {\"d1\": d1},\n {\"d1\": d1, \"d5\": d5},\n {\"d1\": d1, \"d2\": d2, \"d5\": d5},\n {\"d3\": d3},\n ]\n\n os.chdir(tmpdir)\n for tree in trees:\n WeldxFile(\"test.wx\", mode=\"rw\", tree=tree)\n\n # AsdfFile version\n asdf.AsdfFile(trees[0]).write_to(\"test.asdf\")\n\n for tree in trees[1:]:\n f = asdf.open(\"test.asdf\", mode=\"rw\")\n f.tree = tree\n f.update()\n f.close()\n\n # file sizes should be almost equal (array inlining in wxfile).\n a = pathlib.Path(\"test.asdf\").stat().st_size\n b = pathlib.Path(\"test.wx\").stat().st_size\n assert a >= b\n\n if a == b:\n\n def _read(fn):\n with open(fn, \"br\") as fh:\n return fh.read()\n\n assert _read(\"test.asdf\") == _read(\"test.wx\")",
"def test_update_submodules():",
"def setup_hds(self):\n if self.hds_kperk is None or len(self.hds_kperk) == 0:\n return\n from .gw_utils import setup_hds_obs\n # if len(self.hds_kperk) == 2:\n # try:\n # if len(self.hds_kperk[0] == 2):\n # pass\n # except:\n # self.hds_kperk = [self.hds_kperk]\n oc = self.m.get_package(\"OC\")\n if oc is None:\n raise Exception(\"can't find OC package in model to setup hds grid obs\")\n if not oc.savehead:\n raise Exception(\"OC not saving hds, can't setup grid obs\")\n hds_unit = oc.iuhead\n hds_file = self.m.get_output(unit=hds_unit)\n assert os.path.exists(os.path.join(self.org_model_ws,hds_file)),\\\n \"couldn't find existing hds file {0} in org_model_ws\".format(hds_file)\n shutil.copy2(os.path.join(self.org_model_ws,hds_file),\n os.path.join(self.m.model_ws,hds_file))\n inact = None\n if self.m.lpf is not None:\n inact = self.m.lpf.hdry\n elif self.m.upw is not None:\n inact = self.m.upw.hdry\n if inact is None:\n skip = lambda x: np.NaN if x == self.m.bas6.hnoflo else x\n else:\n skip = lambda x: np.NaN if x == self.m.bas6.hnoflo or x == inact else x\n print(self.hds_kperk)\n setup_hds_obs(os.path.join(self.m.model_ws,hds_file),\n kperk_pairs=self.hds_kperk,skip=skip)\n self.frun_post_lines.append(\"pyemu.gw_utils.apply_hds_obs('{0}')\".format(hds_file))\n self.tmp_files.append(hds_file)",
"def TestOsModifyValid():\n hv_dict = {\n constants.HT_XEN_PVM: {\n constants.HV_ROOT_PATH: \"/dev/sda5\",\n },\n constants.HT_XEN_HVM: {\n constants.HV_ACPI: False,\n constants.HV_PAE: True,\n },\n }\n\n return _TestOsModify(hv_dict)",
"def test_update_dog(self):\n pass",
"def test_initialize_new(self):\n new_temp_dir = self.temp_dir + \"e09dia0d\"\n directory = Directory(os.path.join(new_temp_dir, \"test\"), rewrite_config=False)\n assert directory.new\n try:\n directory.initialize()\n assert not directory.new, \"directory should not be new after initialization\"\n finally:\n if os.path.exists(new_temp_dir):\n shutil.rmtree(new_temp_dir)",
"def test_update_instructions(self):\n pass",
"def test_update_domain_only(self):\r\n self.test_update()",
"def test_update_device_group(self):\n pass",
"async def test_device_registry_update(\n hass: HomeAssistant, device_registry: dr.DeviceRegistry\n) -> None:\n MOCK_CONFIG_ENTRY.add_to_hass(hass)\n\n device_registry.async_get_or_create(\n config_entry_id=MOCK_CONFIG_ENTRY.entry_id,\n identifiers={(DOMAIN, MOCK_GATEWAY_ID)},\n name=\"Mock Gateway\",\n manufacturer=\"Schelte Bron\",\n model=\"OpenTherm Gateway\",\n sw_version=VERSION_OLD,\n )\n\n with patch(\n \"homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup\",\n return_value=None,\n ), patch(\"pyotgw.OpenThermGateway.connect\", return_value=MINIMAL_STATUS_UPD):\n await setup.async_setup_component(hass, DOMAIN, {})\n\n await hass.async_block_till_done()\n gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)})\n assert gw_dev.sw_version == VERSION_NEW"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Test adding a completely madeup flag, "Unicorn", with the update() method. (It shouldn't get added.)
|
def test_update_Unicorn(self):
self.flags.update(["Unicorn"])
# We don't care about the make-believe Unicorn flag:
self.assertIsNone(getattr(self.flags, "unicorn", None))
|
[
"def test_update_Fast(self):\n self.flags.update([\"Fast\"])\n self.assertTrue(self.flags.fast)\n self.assertFalse(self.flags.stable)",
"def test_patch_feature_flag(self):\n pass",
"def test_update_Fast_Stable(self):\n self.flags.update([\"Fast\", \"Stable\"])\n self.assertTrue(self.flags.fast)\n self.assertTrue(self.flags.stable)",
"def test_post_feature_flag(self):\n pass",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def test_changes_invalid_flag_B(self):\n self.game.invalidFlagB = True\n self.game.roll_the_dice(\"b\")\n self.assertEqual(False,self.game.invalidFlagB,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_post_apply_feature_flag_change_request(self):\n pass",
"def test_changes_invalid_flag_A(self):\n self.game.invalidFlagA = True\n self.game.roll_the_dice(\"a\")\n self.assertEqual(False,self.game.invalidFlagA,\"Didn't changed a flag it should have.\")\n print(\"Correctly, it unset a set flag\")",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def test_init(self):\n for flag in self._all_flag_names:\n f = getattr(self.flags, flag, None)\n self.assertFalse(f, \"%s should be False\" % flag)",
"def test_add_proper_red_flag(self):\n response = base.post_incident(self.credentials)\n self.assertEqual(response.status_code, 201)",
"def test_post_feature_flag_change_request(self):\n pass",
"def test_leaves_invalid_flag_B(self):\n self.game.invalidFlagB = False\n self.game.roll_the_dice(\"b\")\n self.assertEqual(False,self.game.invalidFlagB,\"Changed a flag it shouldn't have.\")\n print(\"Correctly, it didn't set an unset flag\")",
"def test_update_activate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=True, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertTrue(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_get_feature_flag_status(self):\n pass",
"def test_update_deactivate_everyone(self):\n name = 'test'\n flag = Flag.objects.create(name=name)\n self.assertIsNone(flag.percent)\n self.assertIsNone(flag.everyone)\n self.assertTrue(flag.superusers)\n self.assertFalse(flag.staff)\n self.assertFalse(flag.authenticated)\n self.assertFalse(flag.rollout)\n\n percent = 30\n call_command('waffle_flag', name, everyone=False, percent=percent,\n superusers=False, staff=True, authenticated=True,\n rollout=True)\n\n flag.refresh_from_db()\n self.assertEqual(flag.percent, percent)\n self.assertFalse(flag.everyone)\n self.assertFalse(flag.superusers)\n self.assertTrue(flag.staff)\n self.assertTrue(flag.authenticated)\n self.assertTrue(flag.rollout)",
"def test_enabled_feature_update(self):\n self._test_method('put', True, dummy=123)",
"def test_post_review_feature_flag_change_request(self):\n pass",
"def test_full_update_layer(self):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The BridgeAddressBase's _address and _fingerprint should be None.
|
def test_BridgeAddressBase_init(self):
self.assertIsNone(self.bab._address)
self.assertIsNone(self.bab._fingerprint)
|
[
"def test_BridgeAddressBase_address_del(self):\n self.bab.address = '11.12.13.14'\n self.assertEqual(self.bab.address, ipaddr.IPv4Address('11.12.13.14'))\n\n del(self.bab.address)\n self.assertIsNone(self.bab.address)\n self.assertIsNone(self.bab._address)",
"def test_integration_getConfigLine_vanilla_withoutFingerprint(self):\n #self.skip = True\n bridge = bridges.Bridge('nofpr', '23.23.23.23', 2323, self.fingerprint,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine()\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def test_BridgeAddressBase_fingerprint_del(self):\n self.bab.fingerprint = self.fingerprint\n self.assertEqual(self.bab.fingerprint, self.fingerprint)\n\n del(self.bab.fingerprint)\n self.assertIsNone(self.bab.fingerprint)\n self.assertIsNone(self.bab._fingerprint)",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_integration_getConfigLine_vanilla_withFingerprint(self):\n bridge = bridges.Bridge('fpr', '23.23.23.23', 2323,\n id_digest=self.id_digest,\n or_addresses=self.or_addresses)\n bridgeLine = bridge.getConfigLine(includeFingerprint=True)\n self.assertIsNotNone(bridgeLine)\n self.assertSubstring(self.fingerprint, bridgeLine)\n ip = bridgeLine.split(':')[0]\n self.assertTrue(ipaddr.IPAddress(ip))",
"def address_obj(self):\n if not self._address_obj:\n self.address()\n return self._address_obj",
"def init(self):\n if not valid_ovsdb_addr(self.ovsdb_addr):\n raise ValueError('Invalid OVSDB address: %s' % self.ovsdb_addr)\n if self.br_name is None:\n self.br_name = self._get_bridge_name()",
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def hub_address(self) -> str | None:\n return self.ip",
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def setup(self):\n try:\n return Bridge(self.bridge_ip)\n except PhueRegistrationException:\n raise Exception('Press the link button on your Hue bridge and then try again within 30 seconds.')\n except:\n raise Exception('Could not connect to the Hue bridge. Are you sure you have the correct IP address?')",
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def get_address_info(self):\n self.build_url(self.ADDRESS_INFO, self.address)\n return self.connect()",
"def ndaoAddress() -> address:\n return self.ndao",
"def peer_address(self): \n return self._peer_addr",
"def addr(self):\n return self.__addr",
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_get_xrp__ripple_address_details(self):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The del method for the fingerprint property should reset the fingerprint to None.
|
def test_BridgeAddressBase_fingerprint_del(self):
self.bab.fingerprint = self.fingerprint
self.assertEqual(self.bab.fingerprint, self.fingerprint)
del(self.bab.fingerprint)
self.assertIsNone(self.bab.fingerprint)
self.assertIsNone(self.bab._fingerprint)
|
[
"def propdel(self, key):\n self.properties[key] = None",
"def __del__(self):\n self.clear()",
"def __del__( self ):\n\t\tLiFlame.degrid()",
"def __del__(self):\n self.__class__.reference_count -= 1\n if self.__class__.reference_count == 0:\n print \"Number of reference_count is 0, Deleting cached objec ...\"\n del self.__class__.cached_object\n print 'Deleted object count of object = ', self.__class__.reference_count",
"def delete(self, instance):\n self.descriptor.__delete__(instance)",
"def delete(self):\n self.device_buffer.delete() # pytype: disable=attribute-error\n self.device_buffer = deleted_buffer\n self._npy_value = None",
"def destroy(self):\n try:\n del Thing.ID_dict[self.id]\n except KeyError:\n self.log.error('%s was already removed from Thing.ID_dict' % self)\n if self.location and hasattr(self.location, \"extract\"):\n self.location.extract(self)\n self.location = None\n if self in Thing.game.heartbeat_users:\n Thing.game.deregister_heartbeat(self)",
"def remove_property(self, key):",
"def delete(self):\n Multipass.delete(self.name)",
"def deprovision_dictionary(self):\n self._dictionary = None",
"def __del__(self):\n\n # Delete sprite (if it has been defined)\n try:\n self._canvas.delete(self._sprite)\n except AttributeError:\n pass\n except tk.TclError:\n pass\n\n # Delete all missile objects\n del self._missiles[:]",
"def _remove_ref(self, index: int):\n dataset = self[index]\n if hasattr(dataset, 'memory_address'):\n self._refs.pop(dataset.memory_address, None) # type: ignore",
"def delete(self):\n self._assert_c8y()\n self.c8y.identity.delete(self.external_id, self.external_type)",
"def delete(self):\n self.tap.delete()\n self.port.close()",
"def _del(self, _del):\n\n self.__del = _del",
"def del_kb(self):\n self.kb = None",
"def delete_node_property(self):\n raise NotImplementedError(\"delete_node_property()\")",
"def _default_deleter(self, obj):\n try:\n delattr(obj, self._name)\n except AttributeError:\n pass\n except TypeError:\n raise",
"def __delitem__(self, pbft_public_key):\n try:\n del self._store_db[pbft_public_key]\n\n # If the key is the active key, then also clear the active key\n if self.active_key == pbft_public_key:\n self.active_key = None\n except KeyError:\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The del method for the address property should reset the address to None.
|
def test_BridgeAddressBase_address_del(self):
self.bab.address = '11.12.13.14'
self.assertEqual(self.bab.address, ipaddr.IPv4Address('11.12.13.14'))
del(self.bab.address)
self.assertIsNone(self.bab.address)
self.assertIsNone(self.bab._address)
|
[
"def remove_address(self, address_id):\n pass",
"def _clear_address(self):\n for part_addr in [\n \"street\",\n \"house\",\n \"slash\",\n \"letter\",\n \"corpus\",\n \"building\",\n \"room\",\n \"hotel\",\n \"num_address_type\",\n \"region\",\n \"area\",\n \"location\",\n \"place\",\n ]:\n setattr(self, part_addr, \"\")",
"def delete_address(self, address):\n params = {'address': address}\n self._make_request('deleteAddress', **params)\n return True",
"def remove_address(self, address: str):\n receiver = self.receivers.pop(address)\n receiver.close()",
"def delete_address(self, ip_address):\n str_command = 'netsh interface ipv4 delete address \"{}\" addr={}'.format(self.name, ip_address)\n command = Popen(str_command) \n stdout, stderr = command.communicate()\n if stdout is None and stderr is None:\n print('Success - {} removed from {}'.format(ip_address, self.name))\n else:\n print('Failure - {} was not removed from {}'.format(ip_address, self.name))\n print('\\t' + str(stdout))\n print('\\t' + str(stderr))\n self = self.__init__(self.interface)",
"def clear_address_from_mycity_object(mycity_object):\n if intent_constants.ZIP_CODE_KEY in mycity_object.session_attributes:\n del(mycity_object.session_attributes[intent_constants.ZIP_CODE_KEY])\n\n if intent_constants.CURRENT_ADDRESS_KEY in mycity_object.session_attributes:\n del(mycity_object.session_attributes[\n intent_constants.CURRENT_ADDRESS_KEY])\n\n return mycity_object",
"def _remove_ref(self, index: int):\n dataset = self[index]\n if hasattr(dataset, 'memory_address'):\n self._refs.pop(dataset.memory_address, None) # type: ignore",
"def propdel(self, key):\n self.properties[key] = None",
"def _delete(self):\n\n # Validate that we should be able to perform a delete on this\n # AddressBook object based on a valid ID value being defined\n self.validate_id('Sorry unable to delete address book as no ID value'\n 'is defined for it')\n\n # Attempt to issue the delete request to DotMailer to remove the\n # address book\n type(self).delete(self.id)\n\n # Clear the current ID value so we can't accidently call this\n # delete call multiple times\n self.id = None",
"def __del__(self):\n self.clear()",
"def test_BridgeAddressBase_fingerprint_del(self):\n self.bab.fingerprint = self.fingerprint\n self.assertEqual(self.bab.fingerprint, self.fingerprint)\n\n del(self.bab.fingerprint)\n self.assertIsNone(self.bab.fingerprint)\n self.assertIsNone(self.bab._fingerprint)",
"def remove_watch_address(self, coin_symbol: str, addresses: Iterable[str]):\n self.wallet[coin_symbol].addresses.difference_update(addresses)\n self.dump()",
"def delete(self, instance):\n self.descriptor.__delete__(instance)",
"def test_view_can_delete_a_physical_address(self):\n\n self.create_org()\n self.create_location()\n # create an address\n self.client().post('/api/organizations/1/locations/1/addresses/',\n data=self.address_data)\n # delete the address\n res = self.client().delete(\n '/api/organizations/1/locations/1/addresses/1')\n self.assertEqual(res.status_code, 202)\n self.assertNotIn(\"Chicago\", str(res.data))\n self.assertEqual({}, json.loads(res.data.decode()))\n # check to see if it's deleted\n response = self.client().get(\n '/api/organizations/1/locations/1/addresses/1')\n self.assertEqual(response.status_code, 404)",
"def removeExternalLocation(self, externalAddr: ghidra.program.model.address.Address) -> bool:\n ...",
"def disassociate_address(DryRun=None, PublicIp=None, AssociationId=None):\n pass",
"def removeReference(self,addr,referenced):\n return HopperLowLevel.removeReference(self.__internal_segment_addr__,addr,referenced)",
"def setAddress(self, address: ghidra.program.model.address.Address) -> None:\n ...",
"def unbind(self):\n self.tap.delete()\n self.update({'binding:host_id': ''})"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Initialising a PluggableTransport with args should work.
|
def test_PluggableTransport_init_with_parameters(self):
pt = bridges.PluggableTransport(self.fingerprint,
"voltronPT", "1.2.3.4", 443,
{'sharedsecret': 'foobar'})
self.assertIsInstance(pt, bridges.PluggableTransport)
|
[
"def test_PluggableTransport_init(self):\n pt = bridges.PluggableTransport()\n self.assertIsInstance(pt, bridges.PluggableTransport)",
"def _InitTransport(self):\n if self.transport is None:\n self.transport = \\\n self.transport_class(self._GetAddress(),\n timeouts=self.timeouts,\n allow_non_master=self.allow_non_master)",
"def _prepare_transport(self):\n pass",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def __init__(self, service_user, *args):\n self.service_user = service_user\n self.is_service_user = service_user is not None\n # super(UnifiedTransport, self).__init__(*args)\n xmlrpclib.Transport.__init__(self, *args)",
"def __init__(self, args, config):\n pass",
"def __init__(self, timeouts=None, transport=t.Transport,\n allow_non_master=False):\n self.timeouts = timeouts\n self.transport_class = transport\n self.allow_non_master = allow_non_master\n self.transport = None\n # The version used in RPC communication, by default unused:\n self.version = None",
"def add_transport_process(self, transport, **transport_process_kwargs):",
"def _make_transport(self, *args, **kwargs):\n transport = paramiko.Transport(*args, **kwargs)\n self.addCleanup(transport.close)\n return transport",
"def __init__(self, *args):\n _ida_pro.channel_redir_t_swiginit(self, _ida_pro.new_channel_redir_t(*args))",
"def __init__(self, **params):\n super(Remote, self).__init__(**params)",
"def __init__(self, *args, **kwargs):\n # Call the super class' __init__\n super().__init__(*args, **kwargs)\n\n # Set the queue's prefix\n plugin_queue.prefix = self.prefix",
"def __init__(self, *args):\n _snap.TSStr_swiginit(self,_snap.new_TSStr(*args))",
"def __init__(\n self,\n connect_to_host=bridge.DEFAULT_HOST,\n connect_to_port=DEFAULT_SERVER_PORT,\n loglevel=None,\n response_timeout=bridge.DEFAULT_RESPONSE_TIMEOUT,\n do_import=True,\n hook_import=False,\n record_stats=False,\n **kwargs\n ):\n super().__init__(\n connect_to_host=connect_to_host,\n connect_to_port=connect_to_port,\n loglevel=loglevel,\n response_timeout=response_timeout,\n hook_import=hook_import,\n record_stats=record_stats,\n **kwargs\n )\n\n if do_import:\n self.get_idaapi(do_import=True)\n self.get_idc(do_import=True)\n self.get_idautils(do_import=True)",
"def __init__(self, *args):\n _ida_pro.__qthread_t_swiginit(self, _ida_pro.new___qthread_t(*args))",
"def __init__(self):\n super(TNL3ServicePlugin, self).__init__()\n self._tn_info = None\n # self._driver = None\n self.task_manager = tasks.TaskManager()\n self.task_manager.start()\n self.tn_init()",
"def __init__(self, *args):\n _snap.TCh_swiginit(self,_snap.new_TCh(*args))",
"def __init__(self, transport, stream_id=\"default\"):\n self.transport = transport\n self.stream_id = None\n self.stream_id_buff = None\n self.stream_id_length = 0\n self.set_stream_id(stream_id)\n self.transport.connect()",
"def __init__(__self__,\n resource_name: str,\n args: PluginArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ..."
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Initialising a PluggableTransport without args should work.
|
def test_PluggableTransport_init(self):
pt = bridges.PluggableTransport()
self.assertIsInstance(pt, bridges.PluggableTransport)
|
[
"def test_PluggableTransport_init_with_parameters(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar'})\n self.assertIsInstance(pt, bridges.PluggableTransport)",
"def _InitTransport(self):\n if self.transport is None:\n self.transport = \\\n self.transport_class(self._GetAddress(),\n timeouts=self.timeouts,\n allow_non_master=self.allow_non_master)",
"def _prepare_transport(self):\n pass",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def __init__(self, service_user, *args):\n self.service_user = service_user\n self.is_service_user = service_user is not None\n # super(UnifiedTransport, self).__init__(*args)\n xmlrpclib.Transport.__init__(self, *args)",
"def __init__(self, timeouts=None, transport=t.Transport,\n allow_non_master=False):\n self.timeouts = timeouts\n self.transport_class = transport\n self.allow_non_master = allow_non_master\n self.transport = None\n # The version used in RPC communication, by default unused:\n self.version = None",
"def __init__(self, transport, stream_id=\"default\"):\n self.transport = transport\n self.stream_id = None\n self.stream_id_buff = None\n self.stream_id_length = 0\n self.set_stream_id(stream_id)\n self.transport.connect()",
"def _make_transport(self, *args, **kwargs):\n transport = paramiko.Transport(*args, **kwargs)\n self.addCleanup(transport.close)\n return transport",
"def __init__(self):\n super(TNL3ServicePlugin, self).__init__()\n self._tn_info = None\n # self._driver = None\n self.task_manager = tasks.TaskManager()\n self.task_manager.start()\n self.tn_init()",
"def add_transport_process(self, transport, **transport_process_kwargs):",
"def __init__(self, **params):\n super(Remote, self).__init__(**params)",
"def __init__(self, *args, **kwargs):\n # Call the super class' __init__\n super().__init__(*args, **kwargs)\n\n # Set the queue's prefix\n plugin_queue.prefix = self.prefix",
"def open_transport(transport):\n if not transport.isOpen():\n transport.open()",
"def __init__(self, *args):\n _ida_pro.channel_redir_t_swiginit(self, _ida_pro.new_channel_redir_t(*args))",
"def __init__(self, args, config):\n pass",
"def __init__(\n self,\n connect_to_host=bridge.DEFAULT_HOST,\n connect_to_port=DEFAULT_SERVER_PORT,\n loglevel=None,\n response_timeout=bridge.DEFAULT_RESPONSE_TIMEOUT,\n do_import=True,\n hook_import=False,\n record_stats=False,\n **kwargs\n ):\n super().__init__(\n connect_to_host=connect_to_host,\n connect_to_port=connect_to_port,\n loglevel=loglevel,\n response_timeout=response_timeout,\n hook_import=hook_import,\n record_stats=record_stats,\n **kwargs\n )\n\n if do_import:\n self.get_idaapi(do_import=True)\n self.get_idc(do_import=True)\n self.get_idautils(do_import=True)",
"def __init__(self, session=None):\n super(PerspectiveAIOHTTPClient, self).__init__(PerspectiveAIOHTTPWebsocketConnection(session=session))",
"def test_transport_instantiation(sync_transport_no_abc):\n assert sync_transport_no_abc",
"def __init__(self, *args):\n _ida_pro.__qthread_t_swiginit(self, _ida_pro.new___qthread_t(*args))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parsing a valid list of PT args should return a dictionary.
|
def test_PluggableTransport_parseArgumentsIntoDict_valid_list(self):
pt = bridges.PluggableTransport()
args = pt._parseArgumentsIntoDict(["sharedsecret=foobar",
"publickey=1234"])
self.assertIsInstance(args, dict)
self.assertItemsEqual(args, {"sharedsecret": "foobar",
"publickey": "1234"})
|
[
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list_multi(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar,password=baz\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"password\": \"baz\",\n \"publickey\": \"1234\"})",
"def parse_ptask(l) :\n task = {}\n l = l.strip()\n s = l.split(',')\n task['max_inst'] = 3\n for x in s :\n prop_extract(x, 'name', task)\n prop_extract(x, 'ctime', task)\n prop_extract(x, 'period', task)\n prop_extract(x, 'deadline', task)\n prop_extract(x, 'max_inst', task)\n return task",
"def parseArgs (args):\n result = {}\n \n for arg in args:\n try:\n (var, val) = string.split (arg, '=', 1)\n except:\n raise (SyntaxError, '%s is in the wrond format' % (arg))\n \n if (var[:2] != '--'):\n raise (SyntaxError, 'variable names must start with a ' +\n 'double dash (%s)' % (var))\n \n result[var[2:]] = val\n return (result)",
"def parseInputPars(input_dict,in_list):\n input_pars = {}\n for par in in_list:\n if par in input_dict: \n input_pars[par] = input_dict[par]\n else: \n input_pars[par] = DEFAULT_PARS[par]\n \n return input_pars",
"def parseArgs(args):\n args_map = {}\n curkey = None\n for i in xrange(1, len(args)):\n if args[i][0] == '-':\n args_map[args[i]] = True\n curkey = args[i]\n else:\n assert curkey\n args_map[curkey] = args[i]\n curkey = None\n return args_map",
"def parse_arg_list(arg_list):\n arg_dict = OrderedDict()\n for arg in arg_list:\n if '=' in arg:\n arg_name, arg_value = arg.split('=', 1)\n else:\n arg_name = arg\n arg_value = None\n arg_dict[arg_name] = arg_value\n return arg_dict",
"def parse_arguments(self, arguments: List[str]) -> Dict[str, Any]:\n parameters = list(map(lambda x: x.name, self.params.values()))\n idx = 0\n result = dict()\n\n while idx < len(arguments):\n name, val, incr = read_param_pair(idx, arguments, parameters)\n idx += incr\n result[name] = val\n\n for (key, val) in result.items():\n if key in self.params.keys():\n annotation = self.params[key].annotation\n\n if annotation is not None:\n result[key] = annotation(val)\n\n return result",
"def parseArgs(args):\n filelist = []\n filenames = []\n plotoptions = {}\n for a in args:\n asplit = a.split(':')\n path = asplit[0]\n filelist.append(path)\n filenames.append(path)\n plotoptions[path] = []\n has_title = False\n has_name = \"\"\n for i in range(1, len(asplit)):\n ## Add 'Title' if there is no = sign before math mode\n if '=' not in asplit[i] or ('$' in asplit[i] and asplit[i].index('$') < asplit[i].index('=')):\n asplit[i] = 'Title=%s' % asplit[i]\n if asplit[i].startswith('Title='):\n has_title = True\n plotoptions[path].append(asplit[i])\n if asplit[i].startswith('Name='):\n has_name = asplit[i].split('=', 1)[1]\n filenames[-1] = has_name\n if has_name != \"\":\n plotoptions[has_name] = plotoptions[path]\n del plotoptions[path]\n if path != \"PLOT\" and not has_title:\n plotoptions[has_name if has_name != \"\" else path].append('Title=%s' % sanitiseString(os.path.basename( os.path.splitext(path)[0] )) )\n return filelist, filenames, plotoptions",
"def test_list_argument_parsing():\n arguments = [\n {\n \"name\": \"places\",\n \"type\": \"list\",\n \"default\": None\n }\n ]\n parser = reading.build_template_argparser(arguments)\n values = parser.parse_args([\"--places\", \"hawaii\", \"california\", \"oregon\"])\n assert values.places == [\"hawaii\", \"california\", \"oregon\"]\n\n values_with_spaces = parser.parse_args(['--places', \"california\",\n \"new mexico\", \"washington\"])\n assert values_with_spaces.places == [\"california\",\n \"new mexico\",\n \"washington\"]",
"def process_func_args(func_name, in_args):\n\n\tdebug(2, \"reading arguments for function \" + func_name)\n\n\t# initialize the structure for the function.\n\tg.args[func_name] = {}\n\tg.args[func_name]['dict'] = {}\n\tg.args[func_name]['list'] = []\n\n\tif not in_args:\n\t\treturn\n\n\t# split in_args into each argument and its type\n\t# split on commas\n\tags = split_args(in_args)\n\n\tsplit_in_args = []\n\tfor a in ags:\n\t\t#arg = re.match(\"(\\w+)\\s*([&*]{0,1})*\\s*(\\w+)\", a)\n\t\targ = re.match(\"([&]{0,1})\\s*[\\(]{0,1}\\s*(\\w+)\\s*[\\)]{0,1}\", a)\n\t\tif arg:\n\t\t\t#arg_type = arg.group(1)\n\t\t\targ_modifier = arg.group(1)\n\t\t\targ_name = arg.group(2)\n\t\t\t#split_in_args.append({'name': arg_name, 'modifier': arg_modifier, 'type': arg_type})\n\t\t\tsplit_in_args.append({'name': arg_name, 'modifier': arg_modifier})\n\t\telse:\n\t\t\terror(\"couldn't match the arg in process_func_args: '\" + a + \"' from in_args: '\" + in_args + \"', ags: \" + str(ags))\n\n\t# set the type for each arg from the spit information\n\tfor sia in split_in_args:\n\t\t# output debug info\n\t\tdebug_string = \"\"\n\t\tif sia['modifier']:\n\t\t\tdebug_string += \" \" + sia['modifier']\n\t\tdebug_string += \" \" + sia['name'] + \"\\n\"\n\t\tdebug(3, debug_string)\n\n\t\t# save info\n\t\tg.args[func_name]['list'].append(sia['name'])\n\t\tg.args[func_name]['dict'][sia['name']] = {}\n\t\tif sia['modifier']:\n\t\t\tg.args[func_name]['dict'][sia['name']]['modifier'] = sia['modifier']\n\t\telse:\n\t\t\tg.args[func_name]['dict'][sia['name']]['modifier'] = None",
"def _parse_args(self):\n self._verify(self.args + list(self.kwargs))\n\n self.name = self.args[0]\n self.nodes = self.args[1:1+self.num_nodes]\n self.value = self._parse_values(self.args[1+self.num_nodes:])\n self.kwargs = self._parse_pairs(self.kwargs)\n # for key, value in self.kwargs.items():\n # setattr(self, key, value)",
"def arg_plist(self, line):\n if not line:\n raise IllegalClientResponse(\"Missing argument\")\n\n if line[:1] != b\"(\":\n raise IllegalClientResponse(\"Missing parenthesis\")\n\n i = line.find(b\")\")\n\n if i == -1:\n raise IllegalClientResponse(\"Mismatched parenthesis\")\n\n return (parseNestedParens(line[1:i],0), line[i+2:])",
"def parse_args_string(val: str) -> TypeInspectionsArgs:\n out = {}\n\n for chunk in val.split(';'):\n args = {}\n\n alias, _, argstr = chunk.strip().partition(':')\n argstr = argstr.strip()\n\n for arg in argstr.split(','):\n name, _, val = arg.partition('=')\n val = val.strip()\n\n if val:\n args[name.strip()] = val\n\n if args:\n out[alias.strip()] = args\n\n return out",
"def prepare_args(self, session_data_tasks: List[str],\n args: dict, decrypt: bool) -> dict:\n parsed_args = {}\n for arg in args:\n value = args[arg]\n\n # value is data id\n if self.contains_data_task_id(value):\n data_task_id, filters = self.parse_value(value)\n df = self.data_task_id_to_data_frame(\n data_task_id, session_data_tasks, decrypt)\n if filters:\n df = self.apply_filters(df, filters)\n value = df\n\n # value is list containing data ids\n if (isinstance(value, list) and\n value and self.contains_data_task_id(value[0])):\n dfs = []\n for el in value:\n data_task_id, filters = self.parse_value(el)\n df = self.data_task_id_to_data_frame(\n data_task_id, session_data_tasks, decrypt)\n if filters:\n df = self.apply_filters(df, filters)\n dfs.append(df)\n value = dfs\n\n parsed_args[arg] = value\n\n return parsed_args",
"def _individual_args(args) -> None:\n if args is None:\n return\n\n if not isinstance(args, list):\n raise PluginValidationError(\n f\"Invalid {ConfigKeys.PLUGIN_ARGS.name} entry '{args}': must be a list\"\n )\n\n for arg in args:\n if not isinstance(arg, str):\n raise PluginValidationError(\n f\"Invalid plugin argument '{arg}': must be a string\"\n )",
"def _args_to_dict() -> Dict[str, str]:\r\n arguments = {}\r\n for argument in sys.argv[1:]:\r\n if '=' in argument:\r\n separated = argument.find('=')\r\n key, value = argument[:separated], argument[separated + 1:]\r\n arguments[key] = value\r\n return arguments",
"def get_parameters(args):\n posargs = []\n kwargs = {}\n for arg in args:\n if '=' in arg:\n k,_,v = arg.rpartition('=')\n if ',' in v:\n v = [i for i in v.split(',') if i]\n kwargs[k] = v\n else:\n if ',' in arg:\n arg = [i for i in arg.split(',') if i]\n posargs.append(arg)\n return posargs, kwargs",
"def readInputParameters(inputList):\n paramDict = dict()\n for e in inputList:\n eSplit = e.split(\"=\")\n paramDict[eSplit[0]] = eSplit[1]\n return paramDict",
"def get_args (args):\n\n args = args[1:]\n args_list = ''.join(args).strip().replace(']][[',']]|[[').split('|')\n\n adjM = ast.literal_eval(args_list[0])\n samples = ast.literal_eval(args_list[1])\n return adjM, samples"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parsing a valid list with multiple PT args in a single list element should return a dictionary.
|
def test_PluggableTransport_parseArgumentsIntoDict_valid_list_multi(self):
pt = bridges.PluggableTransport()
args = pt._parseArgumentsIntoDict(["sharedsecret=foobar,password=baz",
"publickey=1234"])
self.assertIsInstance(args, dict)
self.assertItemsEqual(args, {"sharedsecret": "foobar",
"password": "baz",
"publickey": "1234"})
|
[
"def parse_ptask(l) :\n task = {}\n l = l.strip()\n s = l.split(',')\n task['max_inst'] = 3\n for x in s :\n prop_extract(x, 'name', task)\n prop_extract(x, 'ctime', task)\n prop_extract(x, 'period', task)\n prop_extract(x, 'deadline', task)\n prop_extract(x, 'max_inst', task)\n return task",
"def parseInputPars(input_dict,in_list):\n input_pars = {}\n for par in in_list:\n if par in input_dict: \n input_pars[par] = input_dict[par]\n else: \n input_pars[par] = DEFAULT_PARS[par]\n \n return input_pars",
"def parse_arg_list(arg_list):\n arg_dict = OrderedDict()\n for arg in arg_list:\n if '=' in arg:\n arg_name, arg_value = arg.split('=', 1)\n else:\n arg_name = arg\n arg_value = None\n arg_dict[arg_name] = arg_value\n return arg_dict",
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"publickey\": \"1234\"})",
"def _parse_list(self, inputs):\n # Lists can only be used as inputs in the case where there is a single input node.\n # Validate that this is true. If so, resolve the list into a dict and parse it.\n input_nodes = self.get_nodes_by_role(NodeRole.INPUT)\n if len(input_nodes) == 1:\n _inputs = {next(iter(input_nodes)): inputs}\n else:\n raise CompositionError(\n f\"Inputs to {self.name} must be specified in a dictionary with a key for each of its \"\n f\"{len(input_nodes)} INPUT nodes ({[n.name for n in input_nodes]}).\")\n input_dict, num_inputs_sets = self._parse_dict(_inputs)\n return input_dict, num_inputs_sets",
"def arg_plist(self, line):\n if not line:\n raise IllegalClientResponse(\"Missing argument\")\n\n if line[:1] != b\"(\":\n raise IllegalClientResponse(\"Missing parenthesis\")\n\n i = line.find(b\")\")\n\n if i == -1:\n raise IllegalClientResponse(\"Mismatched parenthesis\")\n\n return (parseNestedParens(line[1:i],0), line[i+2:])",
"def convertToDict( p ):\n\n # Check for a ParseResult which is actually a list (i.e. all values are\n # empty)\n if not p.keys():\n out = []\n for item in p:\n if isinstance( item, ParseResults):\n out.append(convertToDict(item))\n else:\n out.append(item)\n return out\n else:\n out = {}\n for k in p.keys():\n v = p[k]\n if isinstance( k, ParseResults):\n k = listToTuple(k.asList())\n if isinstance( v, ParseResults):\n v = convertToDict(v)\n out[k] = v\n return out",
"def readInputParameters(inputList):\n paramDict = dict()\n for e in inputList:\n eSplit = e.split(\"=\")\n paramDict[eSplit[0]] = eSplit[1]\n return paramDict",
"def parse_keyword_list(iter):\n return parse_list_template(parse_keyword, iter)",
"def parseArgs (args):\n result = {}\n \n for arg in args:\n try:\n (var, val) = string.split (arg, '=', 1)\n except:\n raise (SyntaxError, '%s is in the wrond format' % (arg))\n \n if (var[:2] != '--'):\n raise (SyntaxError, 'variable names must start with a ' +\n 'double dash (%s)' % (var))\n \n result[var[2:]] = val\n return (result)",
"def test_list_argument_parsing():\n arguments = [\n {\n \"name\": \"places\",\n \"type\": \"list\",\n \"default\": None\n }\n ]\n parser = reading.build_template_argparser(arguments)\n values = parser.parse_args([\"--places\", \"hawaii\", \"california\", \"oregon\"])\n assert values.places == [\"hawaii\", \"california\", \"oregon\"]\n\n values_with_spaces = parser.parse_args(['--places', \"california\",\n \"new mexico\", \"washington\"])\n assert values_with_spaces.places == [\"california\",\n \"new mexico\",\n \"washington\"]",
"def parse_arguments(self, arguments: List[str]) -> Dict[str, Any]:\n parameters = list(map(lambda x: x.name, self.params.values()))\n idx = 0\n result = dict()\n\n while idx < len(arguments):\n name, val, incr = read_param_pair(idx, arguments, parameters)\n idx += incr\n result[name] = val\n\n for (key, val) in result.items():\n if key in self.params.keys():\n annotation = self.params[key].annotation\n\n if annotation is not None:\n result[key] = annotation(val)\n\n return result",
"def map_listjobs(item):\n fields = item.split()\n\n fields = [x.split(\":\", 1)[-1] for x in fields]\n\n return tuple( fields )",
"def list_to_dict(lst):\n\n if len(lst) == 6:\n qtn = tf.transformations.quaternion_from_euler(lst[3], lst[4], lst[5])\n elif len(lst) == 7:\n qtn = Quaternion()\n qtn.x = lst[3]\n qtn.y = lst[4]\n qtn.z = lst[5]\n qtn.w = lst[6]\n else:\n raise MoveItCommanderException(\"\"\"Expected either 6 or 7 elements\n in list: (x,y,z,r,p,y) or (x,y,z,qx,qy,qz,qw)\"\"\")\n\n pnt = Point()\n pnt.x = lst[0]\n pnt.y = lst[1]\n pnt.z = lst[2]\n\n pose_dict = {\n 'position': pnt,\n 'orientation': qtn\n }\n return pose_dict",
"def parse_instructions(instruction_list):\n instruction_dict = []\n for instruction in instruction_list:\n regex_match = re.match(r\"(?P<direction>\\w)(?P<value>\\d*)\",instruction)\n if regex_match:\n instruction_dict.append(regex_match.groupdict())\n return instruction_dict",
"def try_parse_list(list_vals, valid_types=None):\n valid = True\n result = []\n for item in list_vals:\n (v, value) = try_parse(item, valid_types)\n valid = valid and v\n result.append(value)\n\n return v, result",
"def parseHints(self, hintlist):\n print(\"Got to self.parseHints with hints = \", hintlist)\n parsed = []\n for hint in hintlist:\n parsed.append(hint.split('='))\n\n return parsed",
"def parse_field_list(input: str, field_sep: str = \";\", kv_sep: str = \"=\") -> dict:\n try:\n fields = input.split(field_sep)\n kv_pairs = (f.split(kv_sep) for f in fields)\n return {k: v for k, v in kv_pairs}\n except ValueError as err:\n raise NotAFieldListError(\n f\"Expected {field_sep}-separated list of key{kv_sep}value pairs: {input}\"\n ) from err",
"def parse_parameters(iter):\n # check that the parameters definition starts with the correct keyword\n if not iter.try_match(':parameters'):\n raise ValueError('Error keyword \":parameters\" required before '\n 'parameter list!')\n varList = parse_typed_var_list(next(iter))\n return varList"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _checkArguments on a scramblesuit PT without a password should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))
|
[
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=1,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722')]))",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_encrypt_missing_password_positional_argument(self):\n self.assertRaises(TypeError, lambda: scrypt.encrypt(self.input))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def verifyPlaintextPassword(password):",
"def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n 'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))",
"def check_passport(mandatory_fields, line):\n pass",
"def bad_password_handler(spawn):\n raise UniconAuthenticationError('Bad Password sent to device %s' % (str(spawn),))",
"def test_password_prompt(self, fake_getpass, fake_stderr):\n cli_args = ['--clusters', 'myCluster', '--location', '/foo', '--username', 'pat']\n\n iiqtools_cluster_backup.parse_args(cli_args)\n\n fake_getpass.assert_called()",
"def acceptable_password(password):\n LOG.debug(\"PASS\")\n LOG.debug(password)\n\n if password is not None:\n LOG.debug(len(password))\n\n if password is None:\n return False\n\n if len(password) < 3:\n return False\n\n return True",
"def test_check_pw_failure(dbtransaction, auth_env):\n from .. security import check_password\n password = 'not secret'\n assert check_password(password) is False",
"def handle_pass(self, args):\n if not args:\n self.error(IRC.ERR_NEEDMOREPARAMS)\n return\n self.password = args",
"def test_validate_ticket_track_arguments_successful_execution():\n\n # Verify valid value\n assert not ExtraHop_v2.validate_ticket_track_arguments(\"3\")",
"def test_integration_getConfigLine_scramblesuit_withFingerprint(self):\n bridge = bridges.Bridge('philipkdick', '23.23.23.23', 2323,\n id_digest=self.id_digest,\n or_addresses=self.or_addresses)\n ptArgs = {'password': 'NEQGQYLUMUQGK5TFOJ4XI2DJNZTS4LRO'}\n pt = bridges.PluggableTransport(bridge.fingerprint, 'scramblesuit',\n ipaddr.IPAddress('42.42.42.42'), 4242,\n ptArgs)\n bridge.transports.append(pt)\n bridgeLine = bridge.getConfigLine(includeFingerprint=True,\n transport='scramblesuit')\n ptArgsList = ' '.join([\"{0}={1}\".format(k,v) for k,v in ptArgs.items()])\n self.assertEqual(\"scramblesuit 42.42.42.42:4242 %s %s\"\n % (self.fingerprint, ptArgsList),\n bridgeLine)",
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"publickey\": \"1234\"})"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _checkArguments on an obfs4 PT without an iatmode argument should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [
'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))
|
[
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=1,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722')]))",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_bluetoothctl_with_invalid_args(self):\n\n output='Too many arguments: 2 > 1'\n self.assertEqual(parse(output, quiet=True), [])",
"def test_require_arg(self):\n self.layer.require_arg('bobofet')\n self.assertRaises(outline.layer.LayerException, self.layer.check_required_args)\n self.layer.set_arg('bobofet', 1)\n self.layer.check_required_args()",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_params_type_check(test_endpoint):\n\n with pytest.raises(ValueError):\n test_endpoint.params = False",
"def check_type_arg_validity(arg):\n\n return arg.lower() in (\"o\", \"output\", \"i\", \"input\")",
"def _checkMode(self, ax_args):\n mode = ax_args.get('mode')\n if isinstance(mode, bytes):\n mode = str(mode, encoding=\"utf-8\")\n if mode != self.mode:\n if not mode:\n raise NotAXMessage()\n else:\n raise AXError('Expected mode %r; got %r' % (self.mode, mode))",
"def test_api_endpoint_param():\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=\"https://api.apptuit.ai/\")\n with assert_raises(ValueError):\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=None)\n with assert_raises(ValueError):\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=\"\")",
"def test_prepare_missing_param(self):\n with self.assertRaises(vnf.VnfPreparationException):\n self.ims_vnf.prepare()",
"def _validate_args(recognition_provider, ingestion_pipelinerun_id, ingestion_provider):\n if bool(ingestion_pipelinerun_id) == bool(ingestion_provider):\n raise ValueError('pipeline requires exactly one of out of ingestion pipeline run \\\n and ingestion provider - zero or two were given')\n if ingestion_pipelinerun_id and\\\n not isinstance(ingestion_pipelinerun_id, str):\n raise ValueError('ingestion pipeline run id is not a string')\n if ingestion_provider and not isinstance(ingestion_provider, str):\n raise ValueError('ingestion pipeline provider id is not a string')\n if not isinstance(recognition_provider, str):\n raise ValueError('recognition provider is not a string')",
"def test_bad_rt(self):\n self.assertEqual(check_args(self.bad_rt), {'rt': 168})",
"def _handle_arguments() -> None:\n # Check used arguments\n all_features = (\"--all\" in sys.argv) or (\"-a\" in sys.argv)\n ardupilot_commands = \"--with-commands\" in sys.argv\n ardupilot_geofence = \"--with-geofence\" in sys.argv\n ardupilot_telemetry = \"--with-telemetry\" in sys.argv\n qt_visual = \"--with-qt\" in sys.argv\n\n # Enable features according to arguments\n if ardupilot_commands or all_features:\n send_commands.Commander()\n if ardupilot_geofence or all_features:\n _start_mavlink_geofence()\n if ardupilot_telemetry or all_features:\n _start_telemetry()\n if qt_visual or all_features:\n _init_qt()\n\n # CLI if visual portion is not enabled\n if not (qt_visual or all_features):\n _console()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _checkArguments on an obfs4 PT without a cert argument should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))
|
[
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n 'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))",
"def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=1,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722')]))",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_validate_ticket_track_arguments_successful_execution():\n\n # Verify valid value\n assert not ExtraHop_v2.validate_ticket_track_arguments(\"3\")",
"def test_bluetoothctl_with_invalid_args(self):\n\n output='Too many arguments: 2 > 1'\n self.assertEqual(parse(output, quiet=True), [])",
"def test_require_arg(self):\n self.layer.require_arg('bobofet')\n self.assertRaises(outline.layer.LayerException, self.layer.check_required_args)\n self.layer.set_arg('bobofet', 1)\n self.layer.check_required_args()",
"def validatePhEDExSubscription(arguments):\n for site in arguments.get(\"AutoApproveSubscriptionSites\", []):\n if site.endswith('_MSS'):\n raise WMSpecFactoryException(\"Auto-approval to MSS endpoint is not allowed: %s\" % site)\n if arguments.get(\"SubscriptionPriority\", \"Low\") not in [\"Low\", \"Normal\", \"High\"]:\n raise WMSpecFactoryException(\"Invalid subscription priority: %s\" % arguments[\"SubscriptionPriority\"])\n if arguments.get(\"CustodialSubType\", \"Replica\") not in [\"Move\", \"Replica\"]:\n raise WMSpecFactoryException(\"Invalid custodial subscription type: %s\" % arguments[\"CustodialSubType\"])\n if arguments.get(\"NonCustodialSubType\", \"Replica\") not in [\"Move\", \"Replica\"]:\n raise WMSpecFactoryException(\"Invalid non custodial subscription type: %s\" % arguments[\"NonCustodialSubType\"])\n\n if 'CustodialGroup' in arguments and not isinstance(arguments[\"CustodialGroup\"], basestring):\n raise WMSpecFactoryException(\"Invalid custodial PhEDEx group: %s\" % arguments[\"CustodialGroup\"])\n if 'NonCustodialGroup' in arguments and not isinstance(arguments[\"NonCustodialGroup\"], basestring):\n raise WMSpecFactoryException(\"Invalid non custodial PhEDEx group: %s\" % arguments[\"NonCustodialGroup\"])\n if 'DeleteFromSource' in arguments and not isinstance(arguments[\"DeleteFromSource\"], bool):\n raise WMSpecFactoryException(\"Invalid DeleteFromSource type, it must be boolean\")\n\n return",
"def ValidateOptions(self, opt, args):",
"def test_constructorDoesNotAllowExtraChainWithoutPrivateKey(self):\n self.assertRaises(\n ValueError,\n sslverify.OpenSSLCertificateOptions,\n certificate=self.sCert,\n extraCertChain=self.extraCertChain,\n )",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"publickey\": \"1234\"})",
"def test_validate_ticket_track_arguments_failed_execution():\n # Verify invalid value\n with pytest.raises(ExtraHop_v2.InvalidValueError) as err:\n ExtraHop_v2.validate_ticket_track_arguments(\"4\")\n\n assert (\n str(err.value)\n == \"4 is an invalid value for incident_status. Possible values are: ['0', '1', '2', '3']\"\n )",
"def test_api_endpoint_param():\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=\"https://api.apptuit.ai/\")\n with assert_raises(ValueError):\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=None)\n with assert_raises(ValueError):\n _ = Apptuit(sanitize_mode=None, token=\"test_token\", api_endpoint=\"\")",
"def test_check_kwargs_strict():\n kwargs = dict(poolsize=100, volume_fraction=0.9, augment_dims=1)\n with pytest.raises(\n RuntimeError,\n match=\"Keyword arguments contain unknown keys: {'augment_dims'}\",\n ):\n check_proposal_kwargs(FlowProposal, kwargs, strict=True)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _checkArguments on an obfs4 PT without a publickey argument should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [
('iat-mode=1,'
'node-id=2a79f14120945873482b7823caabe2fcde848722')]))
|
[
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n 'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"publickey\": \"1234\"})",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_parseArgumentsIntoDict_valid_list_multi(self):\n pt = bridges.PluggableTransport()\n args = pt._parseArgumentsIntoDict([\"sharedsecret=foobar,password=baz\",\n \"publickey=1234\"])\n self.assertIsInstance(args, dict)\n self.assertItemsEqual(args, {\"sharedsecret\": \"foobar\",\n \"password\": \"baz\",\n \"publickey\": \"1234\"})",
"def test_PluggableTransport_init_with_parameters(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar'})\n self.assertIsInstance(pt, bridges.PluggableTransport)",
"def validate_pub_key(pub_key: str):\n try:\n Keypair.from_public_key(pub_key)\n return True\n except:\n return False",
"def test_check_kwargs_strict():\n kwargs = dict(poolsize=100, volume_fraction=0.9, augment_dims=1)\n with pytest.raises(\n RuntimeError,\n match=\"Keyword arguments contain unknown keys: {'augment_dims'}\",\n ):\n check_proposal_kwargs(FlowProposal, kwargs, strict=True)",
"def test_keys_valid(self):\n self.assertEqual(None, tsig_keys.check({'keys':\n ['testkey:QklORCAxMCBpcyBjb29sCg==',\n 'test.key:QklORCAxMCBpcyBjb29sCg==:hmac-sha1']}))",
"def clean(self):\n super(SignedSSHKey, self).clean()\n if not self.certificate.startswith('ssh-rsa-cert-v01@openssh.com'):\n raise BadRequestError(\"Certificate is not a valid signed RSA key.\")",
"def test_require_arg(self):\n self.layer.require_arg('bobofet')\n self.assertRaises(outline.layer.LayerException, self.layer.check_required_args)\n self.layer.set_arg('bobofet', 1)\n self.layer.check_required_args()",
"def test_validate_ticket_track_arguments_successful_execution():\n\n # Verify valid value\n assert not ExtraHop_v2.validate_ticket_track_arguments(\"3\")",
"def _validate_init_args(self):\r\n\r\n birdseed_args = {\r\n 'access_key': self.access_key,\r\n 'access_secret': self.access_secret,\r\n 'consumer_key': self.consumer_key,\r\n 'consumer_secret': self.consumer_secret,\r\n 'query': self.query\r\n }\r\n\r\n # iterate through the keys of the dict\r\n # check that the value it represents is \"truthy\" (in this case, not None)\r\n # if it IS None, raise a ValueError telling the caller it must provide that argument\r\n for key in birdseed_args:\r\n if not birdseed_args[key]:\r\n raise ValueError('Please provide `{}`'.format(key))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _runChecks() on a PluggableTransport with an invalid fingerprint should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_runChecks_invalid_fingerprint(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
"INVALIDFINGERPRINT", 'obfs4', ('34.230.223.87', 37341, [
('iat-mode=0,'
'node-id=2a79f14120945873482b7823caabe2fcde848722,'
'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))
|
[
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=1,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722')]))",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n 'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))",
"async def test_invalid_dumpling(\n self, mocker, mock_websocket, test_dumpling_dns,\n test_dumpling_pktcount, eater_with_mocked_handlers):\n mock_websocket.recv.side_effect = [\n json.dumps(test_dumpling_dns),\n '{invalid',\n json.dumps(test_dumpling_pktcount),\n RuntimeError,\n ]\n\n mock_logger = mocker.patch.object(eater_with_mocked_handlers, 'logger')\n\n try:\n await eater_with_mocked_handlers._grab_dumplings()\n except RuntimeError:\n pass\n\n assert eater_with_mocked_handlers.on_dumpling.call_count == 2\n assert mock_logger.error.call_count >= 1",
"def test_jwt_auth_invalid_transport(self):\n try:\n connect(auth_mechanism=\"JWT\", jwt=\"dummy.jwt.arg\")\n assert False, \"'connect' method should have thrown an exception but did not\"\n except NotSupportedError as e:\n assert \"JWT authentication is only supported for HTTP transport\" in str(e)",
"def _failed():\n raise BaseException",
"def test_process_speed_wrong_payload(self): # pylint: disable=invalid-name\n xknx = XKNX()\n fan = Fan(xknx, name=\"TestFan\", group_address_speed=\"1/2/3\")\n telegram = Telegram(\n destination_address=GroupAddress(\"1/2/3\"),\n payload=GroupValueWrite(DPTBinary(1)),\n )\n with self.assertRaises(CouldNotParseTelegram):\n self.loop.run_until_complete(fan.process(telegram))",
"def test_instantiate_with_bad_credentials_type(self):\n self.assertRaises(TypeError, self._auth, [])",
"def test_fail_verify_when_not_connected(self, debug_session, tdevice):\n with pytest.raises(Exception):\n debug_session.verify(tdevice[\"hex-image\"])",
"def test_process_fan_payload_invalid_length(self):\n # pylint: disable=invalid-name\n xknx = XKNX()\n fan = Fan(xknx, name=\"TestFan\", group_address_speed=\"1/2/3\")\n telegram = Telegram(\n destination_address=GroupAddress(\"1/2/3\"),\n payload=GroupValueWrite(DPTArray((23, 24))),\n )\n with self.assertRaises(CouldNotParseTelegram):\n self.loop.run_until_complete(fan.process(telegram))",
"def test_hostFromBlankSSLTransport(self):\n x = self.assertRaises(CertificateError,\n sslverify.Certificate.hostFromTransport,\n _MaybeSSLTransport())\n self.assertTrue(str(x).startswith(\"TLS\"))",
"def test_hostFromNonSSLTransport(self):\n x = self.assertRaises(CertificateError,\n sslverify.Certificate.hostFromTransport,\n _NotSSLTransport())\n self.assertTrue(str(x).startswith(\"non-TLS\"))",
"def _prepare_transport(self):\n pass",
"def test_post_bad_telemetry(self):\n t0 = Telemetry(latitude=38,\n longitude=-76,\n altitude_msl=100,\n uas_heading=90)\n # The Telemetry constructor prevents us from passing invalid\n # values, but we can still screw things up in an update\n t0.latitude = 'baz'\n with self.assertRaises(InteropError):\n self.client.post_telemetry(t0)\n with self.assertRaises(InteropError):\n self.async_client.post_telemetry(t0).result()\n\n # We only accept Telemetry objects (or objects that behave like\n # Telemetry, not dicts.\n t1 = {\n 'latitude': 38,\n 'longitude': -76,\n 'altitude_msl': 100,\n 'uas_heading': 90\n }\n with self.assertRaises(AttributeError):\n self.client.post_telemetry(t1)\n with self.assertRaises(AttributeError):\n self.async_client.post_telemetry(t1).result()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _runChecks() on a PluggableTransport with an invalid IP address should raise a InvalidPluggableTransportIP exception.
|
def test_PluggableTransport_runChecks_invalid_ip(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.InvalidPluggableTransportIP,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223', 37341, [
('iat-mode=0,'
'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))
|
[
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_wrong_ip(self):\n\n requirement = self.tool_basic_requirement()\n\n requirement.sender_ip = \"1.1.1.1\"\n requirement.sender_ip_cidr = \"32\"\n\n requirement.save()\n\n helper = self.tool_get_helper()\n\n # Try to mimic a connection from the right IP\n\n helper.connect(\"\", \"\", \"1.1.1.1\", \"\", {})\n\n self.assertTrue(\n helper.enabled,\n \"Helper wasn't enabled after connecting with the right IP\"\n )\n\n # Try to mimic a connection from the wrong IP\n\n helper = self.tool_get_helper()\n\n helper.connect(\"\", \"\", \"1.1.1.2\", \"\", {})\n\n self.assertFalse(\n helper.enabled,\n \"Helper was enabled after connecting with the wrong IP\"\n )",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_validate_ip_for_get_peer_command_failure(requests_mock) -> None:\n args = {\"ip_or_id\": \"1:1:1\"}\n mock_client = init_mock_client(on_cloud=False, requests_mock=requests_mock)\n with pytest.raises(ExtraHop_v2.DemistoException) as error:\n _ = ExtraHop_v2.peers_get_command(mock_client, args, False)\n assert \"Error parsing IP Address 1:1:1\" == str(error.value)",
"def test_extrahop_protocols_get_invalid_ip(args, message, requests_mock):\n client = init_mock_client(requests_mock, on_cloud=False)\n with pytest.raises(Exception) as error:\n ExtraHop_v2.protocols_get_command(client, args, False)\n\n assert str(error.value) == message",
"def display_invalid_ip_error(self):\r\n self._display_error(\"The given IP address cannot be used. Please try again.\")",
"def test_bad_ip():\n ip = \"1.1.0.1\"\n server = Server(ip)\n assert str(server) == ip\n assert not server.ping()",
"def test_hostFromNonSSLTransport(self):\n x = self.assertRaises(CertificateError,\n sslverify.Certificate.hostFromTransport,\n _NotSSLTransport())\n self.assertTrue(str(x).startswith(\"non-TLS\"))",
"def _failed():\n raise BaseException",
"def test_extrahop_protocols_get_ip_not_present_in_extrahop(requests_mock):\n client = init_mock_client(requests_mock, on_cloud=False)\n args = {\"ip_or_id\": \"0.0.0.0\"}\n expected_error_message = (\n f\"Error the IP Address {args['ip_or_id']} was not found in ExtraHop.\"\n )\n expected_response = []\n requests_mock.post(\n f\"{BASE_URL}/api/v1/devices/search\", json=expected_response, status_code=200\n )\n with pytest.raises(DemistoException) as error:\n ExtraHop_v2.protocols_get_command(client, args, False)\n\n assert str(error.value) == expected_error_message",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_invalid_ipv4(self):\n self.assertEqual(is_valid_ip_address(\"192.168.0.256\"), False)",
"def display_ip_validation_error(self):\r\n self._display_error(\"The given IP address is not valid. Please try again.\")",
"def testPortComparisonValidation(self):\n bytecode = sock_diag.InetDiagBcOp((sock_diag.INET_DIAG_BC_D_GE, 4, 8))\n self.assertEquals(\"???\",\n self.sock_diag.DecodeBytecode(bytecode))\n self.assertRaisesErrno(\n EINVAL,\n self.sock_diag.DumpAllInetSockets, IPPROTO_TCP, bytecode.Pack())",
"async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None:\n await async_setup_component(\n hass,\n \"http\",\n {\n \"http\": {\n CONF_TRUSTED_PROXIES: [\"192.168.128.0/31\", \"fd00::1\"],\n CONF_USE_X_FORWARDED_FOR: True,\n }\n },\n )\n hass.config.components.add(\"cloud\")\n\n provider.async_validate_access(ip_address(\"192.168.128.2\"))\n\n remote.is_cloud_request.set(True)\n with pytest.raises(tn_auth.InvalidAuthError):\n provider.async_validate_access(ip_address(\"192.168.128.2\"))",
"def __validate_ip_fields(self, data, field_name):\n\n regex = r\"^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$\"\n errs, result = self.match_regular_expression(\n regex, data[field_name], \"ip\")\n\n # in case the RE did not match or their was a key error\n if not result:\n errs.append(self.return_field_message(field_name, \"ip\"))\n return errs",
"def ip_check():\n ip_address = socket.gethostbyname(\"localhost\")\n if ip_address != \"127.0.0.1\":\n subject = \"Error - localhost cannot be resolved to 127.0.0.1\"\n message = email.generate_error_report(subject)\n emails.send(message)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _runChecks() on a PluggableTransport with an invalid port should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_runChecks_invalid_port_type(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223.87', "anyport", [
('iat-mode=0,'
'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))
|
[
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_bad_port():\n pytest.xfail(\"Bad port.\")\n connect_to_dremio_flight_server_endpoint(\"localhost\",\n \"12345\", \"dremio\", \"dremio123\", False, False, False)",
"def validate_port(self):\n\n if self.port == None:\n self.port = \"\"\n else:\n try:\n self.port = int(self.port)\n if not 1 <= self.port <= 65535:\n raise ValueError\n except ValueError:\n end(UNKNOWN, \"port number must be a whole number between \" \\\n + \"1 and 65535\")",
"def test_tls_port(self):\n if self._tls_port is None:\n return\n try:\n int(self.tls_port)\n except ValueError:\n msg = '{} is not a valid port number.'.format(self.tls_port)\n raise ConfigException(msg)\n if self.port == self.tls_port:\n raise ConfigException(\"SMTP and SMTP/TLS ports must be different.\")",
"def _valid_port(self, port):\n if port.data is None or port.data.empty:\n return False\n if \"epoch\" not in port.meta:\n return False\n if port.data.shape[0] != self._num_times:\n if self._reporting == \"error\":\n raise WorkerInterrupt(\n f\"Received an epoch with {port.data.shape[0]} \"\n f\"samples instead of {self._num_times}.\"\n )\n elif self._reporting == \"warn\":\n self.logger.warning(\n f\"Received an epoch with {port.data.shape[0]} \"\n f\"samples instead of {self._num_times}. \"\n f\"Skipping.\"\n )\n return False\n else: # reporting is None\n # be cool\n return False\n return True",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_disallowed_service_ports(\n config_path, virtualPort,\n disallowed_ports=(\n 0, # no wildcard listeners\n 6667, # disallowed port\n )\n ):\n print(f\"currently processing: {config_path}, virtualPort: {virtualPort}\", end=\" \")\n if virtualPort is not None:\n assert isinstance(virtualPort, int) # virtualPort must be an integer\n assert (\n virtualPort not in disallowed_ports\n ) # virtualPort must not be in disallowed_ports",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def is_valid(host_port):\n\n if len(host_port.split(\":\")) != 2:\n return False\n\n return True",
"def validate_rule_port(port):\n if isinstance(port, int):\n if port < 0 or port > 65535:\n return \"integer out of range\"\n return None\n\n # If not an integer, must be format N:M, i.e. a port range.\n try:\n fields = port.split(\":\")\n except AttributeError:\n return \"neither integer nor string\"\n\n if not len(fields) == 2:\n return \"range unparseable\"\n\n try:\n start = int(fields.pop(0))\n end = int(fields.pop(0))\n except ValueError:\n return \"range invalid\"\n\n if start >= end or start < 0 or end > 65535:\n return \"range invalid\"\n\n return None",
"def validate_unique_port(port):\n if not (1024 < port and port < 65536):\n arg_error(\n 'Port {} is unsupported; must be between 1024 and 65536, exclusive.'.format(port))\n if port in all_ports:\n arg_error('Port {} specified more than once.'.format(port))\n all_ports.append(port)",
"def test_invalid_port(device, port):\n with pytest.raises(ValueError):\n device.start_repeated_i2c_transceive(\n port, interval_us=1000, address=1, tx_data=[0x00], rx_length=1,\n timeout_us=1000, read_delay_us=1000)",
"def test_bad_password():\n pytest.xfail(\"Bad port.\")\n connect_to_dremio_flight_server_endpoint(\"localhost\",\n \"32010\", \"dremio\", \"badPassword\", False, False, False)",
"def _validate_port_range(self):\n # Let port_range override global value - if set on kernelspec...\n port_range = self.kernel_manager.port_range\n if self.proxy_config.get('port_range'):\n port_range = self.proxy_config.get('port_range')\n\n try:\n port_ranges = port_range.split(\"..\")\n\n self.lower_port = int(port_ranges[0])\n self.upper_port = int(port_ranges[1])\n\n port_range_size = self.upper_port - self.lower_port\n if port_range_size != 0:\n if port_range_size < min_port_range_size:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Range size must be at least {} as specified by env EG_MIN_PORT_RANGE_SIZE\".\n format(port_range, min_port_range_size))\n\n # According to RFC 793, port is a 16-bit unsigned int. Which means the port\n # numbers must be in the range (0, 65535). However, within that range,\n # ports 0 - 1023 are called \"well-known ports\" and are typically reserved for\n # specific purposes. For example, 0 is reserved for random port assignment,\n # 80 is used for HTTP, 443 for TLS/SSL, 25 for SMTP, etc. But, there is\n # flexibility as one can choose any port with the aforementioned protocols.\n # Ports 1024 - 49151 are called \"user or registered ports\" that are bound to\n # services running on the server listening to client connections. And, ports\n # 49152 - 65535 are called \"dynamic or ephemeral ports\". A TCP connection\n # has two endpoints. Each endpoint consists of an IP address and a port number.\n # And, each connection is made up of a 4-tuple consisting of -- client-IP,\n # client-port, server-IP, and server-port. A service runs on a server with a\n # specific IP and is bound to a specific \"user or registered port\" that is\n # advertised for clients to connect. So, when a client connects to a service\n # running on a server, three out of 4-tuple - client-IP, client-port, server-IP -\n # are already known. To be able to serve multiple clients concurrently, the\n # server's IP stack assigns an ephemeral port for the connection to complete\n # the 4-tuple.\n #\n # In case of JEG, we will accept ports in the range 1024 - 65535 as these days\n # admins use dedicated hosts for individual services.\n if self.lower_port < 1024 or self.lower_port > 65535:\n self.log_and_raise(http_status_code=500, reason=\"Invalid port range '{}' specified. \"\n \"Range for valid port numbers is (1024, 65535).\".format(port_range))\n if self.upper_port < 1024 or self.upper_port > 65535:\n self.log_and_raise(http_status_code=500, reason=\"Invalid port range '{}' specified. \"\n \"Range for valid port numbers is (1024, 65535).\".format(port_range))\n except ValueError as ve:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Error was: {}\".format(port_range, ve))\n except IndexError as ie:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Error was: {}\".format(port_range, ie))\n\n self.kernel_manager.port_range = port_range",
"def _port_validator(port_num: Optional[int]) -> bool:\n\n if port_num is None:\n pass # OK\n \n elif not isinstance(port_num, int):\n return False\n elif not PORT_NUMBER_MIN_VALUE <= port_num <= PORT_NUMBER_MAX_VALUE:\n return False\n \n return True",
"def port_num(port):\n print(\"checking port numbers\")\n if port not in PORT_RANGE:\n return False\n else:\n return True"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _runChecks() on a PluggableTransport with an invalid port (too high) should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_runChecks_invalid_port_range(self):
pt = bridges.PluggableTransport()
self.assertRaises(
bridges.MalformedPluggableTransport,
pt.updateFromStemTransport,
self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [
('iat-mode=0,'
'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))
|
[
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_pt_args(self):\n try:\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n 'sharedsecret=foobar')\n except Exception as error:\n self.failUnlessIsInstance(error,\n bridges.MalformedPluggableTransport)",
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_bad_port():\n pytest.xfail(\"Bad port.\")\n connect_to_dremio_flight_server_endpoint(\"localhost\",\n \"12345\", \"dremio\", \"dremio123\", False, False, False)",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_tls_port(self):\n if self._tls_port is None:\n return\n try:\n int(self.tls_port)\n except ValueError:\n msg = '{} is not a valid port number.'.format(self.tls_port)\n raise ConfigException(msg)\n if self.port == self.tls_port:\n raise ConfigException(\"SMTP and SMTP/TLS ports must be different.\")",
"def validate_port(self):\n\n if self.port == None:\n self.port = \"\"\n else:\n try:\n self.port = int(self.port)\n if not 1 <= self.port <= 65535:\n raise ValueError\n except ValueError:\n end(UNKNOWN, \"port number must be a whole number between \" \\\n + \"1 and 65535\")",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def _valid_port(self, port):\n if port.data is None or port.data.empty:\n return False\n if \"epoch\" not in port.meta:\n return False\n if port.data.shape[0] != self._num_times:\n if self._reporting == \"error\":\n raise WorkerInterrupt(\n f\"Received an epoch with {port.data.shape[0]} \"\n f\"samples instead of {self._num_times}.\"\n )\n elif self._reporting == \"warn\":\n self.logger.warning(\n f\"Received an epoch with {port.data.shape[0]} \"\n f\"samples instead of {self._num_times}. \"\n f\"Skipping.\"\n )\n return False\n else: # reporting is None\n # be cool\n return False\n return True",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_disallowed_service_ports(\n config_path, virtualPort,\n disallowed_ports=(\n 0, # no wildcard listeners\n 6667, # disallowed port\n )\n ):\n print(f\"currently processing: {config_path}, virtualPort: {virtualPort}\", end=\" \")\n if virtualPort is not None:\n assert isinstance(virtualPort, int) # virtualPort must be an integer\n assert (\n virtualPort not in disallowed_ports\n ) # virtualPort must not be in disallowed_ports",
"def is_valid(host_port):\n\n if len(host_port.split(\":\")) != 2:\n return False\n\n return True",
"def test_bad_password():\n pytest.xfail(\"Bad port.\")\n connect_to_dremio_flight_server_endpoint(\"localhost\",\n \"32010\", \"dremio\", \"badPassword\", False, False, False)",
"def testPortComparisonValidation(self):\n bytecode = sock_diag.InetDiagBcOp((sock_diag.INET_DIAG_BC_D_GE, 4, 8))\n self.assertEquals(\"???\",\n self.sock_diag.DecodeBytecode(bytecode))\n self.assertRaisesErrno(\n EINVAL,\n self.sock_diag.DumpAllInetSockets, IPPROTO_TCP, bytecode.Pack())",
"def _validate_port_range(self):\n # Let port_range override global value - if set on kernelspec...\n port_range = self.kernel_manager.port_range\n if self.proxy_config.get('port_range'):\n port_range = self.proxy_config.get('port_range')\n\n try:\n port_ranges = port_range.split(\"..\")\n\n self.lower_port = int(port_ranges[0])\n self.upper_port = int(port_ranges[1])\n\n port_range_size = self.upper_port - self.lower_port\n if port_range_size != 0:\n if port_range_size < min_port_range_size:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Range size must be at least {} as specified by env EG_MIN_PORT_RANGE_SIZE\".\n format(port_range, min_port_range_size))\n\n # According to RFC 793, port is a 16-bit unsigned int. Which means the port\n # numbers must be in the range (0, 65535). However, within that range,\n # ports 0 - 1023 are called \"well-known ports\" and are typically reserved for\n # specific purposes. For example, 0 is reserved for random port assignment,\n # 80 is used for HTTP, 443 for TLS/SSL, 25 for SMTP, etc. But, there is\n # flexibility as one can choose any port with the aforementioned protocols.\n # Ports 1024 - 49151 are called \"user or registered ports\" that are bound to\n # services running on the server listening to client connections. And, ports\n # 49152 - 65535 are called \"dynamic or ephemeral ports\". A TCP connection\n # has two endpoints. Each endpoint consists of an IP address and a port number.\n # And, each connection is made up of a 4-tuple consisting of -- client-IP,\n # client-port, server-IP, and server-port. A service runs on a server with a\n # specific IP and is bound to a specific \"user or registered port\" that is\n # advertised for clients to connect. So, when a client connects to a service\n # running on a server, three out of 4-tuple - client-IP, client-port, server-IP -\n # are already known. To be able to serve multiple clients concurrently, the\n # server's IP stack assigns an ephemeral port for the connection to complete\n # the 4-tuple.\n #\n # In case of JEG, we will accept ports in the range 1024 - 65535 as these days\n # admins use dedicated hosts for individual services.\n if self.lower_port < 1024 or self.lower_port > 65535:\n self.log_and_raise(http_status_code=500, reason=\"Invalid port range '{}' specified. \"\n \"Range for valid port numbers is (1024, 65535).\".format(port_range))\n if self.upper_port < 1024 or self.upper_port > 65535:\n self.log_and_raise(http_status_code=500, reason=\"Invalid port range '{}' specified. \"\n \"Range for valid port numbers is (1024, 65535).\".format(port_range))\n except ValueError as ve:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Error was: {}\".format(port_range, ve))\n except IndexError as ie:\n self.log_and_raise(http_status_code=500, reason=\"Port range validation failed for range: '{}'. \"\n \"Error was: {}\".format(port_range, ie))\n\n self.kernel_manager.port_range = port_range",
"def test_invalid_tcp_rule(self, setup_info, setup_mirror_session, ptfadapter, duthost):\n pass\n\n # NOTE: This type of rule won't really function since you need a TCP packet to have TCP flags.\n # However, we have still included such a rule in the acl.json file to validate that the SAI\n # will not crash if such a rule is installed. If this does happen, we expect the whole test\n # suite + loganaylzer + the sanity check to fail.",
"def validate_rule_port(port):\n if isinstance(port, int):\n if port < 0 or port > 65535:\n return \"integer out of range\"\n return None\n\n # If not an integer, must be format N:M, i.e. a port range.\n try:\n fields = port.split(\":\")\n except AttributeError:\n return \"neither integer nor string\"\n\n if not len(fields) == 2:\n return \"range unparseable\"\n\n try:\n start = int(fields.pop(0))\n end = int(fields.pop(0))\n except ValueError:\n return \"range invalid\"\n\n if start >= end or start < 0 or end > 65535:\n return \"range invalid\"\n\n return None",
"def test_invalid_port(device, port):\n with pytest.raises(ValueError):\n device.start_repeated_i2c_transceive(\n port, interval_us=1000, address=1, tx_data=[0x00], rx_length=1,\n timeout_us=1000, read_delay_us=1000)",
"def test_process_fan_payload_invalid_length(self):\n # pylint: disable=invalid-name\n xknx = XKNX()\n fan = Fan(xknx, name=\"TestFan\", group_address_speed=\"1/2/3\")\n telegram = Telegram(\n destination_address=GroupAddress(\"1/2/3\"),\n payload=GroupValueWrite(DPTArray((23, 24))),\n )\n with self.assertRaises(CouldNotParseTelegram):\n self.loop.run_until_complete(fan.process(telegram))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _runChecks() on a PluggableTransport with an invalid PT args should raise a MalformedPluggableTransport exception.
|
def test_PluggableTransport_runChecks_invalid_pt_args(self):
try:
pt = bridges.PluggableTransport(self.fingerprint,
"voltronPT", "1.2.3.4", 443,
'sharedsecret=foobar')
except Exception as error:
self.failUnlessIsInstance(error,
bridges.MalformedPluggableTransport)
|
[
"def test_PluggableTransport_runChecks_invalid_ip(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.InvalidPluggableTransportIP,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_type(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', \"anyport\", [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_port_range(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 65536, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,')]))",
"def test_PluggableTransport_runChecks_invalid_fingerprint(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n \"INVALIDFINGERPRINT\", 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=0,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722,'\n 'public-key=0a5b046d07f6f971b7776de682f57c5b9cdc8fa060db7ef59de82e721c8098f4')]))",
"def test_PluggableTransport_checkArguments_obfs4_missing_cert(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, ['iat-mode=1']))",
"def test_PluggableTransport_checkArguments_scramblesuit_missing_password(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'scramblesuit', ('34.230.223.87', 37341, []))",
"def test_PluggableTransport_checkArguments_obfs4_missing_publickey(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n ('iat-mode=1,'\n 'node-id=2a79f14120945873482b7823caabe2fcde848722')]))",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def test_PluggableTransport_checkArguments_obfs4_missing_iatmode(self):\n pt = bridges.PluggableTransport()\n self.assertRaises(\n bridges.MalformedPluggableTransport,\n pt.updateFromStemTransport,\n self.fingerprint, 'obfs4', ('34.230.223.87', 37341, [\n 'cert=UXj/cWm0qolGrROYpkl0UyD/7PEhzkoZkZXrOpjRKwImvkpQZwmF0nSzBXfyfbT9afBZEw']))",
"def test_PluggableTransport_init_with_parameters(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar'})\n self.assertIsInstance(pt, bridges.PluggableTransport)",
"def test_bad_rt(self):\n self.assertEqual(check_args(self.bad_rt), {'rt': 168})",
"def test_PluggableTransport_init(self):\n pt = bridges.PluggableTransport()\n self.assertIsInstance(pt, bridges.PluggableTransport)",
"def _checkValidTopping(self, topping):\n\n if not isinstance(topping, Topping):\n raise Exception(\"The topping given is not a valid one. Please enter valid topping.\")",
"def test_validate_ticket_track_arguments_successful_execution():\n\n # Verify valid value\n assert not ExtraHop_v2.validate_ticket_track_arguments(\"3\")",
"def _sanity_check(self) -> None:\n if self.archive_action:\n script_path, _ = self.archive_action\n if not script_path.exists() and script_path.is_file():\n self.errors += [f\"Unable to locate custom archive action script: {script_path}\"]\n if not self.payload_uris:\n self.errors += [f\"[{self.package_name}] payload contains no payload URIs\"]\n if not self.payload_base_uri and not self.payload_uris[0].endswith(\n self.supported_arch_formats\n ):\n if self.package_strip_dirs != 0:\n self.errors += [f\"[{self.package_name}] strip dirs set for a non-archive\"]\n if self.package_finalize_items:\n self.errors += [f\"[{self.package_name}] finalize items set for a non-archive\"]",
"def test_validate_ticket_track_arguments_failed_execution():\n # Verify invalid value\n with pytest.raises(ExtraHop_v2.InvalidValueError) as err:\n ExtraHop_v2.validate_ticket_track_arguments(\"4\")\n\n assert (\n str(err.value)\n == \"4 is an invalid value for incident_status. Possible values are: ['0', '1', '2', '3']\"\n )",
"def _failed():\n raise BaseException",
"def _check_args(self):\n if not self.wm_class and not self.wm_instance and not self.wm_title:\n raise RaiseorlaunchError(\n \"You need to specify \" '\"wm_class\", \"wm_instance\" or \"wm_title.'\n )\n if (self.workspace or self.target_workspace) and self.scratch:\n raise RaiseorlaunchError(\n \"You cannot use the scratchpad on a specific workspace.\"\n )\n if not check_positive(self.event_time_limit):\n raise RaiseorlaunchError(\n \"The event time limit must be a positive integer or float!\"\n )\n if self.workspace and self.target_workspace:\n if not self.workspace == self.target_workspace:\n raise RaiseorlaunchError(\n \"Setting workspace and initial workspace is ambiguous!\"\n )",
"def check_teleporter(self):\n if len(self._blue_teleporter) % 2 != 0:\n raise Exception(\"One of the blue teleporter is alone.\")\n if len(self._red_teleporter) % 2 != 0:\n raise Exception(\"One of the red teleporter is alone.\")\n if len(self._pink_teleporter) % 2 != 0:\n raise Exception(\"One of the pink teleporter is alone.\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
If the 'Bridge ' prefix was requested, then it should be at the beginning of the bridge line.
|
def test_PluggableTransport_getTransportLine_bridge_prefix(self):
pt = bridges.PluggableTransport(self.fingerprint,
"voltronPT", "1.2.3.4", 443,
{'sharedsecret': 'foobar',
'password': 'unicorns'})
bridgeLine = pt.getTransportLine(bridgePrefix=True)
self.assertTrue(bridgeLine.startswith("Bridge "))
|
[
"def test_Bridge_getBridgeLine_bridge_prefix(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, bridgePrefix=True)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)\n self.assertTrue(line.startswith('Bridge'))",
"def test_Bridge_constructBridgeLine_IPv6(self):\n bridge = bridges.Bridge()\n addrport = (u'6bf3:806b:78cd::4ced:cfad:dad4', 36488, 6)\n\n bridgeline = bridge._constructBridgeLine(addrport,\n includeFingerprint=False,\n bridgePrefix=True)\n self.assertEqual(bridgeline, 'Bridge [6bf3:806b:78cd::4ced:cfad:dad4]:36488')",
"def test_Bridge_str_without_fingerprint_without_nickname(self):\n bridge = bridges.Bridge()\n identifier = str(bridge)\n self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))",
"def test_Bridge_getBridgeLine_googlygooglybegone(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withPluggableTransportType('googlygooglybegone')\n\n self.assertRaises(bridges.PluggableTransportUnavailable,\n self.bridge.getBridgeLine,\n request)",
"def addPrefix(self, prefix):\n \n pass",
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def _NSPrefix(self, ns):\n if ns == self.defaultNS:\n return ''\n prefix = self.nsMap[ns]\n return prefix and prefix + ':' or ''",
"def AddPrefix(patch, text):\n return '%s%s' % (site_config.params.CHANGE_PREFIX[patch.remote], text)",
"def test_Bridge_str_without_safelogging(self):\n bridges.safelog.safe_logging = False\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', bridge.fingerprint,\n '~', bridge.nickname]))",
"def _allocate_bridge_name(self):\n name = None\n while name is None:\n name = self._random_name()\n if name in self.iso_bridge_mappings.values() or name in self.ovs_bridge_mappings.values():\n name = None\n if self._if_bridge_exists(name):\n name = None\n return name",
"def test_Bridge_getBridgeLine_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def set_bidsprefix(self, bidsprefix):\n\n # remove '_bold.nii(.gz)' or '_events' if present **at the end of the bidsPrefix**\n for mystr in ['.gz', '.nii', '_bold', '_events']:\n bidsprefix = bidsprefix[:-len(mystr)] if bidsprefix.endswith(mystr) else bidsprefix\n\n # Whatever is left, we assign to the bidsprefix class attribute:\n self.bidsprefix = bidsprefix",
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_Bridge_str_with_safelogging(self):\n bridges.safelog.safe_logging = True\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(\n identifier,\n ''.join(['$$',\n hashlib.sha1(bridge.fingerprint).hexdigest().upper(),\n '~', bridge.nickname]))",
"def bridgeName(self):\n ret = libvirtmod.virNetworkGetBridgeName(self._o)\n if ret is None: raise libvirtError ('virNetworkGetBridgeName() failed', net=self)\n return ret",
"def _get_bridge_name(self):\n command = ovs_vsctl.VSCtlCommand(\n 'find',\n ('Bridge',\n 'datapath_id=%s' % dpid_lib.dpid_to_str(self.datapath_id)))\n self.run_command([command])\n if not isinstance(command.result, list) or len(command.result) != 1:\n raise OVSBridgeNotFound(\n datapath_id=dpid_lib.dpid_to_str(self.datapath_id))\n return command.result[0].name",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def has_obo_prefix(obj):\n return (\"uri_prefix\" not in obj) or (\n obj[\"uri_prefix\"] == \"http://purl.obolibrary.org/obo/\"\n )",
"def test_barname_stripper(self):\n assert bu.stripper(\"base-nto+armle-v7+signed.bar\") == \"base\""
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Check the order and content of the bridge line string.
|
def test_PluggableTransport_getTransportLine_content_order(self):
pt = bridges.PluggableTransport(self.fingerprint,
"voltronPT", "1.2.3.4", 443,
{'sharedsecret': 'foobar',
'password': 'unicorns'})
bridgeLine = pt.getTransportLine()
# We have to check for substrings because we don't know which order
# the PT arguments will end up in the bridge line. We also have to
# check for the lowercased transport name. Fortunately, the following
# three are the only ones which are important to have in order:
self.assertTrue(bridgeLine.startswith("voltronpt"))
self.assertSubstring("voltronpt 1.2.3.4:443 " + self.fingerprint,
bridgeLine)
# These ones can be in any order, but they should be at the end of the
# bridge line:
self.assertSubstring("password=unicorns", bridgeLine)
self.assertSubstring("sharedsecret=foobar", bridgeLine)
|
[
"def test_Bridge_getBridgeLine_request_valid(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_Bridge_getBridgeLine_bridge_prefix(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, bridgePrefix=True)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)\n self.assertTrue(line.startswith('Bridge'))",
"def _is_ingredient_heading_2(line):\n return line.strip() == '-------- ------------ --------------------------------'",
"def _is_line_break(self, char):\n return ord(char) == 10 or ord(char) == 13",
"def check_in_buffer(self):\n lines = []\n while \"\\n\" in self.in_buffer:\n nl = self.in_buffer.find(\"\\n\")\n if nl == -1:\n break\n line = self.in_buffer[:nl]\n # strip \\r newlines\n line = line.replace(\"\\r\", \"\")\n lines.append(line)\n self.in_buffer = self.in_buffer[nl + 1:]\n\n for line in lines:\n try:\n line = line.decode('utf-8', 'replace')\n except UnicodeDecodeError:\n log.error(\"Invalid encoding for irc line: %r\", line)\n else:\n self.parse_line(line)",
"def test_ends_newline(self):\r\n text = 'A line\\nAnother line\\nAnd a final one.\\n'\r\n expected_res = text.split('\\n')\r\n for res, expected in zip(split_by_newline(text), expected_res):\r\n self.assertEqual(res[1], expected)",
"def test_split_line(self):\n text = \" 1 2 3 4 5 6 7 8 9 0 \"\n for size, prefix, expect in (\n (3, 0, [\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"0\"]),\n (3, 3, [\"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"0\"]),\n (4, 0, [\"1\", \"2 3\", \"4\", \"5\", \"6 7\", \"8\", \"9\", \"0\"]),\n (2, 7, [\"3\", \"4\", \"\", \"5\", \"6\", \"\", \"7\", \"8\", \"\", \"9\", \"0\"]),\n ):\n self.assertEqual(gfs._split_line(text, size, prefix), expect)\n # MAV line\n text = \"HR 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 09 12 18 00 \"\n line = gfs._split_line(text)\n self.assertEqual(len(line), 21)\n self.assertEqual(line[0], \"06\")\n self.assertEqual(line[-1], \"00\")\n # MEX line\n text = \"FHR 24 36| 48 60| 72 84| 96 108|120 132|144 156|168 180|192\"\n line = gfs._split_line(text, size=4)\n self.assertEqual(len(line), 15)\n self.assertEqual(line[0], \"24\")\n self.assertEqual(line[-1], \"192\")",
"def _validate_line(self):\n if self.comp_type == \"U\" and self.gap_len != 100:\n raise AGPError(self.fname, self.line_number, \"invalid gap length for component type 'U': %d (should be 100)\" % self.gap_len)\n\n if self.gap_type not in AGPGapLine.allowed_gap_types:\n raise AGPError(self.fname, self.line_number, \"invalid gap type: %s\" % self.gap_type)\n\n if self.linkage not in AGPGapLine.allowed_linkage_types:\n raise AGPError(self.fname, self.line_number, \"invalid linkage field: %s\" % self.linkage)\n\n all_evidence = self.linkage_evidence.split(\";\")\n for e in all_evidence:\n if e not in AGPGapLine.allowed_evidence_types:\n raise AGPError(self.fname, self.line_number, \"invalid linkage evidence: %s\" % e)\n\n if self.linkage == \"no\":\n if self.gap_type == \"scaffold\":\n raise AGPError(self.fname, self.line_number, \"invalid 'scaffold' gap without linkage evidence\")\n\n if self.linkage_evidence != \"na\":\n raise AGPError(self.fname, self.line_number, \"linkage evidence must be 'na' when not asserting linkage. Got {}\".format(self.linkage_evidence))\n else:\n if \"na\" in all_evidence:\n raise AGPError(self.fname, self.line_number, \"'na' is invalid linkage evidence when asserting linkage\")",
"def _IsExtraneousLine(line, send_cmd):\n return send_cmd.rstrip() in line",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def testMakeLine(self):\n self.assertEqual(\n r'\\path [line] (test1-0) -- (test1-1);',\n self.sf._makeLine(0, 1)\n )",
"def split_line(self):\n # coordinate of the # symbol or end of the line (-1) if not found\n hash_or_end = self.line.find(\"#\")\n temp = self.line[self.region_end:hash_or_end].strip(\" |\")\n self.coord_str = regex_paren.sub(\"\", temp)\n\n # don't want any meta_str if there is no metadata found\n if hash_or_end >= 0:\n self.meta_str = self.line[hash_or_end:]\n else:\n self.meta_str = \"\"",
"def test_Bridge_getBridgeLine_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def _validate_msg(line: Line, string_idx: int) -> TResult[None]:\n # We first check for \"inner\" stand-alone comments (i.e. stand-alone\n # comments that have a string leaf before them AND after them).\n for inc in [1, -1]:\n i = string_idx\n found_sa_comment = False\n is_valid_index = is_valid_index_factory(line.leaves)\n while is_valid_index(i) and line.leaves[i].type in [\n token.STRING,\n STANDALONE_COMMENT,\n ]:\n if line.leaves[i].type == STANDALONE_COMMENT:\n found_sa_comment = True\n elif found_sa_comment:\n return TErr(\n \"StringMerger does NOT merge string groups which contain \"\n \"stand-alone comments.\"\n )\n\n i += inc\n\n num_of_inline_string_comments = 0\n set_of_prefixes = set()\n num_of_strings = 0\n for leaf in line.leaves[string_idx:]:\n if leaf.type != token.STRING:\n # If the string group is trailed by a comma, we count the\n # comments trailing the comma to be one of the string group's\n # comments.\n if leaf.type == token.COMMA and id(leaf) in line.comments:\n num_of_inline_string_comments += 1\n break\n\n if has_triple_quotes(leaf.value):\n return TErr(\"StringMerger does NOT merge multiline strings.\")\n\n num_of_strings += 1\n prefix = get_string_prefix(leaf.value).lower()\n if \"r\" in prefix:\n return TErr(\"StringMerger does NOT merge raw strings.\")\n\n set_of_prefixes.add(prefix)\n\n if id(leaf) in line.comments:\n num_of_inline_string_comments += 1\n if contains_pragma_comment(line.comments[id(leaf)]):\n return TErr(\"Cannot merge strings which have pragma comments.\")\n\n if num_of_strings < 2:\n return TErr(\n f\"Not enough strings to merge (num_of_strings={num_of_strings}).\"\n )\n\n if num_of_inline_string_comments > 1:\n return TErr(\n f\"Too many inline string comments ({num_of_inline_string_comments}).\"\n )\n\n if len(set_of_prefixes) > 1 and set_of_prefixes != {\"\", \"f\"}:\n return TErr(f\"Too many different prefixes ({set_of_prefixes}).\")\n\n return Ok(None)",
"def test_ends_character(self):\r\n text = 'A line\\nAnother line\\nAnd a final one.'\r\n expected_res = text.split('\\n')\r\n for res, expected in zip(split_by_newline(text), expected_res):\r\n self.assertEqual(res[1], expected)",
"def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):\n pt = bridges.PluggableTransport(self.fingerprint,\n \"voltronPT\", \"1.2.3.4\", 443,\n {'sharedsecret': 'foobar',\n 'password': 'unicorns'})\n bridgeLine = pt.getTransportLine()\n self.assertTrue(\n (\"password=unicorns sharedsecret=foobar\" in bridgeLine) or\n (\"sharedsecret=foobar password=unicorns\" in bridgeLine))",
"def pre_parse_line(self, line):\n datas = line.split(':')\n if len(datas) >= 2 and self.parse_label(datas[0]):\n return",
"def support_exact_line_search(self):\n return False",
"def check_orderline(self):\n if not self.line_ids:\n raise ValidationError(\"\"\"Cannot process return as there is no order line\n associated with this record!\"\"\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The PT arguments in a bridge line should be spaceseparated.
|
def test_PluggableTransport_getTransportLine_ptargs_space_delimited(self):
pt = bridges.PluggableTransport(self.fingerprint,
"voltronPT", "1.2.3.4", 443,
{'sharedsecret': 'foobar',
'password': 'unicorns'})
bridgeLine = pt.getTransportLine()
self.assertTrue(
("password=unicorns sharedsecret=foobar" in bridgeLine) or
("sharedsecret=foobar password=unicorns" in bridgeLine))
|
[
"def parse_instruction(self, line):\n instruction, *args = line.strip().replace(',', '').split()\n return instruction, args",
"def _split_args_line(line):\n lexer = lap.Lexer(line)\n scanner = lap.Parser(lexer)\n tree = scanner.input_line()\n\n extractor = lap.QueryParamsExtractor()\n params_option_value, rest_of_args = extractor.visit(tree)\n\n return params_option_value, rest_of_args",
"def arg_plist(self, line):\n if not line:\n raise IllegalClientResponse(\"Missing argument\")\n\n if line[:1] != b\"(\":\n raise IllegalClientResponse(\"Missing parenthesis\")\n\n i = line.find(b\")\")\n\n if i == -1:\n raise IllegalClientResponse(\"Mismatched parenthesis\")\n\n return (parseNestedParens(line[1:i],0), line[i+2:])",
"def parse_parms(*parms):\n parameters = (\"shell {0} {1} {2} {3} {4} {5} {6}\".\n format(parms[0],\n '-p {0}'.format(parms[1]) if parms[1] else '',\n parms[2],\n parms[3],\n '-l {0}'.format(parms[4]) if parms[4] else '',\n base64.b64decode(parms[5]),\n parms[6]))\n return parameters.split()",
"def split_args(in_args):\n\n\t# this will split the arguments on commas. However, if the argument is\n\t# passed by reference, it will look like '&arg' or '& arg'\n\tsplit_args = re.split(\"\\s*,\\s*\", in_args)\n\n\treturn split_args",
"def _split_args(args):\n all_args = ([], [])\n wargb = False\n for an_arg in args:\n if wargb is True:\n all_args[0].append(an_arg)\n wargb = False\n continue\n if an_arg == \"--rropt\":\n wargb = True\n continue\n all_args[1].append(an_arg)\n return all_args",
"def get_args(text):\r\n\targ_tokens = text.split(\"=\")\r\n\tif len(arg_tokens) != 2:\r\n\t\traise Exception('invalid filter format',text)\r\n\treturn arg_tokens",
"def test_smart_print_multiple_args(self):\n arg_1 = 'Hello'\n arg_2 = 'everyone!'\n print_str = \"%s %s %s supports multiple args in smart_print.\"\n self.utils.smart_print(\n print_str, migrate_utils.DEBUG,\n arg_1, arg_2, platform.python_version())",
"def _append_args_line(self, line):\n name = line.split(' ', 1)[0]\n\n if name.startswith('@') and name.endswith(':'):\n line = line[len(name)+1:]\n self._start_args_section(name[1:-1])\n elif self._is_section_tag(name):\n self._append_line = self._append_various_line\n self._append_various_line(line)\n return\n elif (self._section.text.endswith('\\n\\n')\n and line and not line[0].isspace()):\n if line == 'Features:':\n self._append_line = self._append_features_line\n else:\n self._start_section()\n self._append_line = self._append_various_line\n self._append_various_line(line)\n return\n\n self._append_freeform(line.strip())",
"def dump_args(self) -> None:\n x = self\n table = (\n (x.old_sent_lines, 'old private lines'),\n (x.a, 'old public lines'),\n (x.b, 'new public lines'),\n )\n for lines, title in table:\n x.dump_lines(lines, title)\n g.pr()",
"def get_args (args):\n\n args = args[1:]\n args_list = ''.join(args).strip().replace(']][[',']]|[[').split('|')\n\n adjM = ast.literal_eval(args_list[0])\n samples = ast.literal_eval(args_list[1])\n return adjM, samples",
"def Args(pyparseSymbol):\n\treturn pyparseSymbol.setResultsName(\"args\", listAllMatches=True)",
"def argument_string(self):\n if self.templ[\"argument_list\"] is None:\n return \"\"\n return \" \".join(self.templ[\"argument_list\"])",
"def divider(self, *args, dash=\">\"):\n assert len(dash) == 1\n msg = \" \".join([str(a) for a in args])\n dashes = 100 - len(msg) - 2\n log.info(dash * 80)\n log.info(dash * 5, self.ipppssoot, msg, dash * (dashes - 6 - len(self.ipppssoot) - len(msg) - 1))",
"def __tokenize_rmode_argument_string(self, args: str) -> List[str]:\n if args == '':\n return []\n open_bracket_index = args.find('#[')\n if open_bracket_index == -1: # no open bracket found\n arguments = args.split(',')\n else:\n if open_bracket_index == 0:\n arguments = []\n else:\n arguments = args[0:open_bracket_index - 1].split(',')\n\n close_bracket_index = args.index(']')\n arguments.append(args[open_bracket_index:close_bracket_index + 1])\n\n if close_bracket_index + 2 <= len(args):\n args_after = self.__tokenize_rmode_argument_string(args[close_bracket_index + 2:])\n arguments.extend(args_after)\n return arguments",
"def _line_parser(self, line):\n line = (line.rstrip()).split(' ')\n return line",
"def test_arguments_extraction(self):\n\n for item in self.test_lines:\n physplan = TruncatedPhysicalPlan(item[\"line\"], self.ppid)\n self.assertEqual(physplan.arguments, item[\"expected_arguments\"])",
"def args():\n return []",
"def parse_execution_line_for_python_program(execution_line):\n split_line = execution_line.split(' ')\n\n execution_list = []\n for item in split_line:\n if item:\n cleaned_item = item.strip()\n execution_list.append(cleaned_item)\n\n return execution_list"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Using setStatus() to set the Stable flag should set Bridge.stable and Bridge.flags.stable to True.
|
def test_BridgeBackwardsCompatibility_setStatus_stable(self):
bridge = bridges.BridgeBackwardsCompatibility(
nickname=self.nickname,
ip=self.address,
orport=self.orPort,
fingerprint=self.fingerprint,
or_addresses={"2006:42::123F": 443, "2006:42::123E": 9001})
self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)
self.assertFalse(bridge.stable)
self.assertFalse(bridge.flags.stable)
bridge.setStatus(stable=True)
self.assertTrue(bridge.stable)
self.assertTrue(bridge.flags.stable)
|
[
"def test_integration_setRunningStable(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.stable)\n bridge.setStatus(True, True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.stable)",
"def test_settingStable(self):\n self.flags.stable = True\n self.assertTrue(self.flags.stable, \"The Stable flag should be True\")",
"def test_BridgeBackwardsCompatibility_setStatus_running(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=\"anyport\",\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.flags.running)\n\n bridge.setStatus(running=True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.flags.running)",
"def SetStatus(self, status):\n self.status = status\n self.put()",
"def _update_charm_status(self, status: model.StatusBase):\n self.unit.status = status\n if self.unit.is_leader():\n self.app.status = status",
"def __set_status(self, status):\n self.__status = status",
"def test_update_Fast_Stable(self):\n self.flags.update([\"Fast\", \"Stable\"])\n self.assertTrue(self.flags.fast)\n self.assertTrue(self.flags.stable)",
"def UpdateCurrentPortStatusGood(self, status):\n if status:\n self.CurrentSerialPortStatus = self._GreenLightIconPath\n else:\n self.CurrentSerialPortStatus = self._RedLightIconPath\n self.update_menu_options()",
"def on_set_status(self, status, pokemon, setter, battle):",
"def advapi32_SetServiceStatus(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"hServiceStatus\", \"lpServiceStatus\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def set_switch_table_OnOff(self, state='Off'):\n\n if(state == 'On'):\n self.master_payloads['BattTable_OnOff'][3] = 1\n elif(state == 'Off'):\n self.master_payloads['BattTable_OnOff'][3] = 0\n else:\n print('Error : Wrong State entered')",
"def _set_status(self, status: str = 'none'):\n if status == 'loop':\n self.color_loop = True\n else:\n self.color_loop = False\n self.alert_status = status\n if status == 'select':\n self.bright = 1.0\n elif status == 'lselect':\n self.bright = 1.0\n else:\n self.bright = 0.8",
"def set_status(self, status):\n if status in [\"w\", \"b\", \"d\", \"-\"]:\n self.status = status\n else:\n raise ValueError(\"Status of game can only be \\\"w\\\", \\\"b\\\" or \\\"d\\\", you tried to set status \"+status)",
"def statusSet(self, status, callback):\n self.statusSet(None, status, callback)",
"def serverStatusChanged(self, status, description):\n # update status bar button\n if status == 2: # receiving data\n self.statusbutton.setStyleSheet('background-color: yellow;')\n elif status == 3: # processing request (blocking)\n self.statusbutton.setStyleSheet('background-color: red;')\n elif status == 1: # listening\n self.statusbutton.setStyleSheet('')\n else: # only 0 = server is switched off\n self.statusbutton.setStyleSheet('')\n\n self.statusbutton.setChecked(bool(status))\n self.statusbutton.setToolTip(description)\n\n # update text field and toggle button in the plugin config dialog\n self.dlg.status.setText(description)\n self.dlg.toggle.setChecked(bool(status))\n self.dlg.toggle.setText('Disable API' if bool(status) else 'Enable API')",
"def set_status(self, status: CommitStatus):\n raise NotImplementedError",
"def connection_status_mutate(self):\n\n\t\t# pick a random connection and switch its connection status to false\n\t\tindex = np.random.randint(0,len(self.connections))\n\t\tself.connections[index].setStatus(not self.connections[index].getStatus())",
"def change_status_interface(self, name, status):\n\n try:\n with self.ipdb_controller.interfaces[name] as iface:\n if status == 'up':\n iface.up()\n if status == 'down':\n iface.down()\n except Exception:\n logging.error('Cannot change interface status')\n return",
"def __update_status(self, pin, new_status):\n self.pi.write(pin, new_status)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Using setStatus() to set the Running flag should set Bridge.running and Bridge.flags.running to True.
|
def test_BridgeBackwardsCompatibility_setStatus_running(self):
bridge = bridges.BridgeBackwardsCompatibility(
nickname=self.nickname,
ip=self.address,
orport="anyport",
fingerprint=self.fingerprint,
or_addresses={"2006:42::123F": 443, "2006:42::123E": 9001})
self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)
self.assertFalse(bridge.running)
self.assertFalse(bridge.flags.running)
bridge.setStatus(running=True)
self.assertTrue(bridge.running)
self.assertTrue(bridge.flags.running)
|
[
"def test_integration_setRunningStable(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertFalse(bridge.running)\n self.assertFalse(bridge.stable)\n bridge.setStatus(True, True)\n self.assertTrue(bridge.running)\n self.assertTrue(bridge.stable)",
"def __set_status(self, status):\n self.__status = status",
"def SetStatus(self, status):\n self.status = status\n self.put()",
"def test_settingRunning(self):\n self.flags.running = True\n self.assertTrue(self.flags.running, \"The Running flag should be True\")",
"def change_status(self):\n self.completed = not self.completed",
"def set_status(self, status):\n if status in [\"w\", \"b\", \"d\", \"-\"]:\n self.status = status\n else:\n raise ValueError(\"Status of game can only be \\\"w\\\", \\\"b\\\" or \\\"d\\\", you tried to set status \"+status)",
"def status_set(self, status, message='', *, is_app=False):\n if not isinstance(is_app, bool):\n raise TypeError('is_app parameter must be boolean')\n return self._run('status-set', f'--application={is_app}', status, message)",
"def _set_status(self, status: str = 'none'):\n if status == 'loop':\n self.color_loop = True\n else:\n self.color_loop = False\n self.alert_status = status\n if status == 'select':\n self.bright = 1.0\n elif status == 'lselect':\n self.bright = 1.0\n else:\n self.bright = 0.8",
"def mark_as_running(self):\n self.status = self.STATUS_RUNNING\n self.started_running_datetime = timezone.now()\n self.clean()\n self.save()",
"def change_status_interface(self, name, status):\n\n try:\n with self.ipdb_controller.interfaces[name] as iface:\n if status == 'up':\n iface.up()\n if status == 'down':\n iface.down()\n except Exception:\n logging.error('Cannot change interface status')\n return",
"def test_BridgeBackwardsCompatibility_setStatus_stable(self):\n bridge = bridges.BridgeBackwardsCompatibility(\n nickname=self.nickname,\n ip=self.address,\n orport=self.orPort,\n fingerprint=self.fingerprint,\n or_addresses={\"2006:42::123F\": 443, \"2006:42::123E\": 9001})\n self.assertIsInstance(bridge, bridges.BridgeBackwardsCompatibility)\n self.assertFalse(bridge.stable)\n self.assertFalse(bridge.flags.stable)\n\n bridge.setStatus(stable=True)\n self.assertTrue(bridge.stable)\n self.assertTrue(bridge.flags.stable)",
"def statusSet(self, status, callback):\n self.statusSet(None, status, callback)",
"def _update_charm_status(self, status: model.StatusBase):\n self.unit.status = status\n if self.unit.is_leader():\n self.app.status = status",
"def __setup_status_pins(self):\n self.pi.set_mode(self.RUNNING_LED_PIN, pigpio.OUTPUT)\n self.pi.set_mode(self.FLASH_STATUS_PIN, pigpio.OUTPUT)\n self.pi.set_mode(self.CLEAR_MODE_STATUS_PIN, pigpio.OUTPUT)\n self.pi.set_mode(self.DECK_EMPTY_STATUS_PIN, pigpio.OUTPUT)\n self.pi.set_mode(self.FLASH_ERROR_STATUS_PIN, pigpio.OUTPUT)\n\n self.turn_status_leds_off()\n self.pi.write(self.RUNNING_LED_PIN, 1)",
"def _push_status(self):\n\n self.data['status'] = self._status\n event_manager.device_changed(self)",
"async def change_status():\n await bot.change_presence(activity=discord.Game(next(STATUS)))",
"def set_open( self, status ):\r\n self.lables[ 0 ].config( text = \"Port: \" + status )\r\n\r\n return",
"def _set_status(self, status):\n\n if status not in AnalysisTask.STATUS_LIST:\n raise ValueError(\"Invalid Status: %s\" % status)\n self.status = status",
"def setStatus(self, ifname, status):\n ifreq = (ifname + '\\0' * self.IFNAMSIZ)[:self.IFNAMSIZ]\n\n if status is \"UP\":\n flags = self.IFF_UP\n flags |= self.IFF_RUNNING\n flags |= self.IFF_BROADCAST\n flags |= self.IFF_MULTICAST\n flags &= ~self.IFF_NOARP\n flags &= ~self.IFF_PROMISC\n elif status is \"DOWN\":\n result = self._call(ifname, self.SIOCGIFFLAGS)\n flags, = struct.unpack('H', result[16:18])\n flags &= ~self.IFF_UP\n else:\n return None\n\n data = struct.pack(\"16sh\", ifreq, flags)\n result = self._ioctl(self.SIOCSIFFLAGS, data)\n return result"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Reset safelogging to its default (disabled) state, due to test_Bridge_str_with_safelogging changing it.
|
def tearDown(self):
bridges.safelog.safe_logging = False
|
[
"def disable_logging():\n logging.shutdown()",
"def silence_log_messages_by_default():\n logging.basicConfig(handlers=(logging.NullHandler(),))",
"def disable():\n ActionLogger.disable()",
"def disable_scribe_logging():\n app.set_option(\"_SCRIBE_LOG_LEVEL\", LogOptions._LOG_LEVEL_NONE_KEY, force=True)",
"def clear_logging() -> None:\n logger = logging.getLogger('mltk')\n logger.propagate = True\n logger.setLevel(logging.NOTSET)\n logger.handlers.clear()",
"def reset_level():\n _StandardLogger.logger.setLevel(logging.INFO)",
"def resetToSafeState(self) -> None:\n raise NotImplementedError()",
"def test_disabled_via_env(self):\n # Disable the code that injects the pylogging_sink.\n env = dict(os.environ)\n env[\"DRAKE_PYTHON_LOGGING\"] = \"0\"\n # Configure the Python logging to not print anything.\n python_level = CRITICAL + 1\n try:\n output = subprocess.check_output(\n [\"bindings/pydrake/common/text_logging_example\",\n \"--use_nice_format=1\",\n \"--use_native_cpp_logging=0\",\n f\"--root_level={python_level}\",\n f\"--drake_level={python_level}\"],\n stderr=subprocess.STDOUT,\n encoding=\"utf8\",\n env=env)\n except subprocess.CalledProcessError as e:\n print(e.output, file=sys.stderr, flush=True)\n raise\n # The C++ logger should still have printed (INFO and higher)\n self.assertIn(\"Test Info message\", output)",
"def unset_logger():\n raise NotImplementedError('Unset logger function is not implemented yet.')",
"def Reset():\n _log_manager.Reset(sys.stdout, sys.stderr)",
"def test_Bridge_str_without_safelogging(self):\n bridges.safelog.safe_logging = False\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', bridge.fingerprint,\n '~', bridge.nickname]))",
"def sanitizer_log_always(msg, log_prefix=True):\n sanitizer_log(msg, 0, force=True, log_prefix=log_prefix)",
"def syslog_off(self):\n self.logger.removeHandler(self.syslog_handler)",
"def reset_level():\n ActionLogger.reset_level()",
"def test_Bridge_str_with_safelogging(self):\n bridges.safelog.safe_logging = True\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(\n identifier,\n ''.join(['$$',\n hashlib.sha1(bridge.fingerprint).hexdigest().upper(),\n '~', bridge.nickname]))",
"def disable() -> None:\n global _CHECK_ACCESS # pylint: disable=global-statement\n _CHECK_ACCESS = False",
"def disable_custom_logger(): \n logger = logging.getLogger() #create logger object\n logger.disabled = True #set logger is disable \n return logger",
"def _warn_about_disabled_safety_check(safety_check, logger):\n if not safety_check:\n logger.warning('*************************************************************************')\n logger.warning('WARNING: --no-safety-check option passed to dirbs-classify')\n logger.warning('*************************************************************************')\n logger.warning('')\n logger.warning('This should not be done in a production DIRBS deployment for the following reasons:')\n logger.warning('')\n logger.warning('1. The safety check is in place to prevent a misconfigured condition from classifying')\n logger.warning(' a large proportion of the subscriber population. In the worst case, a list could')\n logger.warning(' then be generated and a large number of subscribers would be notified or blacklisted.')\n logger.warning(' Even in the best case where the error is found before list generation, this generates')\n logger.warning(' bloat in the classification_state table that must be pruned to avoid a performance impact')\n logger.warning(' in other parts of DIRBS Core.')\n logger.warning('')",
"def set_debug_off():\n global _debug\n _debug = False\n print 'Debug off.'"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The del method for the nickname property should reset the nickname to None.
|
def test_Bridge_nickname_del(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.assertEqual(self.bridge.nickname, "FourfoldQuirked")
del(self.bridge.nickname)
self.assertIsNone(self.bridge.nickname)
self.assertIsNone(self.bridge._nickname)
|
[
"def delName(self, value):\n value = valueToInt(value)\n if value < 0 or value > len(self._nameList) - 1:\n self.log.warning('invalid value: {0}'.format(value))\n return\n self._nameList.pop(value)",
"def deluser(self, nick):\n if self.users.has_key(nick):\n self.list.remove_items([self.users[nick]])\n if self.name == nick:\n self.name = None\n self.targetlabel.set_text(\"No target\")\n del self.users[nick]\n # select first entry we find (?)\n if len(self.users) > 0:\n self.setselection(self.users.keys()[0])",
"async def remove(self, ctx: commands.Context):\n if len(mentions := ctx.message.mentions) == 0:\n await ctx.send(\"Please supply a user to remove a nickname override from.\")\n return\n\n self.check_guild_data_exists(ctx.guild.id)\n current_guild_overrides = self.guild_data[ctx.guild.id][\"overrides\"]\n override_user = mentions[0]\n\n if override_user.id not in current_guild_overrides:\n await ctx.send(f\"{override_user.name}'s nickname is not overridden.\")\n return\n\n # Remove the override and write changes\n del current_guild_overrides[override_user.id]\n await override_user.edit(nick=None)\n self.write_guild_data_changes()\n\n await ctx.send(f\"{override_user.name}'s nickname is no longer overridden.\")",
"def delete(self):\n assert self.name, \"Device name must be defined for deletion.\"\n device_username = 'device_' + self.name\n super().delete()\n self.c8y.users.delete(device_username)",
"def removeMember(*args, **kwargs):\n \n pass",
"def delete_user(self, key: str):\n\t\tip_to_del = self.__user[key]\n\t\tdel self.__user[key]\n\t\tdel self.__ip[ip_to_del]",
"def _deleteofflinenick( self, mess, args):\n\t\tif mess.getType() != 'groupchat':\n\t\t\treturn 'This feature is only available in group chats'\n\t\tif len(args) == 0:\n\t\t\treturn 'Please supply a nick as argument'\n\t\tnick = args.split(' ')[0]\n\t\tuser = mess.getFrom()\n\t\tchannel = user.getStripped()\n\t\tuniqueKey = nick+' '+channel\n\t\tif uniqueKey in self.offlineUsers:\n\t\t\tif self.offlineUsers[uniqueKey] == user:\n\t\t\t\tdel self.offlineUsers[uniqueKey]\n\t\t\t\tself.config.remove_option('offlinemessages', uniqueKey)\n\t\t\t\tself.save_config()\n\t\t\t\tself.log.info('Deleted %s from the offline message system' % nick)\n\t\t\t\treturn 'Deleted %s from the offline message system' % nick\n\t\telse:\n\t\t\treturn 'Nick %s not found' % nick",
"def del_label_name(self, label_name):\n try:\n if self.label_name == label_name:\n self.label_name = \"\"\n self.label_id = False\n except AttributeError:\n # No label name? ok.. carry on\n pass",
"def remove_member(self, username):\n warnings.warn(\n 'This is no longer supported by the GitHub API, see '\n 'https://developer.github.com/changes/2014-09-23-one-more-week'\n '-before-the-add-team-member-api-breaking-change/',\n DeprecationWarning)\n url = self._build_url('members', username, base_url=self._api)\n return self._boolean(self._delete(url), 204, 404)",
"def remove_owner(self, nick=\"*\", ident=\"*\", host=\"*\"):\n return self._del_rank(User(nick, ident, host), rank=self.OWNER)",
"def clear_username_entry(event):\n username_entry.delete(0, END)",
"def delete_user(self, instance, name):\r\n return instance.delete_user(name)",
"def delete(self):\n Multipass.delete(self.name)",
"def clean(self, nick=None):\n if nick:\n message = \"{} has left the channel\\n\".format(nick)\n self.lock.acquire()\n try:\n userDel(nick)\n finally:\n self.lock.release()\n sendAll(self.nick, message)\n self.conn.close()",
"def remove(self, propertyName: unicode) -> None:\n ...",
"def delete_user(self):\n \n User.user_list.remove(self)",
"def set_nick_name(self, val):\n self.nick = val",
"def delete_member():\n db.delete(doc)",
"def remove_username(self, ip, port, username, timeout=5):\n api_url = \"http://{}/api/{}\".format(ip+':'+port, username)\n url = api_url + \"/config/whitelist/{}\".format(username)\n self.logger.info(\"remove_username: url = {}\".format(url))\n res = qhue.qhue.Resource(url, timeout)\n\n devicetype = \"SmartHomeNG#{}\".format(getfqdn())\n\n # raises QhueException if something went wrong\n try:\n response = res(devicetype=devicetype, http_method=\"delete\")\n except Exception as e:\n self.logger.error(\"remove_username: res-delete exception {}\".format(e))\n response = [{'error': str(e)}]\n\n if not('success' in response[0]):\n self.logger.warning(\"remove_username: Error removing username/application key {} - {}\".format(username, response[0]))\n else:\n self.logger.info(\"remove_username: username/application key {} removed\".format(username))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The del method for the orPort property should reset the orPort to None.
|
def test_Bridge_orport_del(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.assertEqual(self.bridge.orPort, 36489)
del(self.bridge.orPort)
self.assertIsNone(self.bridge.orPort)
self.assertIsNone(self.bridge._orPort)
|
[
"def delete_port(port):\n return IMPL.delete_port(port)",
"def _port_unbound_update(self, context, port):\n LOG.info(\"Port becoming unbound: destroy.\")\n self.transport.endpoint_deleted(port)",
"def test_delete_logical_router_port(self):\n lrport = self._mocked_lrport()\n\n uuid = test_constants_v3.FAKE_ROUTER_PORT['id']\n lrport.delete(uuid)\n test_client.assert_json_call(\n 'delete', lrport,\n 'https://1.2.3.4/api/v1/logical-router-ports/%s' % uuid)",
"def __del__(self):\n self.usb_port.close()",
"def free_port(self, port):\n \n self.logging.debug(\"Freeing port %d\" %(port))\n try:\n os.remove(self.get_file_name(port))\n except OSError:\n pass",
"def delete_port(self, port_name):\n\n try:\n port_num = self.get_port_number(port_name)\n\n mask = np.arange(len(self.ports)) != port_num\n s = self.s[mask]\n self.s = s[:,mask]\n self.z0 = self.z0[mask]\n\n ports = list(self.ports)\n ports.remove(port_name)\n self.ports = tuple(ports)\n\n except:\n print(\"The \\\"{}\\\" port does not exist.\".format(port_name))",
"def delete(self):\n for port in self.ports:\n port.delete()\n self.ports = []\n self.subnet.close()",
"def del_port(self, user, port):\n try:\n self.c.execute(sql['del_port'], (user, port))\n self.c.execute(sql['del_stocks'], (user, port))\n self.db.commit()\n except sqlite3.Error as e:\n self.db.rollback()\n flash(\"Can't delete port because \"+str(e))",
"def detach_port(self, instance_obj, network_obj):\n raise NotImplementedError()",
"def without_port(self):\n return self.__replace(port=None)",
"def delete_port(self, port_name):\n command = ovs_vsctl.VSCtlCommand(\n 'del-port', (self.br_name, port_name), '--if-exists')\n self.run_command([command])",
"def __del__(self):\n if DEBUG:\n print(\"start deconstrutor\")\n if self.is_start is True:\n self.stop()\n if self.is_lsl is True:\n self.stop_lsl()\n if self.serial_port.isOpen():\n self.serial_port.close()",
"def _destroy_tunnel(self, reg_req_packet):\n\n tid = self._get_binding_id(reg_req_packet.home_address)\n _destroy_interface(name=\"mip\"+str(tid))",
"def test_model_delete_port_rollback(self):\n with self._create_port_res() as res:\n\n # After port is created, we should have one binding for this\n # vlan/nexus switch.\n port = self.deserialize(self.fmt, res)\n start_rows = nexus_db_v2.get_nexusvlan_binding(self.vlan_start,\n self.switch_ip)\n self.assertEqual(len(start_rows), 1)\n\n # Inject an exception in the OVS plugin delete_port\n # processing, and attempt a port deletion.\n inserted_exc = q_exc.Conflict\n expected_http = base.FAULT_MAP[inserted_exc].code\n with mock.patch.object(l3_db.L3_NAT_db_mixin,\n 'disassociate_floatingips',\n side_effect=inserted_exc):\n self._delete('ports', port['port']['id'],\n expected_code=expected_http)\n\n # Confirm that the Cisco model plugin has restored\n # the nexus configuration for this port after deletion failure.\n end_rows = nexus_db_v2.get_nexusvlan_binding(self.vlan_start,\n self.switch_ip)\n self.assertEqual(start_rows, end_rows)",
"def delete(self):\n self.tap.delete()\n self.port.close()",
"def leave_update(self):\n self.port = None\n self.flag = False\n self.ttl = 0",
"def transport_jlink_reset(self, port=0):",
"def remove_in_port(self, id):\n\n del self.in_ports[id]",
"def _destroy_binding(self, reg_req_packet):\n\n if reg_req_packet.home_address in self._binding_table:\n self._destroy_tunnel(reg_req_packet)\n self._binding_table_lock.acquire()\n logging.debug(\"Destroing [home address=%s, CoA=%s] binding.\",\n reg_req_packet.home_address,\n reg_req_packet.care_of_address)\n del self._binding_table[reg_req_packet.home_address]\n self._binding_table_lock.release()\n else:\n logging.warning(\"Unable to find binding for home address=%s.\",\n home_address)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The str() method of a Bridge should return an identifier for the Bridge, which should be different if safelogging is enabled.
|
def test_Bridge_str_without_safelogging(self):
bridges.safelog.safe_logging = False
bridge = bridges.Bridge()
bridge.updateFromNetworkStatus(self.networkstatus)
identifier = str(bridge)
self.assertEqual(identifier,
''.join(['$', bridge.fingerprint,
'~', bridge.nickname]))
|
[
"def test_Bridge_str_with_safelogging(self):\n bridges.safelog.safe_logging = True\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(\n identifier,\n ''.join(['$$',\n hashlib.sha1(bridge.fingerprint).hexdigest().upper(),\n '~', bridge.nickname]))",
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def test_Bridge_str_without_fingerprint_without_nickname(self):\n bridge = bridges.Bridge()\n identifier = str(bridge)\n self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))",
"def __str__(self):\n \n return '{0}!{1}@{2}'.format(self.nickname, self.ident, self.host)",
"def identifier(self) -> str:\n return str(self)",
"def get_uuid(self): # real signature unknown; restored from __doc__\n return \"\"",
"def _get_bridge_name(self):\n command = ovs_vsctl.VSCtlCommand(\n 'find',\n ('Bridge',\n 'datapath_id=%s' % dpid_lib.dpid_to_str(self.datapath_id)))\n self.run_command([command])\n if not isinstance(command.result, list) or len(command.result) != 1:\n raise OVSBridgeNotFound(\n datapath_id=dpid_lib.dpid_to_str(self.datapath_id))\n return command.result[0].name",
"def bridgeName(self):\n ret = libvirtmod.virNetworkGetBridgeName(self._o)\n if ret is None: raise libvirtError ('virNetworkGetBridgeName() failed', net=self)\n return ret",
"def unique_id(self):\n return \"{}.{}\".format(self.__class__, self.wink.deviceId())",
"def __str__(self):\n return str(getattr(self, self.primary))",
"def __str__(self):\n \n return '{0}'.format(self.servername)",
"def _get_identifier_from_sdc(self, sdc_infos: Dict[str, Any]) -> str:",
"def __str__(G):\n return G.value",
"def instance_identifier(self) -> str:\n ...",
"def unique_id(self) -> str:\n return f\"{self._device.uuid}-LIGHT\"",
"def get_log_id(self) -> str:\n pass",
"def serialized_id(self) -> str:",
"def idString(self):\n return \"%s_%s\" % (self.equipment,self.booking_id)",
"def test_integration_getID(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertEqual(self.id_digest, bridge.getID())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The str() method of a Bridge should return an identifier for the Bridge, which should be different if safelogging is enabled.
|
def test_Bridge_str_with_safelogging(self):
bridges.safelog.safe_logging = True
bridge = bridges.Bridge()
bridge.updateFromNetworkStatus(self.networkstatus)
identifier = str(bridge)
self.assertEqual(
identifier,
''.join(['$$',
hashlib.sha1(bridge.fingerprint).hexdigest().upper(),
'~', bridge.nickname]))
|
[
"def test_Bridge_str_without_safelogging(self):\n bridges.safelog.safe_logging = False\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', bridge.fingerprint,\n '~', bridge.nickname]))",
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def test_Bridge_str_without_fingerprint_without_nickname(self):\n bridge = bridges.Bridge()\n identifier = str(bridge)\n self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))",
"def __str__(self):\n \n return '{0}!{1}@{2}'.format(self.nickname, self.ident, self.host)",
"def identifier(self) -> str:\n return str(self)",
"def get_uuid(self): # real signature unknown; restored from __doc__\n return \"\"",
"def _get_bridge_name(self):\n command = ovs_vsctl.VSCtlCommand(\n 'find',\n ('Bridge',\n 'datapath_id=%s' % dpid_lib.dpid_to_str(self.datapath_id)))\n self.run_command([command])\n if not isinstance(command.result, list) or len(command.result) != 1:\n raise OVSBridgeNotFound(\n datapath_id=dpid_lib.dpid_to_str(self.datapath_id))\n return command.result[0].name",
"def bridgeName(self):\n ret = libvirtmod.virNetworkGetBridgeName(self._o)\n if ret is None: raise libvirtError ('virNetworkGetBridgeName() failed', net=self)\n return ret",
"def unique_id(self):\n return \"{}.{}\".format(self.__class__, self.wink.deviceId())",
"def __str__(self):\n return str(getattr(self, self.primary))",
"def __str__(self):\n \n return '{0}'.format(self.servername)",
"def _get_identifier_from_sdc(self, sdc_infos: Dict[str, Any]) -> str:",
"def __str__(G):\n return G.value",
"def instance_identifier(self) -> str:\n ...",
"def unique_id(self) -> str:\n return f\"{self._device.uuid}-LIGHT\"",
"def get_log_id(self) -> str:\n pass",
"def serialized_id(self) -> str:",
"def idString(self):\n return \"%s_%s\" % (self.equipment,self.booking_id)",
"def test_integration_getID(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertEqual(self.id_digest, bridge.getID())"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The str() method of a Bridge should return an identifier for the Bridge, which should be different if the fingerprint is unknown.
|
def test_Bridge_str_without_fingerprint(self):
bridge = bridges.Bridge()
bridge.updateFromNetworkStatus(self.networkstatus)
del(bridge.fingerprint)
identifier = str(bridge)
self.assertEqual(identifier,
''.join(['$', '0'*40,
'~', bridge.nickname]))
|
[
"def test_Bridge_str_without_fingerprint_without_nickname(self):\n bridge = bridges.Bridge()\n identifier = str(bridge)\n self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))",
"def test_Bridge_str_without_safelogging(self):\n bridges.safelog.safe_logging = False\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', bridge.fingerprint,\n '~', bridge.nickname]))",
"def test_Bridge_str_with_safelogging(self):\n bridges.safelog.safe_logging = True\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(\n identifier,\n ''.join(['$$',\n hashlib.sha1(bridge.fingerprint).hexdigest().upper(),\n '~', bridge.nickname]))",
"def __str__(self):\n \n return '{0}!{1}@{2}'.format(self.nickname, self.ident, self.host)",
"def test_integration_getID(self):\n bridge = bridges.Bridge(self.nickname, self.ip, self.orport,\n self.fingerprint)\n self.assertEqual(self.id_digest, bridge.getID())",
"def get_uuid(self): # real signature unknown; restored from __doc__\n return \"\"",
"def guid_as_string(self, guid_blob):\n blob = \"%s\" % guid_blob\n stops = [4, 2, 2, 2, 6]\n index = 0\n res = \"\"\n x = 0\n while x < len(stops):\n tmp = \"\"\n y = 0\n while y < stops[x]:\n c = hex(ord(blob[index])).replace(\"0x\", \"\")\n c = [None, \"0\" + c, c][len(c)]\n if 2 * index < len(blob):\n tmp = c + tmp\n else:\n tmp += c\n index += 1\n y += 1\n res += tmp + \" \"\n x += 1\n assert index == len(blob)\n return res.strip().replace(\" \", \"-\")",
"def str_to_uuid(value):\n ...",
"def _get_identifier_from_sdc(self, sdc_infos: Dict[str, Any]) -> str:",
"def protocol_str(protocol: Protocol) -> str:\n return {\n Protocol.MRP: \"MRP\",\n Protocol.DMAP: \"DMAP\",\n Protocol.AirPlay: \"AirPlay\",\n Protocol.Companion: \"Companion\",\n Protocol.RAOP: \"RAOP\",\n }.get(protocol, \"Unknown\")",
"def test_for_str_with_id(self):\n Base._Base__nb_objects = 0\n r1 = Rectangle(4, 6, 2, 1, 12)\n self.assertEqual(r1.__str__(), \"[Rectangle] (12) 2/1 - 4/6\")",
"def serialized_id(self) -> str:",
"def _allocate_bridge_name(self):\n name = None\n while name is None:\n name = self._random_name()\n if name in self.iso_bridge_mappings.values() or name in self.ovs_bridge_mappings.values():\n name = None\n if self._if_bridge_exists(name):\n name = None\n return name",
"def unique_id(self):\n return \"{}.{}\".format(self.__class__, self.wink.deviceId())",
"def bridgeName(self):\n ret = libvirtmod.virNetworkGetBridgeName(self._o)\n if ret is None: raise libvirtError ('virNetworkGetBridgeName() failed', net=self)\n return ret",
"def get_hub_identity():\n # TODO - implement reading from beaglebone IDPROM\n # For now this is a test data (same as backend/models/ExampleData.SQL)\n return 'I8FJPAN11X', 'AUTH_KEY IS EMPTY'",
"def fingerprint(self):\n return \"%s|%s\" % (self._debit_note_uid, self._uid)",
"def test_str(self):\n uri = 'netstring://192.168.0.1:9999'\n version = get_version()\n last_seen = 123\n contact = PeerNode(PUBLIC_KEY, version, uri, last_seen)\n expected = str({\n 'network_id': contact.network_id,\n 'public_key': contact.public_key,\n 'version': contact.version,\n 'uri': contact.uri,\n 'last_seen': contact.last_seen,\n 'failed_rpc': contact.failed_RPCs\n })\n self.assertEqual(expected, str(contact))",
"def _get_bridge_name(self):\n command = ovs_vsctl.VSCtlCommand(\n 'find',\n ('Bridge',\n 'datapath_id=%s' % dpid_lib.dpid_to_str(self.datapath_id)))\n self.run_command([command])\n if not isinstance(command.result, list) or len(command.result) != 1:\n raise OVSBridgeNotFound(\n datapath_id=dpid_lib.dpid_to_str(self.datapath_id))\n return command.result[0].name"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling str(Bridge) on a Bridge whose fingerprint and nickname were not set should return a Bridge identifier string where the fingerprint is all 0's and the nickname is "Unnamed".
|
def test_Bridge_str_without_fingerprint_without_nickname(self):
bridge = bridges.Bridge()
identifier = str(bridge)
self.assertEqual(identifier, ''.join(['$', '0'*40, '~', 'Unnamed']))
|
[
"def test_Bridge_str_without_fingerprint(self):\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n del(bridge.fingerprint)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', '0'*40,\n '~', bridge.nickname]))",
"def test_Bridge_str_without_safelogging(self):\n bridges.safelog.safe_logging = False\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(identifier,\n ''.join(['$', bridge.fingerprint,\n '~', bridge.nickname]))",
"def test_Bridge_str_with_safelogging(self):\n bridges.safelog.safe_logging = True\n\n bridge = bridges.Bridge()\n bridge.updateFromNetworkStatus(self.networkstatus)\n\n identifier = str(bridge)\n self.assertEqual(\n identifier,\n ''.join(['$$',\n hashlib.sha1(bridge.fingerprint).hexdigest().upper(),\n '~', bridge.nickname]))",
"def __str__(self):\n \n return '{0}!{1}@{2}'.format(self.nickname, self.ident, self.host)",
"def bridgeName(self):\n ret = libvirtmod.virNetworkGetBridgeName(self._o)\n if ret is None: raise libvirtError ('virNetworkGetBridgeName() failed', net=self)\n return ret",
"def _allocate_bridge_name(self):\n name = None\n while name is None:\n name = self._random_name()\n if name in self.iso_bridge_mappings.values() or name in self.ovs_bridge_mappings.values():\n name = None\n if self._if_bridge_exists(name):\n name = None\n return name",
"def _get_bridge_name(self):\n command = ovs_vsctl.VSCtlCommand(\n 'find',\n ('Bridge',\n 'datapath_id=%s' % dpid_lib.dpid_to_str(self.datapath_id)))\n self.run_command([command])\n if not isinstance(command.result, list) or len(command.result) != 1:\n raise OVSBridgeNotFound(\n datapath_id=dpid_lib.dpid_to_str(self.datapath_id))\n return command.result[0].name",
"def protocol_str(protocol: Protocol) -> str:\n return {\n Protocol.MRP: \"MRP\",\n Protocol.DMAP: \"DMAP\",\n Protocol.AirPlay: \"AirPlay\",\n Protocol.Companion: \"Companion\",\n Protocol.RAOP: \"RAOP\",\n }.get(protocol, \"Unknown\")",
"def get_uuid(self): # real signature unknown; restored from __doc__\n return \"\"",
"def interconnect_to_name(interconnect):\n return \"_\".join(sorted(check_and_format_interconnect(interconnect)))",
"def _get_identifier_from_sdc(self, sdc_infos: Dict[str, Any]) -> str:",
"def make_unique_nickname(nickname):\n if User.query.filter_by(nickname=nickname).first() is None:\n return nickname\n version = 2\n while True:\n new_nickname = nickname = str(version)\n if User.query.filter_by(nickname=new_nickname).first() is None:\n break\n version += 1\n return new_nickname",
"def get_hub_identity():\n # TODO - implement reading from beaglebone IDPROM\n # For now this is a test data (same as backend/models/ExampleData.SQL)\n return 'I8FJPAN11X', 'AUTH_KEY IS EMPTY'",
"def researcher_id(firstname, lastname=\"\", id=\"\"):\n\treturn str(lastname)\n\treturn \"::\".join([str(firstname), str(lastname), str(id)])",
"def GetDefaultBoardID(self):\n #TODO\n return \"beaglebone\"",
"def device_name(id):\n return device_id_to_name_mapping[id] if id in device_id_to_name_mapping else 'Unknown Device'",
"def guid_as_string(self, guid_blob):\n blob = \"%s\" % guid_blob\n stops = [4, 2, 2, 2, 6]\n index = 0\n res = \"\"\n x = 0\n while x < len(stops):\n tmp = \"\"\n y = 0\n while y < stops[x]:\n c = hex(ord(blob[index])).replace(\"0x\", \"\")\n c = [None, \"0\" + c, c][len(c)]\n if 2 * index < len(blob):\n tmp = c + tmp\n else:\n tmp += c\n index += 1\n y += 1\n res += tmp + \" \"\n x += 1\n assert index == len(blob)\n return res.strip().replace(\" \", \"-\")",
"def tag_id_str(tag_id):\n if tag_id == OTA_UPG_TAG_ID_UPG_IMG:\n id_str = \"Upgrade Image\"\n elif tag_id == OTA_UPG_TAG_ID_ECDSA_SIG:\n id_str = \"ECDSA Signature\"\n elif tag_id == OTA_UPG_TAG_ID_ECDSA_SIGN_CERT:\n id_str = \"ECDSA Signing Certificate\"\n elif (tag_id >= 0x0003) and (tag_id <= 0xefff):\n id_str = \"Reserved\"\n else:\n id_str = \"Manufacturer Specific\"\n return id_str",
"def hexify(rank):\n return hex(rank_table[rank])[-1].upper()"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Bridge._constructBridgeLine() called with an IPv6 address should wrap the IPv6 address in '[]' in the returned bridge line.
|
def test_Bridge_constructBridgeLine_IPv6(self):
bridge = bridges.Bridge()
addrport = (u'6bf3:806b:78cd::4ced:cfad:dad4', 36488, 6)
bridgeline = bridge._constructBridgeLine(addrport,
includeFingerprint=False,
bridgePrefix=True)
self.assertEqual(bridgeline, 'Bridge [6bf3:806b:78cd::4ced:cfad:dad4]:36488')
|
[
"def test_Bridge_getBridgeLine_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_Bridge_getBridgeLine_IPv6_no_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertTrue(\n line.startswith('[6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488'))\n self.assertNotIn('179.178.155.140:36493', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def test_Bridge_getBridgeLine_obfs3_IPv6(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n request.withIPv6()\n request.withPluggableTransportType('obfs3')\n\n self.assertRaises(bridges.PluggableTransportUnavailable,\n self.bridge.getBridgeLine,\n request)",
"def ipv6(self, ipv6: SubUnnumberedTop):\n\n self._ipv6 = ipv6",
"def create_ipv6(self):\n int1 = Interface('eth1/1')\n int2 = Interface('eth1/2')\n pc1 = PortChannel('211')\n ipv6 = IPV6()\n ipv6.add_interface_address(int1, '2004:0DB8::1/10', link_local='FE83::1')\n ipv6.add_interface_address(int2, '2104:0DB8::1/11')\n ipv6.add_interface_address(int2, '2002:0DB8::1/12')\n ipv6.add_interface_address(pc1, '2022:0DB8::1/13')\n return ipv6",
"def test_ipv6address_format(self):\n n = 10**5\n data = ['s', 'b', 'x', 'n', '#b', '_b', '#_x']\n a1 = '1:2:3::6'\n addr = ip.IPv6Address(a1)\n eaddr = eip.IPv6Address(a1)\n fns = addr.__format__, eaddr.__format__\n for args in data:\n generic_test(self.report_6a, fn_name(), n, fns, args)",
"def _ParseIp6Neighbors(self):\n ip6neigh = subprocess.Popen(IP6NEIGH, stdout=subprocess.PIPE)\n out, _ = ip6neigh.communicate(None)\n result = []\n\n for line in out.splitlines():\n fields = line.split()\n if len(fields) < 5:\n continue\n ip6 = tr.helpers.NormalizeIPAddr(fields[0])\n dev = fields[2]\n mac = fields[4]\n try:\n type(self)._MacValidator.Set( # pylint:disable=protected-access\n self, mac)\n except ValueError:\n continue\n active = 'REACHABLE' in line\n result.append((mac, ip6, dev, active))\n return result",
"def test_resolveOnlyIPv6(self):\n self._resolveOnlyTest([IPv6Address], AF_INET6)",
"def create_ipv6_route(self):\n # Create Interfaces\n int1 = Interface('eth1/1')\n int2 = Interface('eth1/2')\n # Create a L3 port channel\n pc1 = PortChannel('211', layer='Layer3')\n route = IPV6Route('2000:0::0/12')\n route.add_next_hop('234E:44::1', int1, vrf='default', track_id='0',\n tag='1')\n route.add_next_hop('234E:44::2', int2)\n route.add_next_hop('234E:44::4', pc1, vrf='default', track_id='1',\n tag='2')\n return route",
"def test_ipv6interface_format(self):\n n = 10**5\n data = ['s', 'b', 'x', 'n', '#b', '_b', '#_x']\n a1 = '1:2:3::6'\n addr = ip.IPv6Interface(a1)\n eaddr = eip.IPv6Interface(a1)\n fns = addr.__format__, eaddr.__format__\n for args in data:\n generic_test(self.report_6i, fn_name(), n, fns, args)",
"def ipv6_address(self):\n ret = self._get_attr(\"IPV6Address\")\n return ret",
"def _ipv6_to_num(self) -> int:\n\n halves = self._address.split('::')\n segments = []\n\n if len(halves) == 2:\n # Address with zero-skip part\n left, right = map(lambda x: x.split(':'), halves)\n total_length = len(left) + len(right)\n\n if halves[0]:\n segments.extend(left)\n else:\n segments.append('0000')\n \n segments.extend(['0000' for _ in range(IPV6_MAX_SEGMENT_COUNT - total_length)])\n\n if halves[1]:\n segments.extend(right)\n else:\n segments.append('0000')\n\n elif len(halves) == 1:\n # Full address\n segments.extend(halves[0].split(':'))\n\n else:\n raise ValueError(\"Invalid IPv6 address format; only one zero-skip allowed\")\n \n try:\n processed_segments: List[int] = list(map(lambda num: int(num, 16) if num != '' else 0, segments[::-1]))\n except ValueError:\n raise ValueError(f\"Invalid IPv6 address format; address contains invalid characters\")\n\n segment_count = len(processed_segments)\n if segment_count > IPV6_MAX_SEGMENT_COUNT:\n raise ValueError(f\"Invalid IPv6 address format; too many segments ({segment_count} > {IPV6_MAX_SEGMENT_COUNT})\")\n\n highest = max(processed_segments)\n if highest > IPV6_MAX_SEGMENT_VALUE:\n raise ValueError(f\"Invalid IPv6 address format; segment max value passed ({highest} > {IPV6_MAX_SEGMENT_VALUE})\")\n\n lowest = min(processed_segments)\n if 0 > lowest:\n raise ValueError(f\"Invalid IPv6 address format; segment min value passed ({lowest} < 0)\")\n \n total = 0\n for idx, num in enumerate(processed_segments):\n total += num * 2**(idx * 16)\n\n return total",
"def _GetHostsFromIp6Neigh(self, hosts):\n for (mac, ip6, iface, active) in self._ParseIp6Neighbors():\n ip6 = tr.helpers.NormalizeIPAddr(ip6)\n mac = mac.lower()\n host = hosts.get(mac, dict())\n self._AddLayer1Interface(host, iface)\n host['PhysAddress'] = mac\n if active:\n # Only store if known active. We don't want to override\n # Active=True from some other source.\n host['Active'] = active\n self._AddIpToHostDict(entry=host, ip=ip6)\n hosts[mac] = host",
"def validate_ipv6_address(value: Any):\n try:\n ipaddress.IPv6Address(value)\n except ValueError:\n raise ValidationError(f\"'{value}' is not a valid IPv6 address.\")",
"def ipv6_address(self):\n try:\n return ipaddress.ip_interface(self._ipv6['address'])\n except (KeyError, ValueError, TypeError):\n return None",
"def calculate_link_ip_addresses_ipv6(address_str, cidr_str, supernet_str, ip_count):\n if '::' in address_str:\n add_str = ''\n count = (address_str.count(':'))\n if address_str[-1] == ':':\n count -= 2\n while count < 7:\n add_str += ':0'\n count += 1\n else:\n while count < 8:\n add_str += ':0'\n count += 1\n add_str += ':'\n\n address_str = address_str.replace('::', add_str)\n\n address = address_str.split(':')\n cidr = int(cidr_str)\n supernet = int(supernet_str)\n\n mask_cidr = find_mask_v6(cidr)\n network = find_network_v6(address, mask_cidr)\n broadcast = find_broadcast_v6(network, cidr)\n\n mask_supernet = find_mask_v6(supernet)\n network_hex = []\n for i in range(8):\n network_hex.append(hex(network[i])[2:])\n network_supernet = find_network_v6(address, mask_supernet)\n broadcast_supernet = find_broadcast_v6(network_supernet, supernet)\n\n initial_ip = network_supernet[7]\n ip_checking = list(network_supernet)\n while not(initial_ip >= broadcast[7] and ip_checking[:7] == broadcast[:7]):\n initial_ip = network_supernet[7]\n ips_list = []\n no_of_ip = 0\n while initial_ip <= broadcast_supernet[7] and no_of_ip < ip_count:\n ip = list(network_supernet)\n ip[7] = initial_ip\n\n for i in range(0, 8):\n ip[i] = hex(ip[i])[2:]\n\n ip = ':'.join(ip)\n ip += '/' + str(supernet)\n ips_list.append(ip)\n initial_ip += 1\n no_of_ip += 1\n ip_checking = list(broadcast_supernet)\n initial_ip = broadcast_supernet[7]\n network_supernet = find_network_supernet_v6(broadcast_supernet, cidr, supernet)\n broadcast_supernet = find_broadcast_v6(network_supernet, supernet)\n\n yield ips_list",
"def test_hostBracketIPv6AddressLiteral(self):\n uri = client.URI.fromBytes(b\"http://[::1]:80/index.html\")\n\n self.assertEqual(uri.host, b\"::1\")\n self.assertEqual(uri.netloc, b\"[::1]:80\")\n self.assertEqual(uri.toBytes(), b\"http://[::1]:80/index.html\")",
"def assign_ipv6_address_on_creation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"assign_ipv6_address_on_creation\")",
"def build_ipv6(anm):\n import netaddr\n import autonetkit.plugins.ipv6 as ipv6\n\n # uses the nodes and edges from ipv4\n\n g_ipv6 = anm.add_overlay('ipv6')\n g_ip = anm['ip']\n g_in = anm['input']\n g_ipv6.add_nodes_from(g_ip, retain=['label', 'asn', 'broadcast_domain']) # retain if collision domain or not\n g_ipv6.add_edges_from(g_ip.edges())\n\n #TODO: tidy up naming consitency of secondary_loopback_block and vrf_loopback_block\n (infra_block, loopback_block, secondary_loopback_block) = \\\n extract_ipv6_blocks(anm)\n\n block_message = \"IPv6 allocations: Infrastructure: %s, Loopback: %s\" % (infra_block, loopback_block)\n if any(i for n in g_ip.nodes() for i in\n n.loopback_interfaces if not i.is_loopback_zero):\n block_message += \" Secondary Loopbacks: %s\" % secondary_loopback_block\n log.info(block_message)\n\n # TODO: replace this with direct allocation to interfaces in ip alloc plugin\n allocated = sorted([n for n in g_ip if n['input'].loopback_v6])\n if len(allocated) == len(g_ip.l3devices()):\n # all allocated\n #TODO: need to infer subnetomanual_ipv6_loopback_allocation\n log.info(\"Using user-specified IPv6 loopback addresses\")\n manual_ipv6_loopback_allocation(anm)\n else:\n if len(allocated):\n log.warning(\"Using automatic IPv6 loopback allocation. IPv6 loopback addresses specified on nodes %s will be ignored.\" % allocated)\n else:\n log.info(\"Automatically assigning IPv6 loopback addresses\")\n\n ipv6.allocate_loopbacks(g_ipv6, loopback_block)\n\n l3_devices = [d for d in g_in if d.device_type in ('router', 'server')]\n\n manual_alloc_devices = set()\n for device in l3_devices:\n physical_interfaces = list(device.physical_interfaces)\n allocated = list(interface.ipv6_address for interface in physical_interfaces if interface.is_bound)\n if all(interface.ipv6_address for interface in\n physical_interfaces if interface.is_bound):\n manual_alloc_devices.add(device) # add as a manual allocated device\n\n if manual_alloc_devices == set(l3_devices):\n log.info(\"Using user-specified IPv6 infrastructure addresses\")\n manual_alloc_ipv6_infrastructure = True\n else:\n manual_alloc_ipv6_infrastructure = False\n # warn if any set\n allocated = []\n unallocated = []\n for node in l3_devices:\n allocated += sorted([i for i in node.physical_interfaces if i.is_bound and i.ipv6_address])\n unallocated += sorted([i for i in node.physical_interfaces if i.is_bound and not i.ipv6_address])\n\n #TODO: what if IP is set but not a prefix?\n if len(allocated):\n #TODO: if set is > 50% of nodes then list those that are NOT set\n log.warning(\"Using automatic IPv6 interface allocation. IPv6 interface addresses specified on interfaces %s will be ignored.\" % allocated)\n else:\n log.info(\"Automatically assigning IPv6 infrastructure addresses\")\n\n if manual_alloc_ipv6_infrastructure:\n manual_ipv6_infrastructure_allocation(anm)\n else:\n ipv6.allocate_infra(g_ipv6, infra_block)\n #TODO: see if this is still needed or if can allocate direct from the ipv6 allocation plugin\n for node in g_ipv6.l3devices():\n for interface in node:\n edges = list(interface.edges())\n if len(edges):\n edge = edges[0] # first (only) edge\n interface.ip_address = edge.ip # TODO: make this consistent\n interface.subnet = edge.dst.subnet # from collision domain\n\n ipv6.allocate_vrf_loopbacks(g_ipv6, secondary_loopback_block)\n\n for node in g_ipv6.routers():\n #TODO: test this code\n node.loopback_zero.ip_address = node.loopback\n node.loopback_zero.subnet = netaddr.IPNetwork(\"%s/32\" % node.loopback)\n for interface in node.loopback_interfaces:\n if not interface.is_loopback_zero:\n interface.ip_address = interface.loopback #TODO: fix this inconsistency elsewhere"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Bridge.allVanillaAddresses should be idempotent, i.e. calling allVanillaAddresses should not affect the results of subsequent calls.
|
def test_Bridge_allVanillaAddresses_idempotency_self(self):
self.bridge.address = '1.1.1.1'
self.bridge.orPort = 443
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
|
[
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_idempotency_others(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.bridge.orAddresses.append(\n (ipaddr.IPv4Address('2.2.2.2'), 4443, 4))\n self.assertItemsEqual(self.bridge.orAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),\n (ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), None, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def sendall_recipient_addresses() -> List[str]:\n return [to_address(0x1234)]",
"def all_addresses() -> list[str]:\n\n servers = graphql(\"\"\"\n query {\n queryGameServer {\n address\n }\n }\n \"\"\"\n )['queryGameServer']\n\n return [server['address'] for server in servers]",
"def nextAddresses(self) -> List[ghidra.program.model.address.Address]:\n ...",
"def getVolatileAddresses(self) -> ghidra.program.model.address.AddressSetView:\n ...",
"def z_listaddresses(self):\n return self._call('z_listaddresses')",
"def _getAddresses(self, *ues):\n return [self._s1_util.get_ip(ue.ue_id) for ue in ues]",
"def get_addresses_voters(self) -> Set[bytes]:\n return self.voters_addresses",
"def GetAddrList(self):\n return list(range(ADDR_START, ADDR_END + 1))",
"def get_all_appliance_locations(self) -> list:\n return self._get(\"/gms/grNode\")",
"def addresses(self) -> typing.List[str]:\n return typing.cast(\n typing.List[str],\n self._properties.get(\"addresses\"),\n )",
"def test_list_address_neighbors(self):\n test_service.list_address_neighbors(self)\n\n query_string = [('direction', 'out'),\n ('',''),\n ('',''),\n ('','')]\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/{currency}/addresses/{address}/neighbors'.format(currency='btc', address='1Archive1n2C579dMsAu3iC6tWzuQJz8dN'),\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def have_any_address():\n if config.addresses():\n return True\n return False",
"def get_enabled_addresses():\n addresses = [addr for addr in config.addresses()\n if config.getboolean(str(addr), 'enabled')]\n return addresses",
"def all_fixed_ips(self) -> Sequence[str]:\n return pulumi.get(self, \"all_fixed_ips\")"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Bridge.allVanillaAddresses should be idempotent, i.e. calling allVanillaAddresses should not affect any of the Bridge's other attributes (such as Bridge.orAddresses).
|
def test_Bridge_allVanillaAddresses_idempotency_others(self):
self.bridge.address = '1.1.1.1'
self.bridge.orPort = 443
self.assertItemsEqual(self.bridge.orAddresses, [])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.orAddresses, [])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.orAddresses, [])
|
[
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.bridge.orAddresses.append(\n (ipaddr.IPv4Address('2.2.2.2'), 4443, 4))\n self.assertItemsEqual(self.bridge.orAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),\n (ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), None, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def sendall_recipient_addresses() -> List[str]:\n return [to_address(0x1234)]",
"def getVolatileAddresses(self) -> ghidra.program.model.address.AddressSetView:\n ...",
"def nextAddresses(self) -> List[ghidra.program.model.address.Address]:\n ...",
"def all_addresses() -> list[str]:\n\n servers = graphql(\"\"\"\n query {\n queryGameServer {\n address\n }\n }\n \"\"\"\n )['queryGameServer']\n\n return [server['address'] for server in servers]",
"def _getAddresses(self, *ues):\n return [self._s1_util.get_ip(ue.ue_id) for ue in ues]",
"def test_Bridge_updateORAddresses_valid_and_invalid(self):\n orAddresses = [\n (u'1.1.1.1', 1111, False), # valid\n (u'127.0.0.1', 2222, False), # invalid IPv4 loopback\n (u'FE80::1234', 3333, True)] # invalid IPv6 link local\n bridge = bridges.Bridge()\n bridge._updateORAddresses(orAddresses)\n\n self.assertEqual(len(bridge.orAddresses), 1)\n addr, port, version = bridge.orAddresses[0]\n self.assertEqual(addr, ipaddr.IPAddress('1.1.1.1'))\n self.assertEqual(port, 1111)\n self.assertEqual(version, 4)",
"def get_addresses_voters(self) -> Set[bytes]:\n return self.voters_addresses",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def z_listaddresses(self):\n return self._call('z_listaddresses')",
"def addresses(self) -> typing.List[str]:\n return typing.cast(\n typing.List[str],\n self._properties.get(\"addresses\"),\n )",
"def set_proxy_arp_for_all(value):\n\n link_list = _ipr.get_links()\n for link in link_list:\n if link.get_attr(\"IFLA_OPERSTATE\") == \"UP\":\n set_proxy_arp(link.get_attr(\"IFLA_IFNAME\"), value)",
"def have_any_address():\n if config.addresses():\n return True\n return False",
"def GetAddrList(self):\n return list(range(ADDR_START, ADDR_END + 1))",
"def test_list_address_neighbors(self):\n test_service.list_address_neighbors(self)\n\n query_string = [('direction', 'out'),\n ('',''),\n ('',''),\n ('','')]\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/{currency}/addresses/{address}/neighbors'.format(currency='btc', address='1Archive1n2C579dMsAu3iC6tWzuQJz8dN'),\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def addresses(self):\n addrs = {u.recieved_raw['ingress-address']\n for u in self.all_joined_units}\n return list(sorted(addrs))"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling Bridge.allVanillaAddresses before Bridge.orPort is set should return ``None`` for the port value, and after Bridge.orPort is set, it should return the orPort.
|
def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):
self.bridge.address = '1.1.1.1'
self.assertItemsEqual(self.bridge.orAddresses, [])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), None, 4)])
self.assertItemsEqual(self.bridge.orAddresses, [])
self.bridge.orPort = 443
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.orAddresses, [])
|
[
"def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.bridge.orAddresses.append(\n (ipaddr.IPv4Address('2.2.2.2'), 4443, 4))\n self.assertItemsEqual(self.bridge.orAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),\n (ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_idempotency_others(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_updateORAddresses_valid_and_invalid(self):\n orAddresses = [\n (u'1.1.1.1', 1111, False), # valid\n (u'127.0.0.1', 2222, False), # invalid IPv4 loopback\n (u'FE80::1234', 3333, True)] # invalid IPv6 link local\n bridge = bridges.Bridge()\n bridge._updateORAddresses(orAddresses)\n\n self.assertEqual(len(bridge.orAddresses), 1)\n addr, port, version = bridge.orAddresses[0]\n self.assertEqual(addr, ipaddr.IPAddress('1.1.1.1'))\n self.assertEqual(port, 1111)\n self.assertEqual(version, 4)",
"def GetDestHomeAlternatives(wl):\n return [node for node in api.GetNaplesHostnames() if node != wl.node_name] if wl else []",
"def without_port(self):\n return self.__replace(port=None)",
"def ports(self):\n return self.__ports[:]",
"def r_port_obj(self, port):\r\n for switch_obj in self.r_switch_objects():\r\n port_obj = switch_obj.r_port_obj(port)\r\n if port_obj is not None:\r\n return port_obj\r\n return None",
"def _get_router_port_networks():\n global _L3Rebalance\n\n router = _L3Rebalance.get_current_working_router()\n\n if router is not None:\n nfvi.nfvi_get_router_ports(router, _get_router_ports_callback(router))\n elif _L3Rebalance.router_ports_done():\n # we're done getting routers port networks,\n # advance to next state\n _L3Rebalance.set_state(\n L3_REBALANCE_STATE.GET_PHYSICAL_NETWORK_FROM_NETWORKS)\n else:\n # We get here if there are no routers on this agent,\n # Stay in same state, but advance to next agent\n _L3Rebalance.state_machine_in_progress = False",
"def fetch_nat_interface_port_ol(self, device, **kwargs):\n all_entry_list = self._common_get_processing(device=device, cmd_keyword=\"nat-interface-port-ol\", kwargs=kwargs)\n device.log(message=\"{} return value:\\n{}\".format(self.tool.get_current_function_name(), self.tool.pprint(all_entry_list)))\n return all_entry_list",
"def get_ports( self, preserve_hierarchy=False ):\n if not preserve_hierarchy:\n return self._inports + self._outports\n else:\n return self._hports",
"def addresses(self):\n ports = self.list_ports()\n return [port.address for port in ports]",
"def broadcast(self):\n if self._module.version == 4 and (self._module.width - self._prefixlen) <= 1:\n return None\n else:\n return IPAddress(self._value | self._hostmask_int, self._module.version)",
"def _check_that_node_from_body(node):\n n_ports = len(node.out_edges())\n internal_port_in_out_ports = ['internal_port_id' in edge for edge in node.out_edges()]\n return np.all(internal_port_in_out_ports) and n_ports",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def get_ports(self):\n raise NotImplementedError() #pragma: no cover",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def test_Bridge_orport_del(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.assertEqual(self.bridge.orPort, 36489)\n\n del(self.bridge.orPort)\n self.assertIsNone(self.bridge.orPort)\n self.assertIsNone(self.bridge._orPort)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling Bridge.allVanillaAddresses before Bridge.address is set should return ``None`` for the address value, and after Bridge.address is set, it should return the address.
|
def test_Bridge_allVanillaAddresses_reentrancy_address(self):
self.bridge.orPort = 443
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(None, 443, 4)])
self.bridge.address = '1.1.1.1'
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
|
[
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_idempotency_others(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.bridge.orAddresses.append(\n (ipaddr.IPv4Address('2.2.2.2'), 4443, 4))\n self.assertItemsEqual(self.bridge.orAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),\n (ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), None, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def getVolatileAddresses(self) -> ghidra.program.model.address.AddressSetView:\n ...",
"def have_any_address():\n if config.addresses():\n return True\n return False",
"def z_listaddresses(self):\n return self._call('z_listaddresses')",
"def fetch_nat_nopat_address(self, device, **kwargs):\n all_entry_list = self._common_get_processing(device=device, cmd_keyword=\"nat-nopat-address\", kwargs=kwargs)\n device.log(message=\"{} return value:\\n{}\".format(self.tool.get_current_function_name(), self.tool.pprint(all_entry_list)))\n return all_entry_list",
"def use_for_local_addresses(self):\n if \"useForLocalAddresses\" in self._prop_dict:\n return self._prop_dict[\"useForLocalAddresses\"]\n else:\n return None",
"def nextAddresses(self) -> List[ghidra.program.model.address.Address]:\n ...",
"def test_case06(self):\n response = appd.get_node_address()\n self.assertEqual(response, None)",
"def jsonrpc_address_unused(self, account_id=None, wallet_id=None):\n wallet = self.wallet_manager.get_wallet_or_default(wallet_id)\n return wallet.get_account_or_default(account_id).receiving.get_or_create_usable_address()",
"def address_obj(self):\n if not self._address_obj:\n self.address()\n return self._address_obj",
"def sendall_recipient_addresses() -> List[str]:\n return [to_address(0x1234)]",
"def all_addresses() -> list[str]:\n\n servers = graphql(\"\"\"\n query {\n queryGameServer {\n address\n }\n }\n \"\"\"\n )['queryGameServer']\n\n return [server['address'] for server in servers]",
"def _getAddresses(self, *ues):\n return [self._s1_util.get_ip(ue.ue_id) for ue in ues]",
"def addresses(self) -> typing.List[str]:\n return typing.cast(\n typing.List[str],\n self._properties.get(\"addresses\"),\n )",
"def get_address(self):\n return self.account.address"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling Bridge.allVanillaAddresses before Bridge.orAddresses is set should return only the Bridge's address and orPort.
|
def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):
self.bridge.address = '1.1.1.1'
self.bridge.orPort = 443
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
self.assertItemsEqual(self.bridge.orAddresses, [])
self.bridge.orAddresses.append(
(ipaddr.IPv4Address('2.2.2.2'), 4443, 4))
self.assertItemsEqual(self.bridge.orAddresses,
[(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])
self.assertItemsEqual(self.bridge.allVanillaAddresses,
[(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),
(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])
|
[
"def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), None, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_idempotency_others(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_updateORAddresses_valid_and_invalid(self):\n orAddresses = [\n (u'1.1.1.1', 1111, False), # valid\n (u'127.0.0.1', 2222, False), # invalid IPv4 loopback\n (u'FE80::1234', 3333, True)] # invalid IPv6 link local\n bridge = bridges.Bridge()\n bridge._updateORAddresses(orAddresses)\n\n self.assertEqual(len(bridge.orAddresses), 1)\n addr, port, version = bridge.orAddresses[0]\n self.assertEqual(addr, ipaddr.IPAddress('1.1.1.1'))\n self.assertEqual(port, 1111)\n self.assertEqual(version, 4)",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def _getAddresses(self, *ues):\n return [self._s1_util.get_ip(ue.ue_id) for ue in ues]",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def sendall_recipient_addresses() -> List[str]:\n return [to_address(0x1234)]",
"def getVolatileAddresses(self) -> ghidra.program.model.address.AddressSetView:\n ...",
"def nextAddresses(self) -> List[ghidra.program.model.address.Address]:\n ...",
"def _get_receivers_addresses(self):\n receivers = socket.getaddrinfo(\n self.config['server'], self.config['port'],\n proto=socket.IPPROTO_TCP\n )\n # Only keep the actual address\n addresses = [r[4][0] for r in receivers]\n try:\n addresses.remove(self._last_good_receiver_address)\n addresses = [self._last_good_receiver_address] + addresses\n except ValueError:\n pass\n return addresses",
"def get_remote_destination_addresses(self, *, Range=None, filter=None, fields=None, **kwargs):\n headers = kwargs.get('headers', {}).update({'Allow-Hidden': True})\n function_endpoint = urljoin(self._baseurl, 'remote_destination_addresses')\n return self._call('GET', function_endpoint, headers=headers, **kwargs)",
"def GetAddrList(self):\n return list(range(ADDR_START, ADDR_END + 1))",
"def _GetHostsFromBridges(self, hosts):\n for brname in self.bridges:\n try:\n for (mac, iface) in self._GetHostsInBridge(brname):\n mac = mac.lower()\n host = hosts.get(mac, dict())\n self._AddLayer1Interface(host, iface)\n host['PhysAddress'] = mac\n host['Active'] = True\n hosts[mac] = host\n except (OSError, IOError):\n print '_GetHostsFromBridges unable to process %s' % brname",
"def GetDestHomeAlternatives(wl):\n return [node for node in api.GetNaplesHostnames() if node != wl.node_name] if wl else []",
"def get_all_bds_oper_data(node):\n\n status_code, resp = HcUtil.\\\n get_honeycomb_data(node, \"oper_bridge_domains\")\n if status_code != HTTPCodes.OK:\n raise HoneycombError(\n \"Not possible to get operational information about the \"\n \"bridge domains. Status code: {0}.\".format(status_code))\n try:\n return resp[\"bridge-domains-state\"][\"bridge-domain\"]\n\n except (KeyError, TypeError):\n return []",
"def inet_visible_ip(self):\n def handle(results):\n ips = [result[1][0] for result in results if result[0]]\n logger.debug(\"other nodes think our ip is %s\", ips)\n return ips\n\n ds = []\n for neighbor in self.bootstrappable_neighbors():\n ds.append(self.protocol.stun(neighbor))\n future_list(ds, handle)",
"def z_listaddresses(self):\n return self._call('z_listaddresses')"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Bridge._updateORAddresses() called with a mixture of valid and invalid ORAddress tuples should only retain the valid ones.
|
def test_Bridge_updateORAddresses_valid_and_invalid(self):
orAddresses = [
(u'1.1.1.1', 1111, False), # valid
(u'127.0.0.1', 2222, False), # invalid IPv4 loopback
(u'FE80::1234', 3333, True)] # invalid IPv6 link local
bridge = bridges.Bridge()
bridge._updateORAddresses(orAddresses)
self.assertEqual(len(bridge.orAddresses), 1)
addr, port, version = bridge.orAddresses[0]
self.assertEqual(addr, ipaddr.IPAddress('1.1.1.1'))
self.assertEqual(port, 1111)
self.assertEqual(version, 4)
|
[
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def test_Bridge_allVanillaAddresses_reentrancy_orAddresses(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.bridge.orAddresses.append(\n (ipaddr.IPv4Address('2.2.2.2'), 4443, 4))\n self.assertItemsEqual(self.bridge.orAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('2.2.2.2'), 4443, 4),\n (ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_Bridge_allVanillaAddresses_reentrancy_orPort(self):\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), None, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def test_Bridge_allVanillaAddresses_idempotency_others(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.orAddresses, [])",
"def _update_addresses(device, address_data, is_management=False):\n ipaddress_ids = []\n for ip in address_data:\n try:\n ipaddress = IPAddress.objects.get(address=ip)\n except IPAddress.DoesNotExist:\n ipaddress = IPAddress(address=ip)\n ipaddress.device = device\n ipaddress.is_management = is_management\n ipaddress.save(update_last_seen=False)\n ipaddress_ids.append(ipaddress.id)\n # Disconnect the rest of addresses from this device\n for ipaddress in IPAddress.objects.filter(\n device=device,\n is_management=is_management,\n ).exclude(id__in=ipaddress_ids):\n ipaddress.device = None\n ipaddress.save(update_last_seen=False)",
"def test_ip_addresses_partial_update(self):\n pass",
"def validate_address_ranges(self):\n if not (self.source.data or self.dest.data):\n whole_world_member = whole_world_range(self.net_ranges, self.zero_address)\n if not whole_world_member:\n self.source.errors.append(\"Source or dest must be in organization range : {}.\".format(self.net_ranges))\n self.dest.errors.append(\"Source or dest must be in organization range : {}.\".format(self.net_ranges))\n return False\n else:\n source_in_range = network_in_range(self.source.data, self.source_mask.data, self.net_ranges)\n dest_in_range = network_in_range(self.dest.data, self.dest_mask.data, self.net_ranges)\n if not (source_in_range or dest_in_range):\n self.source.errors.append(\"Source or dest must be in organization range : {}.\".format(self.net_ranges))\n self.dest.errors.append(\"Source or dest must be in organization range : {}.\".format(self.net_ranges))\n return False\n\n return True",
"def test_Bridge_allVanillaAddresses_reentrancy_address(self):\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(None, 443, 4)])\n self.bridge.address = '1.1.1.1'\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def update_address(self, address_details):\n pass",
"def test_Bridge_allVanillaAddresses_idempotency_self(self):\n self.bridge.address = '1.1.1.1'\n self.bridge.orPort = 443\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])\n self.assertItemsEqual(self.bridge.allVanillaAddresses,\n [(ipaddr.IPv4Address('1.1.1.1'), 443, 4)])",
"def test_ip_addresses_update(self):\n pass",
"def process_addresses(\n self, safe_addresses: List[str]\n ) -> Optional[Tuple[List[Any], bool]]:\n assert safe_addresses, \"Safe addresses cannot be empty!\"\n assert all(\n [Web3.isChecksumAddress(safe_address) for safe_address in safe_addresses]\n ), (\"A safe address has invalid checksum: %s\" % safe_addresses)\n\n parameters = self.get_block_numbers_for_search(safe_addresses)\n if parameters is None:\n return\n from_block_number, to_block_number = parameters\n updated = to_block_number == (\n self.ethereum_client.current_block_number - self.confirmations\n )\n tx_hashes = self.find_relevant_tx_hashes(\n safe_addresses, from_block_number, to_block_number\n )\n processed_objects = [self.process_tx_hash(tx_hash) for tx_hash in tx_hashes]\n flatten_processed_objects = [\n item for sublist in processed_objects for item in sublist\n ]\n\n self.update_safe_tx_status(safe_addresses, to_block_number)\n return flatten_processed_objects, updated",
"def fix_consolidated_modify(self, ixf_member_data):\n\n for other in self.pending_save:\n if other.asn == ixf_member_data.asn:\n if (\n other.init_ipaddr4\n and other.init_ipaddr4 == ixf_member_data.init_ipaddr4\n ) or (\n other.init_ipaddr6\n and other.init_ipaddr6 == ixf_member_data.init_ipaddr6\n ):\n if not other.modify_speed:\n other.speed = ixf_member_data.speed\n\n if not other.modify_is_rs_peer:\n other.is_rs_peer = ixf_member_data.is_rs_peer\n\n break",
"def clean_up_email_addresses(sender, instance, created, **kwargs):\n if instance.verified:\n others = EmailAddress.objects.filter(email__iexact=instance.email, verified=False)\n for o in others:\n u = o.user\n o.delete()\n if u.emailaddress_set.count() == 0:\n try:\n u.delete()\n except:\n # sometimes the db can generate a ForeignKey error on the delete...\n # so do this as a fallback\n u.is_active = False\n u.save()\n u = instance.user\n if u.is_bulk:\n u.is_bulk = False\n u.save()",
"def test_view_can_update_a_physical_address(self):\n\n self.create_org()\n self.create_location()\n\n new_data = {\n \"postal_code\": \"60603\"\n }\n # create address\n self.client().post('/api/organizations/1/locations/1/addresses/',\n data=self.address_data)\n # update the address\n res = self.client().put('/api/organizations/1/locations/1/addresses/1',\n data=new_data)\n self.assertEqual(res.status_code, 200)\n self.assertIn(\"60603\", str(res.data))",
"def or_types(self, new_or_types):\n self._or_types = list()\n if new_or_types is not None:\n for (base, decs) in new_or_types:\n adjusted_decs = _remove_blanks_repeats(decs, ['', base])\n self._or_types.append((base, adjusted_decs))",
"def test_invalid_change_add_debtor_address():\n statement = copy.deepcopy(CHANGE_STATEMENT)\n del statement['baseDebtor']\n del statement['addDebtors'][0]['address']\n del statement['addDebtors'][0]['partyId']\n\n is_valid, errors = validate(statement, 'changeStatement', 'ppr')\n\n if errors:\n for err in errors:\n print(err.message)\n print(errors)\n\n assert not is_valid",
"def update_or_create(\n self, location, contact_addresses, with_status=False, overwrite_existing=False, **kw\n ):\n updated, created = False, False\n location_ref = location_helper(location)\n if location_ref in self:\n for loc in self:\n if loc.location_ref == location_ref:\n if overwrite_existing:\n loc[\"addresses\"][:] = contact_addresses\n updated = True\n else:\n for ca in contact_addresses:\n if ca not in loc.addresses:\n loc[\"addresses\"].append(ca)\n updated = True\n else:\n self.data.setdefault(\"multi_contact_addresses\", []).append(\n dict(addresses=contact_addresses, location_ref=location_ref)\n )\n created = True\n\n if updated or created:\n self.update()\n if with_status:\n return self, updated, created\n return self",
"def __save_each_address_tuple(address_values, user, person=None, organization=None):\n ADDRESS1 = 0\n ADDRESS2 = 1\n CITY = 2\n STATE = 3\n ZIP_CODE = 4\n COUNTRY = 5\n ADDRESS_ID = 6\n\n for values in address_values:\n address=Address.objects.get(id=values[ADDRESS_ID]) if values[ADDRESS_ID] else Address()\n address.address1=values[ADDRESS1]\n address.address2=values[ADDRESS2]\n address.city=values[CITY]\n address.state=values[STATE]\n address.zip_code=values[ZIP_CODE]\n address.country=values[COUNTRY]\n address.person=person\n address.organization=organization\n\n address.save(user)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling updateFromNetworkStatus() with a descriptor which has an IPv4 address as an additional ORAddress should result in a FutureWarning before continuing parsing.
|
def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):
# Add an additional IPv4 ORAddress:
ns = BRIDGE_NETWORKSTATUS.replace(
'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',
'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\na 123.34.56.78:36488')
self._writeNetworkstatus(ns)
self._parseAllDescriptorFiles()
self.assertWarns(
FutureWarning,
"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!",
bridges.__file__, # filename
self.bridge.updateFromNetworkStatus,
self.networkstatus)
self.assertEqual(self.bridge.fingerprint,
'2C3225C4805331025E211F4B6E5BF45C333FDD2C')
self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),
self.bridge.allVanillaAddresses)
|
[
"def test_Bridge_updateORAddresses_valid_and_invalid(self):\n orAddresses = [\n (u'1.1.1.1', 1111, False), # valid\n (u'127.0.0.1', 2222, False), # invalid IPv4 loopback\n (u'FE80::1234', 3333, True)] # invalid IPv6 link local\n bridge = bridges.Bridge()\n bridge._updateORAddresses(orAddresses)\n\n self.assertEqual(len(bridge.orAddresses), 1)\n addr, port, version = bridge.orAddresses[0]\n self.assertEqual(addr, ipaddr.IPAddress('1.1.1.1'))\n self.assertEqual(port, 1111)\n self.assertEqual(version, 4)",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36493)\n\n # Change the port of obfs4 transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['obfs4']\n self.extrainfo.transport['obfs4'] = (transportline[0],\n 31337,\n transportline[2])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 31337)",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"def test_Bridge_updateFromServerDescriptor_no_networkstatus(self):\n self.assertRaises(bridges.ServerDescriptorWithoutNetworkstatus,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def update_address(self, address_details):\n pass",
"def test_ip_addresses_partial_update(self):\n pass",
"def test_resolveOnlyIPv4(self):\n self._resolveOnlyTest([IPv4Address], AF_INET)",
"def update_node_status(self, address, status):\n\n query = f\"\"\"UPDATE public.nodes SET\n status = {status},\n verified_at = CURRENT_TIMESTAMP \n where address = '{address}';\"\"\"\n self.cursor.execute(query)\n self.chunks_db_connection.commit()",
"def update_device_status(task: Task) -> Result:\n\n if not config.netbox[\"status_update\"]:\n logger.debug(f\"{task.host.name} | status_update disabled skipping\")\n return Result(host=task.host, result=False)\n\n if not task.host.data[\"obj\"].remote:\n logger.debug(f\"{task.host.name} | remote not present skipping\")\n return Result(host=task.host, result=False)\n\n new_status = None\n prev_status = task.host.data[\"obj\"].remote.status.value\n\n if task.host.data[\"status\"] == \"fail-ip\":\n new_status = config.netbox[\"status_on_unreachable\"]\n\n elif \"fail\" in task.host.data[\"status\"]:\n new_status = config.netbox[\"status_on_fail\"]\n\n else:\n new_status = config.netbox[\"status_on_pass\"]\n\n if new_status not in (None, prev_status):\n\n task.host.data[\"obj\"].remote.update(data={\"status\": new_status})\n logger.info(\n f\"{task.host.name} | Updated status on netbox {prev_status} > {new_status}\"\n )\n return Result(host=task.host, result=True)\n\n logger.debug(f\"{task.host.name} | no status update required\")\n\n return Result(host=task.host, result=False)",
"def validate_ipv4(self, addr):\n try:\n socket.inet_aton(addr)\n except socket.error:\n # Not legal\n self.logger.error('Invalid IPv4 address recieved: %s', addr)\n else:\n # legal\n return addr",
"def rel_ipv4_address(self, rel_ipv4_address):\n\n self._rel_ipv4_address = rel_ipv4_address",
"def test_ip_addresses_update(self):\n pass",
"def test_update_from_address_router_config(self):\n\n resp = yield self.post('/channels/', {\n 'type': 'telnet',\n 'config': {\n 'twisted_endpoint': 'tcp:0',\n }\n })\n channel_id = (yield resp.json())['result']['id']\n\n old_config = self.create_router_config(\n label='old', type='from_address',\n config={'channel': channel_id})\n resp = yield self.post('/routers/', old_config)\n router_id = (yield resp.json())['result']['id']\n\n update = {'config': {'channel': channel_id}}\n new_config = deepcopy(old_config)\n new_config.update(update)\n resp = yield self.patch_request(\n '/routers/{}'.format(router_id), new_config)\n\n yield self.assert_response(\n resp, http.OK, 'router updated', new_config, ignore=['id'])",
"def _autoscan_address(address):\n\n try:\n ipaddress = IPAddress.objects.get(address=address)\n except IPAddress.DoesNotExist:\n ipaddress = None\n if ipaddress and ipaddress.is_buried:\n return\n pinged = ping(address)\n if pinged:\n if not ipaddress:\n ipaddress, created = IPAddress.objects.get_or_create(\n address=address,\n )\n ipaddress.http_family = get_http_family(ipaddress.address)\n (\n ipaddress.snmp_name,\n ipaddress.snmp_community,\n ipaddress.snmp_version,\n ) = get_snmp(ipaddress)\n ipaddress.dead_ping_count = 0\n ipaddress.save(update_last_seen=True)\n else:\n if ipaddress:\n ipaddress.http_family = None\n ipaddress.snmp_name = None\n ipaddress.snmp_community = None\n ipaddress.snmp_version = None\n ipaddress.dead_ping_count += 1\n ipaddress.save(update_last_seen=False)",
"def validateIPV4(address):\n try:\n socket.inet_aton(address)\n except socket.error:\n print(address + \"sorry the addr is not valid ip v4 address\")",
"def new_address(self, name, address):\n if address not in self.ip_addresses:\n if any([regex.findall(name) for regex in self.regex_set]):\n self.ip_addresses.update([address])",
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n\n #networkstatus = descriptors.parseNetworkStatusFile(filename)\n #self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.assertRaises(bridges.MissingServerDescriptorDigest,\n # self.bridge.updateFromNetworkStatus,\n # networkstatus[0])",
"def fix_consolidated_modify(self, ixf_member_data):\n\n for other in self.pending_save:\n if other.asn == ixf_member_data.asn:\n if (\n other.init_ipaddr4\n and other.init_ipaddr4 == ixf_member_data.init_ipaddr4\n ) or (\n other.init_ipaddr6\n and other.init_ipaddr6 == ixf_member_data.init_ipaddr6\n ):\n if not other.modify_speed:\n other.speed = ixf_member_data.speed\n\n if not other.modify_is_rs_peer:\n other.is_rs_peer = ixf_member_data.is_rs_peer\n\n break"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parsing a server descriptor for a bridge which wasn't included in the networkstatus document from the BridgeAuthority should raise a ServerDescriptorWithoutNetworkstatus exception.
|
def test_Bridge_updateFromServerDescriptor_no_networkstatus(self):
self.assertRaises(bridges.ServerDescriptorWithoutNetworkstatus,
self.bridge.updateFromServerDescriptor,
self.serverdescriptor)
|
[
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n\n #networkstatus = descriptors.parseNetworkStatusFile(filename)\n #self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.assertRaises(bridges.MissingServerDescriptorDigest,\n # self.bridge.updateFromNetworkStatus,\n # networkstatus[0])",
"def test_Bridge_checkServerDescriptor_digest_missing(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = None\n self.assertRaises(bridges.MissingServerDescriptorDigest,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = 'deadbeef'\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_mismatch_ns(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-mismatched-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\",\n \"c4EVu2r1/iD/DJYBX/Ll38DGQWI\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n networkstatus = descriptors.parseNetworkStatusFile(filename)\n self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_getBridgeLine_request_invalid(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(False)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def load(state, hashring, clear=False):\n if not state:\n logging.fatal(\"bridgedb.main.load() could not retrieve state!\")\n sys.exit(2)\n\n if clear:\n logging.info(\"Clearing old bridges...\")\n hashring.clear()\n\n logging.info(\"Loading bridges...\")\n\n ignoreNetworkstatus = state.IGNORE_NETWORKSTATUS\n if ignoreNetworkstatus:\n logging.info(\"Ignoring BridgeAuthority networkstatus documents.\")\n\n for auth in state.BRIDGE_AUTHORITY_DIRECTORIES:\n logging.info(\"Processing descriptors in %s directory...\" % auth)\n\n bridges = {}\n timestamps = {}\n\n fn = expandBridgeAuthDir(auth, state.STATUS_FILE)\n logging.info(\"Opening networkstatus file: %s\" % fn)\n networkstatuses = descriptors.parseNetworkStatusFile(fn)\n logging.debug(\"Closing networkstatus file: %s\" % fn)\n\n logging.info(\"Processing networkstatus descriptors...\")\n for router in networkstatuses:\n bridge = Bridge()\n bridge.updateFromNetworkStatus(router, ignoreNetworkstatus)\n try:\n bridge.assertOK()\n except MalformedBridgeInfo as error:\n logging.warn(str(error))\n else:\n bridges[bridge.fingerprint] = bridge\n\n for filename in state.BRIDGE_FILES:\n fn = expandBridgeAuthDir(auth, filename)\n logging.info(\"Opening bridge-server-descriptor file: '%s'\" % fn)\n serverdescriptors = descriptors.parseServerDescriptorsFile(fn)\n logging.debug(\"Closing bridge-server-descriptor file: '%s'\" % fn)\n\n for router in serverdescriptors:\n try:\n bridge = bridges[router.fingerprint]\n except KeyError:\n logging.warn(\n (\"Received server descriptor for bridge '%s' which wasn't \"\n \"in the networkstatus!\") % router.fingerprint)\n if ignoreNetworkstatus:\n bridge = Bridge()\n else:\n continue\n\n try:\n bridge.updateFromServerDescriptor(router, ignoreNetworkstatus)\n except (ServerDescriptorWithoutNetworkstatus,\n MissingServerDescriptorDigest,\n ServerDescriptorDigestMismatch) as error:\n logging.warn(str(error))\n # Reject any routers whose server descriptors didn't pass\n # :meth:`~bridges.Bridge._checkServerDescriptor`, i.e. those\n # bridges who don't have corresponding networkstatus\n # documents, or whose server descriptor digests don't check\n # out:\n bridges.pop(router.fingerprint)\n continue\n\n if state.COLLECT_TIMESTAMPS:\n # Update timestamps from server descriptors, not from network\n # status descriptors (because networkstatus documents and\n # descriptors aren't authenticated in any way):\n if bridge.fingerprint in timestamps.keys():\n timestamps[bridge.fingerprint].append(router.published)\n else:\n timestamps[bridge.fingerprint] = [router.published]\n\n eifiles = [expandBridgeAuthDir(auth, fn) for fn in state.EXTRA_INFO_FILES]\n extrainfos = descriptors.parseExtraInfoFiles(*eifiles)\n for fingerprint, router in extrainfos.items():\n try:\n bridges[fingerprint].updateFromExtraInfoDescriptor(router)\n except MalformedBridgeInfo as error:\n logging.warn(str(error))\n except KeyError as error:\n logging.warn((\"Received extrainfo descriptor for bridge '%s', \"\n \"but could not find bridge with that fingerprint.\")\n % router.fingerprint)\n\n blacklist = parseBridgeBlacklistFile(state.NO_DISTRIBUTION_FILE)\n\n inserted = 0\n logging.info(\"Inserting %d bridges into hashring...\" % len(bridges))\n for fingerprint, bridge in bridges.items():\n # Skip insertion of bridges which are geolocated to be in one of the\n # NO_DISTRIBUTION_COUNTRIES, a.k.a. the countries we don't distribute\n # bridges from:\n if bridge.country in state.NO_DISTRIBUTION_COUNTRIES:\n logging.warn(\"Not distributing Bridge %s %s:%s in country %s!\" %\n (bridge, bridge.address, bridge.orPort, bridge.country))\n # Skip insertion of blacklisted bridges.\n elif bridge in blacklist.keys():\n logging.warn(\"Not distributing blacklisted Bridge %s %s:%s: %s\" %\n (bridge, bridge.address, bridge.orPort, blacklist[bridge]))\n else:\n # If the bridge is not running, then it is skipped during the\n # insertion process.\n hashring.insert(bridge)\n inserted += 1\n logging.info(\"Done inserting %d bridges into hashring.\" % inserted)\n\n if state.COLLECT_TIMESTAMPS:\n reactor.callInThread(updateBridgeHistory, bridges, timestamps)\n\n state.save()",
"def _check_descriptor_dependencies(self, session, descriptor):\n if not descriptor.get(\"netslice-subnet\"):\n return\n for nsd in descriptor[\"netslice-subnet\"]:\n nsd_id = nsd[\"nsd-ref\"]\n filter_q = self._get_project_filter(session)\n filter_q[\"id\"] = nsd_id\n if not self.db.get_list(\"nsds\", filter_q):\n raise EngineException(\"Descriptor error at 'netslice-subnet':'nsd-ref'='{}' references a non \"\n \"existing nsd\".format(nsd_id), http_code=HTTPStatus.CONFLICT)",
"def network_by_bridge(bridge, server, virt=\"Xen\"):\n\n networks = net_list(server, virt)\n if len(networks) == 0:\n return None\n\n for network in networks:\n if bridge == get_bridge_from_network_xml(network, server, virt):\n return network\n\n return None",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):\n # Make the signature uppercased\n BEGIN_SIG = '-----BEGIN SIGNATURE-----'\n doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)\n ei = BEGIN_SIG.join([doc, sig.upper()])\n self._writeExtrainfo(ei)\n self._parseAllDescriptorFiles()\n\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 0)\n self.assertIsNone(self.bridge.descriptors['extrainfo'])",
"def get_bridge_desciption(self, ip, port):\n br_info = {}\n\n protocol = 'http'\n if str(port) == '443':\n protocol = 'https'\n\n requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)\n r = requests.get(protocol + '://' + ip + ':' + str(port) + '/description.xml', verify=False)\n if r.status_code == 200:\n xmldict = xmltodict.parse(r.text)\n br_info['ip'] = ip\n br_info['port'] = str(port)\n br_info['friendlyName'] = str(xmldict['root']['device']['friendlyName'])\n br_info['manufacturer'] = str(xmldict['root']['device']['manufacturer'])\n br_info['manufacturerURL'] = str(xmldict['root']['device']['manufacturerURL'])\n br_info['modelDescription'] = str(xmldict['root']['device']['modelDescription'])\n br_info['modelName'] = str(xmldict['root']['device']['modelName'])\n br_info['modelURL'] = str(xmldict['root']['device']['modelURL'])\n br_info['modelNumber'] = str(xmldict['root']['device']['modelNumber'])\n br_info['serialNumber'] = str(xmldict['root']['device']['serialNumber'])\n br_info['UDN'] = str(xmldict['root']['device']['UDN'])\n br_info['gatewayName'] = str(xmldict['root']['device'].get('gatewayName', ''))\n\n br_info['URLBase'] = str(xmldict['root']['URLBase'])\n if br_info['modelName'] == 'Philips hue bridge 2012':\n br_info['version'] = 'v1'\n elif br_info['modelName'] == 'Philips hue bridge 2015':\n br_info['version'] = 'v2'\n else:\n br_info['version'] = 'unknown'\n\n # get API information\n api_config = self.get_api_config_of_bridge(br_info['URLBase'])\n br_info['datastoreversion'] = api_config.get('datastoreversion', '')\n br_info['apiversion'] = api_config.get('apiversion', '')\n br_info['swversion'] = api_config.get('swversion', '')\n\n return br_info",
"def _retrieve_server_status(status):\n valid = False\n try:\n idx = int(status)\n try:\n status = _server.MySQLServer.get_status(idx)\n valid = True\n except IndexError:\n pass\n except ValueError:\n try:\n status = str(status).upper()\n _server.MySQLServer.get_status_idx(status)\n valid = True\n except ValueError:\n pass\n\n if not valid:\n values = [ str((_server.MySQLServer.get_status_idx(value), value))\n for value in _server.MySQLServer.SERVER_STATUS ]\n raise _errors.ServerError(\"Trying to use an invalid status (%s). \"\n \"Possible values are %s.\" % (status, \", \".join(values))\n )\n\n return status",
"def serverStatusChanged(self, status, description):\n # update status bar button\n if status == 2: # receiving data\n self.statusbutton.setStyleSheet('background-color: yellow;')\n elif status == 3: # processing request (blocking)\n self.statusbutton.setStyleSheet('background-color: red;')\n elif status == 1: # listening\n self.statusbutton.setStyleSheet('')\n else: # only 0 = server is switched off\n self.statusbutton.setStyleSheet('')\n\n self.statusbutton.setChecked(bool(status))\n self.statusbutton.setToolTip(description)\n\n # update text field and toggle button in the plugin config dialog\n self.dlg.status.setText(description)\n self.dlg.toggle.setChecked(bool(status))\n self.dlg.toggle.setText('Disable API' if bool(status) else 'Enable API')",
"def test_Bridge_descriptorDigest(self):\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.assertEqual(self.bridge.descriptorDigest, realdigest)",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def test_NeighbourInterfaces(self):\n self.assertTrue(\n self.cdp.parse_state(\n pattern='cdp_nei_remote_int',\n cmd_key='sh_cdp_entry') == 'GigabitEthernet0/1',\n 'CDP Neighbour: local interface not found')",
"def test_Bridge_getBridgeLine_request_valid(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parsing a server descriptor for a bridge which wasn't included in the networkstatus document from the BridgeAuthority, when ignoreNetworkstatus=True, should not raise any warnings.
|
def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):
self.bridge.updateFromServerDescriptor(self.serverdescriptor,
ignoreNetworkstatus=True)
self.assertIsNone(self.bridge.descriptors['networkstatus'])
self.assertIsNotNone(self.bridge.descriptors['server'])
|
[
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n\n #networkstatus = descriptors.parseNetworkStatusFile(filename)\n #self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.assertRaises(bridges.MissingServerDescriptorDigest,\n # self.bridge.updateFromNetworkStatus,\n # networkstatus[0])",
"def test_Bridge_updateFromServerDescriptor_no_networkstatus(self):\n self.assertRaises(bridges.ServerDescriptorWithoutNetworkstatus,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_missing(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = None\n self.assertRaises(bridges.MissingServerDescriptorDigest,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = 'deadbeef'\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_mismatch_ns(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-mismatched-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\",\n \"c4EVu2r1/iD/DJYBX/Ll38DGQWI\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n networkstatus = descriptors.parseNetworkStatusFile(filename)\n self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def test_Bridge_getBridgeLine_request_invalid(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(False)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def load(state, hashring, clear=False):\n if not state:\n logging.fatal(\"bridgedb.main.load() could not retrieve state!\")\n sys.exit(2)\n\n if clear:\n logging.info(\"Clearing old bridges...\")\n hashring.clear()\n\n logging.info(\"Loading bridges...\")\n\n ignoreNetworkstatus = state.IGNORE_NETWORKSTATUS\n if ignoreNetworkstatus:\n logging.info(\"Ignoring BridgeAuthority networkstatus documents.\")\n\n for auth in state.BRIDGE_AUTHORITY_DIRECTORIES:\n logging.info(\"Processing descriptors in %s directory...\" % auth)\n\n bridges = {}\n timestamps = {}\n\n fn = expandBridgeAuthDir(auth, state.STATUS_FILE)\n logging.info(\"Opening networkstatus file: %s\" % fn)\n networkstatuses = descriptors.parseNetworkStatusFile(fn)\n logging.debug(\"Closing networkstatus file: %s\" % fn)\n\n logging.info(\"Processing networkstatus descriptors...\")\n for router in networkstatuses:\n bridge = Bridge()\n bridge.updateFromNetworkStatus(router, ignoreNetworkstatus)\n try:\n bridge.assertOK()\n except MalformedBridgeInfo as error:\n logging.warn(str(error))\n else:\n bridges[bridge.fingerprint] = bridge\n\n for filename in state.BRIDGE_FILES:\n fn = expandBridgeAuthDir(auth, filename)\n logging.info(\"Opening bridge-server-descriptor file: '%s'\" % fn)\n serverdescriptors = descriptors.parseServerDescriptorsFile(fn)\n logging.debug(\"Closing bridge-server-descriptor file: '%s'\" % fn)\n\n for router in serverdescriptors:\n try:\n bridge = bridges[router.fingerprint]\n except KeyError:\n logging.warn(\n (\"Received server descriptor for bridge '%s' which wasn't \"\n \"in the networkstatus!\") % router.fingerprint)\n if ignoreNetworkstatus:\n bridge = Bridge()\n else:\n continue\n\n try:\n bridge.updateFromServerDescriptor(router, ignoreNetworkstatus)\n except (ServerDescriptorWithoutNetworkstatus,\n MissingServerDescriptorDigest,\n ServerDescriptorDigestMismatch) as error:\n logging.warn(str(error))\n # Reject any routers whose server descriptors didn't pass\n # :meth:`~bridges.Bridge._checkServerDescriptor`, i.e. those\n # bridges who don't have corresponding networkstatus\n # documents, or whose server descriptor digests don't check\n # out:\n bridges.pop(router.fingerprint)\n continue\n\n if state.COLLECT_TIMESTAMPS:\n # Update timestamps from server descriptors, not from network\n # status descriptors (because networkstatus documents and\n # descriptors aren't authenticated in any way):\n if bridge.fingerprint in timestamps.keys():\n timestamps[bridge.fingerprint].append(router.published)\n else:\n timestamps[bridge.fingerprint] = [router.published]\n\n eifiles = [expandBridgeAuthDir(auth, fn) for fn in state.EXTRA_INFO_FILES]\n extrainfos = descriptors.parseExtraInfoFiles(*eifiles)\n for fingerprint, router in extrainfos.items():\n try:\n bridges[fingerprint].updateFromExtraInfoDescriptor(router)\n except MalformedBridgeInfo as error:\n logging.warn(str(error))\n except KeyError as error:\n logging.warn((\"Received extrainfo descriptor for bridge '%s', \"\n \"but could not find bridge with that fingerprint.\")\n % router.fingerprint)\n\n blacklist = parseBridgeBlacklistFile(state.NO_DISTRIBUTION_FILE)\n\n inserted = 0\n logging.info(\"Inserting %d bridges into hashring...\" % len(bridges))\n for fingerprint, bridge in bridges.items():\n # Skip insertion of bridges which are geolocated to be in one of the\n # NO_DISTRIBUTION_COUNTRIES, a.k.a. the countries we don't distribute\n # bridges from:\n if bridge.country in state.NO_DISTRIBUTION_COUNTRIES:\n logging.warn(\"Not distributing Bridge %s %s:%s in country %s!\" %\n (bridge, bridge.address, bridge.orPort, bridge.country))\n # Skip insertion of blacklisted bridges.\n elif bridge in blacklist.keys():\n logging.warn(\"Not distributing blacklisted Bridge %s %s:%s: %s\" %\n (bridge, bridge.address, bridge.orPort, blacklist[bridge]))\n else:\n # If the bridge is not running, then it is skipped during the\n # insertion process.\n hashring.insert(bridge)\n inserted += 1\n logging.info(\"Done inserting %d bridges into hashring.\" % inserted)\n\n if state.COLLECT_TIMESTAMPS:\n reactor.callInThread(updateBridgeHistory, bridges, timestamps)\n\n state.save()",
"def get_bridge_desciption(self, ip, port):\n br_info = {}\n\n protocol = 'http'\n if str(port) == '443':\n protocol = 'https'\n\n requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)\n r = requests.get(protocol + '://' + ip + ':' + str(port) + '/description.xml', verify=False)\n if r.status_code == 200:\n xmldict = xmltodict.parse(r.text)\n br_info['ip'] = ip\n br_info['port'] = str(port)\n br_info['friendlyName'] = str(xmldict['root']['device']['friendlyName'])\n br_info['manufacturer'] = str(xmldict['root']['device']['manufacturer'])\n br_info['manufacturerURL'] = str(xmldict['root']['device']['manufacturerURL'])\n br_info['modelDescription'] = str(xmldict['root']['device']['modelDescription'])\n br_info['modelName'] = str(xmldict['root']['device']['modelName'])\n br_info['modelURL'] = str(xmldict['root']['device']['modelURL'])\n br_info['modelNumber'] = str(xmldict['root']['device']['modelNumber'])\n br_info['serialNumber'] = str(xmldict['root']['device']['serialNumber'])\n br_info['UDN'] = str(xmldict['root']['device']['UDN'])\n br_info['gatewayName'] = str(xmldict['root']['device'].get('gatewayName', ''))\n\n br_info['URLBase'] = str(xmldict['root']['URLBase'])\n if br_info['modelName'] == 'Philips hue bridge 2012':\n br_info['version'] = 'v1'\n elif br_info['modelName'] == 'Philips hue bridge 2015':\n br_info['version'] = 'v2'\n else:\n br_info['version'] = 'unknown'\n\n # get API information\n api_config = self.get_api_config_of_bridge(br_info['URLBase'])\n br_info['datastoreversion'] = api_config.get('datastoreversion', '')\n br_info['apiversion'] = api_config.get('apiversion', '')\n br_info['swversion'] = api_config.get('swversion', '')\n\n return br_info",
"def test_Bridge_getBridgeLine_bridge_prefix(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, bridgePrefix=True)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)\n self.assertTrue(line.startswith('Bridge'))",
"def test_NeighbourInterfaces(self):\n self.assertTrue(\n self.cdp.parse_state(\n pattern='cdp_nei_remote_int',\n cmd_key='sh_cdp_entry') == 'GigabitEthernet0/1',\n 'CDP Neighbour: local interface not found')",
"def test_Bridge_getBridgeLine_no_vanilla_addresses(self):\n request = BridgeRequestBase()\n request.isValid(True)\n\n self.assertIsNone(self.bridge.getBridgeLine(request))",
"def network_by_bridge(bridge, server, virt=\"Xen\"):\n\n networks = net_list(server, virt)\n if len(networks) == 0:\n return None\n\n for network in networks:\n if bridge == get_bridge_from_network_xml(network, server, virt):\n return network\n\n return None",
"def GetBridgeInfoFromConf():\n bridges = {}\n with open('/usr/local/bluedon/www/cache/waf_bridge.conf', 'r') as f:\n for line in f.readlines():\n bridgeInfo = line.strip().split() # br0 vEth0,vEth1 num\n if len(bridgeInfo) == 3:\n bridges[bridgeInfo[0]] = [bridgeInfo[1]]\n return bridges",
"def serverStatusChanged(self, status, description):\n # update status bar button\n if status == 2: # receiving data\n self.statusbutton.setStyleSheet('background-color: yellow;')\n elif status == 3: # processing request (blocking)\n self.statusbutton.setStyleSheet('background-color: red;')\n elif status == 1: # listening\n self.statusbutton.setStyleSheet('')\n else: # only 0 = server is switched off\n self.statusbutton.setStyleSheet('')\n\n self.statusbutton.setChecked(bool(status))\n self.statusbutton.setToolTip(description)\n\n # update text field and toggle button in the plugin config dialog\n self.dlg.status.setText(description)\n self.dlg.toggle.setChecked(bool(status))\n self.dlg.toggle.setText('Disable API' if bool(status) else 'Enable API')",
"def _parse_server_capabilities(self, server_capabilities):\n module_list = []\n server_caps = []\n try:\n for sc in server_capabilities:\n # urn:ietf:params:netconf:capability:{name}:1.x\n server_caps_match = re.match(\n r'urn:ietf:params:netconf:capability:(\\S+):\\d+.\\d+',\n sc)\n if server_caps_match:\n server_caps.append(server_caps_match.group(1))\n modules_match = re.findall(\n r'(\\S+)\\?module=(\\S+)&revision=' +\n '(\\d{4}-\\d{2}-\\d{2})&?(features=(\\S+))?',\n sc)\n if modules_match:\n namespace, name, revision, _, features = modules_match[0]\n if features:\n module_list.append(\n {\"name\": name, \"revision\": revision,\n \"namespace\": namespace,\n \"features\": features.split(\",\")})\n else:\n module_list.append({\"name\":name,\n \"revision\":revision,\n \"namespace\": namespace})\n\n module_dict = {\"module-info\": module_list}\n return module_dict, server_caps\n except NcclientException as e:\n logger.error(list(server_capabilities))\n logger.error(str(e))\n raise str(e)",
"def test_Bridge_getBridgeLine_no_include_fingerprint(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n request = BridgeRequestBase()\n request.isValid(True)\n line = self.bridge.getBridgeLine(request, includeFingerprint=False)\n\n self.assertIsNotNone(line)\n self.assertIn('179.178.155.140:36489', line)\n self.assertNotIn('2C3225C4805331025E211F4B6E5BF45C333FDD2C', line)",
"def _retrieve_server_status(status):\n valid = False\n try:\n idx = int(status)\n try:\n status = _server.MySQLServer.get_status(idx)\n valid = True\n except IndexError:\n pass\n except ValueError:\n try:\n status = str(status).upper()\n _server.MySQLServer.get_status_idx(status)\n valid = True\n except ValueError:\n pass\n\n if not valid:\n values = [ str((_server.MySQLServer.get_status_idx(value), value))\n for value in _server.MySQLServer.SERVER_STATUS ]\n raise _errors.ServerError(\"Trying to use an invalid status (%s). \"\n \"Possible values are %s.\" % (status, \", \".join(values))\n )\n\n return status"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling _verifyExtraInfoSignature() with a descriptor which has a good signature should return None.
|
def test_Bridge_verifyExtraInfoSignature_good_signature(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.assertIsNone(self.bridge._verifyExtraInfoSignature(self.extrainfo))
|
[
"def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):\n # Make the signature uppercased\n BEGIN_SIG = '-----BEGIN SIGNATURE-----'\n doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)\n ei = BEGIN_SIG.join([doc, sig.upper()])\n self._writeExtrainfo(ei)\n self._parseAllDescriptorFiles()\n\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 0)\n self.assertIsNone(self.bridge.descriptors['extrainfo'])",
"def signature_checking(self,meta):\n if self.vertification(meta):\n pass\n else:\n raise Exception('Incorrect Signature')",
"def _verify_fallback_signature(self):\n sig = self.signature\n\n if not (\n len(sig.parameters) == 0\n and sig.return_annotation in (None, Signature.empty)\n ):\n raise IllegalFormatException(\"Invalid fallback signature\")",
"def validate_sig_integrity(signer_info: cms.SignedData,\n cert: x509.Certificate,\n expected_content_type: str,\n actual_digest: bytes) -> Tuple[bool, bool]:\n\n signature_algorithm: cms.SignedDigestAlgorithm = \\\n signer_info['signature_algorithm']\n digest_algorithm_obj = signer_info['digest_algorithm']\n md_algorithm = digest_algorithm_obj['algorithm'].native\n signature = signer_info['signature'].native\n\n # signed_attrs comes with some context-specific tagging\n # because it's an implicit field. This breaks validation\n signed_attrs = signer_info['signed_attrs'].untag()\n # TODO if there are no signed_attrs, we should validate the signature\n # against actual_digest. Find some real-world exmples to test this\n # Also, signed_attrs is mandatory if content_type is not id-data\n\n # check the CMSAlgorithmProtection attr, if present\n try:\n cms_algid_protection, = find_cms_attribute(\n signed_attrs, 'cms_algorithm_protection'\n )\n signed_digest_algorithm = \\\n cms_algid_protection['digest_algorithm'].native\n if signed_digest_algorithm != digest_algorithm_obj.native:\n raise SignatureValidationError(\n \"Digest algorithm does not match CMS algorithm protection \"\n \"attribute.\"\n )\n signed_sig_algorithm = \\\n cms_algid_protection['signature_algorithm'].native\n if signed_sig_algorithm is None:\n raise SignatureValidationError(\n \"CMS algorithm protection attribute not valid for signed data\"\n )\n elif signed_sig_algorithm != signature_algorithm.native:\n raise SignatureValidationError(\n \"Signature mechanism does not match CMS algorithm \"\n \"protection attribute.\"\n )\n except KeyError:\n pass\n except SignatureValidationError:\n raise\n except ValueError:\n raise SignatureValidationError(\n 'Multiple CMS protection attributes present'\n )\n\n signed_blob = signed_attrs.dump(force=True)\n try:\n content_type, = find_cms_attribute(signed_attrs, 'content_type')\n content_type = content_type.native\n if content_type != expected_content_type:\n raise SignatureValidationError(\n 'Content type did not match expected value'\n )\n except (KeyError, ValueError):\n raise SignatureValidationError(\n 'Content type not found in signature, or multiple content-type '\n 'attributes present.'\n )\n\n try:\n embedded_digest, = find_cms_attribute(signed_attrs, 'message_digest')\n embedded_digest = embedded_digest.native\n except (KeyError, ValueError):\n raise SignatureValidationError(\n 'Message digest not found in signature, or multiple message '\n 'digest attributes present.'\n )\n intact = actual_digest == embedded_digest\n\n try:\n _validate_raw(\n signature, signed_blob, cert, signature_algorithm, md_algorithm\n )\n valid = True\n except SignatureError:\n valid = False\n\n return intact, valid",
"def test_simple_object_detector(*args, **kwargs): # real signature unknown; restored from __doc__\n pass",
"def verify(self, key, signature, data):\n # type: (Any, bytes, bytes) -> None",
"def version_checking(self,meta):\n if meta[0] == self._valid_metadata:\n pass\n else:\n raise Exception('Incorrect Metadata format')",
"def verify_receipt_signature(self, receipt_update_retrieve_res):\n pass",
"def advapi32_IsValidSecurityDescriptor(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"pSecurityDescriptor\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def write_signature_info(self, signature_info):\n pass",
"def verify(self, key, msg, sig): # pragma: no cover\n raise NotImplementedError()",
"def advapi32_CryptVerifySignature(jitter, get_str, set_str):\n ret_ad, args = jitter.func_args_stdcall([\"hHash\", \"pbSignature\", \"dwSigLen\", \"hPubKey\", \"sDescription\", \"dwFlags\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def _check_signature(self, request, key):\n supercls = super(TokenServerAuthenticationPolicy, self)\n try:\n return supercls._check_signature(request, key)\n except HTTPUnauthorized:\n logger.warn(\"Authentication Failed: invalid hawk signature\")\n raise",
"def _check_signature(self, request, key):\n supercls = super(SagradaAuthenticationPolicy, self)\n try:\n return supercls._check_signature(request, key)\n except HTTPUnauthorized:\n log_cef(\"Authentication Failed: invalid MAC signature\", 5,\n request.environ, request.registry.settings,\n \"\", signature=AUTH_FAILURE)\n raise",
"def is_signature(signature):\n\n try:\n return check_signature(signature)\n except:\n return False",
"def get_maybe(self): # real signature unknown; restored from __doc__\n pass",
"def verify(self, key, signature, data):\n # type: (Any, bytes, bytes) -> None\n # narrow down the key type\n # https://github.com/aws/aws-dynamodb-encryption-python/issues/66\n if hasattr(key, \"private_bytes\"):\n _key = key.public_key()\n else:\n _key = key\n try:\n _key.verify(signature, data, self.padding_type(), self.hash_type())\n except Exception:\n message = \"Unable to verify signature\"\n _LOGGER.exception(message)\n raise SignatureVerificationError(message)",
"def _get_extra_info(self):\n # pylint: disable=no-self-use\n return None",
"def rsa_check_signature(signature, public_key):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling updateFromExtraInfoDescriptor() with a descriptor which has a bad signature should not continue to process the descriptor.
|
def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):
# Make the signature uppercased
BEGIN_SIG = '-----BEGIN SIGNATURE-----'
doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)
ei = BEGIN_SIG.join([doc, sig.upper()])
self._writeExtrainfo(ei)
self._parseAllDescriptorFiles()
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.assertEqual(len(self.bridge.transports), 0)
self.assertIsNone(self.bridge.descriptors['extrainfo'])
|
[
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def test_Bridge_verifyExtraInfoSignature_good_signature(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.assertIsNone(self.bridge._verifyExtraInfoSignature(self.extrainfo))",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_args(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n\n # Change the args of scramblesuit transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['scramblesuit']\n self.extrainfo.transport['scramblesuit'] = (transportline[0],\n transportline[1],\n ['password=PASSWORD'])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n self.assertEqual(pt.arguments['password'], 'PASSWORD')",
"def test_Bridge_checkServerDescriptor_digest_missing(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = None\n self.assertRaises(bridges.MissingServerDescriptorDigest,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def update ( self, package_info ):\n\n if package_info.compare_version ( self._package_info ) > 0:\n desc_data = package_info.get (\n 'desc_data', fallback_value=None, do_fallback=True\n )\n if desc_data and any (\n desc_data.get ( key, None ) for key in self.DATA_KEYS\n ):\n # another solution would be to merge data from several\n # PackageInfo instances (while preferring pkgs with higher\n # versions), doesn't make sense for one metadata field, though\n self._package_info = package_info",
"def handle_unknown(self, name, spec, attrs):\n inherited = self.find_inherited(name, spec, attrs)\n\n attributes = spec\n if attributes.get(\"__extend__\", True):\n attributes = self.combine_dicts(inherited, spec)\n\n kls = attributes.get(\"__main__\")\n kwargs = self.attributes_from(attributes)\n\n self.handle_attributes(name, {name.lower():(name, kls, kwargs)}, None, attrs, bookkeeper_method=\"add_custom\")",
"def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = 'deadbeef'\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_disabled_feature_forbidden_update(self):\n self._test_method('put', False, dummy=123)",
"def _Deprecate(self, proto, field_or_value):\n if self._envoy_internal_shadow:\n field_or_value.name = 'hidden_envoy_deprecated_' + field_or_value.name\n else:\n reserved = proto.reserved_range.add()\n reserved.start = field_or_value.number\n reserved.end = field_or_value.number + 1\n proto.reserved_name.append(field_or_value.name)\n options.AddHideOption(field_or_value.options)",
"def TestOsModifyInvalid():\n hv_dict = {\n \"blahblahblubb\": {\"bar\": \"\"},\n }\n\n return _TestOsModify(hv_dict, fail=True)",
"def lift_descriptor(self, descriptor):\n return UnboundAttribute(descriptor, self.owner)",
"def update_warning(self):\n\t\tpass",
"def unknown_meta_event(self, meta_type, data):\n pass",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36493)\n\n # Change the port of obfs4 transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['obfs4']\n self.extrainfo.transport['obfs4'] = (transportline[0],\n 31337,\n transportline[2])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 31337)",
"def save_hints_discriminant_problem(sender, **kwargs):\n logger.debug('Signal post_save for %s %s', str(sender), str(kwargs['instance']))\n if hasattr(kwargs['instance'], 'hints_info'):\n save_hints(kwargs['instance'])",
"def testEditPackageInfoWhenUpdatingManifestsCatalogMismatch(self):\n self.mox.StubOutWithMock(self.mpcc, 'ValidatePackageConfig')\n self.mpcc.client = self.mox.CreateMockAnything()\n self.mox.StubOutWithMock(cli.plist, 'MunkiPackageInfoPlist')\n\n filename = 'file.dmg'\n filepath = '/path/to/%s' % filename\n description = 'snark!'\n display_name = None\n pkginfo_name = None\n manifests = ['stable']\n # catalogs this package is already in\n pkg_catalogs = ['unstable']\n catalogs = None\n install_types = None\n unattended_install = None\n unattended_uninstall = None\n sha256_hash = 'hash'\n pkginfo_xml = 'xml'\n\n mock_plist = self.mox.CreateMockAnything()\n\n self.mpcc.config = {\n 'edit_pkginfo': None,\n 'template_pkginfo': None,\n }\n\n self.mpcc.ValidatePackageConfig(defaults=False).AndReturn((\n filepath, description, display_name, pkginfo_name, manifests, catalogs,\n install_types, unattended_install, unattended_uninstall))\n self.mpcc.client.GetPackageInfo(filename, get_hash=True).AndReturn((\n sha256_hash, pkginfo_xml))\n\n cli.plist.MunkiPackageInfoPlist(pkginfo_xml).AndReturn(mock_plist)\n mock_plist.Parse().AndReturn(None)\n\n mock_plist.SetDescription(description).AndReturn(None)\n\n mock_plist.GetContents().AndReturn({'catalogs': pkg_catalogs})\n\n self.mox.ReplayAll()\n self.assertRaises(cli.CliError, self.mpcc.EditPackageInfo)\n self.mox.VerifyAll()",
"def set_unknown_specials_spec(self, specialsspec):\n if self.frozen:\n raise RuntimeError(\"You attempted to modify a frozen LatexContextDb object.\")\n self.unknown_specials_spec = specialsspec",
"def test_cannot_update_del_protected_keys(self, protected_key):\n expected_match = \"manipulate an ASDF internal structure\"\n warning_type = UserWarning\n\n # reading is also forbidden\n with pytest.raises(KeyError):\n _ = self.fh[protected_key]\n\n with pytest.warns(warning_type, match=expected_match):\n self.fh.update({protected_key: None})\n with pytest.warns(warning_type, match=expected_match):\n self.fh.pop(protected_key)\n with pytest.warns(warning_type, match=expected_match):\n self.fh[protected_key] = NotImplemented",
"def invalidate(self, old_signature, new_signature, force=False):\n return"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling updateFromExtraInfoDescriptor() with a descriptor which includes a different port for a known bridge with a known pluggable transport should update that transport.
|
def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_port(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.assertEqual(len(self.bridge.transports), 4)
for pt in self.bridge.transports:
if pt.methodname == 'obfs4':
self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))
self.assertEqual(pt.port, 36493)
# Change the port of obfs4 transport in the extrainfo descriptor:
transportline = self.extrainfo.transport['obfs4']
self.extrainfo.transport['obfs4'] = (transportline[0],
31337,
transportline[2])
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
for pt in self.bridge.transports:
if pt.methodname == 'obfs4':
self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))
self.assertEqual(pt.port, 31337)
|
[
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_args(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n\n # Change the args of scramblesuit transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['scramblesuit']\n self.extrainfo.transport['scramblesuit'] = (transportline[0],\n transportline[1],\n ['password=PASSWORD'])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n self.assertEqual(pt.arguments['password'], 'PASSWORD')",
"def plug_port_into_network(self, device_id, host_id, port_id,\n net_id, tenant_id, port_name, device_owner,\n sg, orig_sg, vnic_type, segments=None,\n switch_bindings=None):",
"def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):\n # Make the signature uppercased\n BEGIN_SIG = '-----BEGIN SIGNATURE-----'\n doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)\n ei = BEGIN_SIG.join([doc, sig.upper()])\n self._writeExtrainfo(ei)\n self._parseAllDescriptorFiles()\n\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 0)\n self.assertIsNone(self.bridge.descriptors['extrainfo'])",
"async def _async_create_bridge_with_updated_data(\n hass: HomeAssistant, entry: ConfigEntry\n) -> SamsungTVBridge:\n updated_data: dict[str, str | int] = {}\n host: str = entry.data[CONF_HOST]\n port: int | None = entry.data.get(CONF_PORT)\n method: str | None = entry.data.get(CONF_METHOD)\n load_info_attempted = False\n info: dict[str, Any] | None = None\n\n if not port or not method:\n LOGGER.debug(\"Attempting to get port or method for %s\", host)\n if method == METHOD_LEGACY:\n port = LEGACY_PORT\n else:\n # When we imported from yaml we didn't setup the method\n # because we didn't know it\n _result, port, method, info = await async_get_device_info(hass, host)\n load_info_attempted = True\n if not port or not method:\n raise ConfigEntryNotReady(\n \"Failed to determine connection method, make sure the device is on.\"\n )\n\n LOGGER.info(\"Updated port to %s and method to %s for %s\", port, method, host)\n updated_data[CONF_PORT] = port\n updated_data[CONF_METHOD] = method\n\n bridge = _async_get_device_bridge(hass, {**entry.data, **updated_data})\n\n mac: str | None = entry.data.get(CONF_MAC)\n model: str | None = entry.data.get(CONF_MODEL)\n if (not mac or not model) and not load_info_attempted:\n info = await bridge.async_device_info()\n\n if not mac:\n LOGGER.debug(\"Attempting to get mac for %s\", host)\n if info:\n mac = mac_from_device_info(info)\n\n if not mac:\n mac = await hass.async_add_executor_job(\n partial(getmac.get_mac_address, ip=host)\n )\n\n if mac:\n LOGGER.info(\"Updated mac to %s for %s\", mac, host)\n updated_data[CONF_MAC] = mac\n else:\n LOGGER.info(\"Failed to get mac for %s\", host)\n\n if not model:\n LOGGER.debug(\"Attempting to get model for %s\", host)\n if info:\n model = info.get(\"device\", {}).get(\"modelName\")\n if model:\n LOGGER.info(\"Updated model to %s for %s\", model, host)\n updated_data[CONF_MODEL] = model\n\n if model_requires_encryption(model) and method != METHOD_ENCRYPTED_WEBSOCKET:\n LOGGER.info(\n (\n \"Detected model %s for %s. Some televisions from H and J series use \"\n \"an encrypted protocol but you are using %s which may not be supported\"\n ),\n model,\n host,\n method,\n )\n\n if updated_data:\n data = {**entry.data, **updated_data}\n hass.config_entries.async_update_entry(entry, data=data)\n\n return bridge",
"def update_plugin_config(self):\n conf_dict = {}\n # conf_dict['bridge'] = self.bridge\n conf_dict['bridge_serial'] = self.bridge.get('serialNumber','')\n conf_dict['bridge_user'] = self.bridge.get('username','')\n conf_dict['bridge_ip'] = self.bridge.get('ip','')\n conf_dict['bridge_port'] = self.bridge.get('port','')\n self.update_config_section(conf_dict)\n return",
"def _port_bound_update(self, context, port):\n # TODO: Can we avoid re-writing the security profile here? Put another\n # way, does the security profile change during migration steps, or does\n # a separate port update event occur?\n LOG.info(\"Port becoming bound: create.\")\n port = self.db.get_port(context._plugin_context, port['id'])\n port = self.add_extra_port_information(context._plugin_context, port)\n profiles = self.get_security_profiles(\n context._plugin_context, port\n )\n self.transport.endpoint_created(port)\n\n for profile in profiles:\n self.transport.write_profile_to_etcd(profile)",
"def add_extra_port_information(self, context, port):\n port['fixed_ips'] = self.get_fixed_ips_for_port(\n context, port\n )\n port['security_groups'] = self.get_security_groups_for_port(\n context, port\n )\n self.add_port_gateways(port, context)\n self.add_port_interface_name(port)\n return port",
"def update_ports(self):\n\n if self.to_i != None:\n self.from_e.ports[self.from_i - 1].networks = self.to_e.ports[self.to_i - 1].networks\n else:\n self.from_e.ports[self.from_i - 1].networks = [self.to_e]",
"def test_update_from_address_router_config(self):\n\n resp = yield self.post('/channels/', {\n 'type': 'telnet',\n 'config': {\n 'twisted_endpoint': 'tcp:0',\n }\n })\n channel_id = (yield resp.json())['result']['id']\n\n old_config = self.create_router_config(\n label='old', type='from_address',\n config={'channel': channel_id})\n resp = yield self.post('/routers/', old_config)\n router_id = (yield resp.json())['result']['id']\n\n update = {'config': {'channel': channel_id}}\n new_config = deepcopy(old_config)\n new_config.update(update)\n resp = yield self.patch_request(\n '/routers/{}'.format(router_id), new_config)\n\n yield self.assert_response(\n resp, http.OK, 'router updated', new_config, ignore=['id'])",
"def _populate_neutron_extension_values(self, container,\n pci_request_id,\n port_req_body):\n self._refresh_neutron_extensions_cache()\n has_port_binding_extension = (\n self._has_port_binding_extension())\n if has_port_binding_extension:\n self._populate_neutron_binding_profile(container,\n pci_request_id,\n port_req_body)",
"def update_port_postcommit(self, context):\n LOG.info('UPDATE_PORT_POSTCOMMIT: %s', context)\n port = context._port\n original = context.original\n\n # Abort early if we're manging non-endpoint ports.\n if not self._port_is_endpoint_port(port):\n return\n\n # If this port update is purely for a status change, don't do anything:\n # we don't care about port statuses.\n if port_status_change(port, original):\n LOG.info('Called for port status change, no action.')\n return\n\n # Now, re-read the port.\n with context._plugin_context.session.begin(subtransactions=True):\n port = self.db.get_port(context._plugin_context, port['id'])\n\n # Now, fork execution based on the type of update we're performing.\n # There are a few:\n # - a port becoming bound (binding vif_type from unbound to bound);\n # - a port becoming unbound (binding vif_type from bound to\n # unbound);\n # - an Icehouse migration (binding host id changed and port bound);\n # - an update (port bound at all times);\n # - a change to an unbound port (which we don't care about, because\n # we do nothing with unbound ports).\n if port_bound(port) and not port_bound(original):\n self._port_bound_update(context, port)\n elif port_bound(original) and not port_bound(port):\n self._port_unbound_update(context, original)\n elif original['binding:host_id'] != port['binding:host_id']:\n LOG.info(\"Icehouse migration\")\n self._icehouse_migration_step(context, port, original)\n elif port_bound(original) and port_bound(port):\n LOG.info(\"Port update\")\n self._update_port(context, port)\n else:\n LOG.info(\"Update on unbound port: no action\")\n pass",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"def test_update_port_another_fwg_added(self):\n self._create_ports_and_fwgs()\n self._check_port('b', 2)\n port_dict = {'firewall_group': 3}\n self.map.update_port(self.map.ports['b'], port_dict)\n self._check_port('a', 1)\n self._check_port('b', 3)\n self._check_port('c', 2)\n self._check_fwg(1, ['a'])\n self._check_fwg(2, ['c'])\n self._check_fwg(3, ['b'])",
"def patch(self, request):\n address = request.DATA['address_id']\n port = request.DATA.get('port_id')\n if port is None:\n api.neutron.floating_ip_disassociate(request, address)\n else:\n api.neutron.floating_ip_associate(request, address, port)",
"def _make_port_dict(self, port, fields=None):\n\n if not fields:\n port.update(self.base_binding_dict)\n else:\n for key in self.base_binding_dict:\n if key in fields:\n port.update(self.base_binding_dict[key])\n return port",
"def test_model_update_port_rollback(self):\n with self.port(fmt=self.fmt) as orig_port:\n\n inserted_exc = ValueError\n with mock.patch.object(\n virt_phy_sw_v2.VirtualPhysicalSwitchModelV2,\n '_invoke_nexus_for_net_create',\n side_effect=inserted_exc):\n\n # Send an update port request with a new device ID\n device_id = \"00fff4d0-e4a8-4a3a-8906-4c4cdafb59f1\"\n if orig_port['port']['device_id'] == device_id:\n device_id = \"600df00d-e4a8-4a3a-8906-feed600df00d\"\n data = {'port': {'device_id': device_id,\n portbindings.HOST_ID: 'testhost'}}\n port_id = orig_port['port']['id']\n req = self.new_update_request('ports', data, port_id)\n res = req.get_response(self.api)\n\n # Sanity check failure result code\n self._assertExpectedHTTP(res.status_int, inserted_exc)\n\n # Check that the port still has the original device ID\n plugin = base_plugin.NeutronDbPluginV2()\n ctx = context.get_admin_context()\n db_port = plugin._get_port(ctx, port_id)\n self.assertEqual(db_port['device_id'],\n orig_port['port']['device_id'])",
"def _action_change_port(self, machine, node):\n # this exist here cause of docker host implementation\n if machine.machine_type == 'container-host':\n return\n container_info = self.inspect_node(node)\n\n try:\n port = container_info.extra[\n 'network_settings']['Ports']['22/tcp'][0]['HostPort']\n except (KeyError, TypeError):\n # add TypeError in case of 'Ports': {u'22/tcp': None}\n port = 22\n\n from mist.api.machines.models import KeyMachineAssociation\n key_associations = KeyMachineAssociation.objects(machine=machine)\n for key_assoc in key_associations:\n key_assoc.port = port\n key_assoc.save()\n return True"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling updateFromExtraInfoDescriptor() with a descriptor which includes different PT args for a known bridge with a known pluggable transport should update that transport.
|
def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_args(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.assertEqual(len(self.bridge.transports), 4)
for pt in self.bridge.transports:
if pt.methodname == 'scramblesuit':
self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))
self.assertEqual(pt.port, 36492)
# Change the args of scramblesuit transport in the extrainfo descriptor:
transportline = self.extrainfo.transport['scramblesuit']
self.extrainfo.transport['scramblesuit'] = (transportline[0],
transportline[1],
['password=PASSWORD'])
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
for pt in self.bridge.transports:
if pt.methodname == 'scramblesuit':
self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))
self.assertEqual(pt.port, 36492)
self.assertEqual(pt.arguments['password'], 'PASSWORD')
|
[
"def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n # Remove the obfs3 transport from the extrainfo descriptor:\n self.extrainfo.transport.pop('obfs3')\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 3)\n\n for pt in self.bridge.transports:\n self.failIfEqual(pt.methodname, 'obfs3')",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36493)\n\n # Change the port of obfs4 transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['obfs4']\n self.extrainfo.transport['obfs4'] = (transportline[0],\n 31337,\n transportline[2])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 31337)",
"def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):\n # Make the signature uppercased\n BEGIN_SIG = '-----BEGIN SIGNATURE-----'\n doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)\n ei = BEGIN_SIG.join([doc, sig.upper()])\n self._writeExtrainfo(ei)\n self._parseAllDescriptorFiles()\n\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 0)\n self.assertIsNone(self.bridge.descriptors['extrainfo'])",
"def _updateTransportParamerters(self, updateObject, dest):\n leg = 0\n for tp in updateObject:\n for key, value in tp.items():\n if key in dest[__tp__][leg]:\n dest[__tp__][leg][key] = updateObject[leg][key]\n leg += 1",
"def update_plugin_config(self):\n conf_dict = {}\n # conf_dict['bridge'] = self.bridge\n conf_dict['bridge_serial'] = self.bridge.get('serialNumber','')\n conf_dict['bridge_user'] = self.bridge.get('username','')\n conf_dict['bridge_ip'] = self.bridge.get('ip','')\n conf_dict['bridge_port'] = self.bridge.get('port','')\n self.update_config_section(conf_dict)\n return",
"def _populate_neutron_extension_values(self, container,\n pci_request_id,\n port_req_body):\n self._refresh_neutron_extensions_cache()\n has_port_binding_extension = (\n self._has_port_binding_extension())\n if has_port_binding_extension:\n self._populate_neutron_binding_profile(container,\n pci_request_id,\n port_req_body)",
"async def _async_create_bridge_with_updated_data(\n hass: HomeAssistant, entry: ConfigEntry\n) -> SamsungTVBridge:\n updated_data: dict[str, str | int] = {}\n host: str = entry.data[CONF_HOST]\n port: int | None = entry.data.get(CONF_PORT)\n method: str | None = entry.data.get(CONF_METHOD)\n load_info_attempted = False\n info: dict[str, Any] | None = None\n\n if not port or not method:\n LOGGER.debug(\"Attempting to get port or method for %s\", host)\n if method == METHOD_LEGACY:\n port = LEGACY_PORT\n else:\n # When we imported from yaml we didn't setup the method\n # because we didn't know it\n _result, port, method, info = await async_get_device_info(hass, host)\n load_info_attempted = True\n if not port or not method:\n raise ConfigEntryNotReady(\n \"Failed to determine connection method, make sure the device is on.\"\n )\n\n LOGGER.info(\"Updated port to %s and method to %s for %s\", port, method, host)\n updated_data[CONF_PORT] = port\n updated_data[CONF_METHOD] = method\n\n bridge = _async_get_device_bridge(hass, {**entry.data, **updated_data})\n\n mac: str | None = entry.data.get(CONF_MAC)\n model: str | None = entry.data.get(CONF_MODEL)\n if (not mac or not model) and not load_info_attempted:\n info = await bridge.async_device_info()\n\n if not mac:\n LOGGER.debug(\"Attempting to get mac for %s\", host)\n if info:\n mac = mac_from_device_info(info)\n\n if not mac:\n mac = await hass.async_add_executor_job(\n partial(getmac.get_mac_address, ip=host)\n )\n\n if mac:\n LOGGER.info(\"Updated mac to %s for %s\", mac, host)\n updated_data[CONF_MAC] = mac\n else:\n LOGGER.info(\"Failed to get mac for %s\", host)\n\n if not model:\n LOGGER.debug(\"Attempting to get model for %s\", host)\n if info:\n model = info.get(\"device\", {}).get(\"modelName\")\n if model:\n LOGGER.info(\"Updated model to %s for %s\", model, host)\n updated_data[CONF_MODEL] = model\n\n if model_requires_encryption(model) and method != METHOD_ENCRYPTED_WEBSOCKET:\n LOGGER.info(\n (\n \"Detected model %s for %s. Some televisions from H and J series use \"\n \"an encrypted protocol but you are using %s which may not be supported\"\n ),\n model,\n host,\n method,\n )\n\n if updated_data:\n data = {**entry.data, **updated_data}\n hass.config_entries.async_update_entry(entry, data=data)\n\n return bridge",
"def update_tracking_info_by_entity(self, tracker: Tracker) -> PaypalApiResponse[Tracker]:\n body = tracker.json_data\n url = tracker.update_link\n\n for item in tracker.to_dict().items():\n key = item.key\n if key in body.keys():\n body[key] = item.value\n\n api_response = self._execute_action_link(url, body)\n error = api_response.status_code != 204\n return PaypalApiResponse(error, api_response)",
"def update_param_hints(pars, **kwargs):\r\n for pname, hints in kwargs.items():\r\n if pname in pars:\r\n for hint, val in hints.items():\r\n if val is not None:\r\n setattr(pars[pname], hint, val)\r\n return pars",
"def update ( self, package_info ):\n\n if package_info.compare_version ( self._package_info ) > 0:\n desc_data = package_info.get (\n 'desc_data', fallback_value=None, do_fallback=True\n )\n if desc_data and any (\n desc_data.get ( key, None ) for key in self.DATA_KEYS\n ):\n # another solution would be to merge data from several\n # PackageInfo instances (while preferring pkgs with higher\n # versions), doesn't make sense for one metadata field, though\n self._package_info = package_info",
"def update_source(self, **kwargs):\n feature_tmp = '_neighbor{0}_update_source'\n callback = kwargs.pop('callback', self._callback)\n ip_addr = ip_interface(unicode(kwargs.pop('neighbor')))\n rbridge_id = kwargs.pop('rbridge_id', '1')\n afi = 'ipv4' if ip_addr.version == 4 else 'ipv6'\n vrf = kwargs.pop('vrf', 'default')\n int_type = kwargs.pop('int_type')\n int_name = kwargs.pop('int_name')\n if vrf == 'default':\n if 'ipv4' == afi:\n feature = feature_tmp.format('_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_neighbor_ipv6_addr')\n afi = None\n elif 'ipv4' == afi:\n feature = feature_tmp.format('_af_ipv4_neighbor_addr')\n elif 'ipv6' == afi:\n feature = feature_tmp.format('_af_ipv6_neighbor_addr')\n if int_type == 'loopback':\n args = dict(loopback=int_name)\n feature += '_loopback'\n else:\n args = dict(\n interface_type=int_type,\n ethernet=str(int_name),\n rbridge_id=rbridge_id)\n feature += '_ethernet'\n if kwargs.pop('get', False):\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n afi=afi,\n vrf=vrf,\n feature=feature,\n n_addr=str(\n ip_addr.ip),\n op='_get',\n os=self.os)\n ret = callback(config, handler='get_config')\n search = './/' + int_type\n bgp = Util(ret.data)\n ret = bgp.findText(bgp.root, search)\n ret = ret if ret else None\n return ret\n if kwargs.pop('delete', False):\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n afi=afi,\n vrf=vrf,\n feature=feature,\n n_addr=str(\n ip_addr.ip),\n op='_delete',\n os=self.os)\n return callback(config)\n config = util.get_bgp_api(\n rbridge_id=rbridge_id,\n afi=afi,\n vrf=vrf,\n feature=feature,\n n_addr=str(\n ip_addr.ip),\n args=args,\n os=self.os)\n return callback(config)",
"def plug_port_into_network(self, device_id, host_id, port_id,\n net_id, tenant_id, port_name, device_owner,\n sg, orig_sg, vnic_type, segments=None,\n switch_bindings=None):",
"def test_update_from_address_router_config(self):\n\n resp = yield self.post('/channels/', {\n 'type': 'telnet',\n 'config': {\n 'twisted_endpoint': 'tcp:0',\n }\n })\n channel_id = (yield resp.json())['result']['id']\n\n old_config = self.create_router_config(\n label='old', type='from_address',\n config={'channel': channel_id})\n resp = yield self.post('/routers/', old_config)\n router_id = (yield resp.json())['result']['id']\n\n update = {'config': {'channel': channel_id}}\n new_config = deepcopy(old_config)\n new_config.update(update)\n resp = yield self.patch_request(\n '/routers/{}'.format(router_id), new_config)\n\n yield self.assert_response(\n resp, http.OK, 'router updated', new_config, ignore=['id'])",
"def tunnel_update(self, context, **kwargs):\n try:\n LOG.debug('received tunnel_update: %s' % kwargs)\n self.lbdriver.tunnel_update(**kwargs)\n except q_exception.NeutronException as exc:\n LOG.error(\"tunnel_update: NeutronException: %s\" % exc.msg)\n except Exception as exc:\n LOG.error(\"tunnel_update: Exception: %s\" % exc.message)",
"def _update_ptp_parameters(self):\n # TODO: this method is supposed to be called in the context of the same\n # patch that is deprecating the former PTP APIs. Thus, in a future\n # release (probably the next one) it can be removed\n check_file = tsc.PTP_UPDATE_PARAMETERS_FLAG\n if os.path.isfile(check_file):\n LOG.debug(\"Already done with legacy PTP configuration\")\n return\n\n # Add check file to avoid re-running this method (with late creation of\n # legacy instances)\n open(check_file, 'w').close()\n\n try:\n # This additional check ensures that patch re-apply won't fail\n # because legacy entries weren't removed together with the check\n # file and the patch itself, when it got removed earlier\n legacy_names = [constants.PTP_INSTANCE_LEGACY_PTP4L,\n constants.PTP_INSTANCE_LEGACY_PHC2SYS]\n for name in legacy_names:\n try:\n ptp_instance = self.dbapi.ptp_instance_get_by_name(name)\n LOG.info(\"Legacy PTP instance %s found with id = %s, \"\n \"skipping update\" % (name, ptp_instance['id']))\n return\n except exception.NotFound:\n LOG.debug(\"Legacy PTP instance %s not found\" % name)\n\n # List all the hosts with clock_synchronization=ptp\n hosts_list = self.dbapi.ihost_get_list()\n ptp_hosts_list = [\n host\n for host in hosts_list\n if host['clock_synchronization'] == constants.PTP]\n LOG.debug(\"There are %d hosts with clock_synchronization=ptp\" %\n len(ptp_hosts_list))\n\n # List all PTP parameters in service-parameters table (to be\n # migrated)\n ptp_svc_parameters_list = self.dbapi.service_parameter_get_all(\n service=constants.SERVICE_TYPE_PTP)\n LOG.debug(\"There are %d PTP rows in 'service_parameter' table\" %\n len(ptp_svc_parameters_list))\n\n if len(ptp_hosts_list) == 0 and len(ptp_svc_parameters_list) == 0:\n # No need for upgrade\n return\n\n # List all the interfaces with ptp_role!=none\n ifaces_list = self.dbapi.iinterface_get_list()\n ptp_ifaces_list = [\n iface\n for iface in ifaces_list\n if iface['ptp_role'] != constants.INTERFACE_PTP_ROLE_NONE]\n LOG.debug(\"There are %d interfaces with ptp_role != none\" %\n len(ptp_ifaces_list))\n\n LOG.info(\"Creating PTP instances for legacy parameters\")\n\n # Take system-wide parameters from legacy configuration\n ptp_config = self.dbapi.ptp_get_one() # there is a single entry\n delay_mechanism = str(ptp_config.mechanism).upper()\n time_stamping = str(ptp_config.mode).lower()\n network_transport = str(ptp_config.transport).upper()\n\n # Legacy instance for system-wide parameters and those of section\n # \"global\" in service-parameters table\n (ptp4l_id, ptp4l_uuid) = self._update_ptp_create_instance(\n constants.PTP_INSTANCE_LEGACY_PTP4L,\n constants.PTP_INSTANCE_TYPE_PTP4L)\n\n # Legacy PTP interface associated to legacy ptp4l instance\n ptp4lif_id = self._update_ptp_create_interface(\n constants.PTP_INTERFACE_LEGACY_PTP4L, ptp4l_id)\n\n # Legacy instance for parameters of section \"phc2sys\"\n (phc2sys_id, phc2sys_uuid) = self._update_ptp_create_instance(\n constants.PTP_INSTANCE_LEGACY_PHC2SYS,\n constants.PTP_INSTANCE_TYPE_PHC2SYS)\n\n # Legacy PTP interface associated to legacy phc2sys instance\n phc2sysif_id = self._update_ptp_create_interface(\n constants.PTP_INTERFACE_LEGACY_PHC2SYS, phc2sys_id)\n\n # Add 'uds_address' parameter to phy2sys instance for linkage with\n # ptp4l instance\n uds_address_path = \\\n '/var/run/ptp4l-%s' % constants.PTP_INSTANCE_LEGACY_PTP4L\n self._update_ptp_add_parameter_to_instance(\n phc2sys_uuid,\n constants.PTP_PARAMETER_UDS_ADDRESS,\n uds_address_path)\n\n # Assign legacy instances to all hosts with\n # clock_synchronization=ptp\n for host in ptp_hosts_list:\n self._update_ptp_assign_instance_to_host(ptp4l_id, host['id'])\n self._update_ptp_assign_instance_to_host(phc2sys_id, host['id'])\n\n # Assign legacy PTP interfaces to all interfaces with ptp_role!=none\n for iface in ptp_ifaces_list:\n self._update_ptp_assign_ptp_to_interface(ptp4lif_id,\n iface['id'])\n self._update_ptp_assign_ptp_to_interface(phc2sysif_id,\n iface['id'])\n\n # Copy service-parameter PTP entries, if any\n domain_number = constants.PTP_PARAMETER_DEFAULT_DOMAIN\n for param in ptp_svc_parameters_list:\n\n if param['name'] == constants.PTP_PARAMETER_UPDATE_RATE or \\\n (param['name'] ==\n constants.PTP_PARAMETER_SUMMARY_UPDATES):\n LOG.info(\"Found %s parameter, ignored\" % param['name'])\n continue\n\n if param['name'] == constants.PTP_PARAMETER_DOMAIN_NUMBER:\n domain_number = param['value'] # overwrite default\n continue # skip it for below\n\n if param['name'] == constants.PTP_PARAMETER_DELAY_MECHANISM:\n delay_mechanism = str(param['value']).upper() # overwrite\n continue # skip it for below\n\n if param['name'] == constants.PTP_PARAMETER_TIME_STAMPING:\n time_stamping = str(param['value']).lower() # overwrite\n continue # skip it for below\n\n if param['name'] == constants.PTP_PARAMETER_NETWORK_TRANSPORT:\n network_transport = str(param['value']).upper() # overwrt\n continue # skip it for below\n\n if param['section'] == \\\n constants.SERVICE_PARAM_SECTION_PTP_GLOBAL:\n owner_uuid = ptp4l_uuid\n elif param['section'] == \\\n constants.SERVICE_PARAM_SECTION_PTP_PHC2SYS:\n owner_uuid = phc2sys_uuid\n else:\n raise Exception(\"Unexpected PTP section in \"\n \"'service-parameter' table\")\n\n self._update_ptp_add_parameter_to_instance(owner_uuid,\n param['name'],\n param['value'])\n\n # Whatever 'global' parameter has been found, it must be\n # added also to phc2sys instance, since now this has own\n # configuration file\n if param['section'] == \\\n constants.SERVICE_PARAM_SECTION_PTP_GLOBAL:\n self._update_ptp_add_parameter_to_instance(phc2sys_uuid,\n param['name'],\n param['value'])\n\n self._update_ptp_add_parameter_to_instance(\n ptp4l_uuid,\n constants.PTP_PARAMETER_DOMAIN_NUMBER,\n domain_number)\n self._update_ptp_add_parameter_to_instance(\n phc2sys_uuid,\n constants.PTP_PARAMETER_DOMAIN_NUMBER,\n domain_number)\n self._update_ptp_add_parameter_to_instance(\n ptp4l_uuid,\n constants.PTP_PARAMETER_DELAY_MECHANISM,\n delay_mechanism)\n self._update_ptp_add_parameter_to_instance(\n phc2sys_uuid,\n constants.PTP_PARAMETER_DELAY_MECHANISM,\n delay_mechanism)\n self._update_ptp_add_parameter_to_instance(\n ptp4l_uuid,\n constants.PTP_PARAMETER_TIME_STAMPING,\n time_stamping)\n self._update_ptp_add_parameter_to_instance(\n phc2sys_uuid,\n constants.PTP_PARAMETER_TIME_STAMPING,\n time_stamping)\n self._update_ptp_add_parameter_to_instance(\n ptp4l_uuid,\n constants.PTP_PARAMETER_NETWORK_TRANSPORT,\n network_transport)\n self._update_ptp_add_parameter_to_instance(\n phc2sys_uuid,\n constants.PTP_PARAMETER_NETWORK_TRANSPORT,\n network_transport)\n\n # Add 'boundary_clock_jbod' parameter to ptp4l instance if mode is\n # \"hardware\"\n if time_stamping == 'hardware':\n self._update_ptp_add_parameter_to_instance(\n ptp4l_uuid,\n constants.PTP_PARAMETER_BC_JBOD,\n constants.PTP_BOUNDARY_CLOCK_JBOD_1)\n\n except Exception as e:\n LOG.exception(e)",
"def zCatalogUpdateProxiesPType(catalog, mapping):\n raise NotImplementedError",
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def inject_network_info(self, *args, **kwargs):\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calling updateFromExtraInfoDescriptor() with a descriptor which doesn't include a previouslyknown transport should remove that transport.
|
def test_Bridge_updateFromExtraInfoDescriptor_pt_died(self):
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.bridge.updateFromServerDescriptor(self.serverdescriptor)
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.assertEqual(len(self.bridge.transports), 4)
# Remove the obfs3 transport from the extrainfo descriptor:
self.extrainfo.transport.pop('obfs3')
self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)
self.assertEqual(len(self.bridge.transports), 3)
for pt in self.bridge.transports:
self.failIfEqual(pt.methodname, 'obfs3')
|
[
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_port(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36493)\n\n # Change the port of obfs4 transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['obfs4']\n self.extrainfo.transport['obfs4'] = (transportline[0],\n 31337,\n transportline[2])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'obfs4':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 31337)",
"def test_Bridge_updateFromExtraInfoDescriptor_pt_changed_args(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 4)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n\n # Change the args of scramblesuit transport in the extrainfo descriptor:\n transportline = self.extrainfo.transport['scramblesuit']\n self.extrainfo.transport['scramblesuit'] = (transportline[0],\n transportline[1],\n ['password=PASSWORD'])\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n for pt in self.bridge.transports:\n if pt.methodname == 'scramblesuit':\n self.assertEqual(pt.address, ipaddr.IPv4Address('179.178.155.140'))\n self.assertEqual(pt.port, 36492)\n self.assertEqual(pt.arguments['password'], 'PASSWORD')",
"def test_Bridge_updateFromExtraInfoDescriptor_bad_signature_changed(self):\n # Make the signature uppercased\n BEGIN_SIG = '-----BEGIN SIGNATURE-----'\n doc, sig = BRIDGE_EXTRAINFO.split(BEGIN_SIG)\n ei = BEGIN_SIG.join([doc, sig.upper()])\n self._writeExtrainfo(ei)\n self._parseAllDescriptorFiles()\n\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n self.bridge.updateFromExtraInfoDescriptor(self.extrainfo)\n\n self.assertEqual(len(self.bridge.transports), 0)\n self.assertIsNone(self.bridge.descriptors['extrainfo'])",
"def disconnect(self, transport):\n if transport == self._transport:\n self._transport = None",
"def delete_last_transport_process(self):",
"def _resync_extra_ports(self, ports_to_delete):\n for endpoint in ports_to_delete:\n try:\n self.transport.atomic_delete_endpoint(endpoint)\n except (ValueError, etcd.EtcdKeyNotFound):\n # If the atomic CAD doesn't successfully delete, that's ok, it\n # means the endpoint was created or updated elsewhere.\n LOG.info('Endpoint %s was deleted elsewhere', endpoint)\n continue",
"def torrent_removed(self, infohash):\n raise NotImplementedError('BasicApp.torrent_removed() removing missing torrents not implemented')",
"def __remove_einfo_object__(einfo) :\n\n global __data__, __enclave_name_map__\n\n try :\n # we use the old information because the name may change\n # in the new enclave (though the enclave_id will not)\n old_einfo = __data__[einfo.enclave_id]\n __data__.pop(einfo.enclave_id, None)\n __enclave_name_map__.pop(old_einfo.name, None)\n except :\n pass",
"def CloseTransport(self):\n self.logger.info(\n \"Close transport for localAddr:%s, remoteAddr:%s, localConnectionID:%d, remoteConnection:%d\" % (\n self.localAddr, self.remoteAddr, self.connection.localConnID, self.connection.remoteConnID\n ))\n self.dispatcher.request_unregister({\n \"unregType\": \"transport\",\n \"value\": self\n })",
"def replace(self, sender, receiver):\n logging.info(('ADAPTIVE before' + str(self.store)))\n\n # Determine send cost before messing around with the send histories\n self.assert_history() # still intact, but we also don't use them elsewehere\n cost = self.mod.get_send_cost(sender, receiver)\n\n # Remove previous sender\n for (s, l_receivers) in self.store.items():\n logging.info(('ADAPTIVE', 'looking for ', receiver, 'in', l_receivers))\n self.store[s] = [ (c, r) for (c, r) in l_receivers if \\\n r != receiver ]\n\n # Add new pair\n self.store[sender].append((cost, receiver))\n\n logging.info(('ADAPTIVE after' + str(self.store)))",
"def remove_package_entry ( self, p_info, with_ebuild=True ):\n filename = p_info.get ( 'package_src_destpath', do_fallback=True )\n\n if with_ebuild:\n efile = p_info.get ( 'ebuild_file' )\n if not efile:\n raise Exception ( \"package info object has no ebuild file\" )\n self.remove_entry ( 'EBUILD', os.path.basename ( efile ) )\n\n self.remove_entry ( 'DIST', filename )",
"def peer_access_unset(self, peer, data):\n pass",
"async def test_options_remove_device_override(hass: HomeAssistant) -> None:\n config_entry = MockConfigEntry(\n domain=DOMAIN,\n entry_id=\"abcde12345\",\n data={**MOCK_USER_INPUT_HUB_V2, CONF_HUB_VERSION: 2},\n options={\n CONF_OVERRIDE: [\n {CONF_ADDRESS: \"1A.2B.3C\", CONF_CAT: 6, CONF_SUBCAT: 100},\n {CONF_ADDRESS: \"4D.5E.6F\", CONF_CAT: 7, CONF_SUBCAT: 200},\n ]\n },\n )\n\n config_entry.add_to_hass(hass)\n result = await _options_init_form(hass, config_entry.entry_id, STEP_REMOVE_OVERRIDE)\n\n user_input = {CONF_ADDRESS: \"1A.2B.3C\"}\n result, _ = await _options_form(hass, result[\"flow_id\"], user_input)\n\n assert result[\"type\"] == data_entry_flow.FlowResultType.CREATE_ENTRY\n assert len(config_entry.options[CONF_OVERRIDE]) == 1",
"def _resolve_transport(transport):\n nested_transport = getattr(transport, 'transport', None)\n if nested_transport is not None:\n return nested_transport\n\n return _resolve_transport(nested_transport)",
"def delete_extra( self, pick=None ):\n if pick is not None:\n self._delextra = pick\n return self._delextra",
"def _CloseTransport(self):\n if self.transport is None:\n return\n try:\n old_transp = self.transport\n self.transport = None\n old_transp.Close()\n except Exception: # pylint: disable=W0703\n pass",
"def add_transport_process(self, transport, **transport_process_kwargs):",
"def unlink(self):\n if self._linked_target:\n warnings.warn(\"This might lead to unexpected behaviour as forwarded attributes are not frozen. Parent pulse\"\n \" templates might rely on certain properties to be constant (for example due to caching).\",\n UnlinkWarning)\n self._linked_target = None",
"def _remove_d_info(ctx, stmt):\n t = stmt.parent.i_target_node\n if t is None:\n return\n if stmt.arg == 'not-supported':\n t = t.parent\n if hasattr(t, 'd_children') and hasattr(t, 'd_substmts'):\n if hasattr(t, 'i_children'):\n t.i_children = t.d_children\n t.substmts = t.d_substmts\n del t.d_children\n del t.d_substmts"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Parsing a networkstatus descriptor should result in Bridge.descriptorDigest being set.
|
def test_Bridge_descriptorDigest(self):
realdigest = "738115BB6ACEFE20FF0C96015FF2E5DFC0C64162"
self.bridge.updateFromNetworkStatus(self.networkstatus)
self.assertEqual(self.bridge.descriptorDigest, realdigest)
|
[
"def test_Bridge_checkServerDescriptor(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-missing-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\", \"foo\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n\n #networkstatus = descriptors.parseNetworkStatusFile(filename)\n #self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.assertRaises(bridges.MissingServerDescriptorDigest,\n # self.bridge.updateFromNetworkStatus,\n # networkstatus[0])",
"def test_Bridge_checkServerDescriptor_digest_missing(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = None\n self.assertRaises(bridges.MissingServerDescriptorDigest,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_checkServerDescriptor_digest_mismatch_sd(self):\n self.bridge.updateFromNetworkStatus(self.networkstatus)\n\n self.bridge.descriptorDigest = 'deadbeef'\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge._checkServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_updateFromServerDescriptor_ignoreNetworkstatus_no_networkstatus(self):\n self.bridge.updateFromServerDescriptor(self.serverdescriptor,\n ignoreNetworkstatus=True)\n self.assertIsNone(self.bridge.descriptors['networkstatus'])\n self.assertIsNotNone(self.bridge.descriptors['server'])",
"def test_Bridge_checkServerDescriptor_digest_mismatch_ns(self):\n # Create a networkstatus descriptor without a server descriptor digest:\n filename = self._networkstatusFile + \"-mismatched-digest\"\n fh = open(filename, 'w')\n invalid = BRIDGE_NETWORKSTATUS.replace(\"c4EVu2rO/iD/DJYBX/Ll38DGQWI\",\n \"c4EVu2r1/iD/DJYBX/Ll38DGQWI\")\n fh.seek(0)\n fh.write(invalid)\n fh.flush()\n fh.close()\n\n realdigest = \"738115BB6ACEFE20FF0C96015FF2E5DFC0C64162\"\n networkstatus = descriptors.parseNetworkStatusFile(filename)\n self.bridge.updateFromNetworkStatus(networkstatus[0])\n #self.bridge.updateFromServerDescriptor(self.serverdescriptor)\n\n self.assertRaises(bridges.ServerDescriptorDigestMismatch,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def test_Bridge_updateFromServerDescriptor_no_networkstatus(self):\n self.assertRaises(bridges.ServerDescriptorWithoutNetworkstatus,\n self.bridge.updateFromServerDescriptor,\n self.serverdescriptor)",
"def gattc_read_descriptor(\n self,\n descriptor: Union[_DescriptorHandle, _DescriptorTuple],\n /,\n ) -> bytes:\n ...",
"def _parse_status(response: str) -> Status:\n status_str = re.match(r'.*\\[(.*)\\]\\n$', response, re.DOTALL).group(1)\n if status_str == 'on':\n return Status.ON\n elif status_str == 'off':\n return Status.OFF\n else:\n raise ValueError('Could not parse amixer response status')",
"def _read_status(self):\n self._status = shellutils.read_status(self._status_file, self._status)",
"def readStatus(message):",
"def distinguishingDescriptor(self, descriptor):\n if descriptor == slipnet.letter:\n return False\n if descriptor == slipnet.group:\n return False\n for number in slipnet.numbers:\n if number == descriptor:\n return False\n return True",
"def set_host_status(self, node_state_tag):\n self.host_status[node_state_tag.get('uname')] = \\\n node_state_tag.get('crmd')",
"def read_status_packet(self):\n pkt = packet.Packet()\n while True:\n start = pyb.micros()\n byte = self.serial_port.read_byte()\n if byte is None:\n raise BusError(packet.ErrorCode.TIMEOUT)\n err = pkt.process_byte(byte)\n if err != packet.ErrorCode.NOT_DONE:\n break\n if err != packet.ErrorCode.NONE:\n raise BusError(err)\n if self.show & Bus.SHOW_COMMANDS:\n log('Rcvd Status: {}'.format(packet.ErrorCode(err)))\n if self.show & Bus.SHOW_PACKETS:\n dump_mem(pkt.pkt_bytes, prefix=' R', show_ascii=True, log=log)\n err = pkt.error_code()\n if err != packet.ErrorCode.NONE:\n raise BusError(err)\n return pkt",
"def create_descriptor(descriptor_name, type_descriptor, new_data, data_type, file_uploaded):\n log.debug('Create descriptor')\n\n try:\n client = Client()\n if type_descriptor == 'nsd':\n result = client.nsd_onboard(file_uploaded)\n elif type_descriptor == 'vnfd':\n result = client.vnfd_onboard(file_uploaded)\n\n else:\n log.debug('Create descriptor: Unknown data type')\n return False\n\n except Exception as e:\n log.exception(e)\n result = False\n return result",
"def parse(segment: bytes) -> 'RDTSegment':\n if RDTSegment.calc_checksum(segment) != 0:\n return None\n head = segment[0:16]\n\n src_port, dest_port, seq_num, ack_num = struct.unpack('!HHLL', head[0:12])\n flags, unused, checksum = struct.unpack('!BBH', head[12:16])\n head_length = (flags & 0xF0) >> 2\n payload = segment[head_length:]\n rdt_seg = RDTSegment(src_port, dest_port, seq_num, ack_num, DEFAULT_WIN_SIZE, payload)\n rdt_seg._decode_flags(flags)\n if rdt_seg.header_len > 16:\n rdt_seg._decode_options(segment[16:head_length])\n return rdt_seg",
"def test_Bridge_updateFromNetworkStatus_IPv4_ORAddress(self):\n # Add an additional IPv4 ORAddress:\n ns = BRIDGE_NETWORKSTATUS.replace(\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488',\n 'a [6bf3:806b:78cd:d4b4:f6a7:4ced:cfad:dad4]:36488\\na 123.34.56.78:36488')\n self._writeNetworkstatus(ns)\n self._parseAllDescriptorFiles()\n\n self.assertWarns(\n FutureWarning,\n \"Got IPv4 address in 'a'/'or-address' line! Descriptor format may have changed!\",\n bridges.__file__, # filename\n self.bridge.updateFromNetworkStatus,\n self.networkstatus)\n\n self.assertEqual(self.bridge.fingerprint,\n '2C3225C4805331025E211F4B6E5BF45C333FDD2C')\n self.assertIn((ipaddr.IPAddress('123.34.56.78'), 36488, 4),\n self.bridge.allVanillaAddresses)",
"def parse_remote_card(self, json_string: str) -> list(tuple):\n entity = re.search('input_boolean.learning_mode_remote_\\d*', json_string).group(0) \n mac_address = re.search(\"[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\\\1[0-9a-f]{2}){4}\", json_string).group(0)\n remote_type = self.get_remote_type(json_string) \n title = f\"Comando {remote_type}\"\n additional_information = {'mac': mac_address, 'type': remote_type} \n return [(entity, title, additional_information)] #We output a list of a tuple to ensure compatibility with the rest of entites cards ",
"def parseStatus(self):\n if self.parseSuccessful:\n self.completionMessage += \"\\nvalue: \" + str(self.tree.value()) + \\\n \"\\nPreorder: \" + self.tree.prefix() + \\\n \"\\nInorder: \" + self.tree.infix() + \\\n \"\\nPostorder: \" + self.tree.postfix()\n return self.completionMessage",
"def checkNetworkStatus(self):\r\n pass"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.