focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@VisibleForTesting void submitTrade(int slot, GrandExchangeOffer offer) { GrandExchangeOfferState state = offer.getState(); if (state != GrandExchangeOfferState.CANCELLED_BUY && state != GrandExchangeOfferState.CANCELLED_SELL && state != GrandExchangeOfferState.BUYING && state != GrandExchangeOfferState.SELLING) { return; } SavedOffer savedOffer = getOffer(slot); boolean login = client.getTickCount() <= lastLoginTick + GE_LOGIN_BURST_WINDOW; if (savedOffer == null && (state == GrandExchangeOfferState.BUYING || state == GrandExchangeOfferState.SELLING) && offer.getQuantitySold() == 0) { // new offer GrandExchangeTrade grandExchangeTrade = new GrandExchangeTrade(); grandExchangeTrade.setBuy(state == GrandExchangeOfferState.BUYING); grandExchangeTrade.setItemId(offer.getItemId()); grandExchangeTrade.setTotal(offer.getTotalQuantity()); grandExchangeTrade.setOffer(offer.getPrice()); grandExchangeTrade.setSlot(slot); grandExchangeTrade.setWorldType(getGeWorldType()); grandExchangeTrade.setLogin(login); grandExchangeTrade.setSeq(tradeSeq++); grandExchangeTrade.setResetTime(getLimitResetTime(offer.getItemId())); log.debug("Submitting new trade: {}", grandExchangeTrade); grandExchangeClient.submit(grandExchangeTrade); return; } if (savedOffer == null || savedOffer.getItemId() != offer.getItemId() || savedOffer.getPrice() != offer.getPrice() || savedOffer.getTotalQuantity() != offer.getTotalQuantity()) { // desync return; } if (savedOffer.getState() == offer.getState() && savedOffer.getQuantitySold() == offer.getQuantitySold()) { // no change return; } if (state == GrandExchangeOfferState.CANCELLED_BUY || state == GrandExchangeOfferState.CANCELLED_SELL) { GrandExchangeTrade grandExchangeTrade = new GrandExchangeTrade(); grandExchangeTrade.setBuy(state == GrandExchangeOfferState.CANCELLED_BUY); grandExchangeTrade.setCancel(true); grandExchangeTrade.setItemId(offer.getItemId()); grandExchangeTrade.setQty(offer.getQuantitySold()); grandExchangeTrade.setTotal(offer.getTotalQuantity()); grandExchangeTrade.setSpent(offer.getSpent()); grandExchangeTrade.setOffer(offer.getPrice()); grandExchangeTrade.setSlot(slot); grandExchangeTrade.setWorldType(getGeWorldType()); grandExchangeTrade.setLogin(login); grandExchangeTrade.setSeq(tradeSeq++); grandExchangeTrade.setResetTime(getLimitResetTime(offer.getItemId())); log.debug("Submitting cancelled: {}", grandExchangeTrade); grandExchangeClient.submit(grandExchangeTrade); saveTrade(grandExchangeTrade); return; } final int qty = offer.getQuantitySold() - savedOffer.getQuantitySold(); final int dspent = offer.getSpent() - savedOffer.getSpent(); if (qty <= 0 || dspent <= 0) { return; } GrandExchangeTrade grandExchangeTrade = new GrandExchangeTrade(); grandExchangeTrade.setBuy(state == GrandExchangeOfferState.BUYING); grandExchangeTrade.setItemId(offer.getItemId()); grandExchangeTrade.setQty(offer.getQuantitySold()); grandExchangeTrade.setDqty(qty); grandExchangeTrade.setTotal(offer.getTotalQuantity()); grandExchangeTrade.setDspent(dspent); grandExchangeTrade.setSpent(offer.getSpent()); grandExchangeTrade.setOffer(offer.getPrice()); grandExchangeTrade.setSlot(slot); grandExchangeTrade.setWorldType(getGeWorldType()); grandExchangeTrade.setLogin(login); grandExchangeTrade.setSeq(tradeSeq++); grandExchangeTrade.setResetTime(getLimitResetTime(offer.getItemId())); log.debug("Submitting trade: {}", grandExchangeTrade); grandExchangeClient.submit(grandExchangeTrade); saveTrade(grandExchangeTrade); }
@Test public void testDuplicateTrade() { SavedOffer savedOffer = new SavedOffer(); savedOffer.setItemId(ItemID.ABYSSAL_WHIP); savedOffer.setQuantitySold(1); savedOffer.setTotalQuantity(10); savedOffer.setPrice(1000); savedOffer.setSpent(25); savedOffer.setState(GrandExchangeOfferState.BUYING); when(configManager.getRSProfileConfiguration("geoffer", "0")).thenReturn(gson.toJson(savedOffer)); GrandExchangeOffer grandExchangeOffer = mock(GrandExchangeOffer.class); when(grandExchangeOffer.getQuantitySold()).thenReturn(1); when(grandExchangeOffer.getItemId()).thenReturn(ItemID.ABYSSAL_WHIP); when(grandExchangeOffer.getTotalQuantity()).thenReturn(10); when(grandExchangeOffer.getPrice()).thenReturn(1000); lenient().when(grandExchangeOffer.getSpent()).thenReturn(25); when(grandExchangeOffer.getState()).thenReturn(GrandExchangeOfferState.BUYING); grandExchangePlugin.submitTrade(0, grandExchangeOffer); verify(grandExchangeClient, never()).submit(any(GrandExchangeTrade.class)); }
public void setErrorValue(Object errorValue) { this.errorValue = errorValue; this.status = FactMappingValueStatus.FAILED_WITH_ERROR; }
@Test public void setErrorValue() { value.setErrorValue(VALUE); assertThat(value.getStatus()).isEqualTo(FactMappingValueStatus.FAILED_WITH_ERROR); assertThat(value.getExceptionMessage()).isNull(); assertThat(value.getCollectionPathToValue()).isNull(); assertThat(value.getErrorValue()).isEqualTo(VALUE); }
public TolerantIntegerComparison isWithin(int tolerance) { return new TolerantIntegerComparison() { @Override public void of(int expected) { Integer actual = IntegerSubject.this.actual; checkNotNull( actual, "actual value cannot be null. tolerance=%s expected=%s", tolerance, expected); checkTolerance(tolerance); if (!equalWithinTolerance(actual, expected, tolerance)) { failWithoutActual( fact("expected", Integer.toString(expected)), butWas(), fact("outside tolerance", Integer.toString(tolerance))); } } }; }
@Test public void isWithinOf() { assertThat(20000).isWithin(0).of(20000); assertThat(20000).isWithin(1).of(20000); assertThat(20000).isWithin(10000).of(20000); assertThat(20000).isWithin(10000).of(30000); assertThat(Integer.MIN_VALUE).isWithin(1).of(Integer.MIN_VALUE + 1); assertThat(Integer.MAX_VALUE).isWithin(1).of(Integer.MAX_VALUE - 1); assertThat(Integer.MAX_VALUE / 2).isWithin(Integer.MAX_VALUE).of(-Integer.MAX_VALUE / 2); assertThat(-Integer.MAX_VALUE / 2).isWithin(Integer.MAX_VALUE).of(Integer.MAX_VALUE / 2); assertThatIsWithinFails(20000, 9999, 30000); assertThatIsWithinFails(20000, 10000, 30001); assertThatIsWithinFails(Integer.MIN_VALUE, 0, Integer.MAX_VALUE); assertThatIsWithinFails(Integer.MAX_VALUE, 0, Integer.MIN_VALUE); assertThatIsWithinFails(Integer.MIN_VALUE, 1, Integer.MIN_VALUE + 2); assertThatIsWithinFails(Integer.MAX_VALUE, 1, Integer.MAX_VALUE - 2); // Don't fall for rollover assertThatIsWithinFails(Integer.MIN_VALUE, 1, Integer.MAX_VALUE); assertThatIsWithinFails(Integer.MAX_VALUE, 1, Integer.MIN_VALUE); }
@Override public Status status() { return status; }
@Test void status() { assertThat(response.status()).isSameAs(status); }
@Override public List<String> getFileLocations(String path) throws IOException { List<String> ret = new ArrayList<>(); ret.add(NetworkAddressUtils.getConnectHost(ServiceType.WORKER_RPC, mUfsConf)); return ret; }
@Test public void getFileLocations() throws IOException { byte[] bytes = getBytes(); String filepath = PathUtils.concatPath(mLocalUfsRoot, getUniqueFileName()); OutputStream os = mLocalUfs.create(filepath); os.write(bytes); os.close(); List<String> fileLocations = mLocalUfs.getFileLocations(filepath); assertEquals(1, fileLocations.size()); assertEquals(NetworkAddressUtils.getLocalHostName( (int) CONF.getMs(PropertyKey.NETWORK_HOST_RESOLUTION_TIMEOUT_MS)), fileLocations.get(0)); }
public void updateStatus(final String id, final String status) { Optional<InstanceState> instanceState = InstanceState.get(status); if (!instanceState.isPresent()) { return; } if (instance.getMetaData().getId().equals(id)) { instance.switchState(instanceState.get()); } updateRelatedComputeNodeInstancesStatus(id, instanceState.get()); }
@Test void assertUpdateComputeNodeState() { InstanceMetaData instanceMetaData = mock(InstanceMetaData.class); when(instanceMetaData.getId()).thenReturn("foo_instance_id"); ComputeNodeInstanceContext context = new ComputeNodeInstanceContext( new ComputeNodeInstance(instanceMetaData), mock(WorkerIdGenerator.class), modeConfig, lockContext, eventBusContext); InstanceState actual = context.getInstance().getState().getCurrentState(); assertThat(actual, is(InstanceState.OK)); context.updateStatus(instanceMetaData.getId(), InstanceState.CIRCUIT_BREAK.name()); actual = context.getInstance().getState().getCurrentState(); assertThat(actual, is(InstanceState.CIRCUIT_BREAK)); context.updateStatus(instanceMetaData.getId(), InstanceState.OK.name()); actual = context.getInstance().getState().getCurrentState(); assertThat(actual, is(InstanceState.OK)); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseCreateTypeStatement() { // When: List<CommandParser.ParsedCommand> commands = parse("create type address as struct<street varchar, number int, city string, zip varchar>;"); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (false)); assertThat(commands.get(0).getCommand(), is("create type address as struct<street varchar, number int, city string, zip varchar>;")); }
@Override public void validate(String methodName, Class<?>[] parameterTypes, Object[] arguments) throws Exception { List<Class<?>> groups = new ArrayList<>(); Class<?> methodClass = methodClass(methodName); if (methodClass != null) { groups.add(methodClass); } Method method = clazz.getMethod(methodName, parameterTypes); Class<?>[] methodClasses; if (method.isAnnotationPresent(MethodValidated.class)) { methodClasses = method.getAnnotation(MethodValidated.class).value(); groups.addAll(Arrays.asList(methodClasses)); } // add into default group groups.add(0, Default.class); groups.add(1, clazz); // convert list to array Class<?>[] classGroups = groups.toArray(new Class[0]); Set<ConstraintViolation<?>> violations = new HashSet<>(); Object parameterBean = getMethodParameterBean(clazz, method, arguments); if (parameterBean != null) { violations.addAll(validator.validate(parameterBean, classGroups)); } for (Object arg : arguments) { validate(violations, arg, classGroups); } if (!violations.isEmpty()) { logger.info("Failed to validate service: " + clazz.getName() + ", method: " + methodName + ", cause: " + violations); throw new ConstraintViolationException( "Failed to validate service: " + clazz.getName() + ", method: " + methodName + ", cause: " + violations, violations); } }
@Test void testItWithExistMethod() throws Exception { URL url = URL.valueOf("test://test:11/org.apache.dubbo.validation.support.jvalidation.mock.JValidatorTestTarget"); JValidator jValidator = new JValidator(url); jValidator.validate("someMethod1", new Class<?>[] {String.class}, new Object[] {"anything"}); }
@Override public double getValue(double quantile) { if (quantile < 0.0 || quantile > 1.0 || Double.isNaN( quantile )) { throw new IllegalArgumentException(quantile + " is not in [0..1]"); } if (values.length == 0) { return 0.0; } int posx = Arrays.binarySearch(quantiles, quantile); if (posx < 0) posx = ((-posx) - 1) - 1; if (posx < 1) { return values[0]; } if (posx >= values.length) { return values[values.length - 1]; } return values[posx]; }
@Test(expected = IllegalArgumentException.class) public void disallowsQuantileOverOne() { snapshot.getValue( 1.5 ); }
@Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) servletRequest; HttpServletResponse response = (HttpServletResponse) servletResponse; DBSessions dbSessions = platform.getContainer().getComponentByType(DBSessions.class); ThreadLocalSettings settings = platform.getContainer().getComponentByType(ThreadLocalSettings.class); UserSessionInitializer userSessionInitializer = platform.getContainer().getOptionalComponentByType(UserSessionInitializer.class).orElse(null); LOG.trace("{} serves {}", Thread.currentThread(), request.getRequestURI()); dbSessions.enableCaching(); try { settings.load(); try { doFilter(request, response, chain, userSessionInitializer); } finally { settings.unload(); } } finally { dbSessions.disableCaching(); } }
@Test public void doFilter_disables_caching_in_DbSessions_even_if_chain_throws_exception() throws Exception { RuntimeException thrown = mockChainDoFilterError(); try { underTest.doFilter(request, response, chain); fail("A RuntimeException should have been thrown"); } catch (RuntimeException e) { assertThat(e).isSameAs(thrown); verify(dbSessions).disableCaching(); } }
@Override public void deleteFileConfig(Long id) { // 校验存在 FileConfigDO config = validateFileConfigExists(id); if (Boolean.TRUE.equals(config.getMaster())) { throw exception(FILE_CONFIG_DELETE_FAIL_MASTER); } // 删除 fileConfigMapper.deleteById(id); // 清空缓存 clearCache(id, null); }
@Test public void testDeleteFileConfig_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> fileConfigService.deleteFileConfig(id), FILE_CONFIG_NOT_EXISTS); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { // actually, there was no sense in executing SELECT from db in this case, // should be reported as improvement return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); // IS (NOT) NULL operation does not require second argument // hence, lookupValue can be absent // basically, the index ignores both meta and value, so we can pass everything there Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { // if nothing matches, break the search return null; } } // iterate through all elements survived after filtering stage // and find the first matching BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { // BETWEEN is a special condition demanding two arguments // technically there are no obstacles to implement it, // as it is just a short form of: (a <= b) && (b <= c) // however, let it be so for now matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { // if not BETWEEN, than it is LIKE (or some new operator) // for now, LIKE is not supported here matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
@Test public void lookup_Finds_FirstMatching() throws Exception { ReadAllCache cache = buildCache( "=,IS NOT NULL,<=,IS NULL" ); Object[] found = cache.getRowFromCache( keysMeta.clone(), new Object[] { 1L, null, new Date( 1000000 ), null } ); assertArrayEquals( "(keys[0] == 1) && (keys[2] < 1000000) --> row 3", data[ 3 ], found ); }
@Override public int size() { return size; }
@Test public void sizeIsInitiallyZero() { assertEquals(0, set.size()); }
@PostConstruct public void init() { // blockRequestHandlerOptional has low priority blockRequestHandlerOptional.ifPresent(GatewayCallbackManager::setBlockHandler); initAppType(); initFallback(); }
@Test public void testInitWithFallbackRedirect() { FallbackProperties fallbackProperties = mock(FallbackProperties.class); when(gatewayProperties.getFallback()).thenReturn(fallbackProperties); when(fallbackProperties.getMode()).thenReturn(ConfigConstants.FALLBACK_REDIRECT); when(fallbackProperties.getRedirect()).thenReturn("/test"); config.init(); Mono<ServerResponse> responseMono = GatewayCallbackManager.getBlockHandler() .handleRequest(mock(ServerWebExchange.class), null); HttpHeaders headers = Objects.requireNonNull(responseMono.block()).headers(); List<String> location = headers.get("Location"); Assert.assertNotNull(location); Assert.assertEquals("/test", location.get(0)); }
@Override @NonNull public Iterable<String> getNextWords( @NonNull String currentWord, int maxResults, int minWordUsage) { if (mNextNameParts.containsKey(currentWord)) { return Arrays.asList(mNextNameParts.get(currentWord)); } else { return Collections.emptyList(); } }
@Test public void testRegisterObserver() throws Exception { ShadowContentResolver shadowContentResolver = Shadows.shadowOf(getApplicationContext().getContentResolver()); final Collection<ContentObserver> contentObservers = shadowContentResolver.getContentObservers(ContactsContract.Contacts.CONTENT_URI); Assert.assertEquals(1, contentObservers.size()); // now, simulating contacts update mProvider.addRow(10, "Hagar Even-Danan", true, 10); TestRxSchedulers.drainAllTasks(); Iterator<String> nextWords = mDictionaryUnderTest.getNextWords("Hagar", 2, 1).iterator(); Assert.assertTrue(nextWords.hasNext()); Assert.assertEquals("Even-Danan", nextWords.next()); Assert.assertFalse(nextWords.hasNext()); }
public static boolean isAssignableFrom(Class clazz, Class cls) { Objects.requireNonNull(cls, "cls"); return clazz.isAssignableFrom(cls); }
@Test void testIsAssignableFrom() { assertTrue(ClassUtils.isAssignableFrom(Object.class, Integer.class)); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { PointList pointList = way.getTag("point_list", null); if (pointList != null) { if (pointList.isEmpty() || !pointList.is3D()) { if (maxSlopeEnc != null) maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); if (averageSlopeEnc != null) averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); return; } // Calculate 2d distance, although pointList might be 3D. // This calculation is a bit expensive and edge_distance is available already, but this would be in 3D double distance2D = DistanceCalcEarth.calcDistance(pointList, false); if (distance2D < MIN_LENGTH) { if (averageSlopeEnc != null) // default is minimum of average_slope is negative so we have to explicitly set it to 0 averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0); return; } double towerNodeSlope = calcSlope(pointList.getEle(pointList.size() - 1) - pointList.getEle(0), distance2D); if (Double.isNaN(towerNodeSlope)) throw new IllegalArgumentException("average_slope was NaN for OSM way ID " + way.getId()); if (averageSlopeEnc != null) { if (towerNodeSlope >= 0) averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(towerNodeSlope, averageSlopeEnc.getMaxStorableDecimal())); else averageSlopeEnc.setDecimal(true, edgeId, edgeIntAccess, Math.min(Math.abs(towerNodeSlope), averageSlopeEnc.getMaxStorableDecimal())); } if (maxSlopeEnc != null) { // max_slope is more error-prone as the shorter distances increase the fluctuation // so apply some more filtering (here we use the average elevation delta of the previous two points) double maxSlope = 0, prevDist = 0, prevLat = pointList.getLat(0), prevLon = pointList.getLon(0); for (int i = 1; i < pointList.size(); i++) { double pillarDistance2D = DistanceCalcEarth.DIST_EARTH.calcDist(prevLat, prevLon, pointList.getLat(i), pointList.getLon(i)); if (i > 1 && prevDist > MIN_LENGTH) { double averagedPrevEle = (pointList.getEle(i - 1) + pointList.getEle(i - 2)) / 2; double tmpSlope = calcSlope(pointList.getEle(i) - averagedPrevEle, pillarDistance2D + prevDist / 2); maxSlope = Math.abs(tmpSlope) > Math.abs(maxSlope) ? tmpSlope : maxSlope; } prevDist = pillarDistance2D; prevLat = pointList.getLat(i); prevLon = pointList.getLon(i); } // For tunnels and bridges we cannot trust the pillar node elevation and ignore all changes. // Probably we should somehow recalculate even the average_slope after elevation interpolation? See EdgeElevationInterpolator if (way.hasTag("tunnel", "yes") || way.hasTag("bridge", "yes") || way.hasTag("highway", "steps")) maxSlope = towerNodeSlope; else maxSlope = Math.abs(towerNodeSlope) > Math.abs(maxSlope) ? towerNodeSlope : maxSlope; if (Double.isNaN(maxSlope)) throw new IllegalArgumentException("max_slope was NaN for OSM way ID " + way.getId()); double val = Math.max(maxSlope, maxSlopeEnc.getMinStorableDecimal()); maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(maxSlopeEnc.getMaxStorableDecimal(), val)); } } }
@Test public void testAveragingOfMaxSlope() { // point=49.977518%2C11.564285&point=49.979878%2C11.563663&profile=bike DecimalEncodedValue averageEnc = AverageSlope.create(); DecimalEncodedValue maxEnc = MaxSlope.create(); new EncodingManager.Builder().add(averageEnc).add(maxEnc).build(); SlopeCalculator creator = new SlopeCalculator(maxEnc, averageEnc); ArrayEdgeIntAccess intAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; ReaderWay way = new ReaderWay(1L); PointList pointList = new PointList(5, true); pointList.add(51.0, 12.0010, 10); pointList.add(51.0, 12.0014, 8); // 28m pointList.add(51.0, 12.0034, 8); // 140m pointList.add(51.0, 12.0054, 0); // 140m pointList.add(51.0, 12.0070, 7); // 112m way.setTag("point_list", pointList); creator.handleWayTags(edgeId, intAccess, way, IntsRef.EMPTY); assertEquals(-Math.round(8.0 / 210 * 100), maxEnc.getDecimal(false, edgeId, intAccess), 1e-3); assertEquals(Math.round(8.0 / 210 * 100), maxEnc.getDecimal(true, edgeId, intAccess), 1e-3); }
@Override public void afterIntercept() { if (!cancelled) { try { initIfNecessary(); } finally { initCheckRT(); HealthCheckReactor.scheduleCheck(this); } } }
@Test void testAfterIntercept() { healthCheckTaskV2.afterIntercept(); }
@Override public void createNetwork(Network osNet) { checkNotNull(osNet, ERR_NULL_NETWORK); checkArgument(!Strings.isNullOrEmpty(osNet.getId()), ERR_NULL_NETWORK_ID); osNetworkStore.createNetwork(osNet); OpenstackNetwork finalAugmentedNetwork = buildAugmentedNetworkFromType(osNet); augmentedNetworkMap.compute(osNet.getId(), (id, existing) -> { final String error = osNet.getId() + ERR_DUPLICATE; checkArgument(existing == null, error); return finalAugmentedNetwork; }); log.info(String.format(MSG_NETWORK, deriveResourceName(osNet), MSG_CREATED)); }
@Test(expected = IllegalArgumentException.class) public void testCreateDuplicateNetwork() { target.createNetwork(NETWORK); target.createNetwork(NETWORK); }
public void storeNewShortIds( final ReportWorkItemStatusRequest request, final ReportWorkItemStatusResponse reply) { checkArgument( request.getWorkItemStatuses() != null && reply.getWorkItemServiceStates() != null && request.getWorkItemStatuses().size() == reply.getWorkItemServiceStates().size(), "RequestWorkItemStatus request and response are unbalanced, status: %s, states: %s", request.getWorkItemStatuses(), reply.getWorkItemServiceStates()); for (int i = 0; i < request.getWorkItemStatuses().size(); i++) { WorkItemServiceState state = reply.getWorkItemServiceStates().get(i); WorkItemStatus status = request.getWorkItemStatuses().get(i); if (state.getMetricShortId() == null) { continue; } checkArgument( status.getCounterUpdates() != null, "Response has shortids but no corresponding CounterUpdate"); for (MetricShortId shortIdMsg : state.getMetricShortId()) { int metricIndex = MoreObjects.firstNonNull(shortIdMsg.getMetricIndex(), 0); checkArgument( metricIndex < status.getCounterUpdates().size(), "Received aggregate index outside range of sent update %s >= %s", shortIdMsg.getMetricIndex(), status.getCounterUpdates().size()); CounterUpdate update = status.getCounterUpdates().get(metricIndex); cache.insert(update, checkNotNull(shortIdMsg.getShortId(), "Shortid should be non-null")); } } }
@Test public void testValidateNumberStatusesAndStates() { CounterShortIdCache cache = new CounterShortIdCache(); ReportWorkItemStatusRequest request = new ReportWorkItemStatusRequest(); ReportWorkItemStatusResponse reply = new ReportWorkItemStatusResponse(); request.setWorkItemStatuses( createWorkStatusNameAndKind(new String[] {"counter"}, new String[] {"counter2"})); reply.setWorkItemServiceStates( createWorkServiceState(new MetricShortId[] {createMetricShortId(0, 1000L)})); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("RequestWorkItemStatus request and response are unbalanced"); cache.storeNewShortIds(request, reply); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowEncryptRulesStatement sqlStatement, final ContextManager contextManager) { return rule.getConfiguration().getTables().stream().filter(each -> null == sqlStatement.getTableName() || each.getName().equalsIgnoreCase(sqlStatement.getTableName())) .map(each -> buildColumnData(each, rule.getConfiguration().getEncryptors())).flatMap(Collection::stream).collect(Collectors.toList()); }
@Test void assertGetRowData() throws SQLException { engine.executeQuery(); Collection<LocalDataQueryResultRow> actual = engine.getRows(); assertThat(actual.size(), is(1)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("t_encrypt")); assertThat(row.getCell(2), is("user_id")); assertThat(row.getCell(3), is("user_cipher")); assertThat(row.getCell(4), is("user_assisted")); assertThat(row.getCell(5), is("user_like")); assertThat(row.getCell(6), is("md5")); assertThat(row.getCell(7), is("")); assertThat(row.getCell(8), is("")); assertThat(row.getCell(9), is("")); assertThat(row.getCell(10), is("")); assertThat(row.getCell(11), is("")); }
public static Stream<Vertex> reverseDepthFirst(Graph g) { return reverseDepthFirst(g.getLeaves()); }
@Test public void testReverseDFSVertex() { DepthFirst.reverseDepthFirst(g.getLeaves()).forEach(v -> visitCount.incrementAndGet()); assertEquals("It should visit each node once", visitCount.get(), 3); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testBraceletOfClayUsed() { when(configManager.getRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BRACELET_OF_CLAY, Integer.class)).thenReturn(25); // Create equipment inventory with bracelet of clay ItemContainer equipmentItemContainer = mock(ItemContainer.class); when(client.getItemContainer(InventoryID.EQUIPMENT)).thenReturn(equipmentItemContainer); when(equipmentItemContainer.contains(ItemID.BRACELET_OF_CLAY)).thenReturn(true); when(equipmentItemContainer.getItems()).thenReturn(new Item[0]); // Run message ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", USED_BRACELET_OF_CLAY, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BRACELET_OF_CLAY, 24); }
@POST @Path("/token") @Produces(MediaType.APPLICATION_JSON) public Response token( @FormParam("code") String code, @FormParam("grant_type") String grantType, @FormParam("redirect_uri") String redirectUri, @FormParam("client_id") String clientId, @FormParam("client_assertion_type") String clientAssertionType, @FormParam("client_assertion") String clientAssertion) { if (!"authorization_code".equals(grantType)) { return Response.status(Status.BAD_REQUEST).entity("bad 'grant_type': " + grantType).build(); } var authenticatedClient = authenticator.authenticate(new Request(clientId, clientAssertionType, clientAssertion)); var redeemed = tokenIssuer.redeem(code, redirectUri, authenticatedClient.clientId()); if (redeemed == null) { return Response.status(Status.BAD_REQUEST).entity("invalid code").build(); } var cacheControl = new CacheControl(); cacheControl.setNoStore(true); return Response.ok( new TokenResponse( redeemed.accessToken(), "Bearer", null, (int) redeemed.expiresInSeconds(), redeemed.idToken())) .cacheControl(cacheControl) .build(); }
@Test void token() { var tokenIssuer = mock(TokenIssuer.class); var authenticator = mock(ClientAuthenticator.class); var sut = new TokenEndpoint(tokenIssuer, authenticator); var clientId = "myapp"; var grantType = "authorization_code"; var idToken = UUID.randomUUID().toString(); var accessToken = UUID.randomUUID().toString(); var expiresIn = 3600; var code = "6238e4504332468aa0c12e300787fded"; var token = new Token(accessToken, idToken, expiresIn); when(tokenIssuer.redeem(code, REDIRECT_URI.toString(), clientId)).thenReturn(token); when(authenticator.authenticate(any())).thenReturn(new Client(clientId)); // when try (var res = sut.token(code, grantType, REDIRECT_URI.toString(), clientId, null, null)) { // then assertEquals(Status.OK.getStatusCode(), res.getStatus()); var got = res.readEntity(TokenResponse.class); assertEquals(idToken, got.idToken()); assertEquals(accessToken, got.accessToken()); assertEquals(expiresIn, got.expiresIn()); } }
@Override public final void getSize(@NonNull SizeReadyCallback cb) { sizeDeterminer.getSize(cb); }
@Test public void testSizeCallbackIsCalledSynchronouslyIfViewSizeSet() { int dimens = 333; // activity.get().setContentView(view); view.layout(0, 0, dimens, dimens); target.getSize(cb); verify(cb).onSizeReady(eq(dimens), eq(dimens)); }
@Override public List<SnowflakeIdentifier> listSchemas(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW SCHEMAS"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listSchemas: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> schemas; try { schemas = connectionPool.run( conn -> queryHarness.query( conn, finalQuery, SCHEMA_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list schemas for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing schemas for scope '%s'", scope); } schemas.forEach( schema -> Preconditions.checkState( schema.type() == SnowflakeIdentifier.Type.SCHEMA, "Expected SCHEMA, got identifier '%s' for scope '%s'", schema, scope)); return schemas; }
@SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionAtRootLevel() throws SQLException, InterruptedException { Exception injectedException = new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofRoot())) .withMessageContaining("Failed to list schemas for scope 'ROOT: '''") .withCause(injectedException); }
@Override public void accumulate(Object value) { if (value == null) { return; } if (this.value == null || compare(this.value, value) > 0) { this.value = value; } }
@Test public void test_serialization() { MinSqlAggregation original = new MinSqlAggregation(); original.accumulate(1); InternalSerializationService ss = new DefaultSerializationServiceBuilder().build(); MinSqlAggregation serialized = ss.toObject(ss.toData(original)); assertThat(serialized).isEqualToComparingFieldByField(original); }
public static ConnectSchema columnsToConnectSchema(final List<? extends SimpleColumn> columns) { final SqlToConnectTypeConverter converter = SchemaConverters.sqlToConnectConverter(); final SchemaBuilder builder = SchemaBuilder.struct(); for (final SimpleColumn column : columns) { final Schema colSchema = converter.toConnectSchema(column.type()); builder.field(column.name().text(), colSchema); } return (ConnectSchema) builder.build(); }
@Test public void shouldConvertColumnsToStructSchema() { // Given: final LogicalSchema schema = LogicalSchema.builder() .valueColumn(ColumnName.of("Vic"), DOUBLE) .valueColumn(ColumnName.of("Bob"), BIGINT) .build(); // When: final ConnectSchema result = ConnectSchemas.columnsToConnectSchema(schema.value()); // Then: assertThat(result.type(), is(Type.STRUCT)); assertThat(result.fields(), contains( connectField("Vic", 0, Schema.OPTIONAL_FLOAT64_SCHEMA), connectField("Bob", 1, Schema.OPTIONAL_INT64_SCHEMA) )); }
@Override public String getStatementName(StatementContext statementContext) { final ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod == null) { return null; } final Class<?> clazz = extensionMethod.getType(); final Timed classTimed = clazz.getAnnotation(Timed.class); final Method method = extensionMethod.getMethod(); final Timed methodTimed = method.getAnnotation(Timed.class); // If the method is timed, figure out the name if (methodTimed != null) { String methodName = methodTimed.name().isEmpty() ? method.getName() : methodTimed.name(); if (methodTimed.absolute()) { return methodName; } else { // We need to check if the class has a custom timer name return classTimed == null || classTimed.name().isEmpty() ? MetricRegistry.name(clazz, methodName) : MetricRegistry.name(classTimed.name(), methodName); } } else if (classTimed != null) { // Maybe the class is timed? return classTimed.name().isEmpty() ? MetricRegistry.name(clazz, method.getName()) : MetricRegistry.name(classTimed.name(), method.getName()); } else { // No timers neither on the method or the class return null; } }
@Test public void testNoAnnotations() throws Exception { when(ctx.getExtensionMethod()).thenReturn(new ExtensionMethod(Dummy.class, Dummy.class.getMethod("show"))); assertThat(timedAnnotationNameStrategy.getStatementName(ctx)).isNull(); }
@ScalarFunction @LiteralParameters({"x", "y"}) @SqlType("array(varchar(x))") public static Block split(@SqlType("varchar(x)") Slice string, @SqlType("varchar(y)") Slice delimiter) { return split(string, delimiter, string.length() + 1); }
@Test public void testSplit() { assertFunction("SPLIT('a.b.c', '.')", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b", "c")); assertFunction("SPLIT('ab', '.', 1)", new ArrayType(createVarcharType(2)), ImmutableList.of("ab")); assertFunction("SPLIT('a.b', '.', 1)", new ArrayType(createVarcharType(3)), ImmutableList.of("a.b")); assertFunction("SPLIT('a.b.c', '.')", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b", "c")); assertFunction("SPLIT('a..b..c', '..')", new ArrayType(createVarcharType(7)), ImmutableList.of("a", "b", "c")); assertFunction("SPLIT('a.b.c', '.', 2)", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b.c")); assertFunction("SPLIT('a.b.c', '.', 3)", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b", "c")); assertFunction("SPLIT('a.b.c', '.', 4)", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b", "c")); assertFunction("SPLIT('a.b.c.', '.', 4)", new ArrayType(createVarcharType(6)), ImmutableList.of("a", "b", "c", "")); assertFunction("SPLIT('a.b.c.', '.', 3)", new ArrayType(createVarcharType(6)), ImmutableList.of("a", "b", "c.")); assertFunction("SPLIT('...', '.')", new ArrayType(createVarcharType(3)), ImmutableList.of("", "", "", "")); assertFunction("SPLIT('..a...a..', '.')", new ArrayType(createVarcharType(9)), ImmutableList.of("", "", "a", "", "", "a", "", "")); assertFunction("SPLIT('a.b.', '')", new ArrayType(createVarcharType(4)), ImmutableList.of("a", ".", "b", ".", "")); // Test SPLIT for non-ASCII assertFunction("SPLIT('\u4FE1\u5FF5,\u7231,\u5E0C\u671B', ',', 3)", new ArrayType(createVarcharType(7)), ImmutableList.of("\u4FE1\u5FF5", "\u7231", "\u5E0C\u671B")); assertFunction("SPLIT('\u8B49\u8BC1\u8A3C', '\u8BC1', 2)", new ArrayType(createVarcharType(3)), ImmutableList.of("\u8B49", "\u8A3C")); assertFunction("SPLIT('.a.b.c', '.', 4)", new ArrayType(createVarcharType(6)), ImmutableList.of("", "a", "b", "c")); assertFunction("SPLIT('.a.b.c', '.', 3)", new ArrayType(createVarcharType(6)), ImmutableList.of("", "a", "b.c")); assertFunction("SPLIT('.a.b.c', '.', 2)", new ArrayType(createVarcharType(6)), ImmutableList.of("", "a.b.c")); assertFunction("SPLIT('a..b..c', '.', 3)", new ArrayType(createVarcharType(7)), ImmutableList.of("a", "", "b..c")); assertFunction("SPLIT('a.b..', '.', 3)", new ArrayType(createVarcharType(5)), ImmutableList.of("a", "b", ".")); assertInvalidFunction("SPLIT('a.b.c', '.', 0)", "Limit must be positive"); assertInvalidFunction("SPLIT('a.b.c', '.', -1)", "Limit must be positive"); assertInvalidFunction("SPLIT('a.b.c', '.', 2147483648)", "Limit is too large"); }
public void add(int value) { add(Util.toUnsignedLong(value)); }
@Test public void testBitmapValueAdd() { // test add int BitmapValue bitmapValue1 = new BitmapValue(); for (int i = 0; i < 10; i++) { bitmapValue1.add(i); } checkBitmap(bitmapValue1, BitmapValue.SET_VALUE, 0, 10); // test add long BitmapValue bitmapValue2 = new BitmapValue(); for (long i = Long.MAX_VALUE - 1; i > Long.MAX_VALUE - 11; i--) { bitmapValue2.add(i); } checkBitmap(bitmapValue2, BitmapValue.SET_VALUE, Long.MAX_VALUE - 10, Long.MAX_VALUE); // test add int and long for (int i = 0; i < 10; i++) { bitmapValue2.add(i); } checkBitmap(bitmapValue2, BitmapValue.SET_VALUE, 0, 10, Long.MAX_VALUE - 10, Long.MAX_VALUE); // test distinct BitmapValue bitmapValue = new BitmapValue(); bitmapValue.add(1); bitmapValue.add(1); checkBitmap(bitmapValue, BitmapValue.SINGLE_VALUE, 1, 2); }
@NonNull public AuthorizationResponse auth(@NonNull AuthorizationRequest request) { validateAuthorizationRequest(request); var verifier = generatePkceCodeVerifier(); var codeChallenge = calculateS256CodeChallenge(verifier); var relyingPartyCallback = baseUri.resolve("/auth/callback"); var step1 = authenticationFlow.start( new AuthenticationFlow.Session( request.state(), request.nonce(), relyingPartyCallback, codeChallenge, federationConfig.scopes())); var identityProviders = step1.fetchIdpOptions(); var sessionId = IdGenerator.generateID(); var session = Session.create() .id(sessionId) .state(request.state()) .nonce(request.nonce()) .redirectUri(request.redirectUri()) .clientId(request.clientId()) .codeVerifier(verifier) .selectSectoralIdpStep(step1) .build(); sessionRepo.save(session); return new AuthorizationResponse(identityProviders, sessionId); }
@Test void auth_badResponseType() { var config = new RelyingPartyConfig(List.of("code"), List.of(REDIRECT_URI)); var sessionRepo = mock(SessionRepo.class); var sut = new AuthService(BASE_URI, config, null, sessionRepo, null, null); var scope = "openid"; var state = UUID.randomUUID().toString(); var nonce = UUID.randomUUID().toString(); var responseType = "badtype"; var clientId = "myapp"; var req = new AuthorizationRequest(scope, state, responseType, clientId, REDIRECT_URI, nonce); var e = assertThrows(ValidationException.class, () -> sut.auth(req)); // when assertEquals( "https://myapp.example.com?error=unsupported_response_type&error_description=error.unsupportedResponseType&state=" + state, e.seeOther().toString()); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Map<String, Object> headers = message.getHeaders(); CloudEvent cloudEvent = CloudEvents.v1_0; headers.putIfAbsent(CloudEvents.CAMEL_CLOUD_EVENT_ID, message.getExchange().getExchangeId()); headers.putIfAbsent(CloudEvent.CAMEL_CLOUD_EVENT_VERSION, cloudEvent.version()); headers.put(CloudEvents.CAMEL_CLOUD_EVENT_TYPE, "org.apache.camel.event.aws.sqs.receiveMessage"); if (message.getHeaders().containsKey(Sqs2Constants.RECEIPT_HANDLE)) { headers.put(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE, "aws.sqs.queue." + message.getHeader(Sqs2Constants.RECEIPT_HANDLE, String.class)); } headers.put(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT, message.getHeader(Sqs2Constants.MESSAGE_ID, String.class)); headers.put(CloudEvents.CAMEL_CLOUD_EVENT_TIME, cloudEvent.getEventTime(message.getExchange())); }
@Test void shouldMapToCloudEvent() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setHeader(Sqs2Constants.RECEIPT_HANDLE, "myQueue"); exchange.getMessage().setHeader(Sqs2Constants.MESSAGE_ID, "1234"); exchange.getMessage().setBody(new ByteArrayInputStream("Test1".getBytes(StandardCharsets.UTF_8))); transformer.transform(exchange.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(Sqs2Constants.RECEIPT_HANDLE)); assertEquals("org.apache.camel.event.aws.sqs.receiveMessage", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_TYPE)); assertEquals("1234", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT)); assertEquals("aws.sqs.queue.myQueue", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE)); }
public static Map<Node, Node> createNodes(Document document, String containerNodeName, String childNodeName, String nodeContent) { return asStream(document.getElementsByTagName(containerNodeName)) .collect(Collectors.toMap( containerNode -> containerNode, containerNode -> { Node childNode = document.createElement(childNodeName); containerNode.appendChild(childNode); if (nodeContent != null) { childNode.setTextContent(nodeContent); } return childNode; } )); }
@Test public void createNodes() throws Exception { final String newNodeName = "NEW_NODE_NAME"; final String newNodeValue = "NEW_NODE_VALUE"; Document document = DOMParserUtil.getDocument(XML); Map<Node, Node> retrieved = DOMParserUtil.createNodes(document, MAIN_NODE, newNodeName, newNodeValue); assertThat(retrieved).hasSize(1); Node created = (Node) retrieved.values().toArray()[0]; assertThat(created).isNotNull(); assertThat(created.getNodeName()).isEqualTo(newNodeName); assertThat(created.getTextContent()).isEqualTo(newNodeValue); retrieved = DOMParserUtil.createNodes(document, MAIN_NODE, newNodeName, null); assertThat(retrieved).hasSize(1); created = (Node) retrieved.values().toArray()[0]; assertThat(created).isNotNull(); assertThat(created.getNodeName()).isEqualTo(newNodeName); assertThat(created.getTextContent()).isEmpty(); retrieved = DOMParserUtil.createNodes(document, CHILD_NODE, newNodeName, newNodeValue); assertThat(retrieved).hasSize(2); retrieved.forEach((key, createdNode) -> { assertThat(createdNode).isNotNull(); assertThat(createdNode.getNodeName()).isEqualTo(newNodeName); assertThat(createdNode.getTextContent()).isEqualTo(newNodeValue); }); retrieved = DOMParserUtil.createNodes(document, CHILD_NODE, newNodeName, null); assertThat(retrieved).hasSize(2); retrieved.forEach((key, createdNode) -> { assertThat(createdNode).isNotNull(); assertThat(createdNode.getNodeName()).isEqualTo(newNodeName); assertThat(createdNode.getTextContent()).isEmpty(); }); }
@Override protected void doDelete(final List<RuleData> dataList) { dataList.forEach(pluginDataSubscriber::unRuleSubscribe); }
@Test public void testDoDelete() { List<RuleData> ruleDataList = createFakeRuleDateObjects(3); ruleDataHandler.doDelete(ruleDataList); ruleDataList.forEach(verify(subscriber)::unRuleSubscribe); }
@Override public void remove(NamedNode master) { connection.sync(RedisCommands.SENTINEL_REMOVE, master.getName()); }
@Test public void testRemove() { Collection<RedisServer> masters = connection.masters(); connection.remove(masters.iterator().next()); }
public static void addLiveSstFilesSizeMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, LIVE_SST_FILES_SIZE, LIVE_SST_FILES_SIZE_DESCRIPTION ); }
@Test public void shouldAddLiveSstFilesSizeMetric() { final String name = "live-sst-files-size"; final String description = "Total size in bytes of all SST files that belong to the latest LSM tree"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addLiveSstFilesSizeMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
public TurnServerOptions getRoutingFor( @Nonnull final UUID aci, @Nonnull final Optional<InetAddress> clientAddress, final int instanceLimit ) { try { return getRoutingForInner(aci, clientAddress, instanceLimit); } catch(Exception e) { logger.error("Failed to perform routing", e); return new TurnServerOptions(this.configTurnRouter.getHostname(), null, this.configTurnRouter.randomUrls()); } }
@Test public void testRandomizes() throws UnknownHostException { when(configTurnRouter.shouldRandomize()) .thenReturn(true); assertThat(router().getRoutingFor(aci, Optional.of(InetAddress.getByName("0.0.0.1")), 10)) .isEqualTo(new TurnServerOptions( TEST_HOSTNAME, null, TEST_URLS_WITH_HOSTS )); }
@Override public ParamCheckResponse checkParamInfoList(List<ParamInfo> paramInfos) { ParamCheckResponse paramCheckResponse = new ParamCheckResponse(); if (paramInfos == null) { paramCheckResponse.setSuccess(true); return paramCheckResponse; } for (ParamInfo paramInfo : paramInfos) { paramCheckResponse = checkParamInfoFormat(paramInfo); if (!paramCheckResponse.isSuccess()) { return paramCheckResponse; } } paramCheckResponse.setSuccess(true); return paramCheckResponse; }
@Test void testCheckParamInfoForCluster() { ParamInfo paramInfo = new ParamInfo(); ArrayList<ParamInfo> paramInfos = new ArrayList<>(); paramInfos.add(paramInfo); // Max Length String cluster = buildStringLength(65); paramInfo.setCluster(cluster); ParamCheckResponse actual = paramChecker.checkParamInfoList(paramInfos); assertFalse(actual.isSuccess()); assertEquals("Param 'cluster' is illegal, the param length should not exceed 64.", actual.getMessage()); // Pattern paramInfo.setCluster("@hsbfkj$@@!#khdkad啊@@"); actual = paramChecker.checkParamInfoList(paramInfos); assertFalse(actual.isSuccess()); assertEquals("Param 'cluster' is illegal, illegal characters should not appear in the param.", actual.getMessage()); // Success paramInfo.setCluster("0-9a-zA-Z-_"); actual = paramChecker.checkParamInfoList(paramInfos); assertTrue(actual.isSuccess()); }
@Override @GetMapping("/presigned-url") @PreAuthorize("isAuthenticated()") public ResponseEntity<?> getPresignedUrl(@Validated PresignedUrlDto.Req request, @AuthenticationPrincipal SecurityUserDetails user) { return ResponseEntity.ok(storageUseCase.getPresignedUrl(user.getUserId(), request)); }
@Test @WithSecurityMockUser @DisplayName("Type이 CHAT이고, ChatroomId가 NULL일 때 400 응답을 반환한다.") void getPresignedUrlWithNullChatroomId() throws Exception { // given PresignedUrlDto.Req request = new PresignedUrlDto.Req("CHAT", "jpg", null); given(storageUseCase.getPresignedUrl(1L, request)).willThrow(new StorageException(StorageErrorCode.MISSING_REQUIRED_PARAMETER)); // when ResultActions resultActions = getPresignedUrlRequest(request); // then resultActions.andExpect(status().isBadRequest()); }
public StaticDirectory(List<Invoker<T>> invokers) { this(null, invokers, null); }
@Test void testStaticDirectory() { StateRouter router = new ConditionStateRouterFactory() .getRouter(String.class, getRouteUrl(" => " + " host = " + NetUtils.getLocalHost())); List<StateRouter> routers = new ArrayList<StateRouter>(); routers.add(router); List<Invoker<String>> originInvokers = new ArrayList<Invoker<String>>(); Invoker<String> invoker1 = new MockInvoker<String>(URL.valueOf("dubbo://10.20.3.3:20880/com.foo.BarService"), true); Invoker<String> invoker2 = new MockInvoker<String>( URL.valueOf("dubbo://" + NetUtils.getLocalHost() + ":20880/com.foo.BarService"), true); Invoker<String> invoker3 = new MockInvoker<String>( URL.valueOf("dubbo://" + NetUtils.getLocalHost() + ":20880/com.foo.BarService"), true); originInvokers.add(invoker1); originInvokers.add(invoker2); originInvokers.add(invoker3); BitList<Invoker<String>> invokers = new BitList<>(originInvokers); List<Invoker<String>> filteredInvokers = router.route( invokers.clone(), URL.valueOf("consumer://" + NetUtils.getLocalHost() + "/com.foo.BarService"), new RpcInvocation(), false, new Holder<>()); ApplicationModel.defaultModel().getBeanFactory().registerBean(MetricsDispatcher.class); StaticDirectory<String> staticDirectory = new StaticDirectory<>(filteredInvokers); boolean isAvailable = staticDirectory.isAvailable(); Assertions.assertTrue(isAvailable); List<Invoker<String>> newInvokers = staticDirectory.list(new MockDirInvocation()); Assertions.assertTrue(newInvokers.size() > 0); staticDirectory.destroy(); Assertions.assertEquals(0, staticDirectory.getInvokers().size()); Assertions.assertEquals(0, staticDirectory.getValidInvokers().size()); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test @SuppressWarnings("ArraysAsListPrimitiveArray") public void testCombiningAddBeforeRead() throws Exception { GroupingState<Integer, Integer> value = underTest.state(NAMESPACE, COMBINING_ADDR); SettableFuture<Iterable<int[]>> future = SettableFuture.create(); when(mockReader.bagFuture(eq(COMBINING_KEY), eq(STATE_FAMILY), Mockito.<Coder<int[]>>any())) .thenReturn(future); value.readLater(); value.add(5); value.add(6); waitAndSet(future, Arrays.asList(new int[] {8}, new int[] {10}), 200); assertThat(value.read(), Matchers.equalTo(29)); // That get "compressed" the combiner. So, the underlying future should change: future.set(Collections.singletonList(new int[] {29})); value.add(2); assertThat(value.read(), Matchers.equalTo(31)); }
@Override public Optional<ConfigItem> resolve(final String propertyName, final boolean strict) { if (propertyName.startsWith(KSQL_REQUEST_CONFIG_PROPERTY_PREFIX)) { return resolveRequestConfig(propertyName); } else if (propertyName.startsWith(KSQL_CONFIG_PROPERTY_PREFIX) && !propertyName.startsWith(KSQL_STREAMS_PREFIX)) { return resolveKsqlConfig(propertyName); } return resolveStreamsConfig(propertyName, strict); }
@Test public void shouldReturnUnresolvedForOtherConfigIfNotStrict() { assertThat(resolver.resolve("confluent.monitoring.interceptor.topic", false), is(unresolvedItem("confluent.monitoring.interceptor.topic"))); }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename_keyNotExist() { Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); if (sameSlot) { // This is a quirk of the implementation - since same-slot renames use the non-cluster version, // the result is a Redis error. This behavior matches other spring-data-redis implementations assertThatThrownBy(() -> connection.keyCommands().rename(originalKey, newKey).block()) .isInstanceOf(RedisSystemException.class); } else { Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(null); } }
public Span nextSpan(Message message) { TraceContextOrSamplingFlags extracted = extractAndClearTraceIdProperties(processorExtractor, message, message); Span result = tracer.nextSpan(extracted); // Processor spans use the normal sampler. // When an upstream context was not present, lookup keys are unlikely added if (extracted.context() == null && !result.isNoop()) { // simplify code by re-using an existing MessagingRequest impl tagQueueOrTopic(new MessageConsumerRequest(message, destination(message)), result); } return result; }
@Test void nextSpan_should_use_span_from_headers_as_parent() { setStringProperty(message, "b3", "0000000000000001-0000000000000002-1"); Span span = jmsTracing.nextSpan(message); assertThat(span.context().parentId()).isEqualTo(2L); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test public void testParseUpdateRoot() { DistCpOptions options = OptionsParser.parse(new String[] { "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); Assert.assertFalse(options.shouldUpdateRoot()); options = OptionsParser.parse(new String[] { "-updateRoot", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); Assert.assertTrue(options.shouldUpdateRoot()); }
@Override public Iterable<Result> buffer( Flowable<I> flowable ) { Flowable<List<I>> buffer = millis > 0 ? batchSize > 0 ? flowable.buffer( millis, MILLISECONDS, Schedulers.io(), batchSize, ArrayList::new, true ) : flowable.buffer( millis, MILLISECONDS ) : flowable.buffer( batchSize ); return buffer .parallel( parallelism, rxBatchCount ) .runOn( sharedStreamingBatchPoolSize > 0 ? Schedulers.from( sharedStreamingBatchPool ) : Schedulers.io(), rxBatchCount ) .filter( list -> !list.isEmpty() ) .map( this.bufferFilter ) // apply any filtering for data that should no longer be processed .filter( list -> !list.isEmpty() ) // ensure at least one record is left before sending to subtrans .map( this::sendBufferToSubtrans ) .filter( Optional::isPresent ) .map( Optional::get ) .sequential() .doOnNext( this::failOnError ) .doOnNext( postProcessor ) .map( Map.Entry::getValue ) .blockingIterable(); }
@Test public void abortedSubtransThrowsAnError() throws KettleException { Result result1 = new Result(); result1.setNrErrors( 1 ); when( subtransExecutor.execute( any() ) ).thenReturn( Optional.of( result1 ) ); when( subtransExecutor.getPrefetchCount() ).thenReturn( 10 ); RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "field" ) ); FixedTimeStreamWindow<List> window = new FixedTimeStreamWindow<>( subtransExecutor, rowMeta, 0, 2, 1 ); try { window.buffer( Flowable.fromIterable( singletonList( asList( "v1", "v2" ) ) ) ).forEach( result -> { } ); } catch ( Exception e ) { assertEquals( getString( BaseStreamStep.class, "FixedTimeStreamWindow.SubtransFailed" ), e.getCause().getMessage().trim() ); } }
public int getNameLength() { return name_length; }
@Test public void testGetNameLength() { assertEquals(TestParameters.nameLength, dle.getNameLength()); }
@Override public String key() { return PropertyType.SINGLE_SELECT_LIST.name(); }
@Test public void key() { assertThat(validation.key()).isEqualTo("SINGLE_SELECT_LIST"); }
@Override public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility( TypeSerializerSnapshot<T> oldSerializerSnapshot) { if (!(oldSerializerSnapshot instanceof PojoSerializerSnapshot)) { return TypeSerializerSchemaCompatibility.incompatible(); } PojoSerializerSnapshot<T> previousPojoSerializerSnapshot = (PojoSerializerSnapshot<T>) oldSerializerSnapshot; final Class<T> previousPojoClass = previousPojoSerializerSnapshot.snapshotData.getPojoClass(); final LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots = previousPojoSerializerSnapshot.snapshotData.getFieldSerializerSnapshots(); final LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots = previousPojoSerializerSnapshot.snapshotData .getRegisteredSubclassSerializerSnapshots(); final LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots = previousPojoSerializerSnapshot.snapshotData .getNonRegisteredSubclassSerializerSnapshots(); if (previousPojoClass != snapshotData.getPojoClass()) { return TypeSerializerSchemaCompatibility.incompatible(); } if (registeredSubclassSerializerSnapshots.hasAbsentKeysOrValues()) { return TypeSerializerSchemaCompatibility.incompatible(); } if (nonRegisteredSubclassSerializerSnapshots.hasAbsentKeysOrValues()) { return TypeSerializerSchemaCompatibility.incompatible(); } final IntermediateCompatibilityResult<T> preExistingFieldSerializersCompatibility = getCompatibilityOfPreExistingFields(fieldSerializerSnapshots); if (preExistingFieldSerializersCompatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } final IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility = getCompatibilityOfPreExistingRegisteredSubclasses( registeredSubclassSerializerSnapshots); if (preExistingRegistrationsCompatibility.isIncompatible()) { return TypeSerializerSchemaCompatibility.incompatible(); } if (newPojoSerializerIsCompatibleAfterMigration( preExistingFieldSerializersCompatibility, preExistingRegistrationsCompatibility, fieldSerializerSnapshots)) { return TypeSerializerSchemaCompatibility.compatibleAfterMigration(); } if (newPojoSerializerIsCompatibleWithReconfiguredSerializer( preExistingFieldSerializersCompatibility, preExistingRegistrationsCompatibility, registeredSubclassSerializerSnapshots, nonRegisteredSubclassSerializerSnapshots)) { return TypeSerializerSchemaCompatibility.compatibleWithReconfiguredSerializer( constructReconfiguredPojoSerializer( preExistingFieldSerializersCompatibility, registeredSubclassSerializerSnapshots, preExistingRegistrationsCompatibility, nonRegisteredSubclassSerializerSnapshots)); } return TypeSerializerSchemaCompatibility.compatibleAsIs(); }
@Test void testResolveSchemaCompatibilityWithCompatibleAfterMigrationFieldSerializers() { final PojoSerializerSnapshot<TestPojo> oldSnapshot = buildTestSnapshot( Arrays.asList( ID_FIELD, NAME_FIELD, mockFieldSerializerSnapshot( HEIGHT_FIELD, new SchemaCompatibilityTestingSerializer() .snapshotConfiguration()))); final PojoSerializerSnapshot<TestPojo> newSnapshot = buildTestSnapshot( Arrays.asList( ID_FIELD, NAME_FIELD, mockFieldSerializerSnapshot( HEIGHT_FIELD, SchemaCompatibilityTestingSnapshot .thatIsCompatibleWithLastSerializerAfterMigration()))); final TypeSerializerSchemaCompatibility<TestPojo> resultCompatibility = newSnapshot.resolveSchemaCompatibility(oldSnapshot); assertThat(resultCompatibility.isCompatibleAfterMigration()).isTrue(); }
public static String prettyHex(byte[] data, int offset, int length) { if (length == 0) return ""; final StringBuilder sb = new StringBuilder(length * 3 - 1); sb.append(String.format("%02X", data[offset])); for (int i = 1; i < length; i++) { sb.append(String.format(" %02X", data[offset + i])); } return sb.toString(); }
@Test public void prettyHexEmptyByteArray() { assertEquals("", ByteArrayUtils.prettyHex(new byte[0])); }
public Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes(Map<TopicPartition, Long> timestampsToSearch, Timer timer) { metadata.addTransientTopics(topicsForPartitions(timestampsToSearch.keySet())); try { Map<TopicPartition, ListOffsetData> fetchedOffsets = fetchOffsetsByTimes(timestampsToSearch, timer, true).fetchedOffsets; return buildOffsetsForTimesResult(timestampsToSearch, fetchedOffsets); } finally { metadata.clearTransientTopics(); } }
@Test public void testListOffsetsWithZeroTimeout() { buildFetcher(); Map<TopicPartition, Long> offsetsToSearch = new HashMap<>(); offsetsToSearch.put(tp0, ListOffsetsRequest.EARLIEST_TIMESTAMP); offsetsToSearch.put(tp1, ListOffsetsRequest.EARLIEST_TIMESTAMP); Map<TopicPartition, OffsetAndTimestamp> offsetsToExpect = new HashMap<>(); offsetsToExpect.put(tp0, null); offsetsToExpect.put(tp1, null); assertEquals(offsetsToExpect, offsetFetcher.offsetsForTimes(offsetsToSearch, time.timer(0))); }
public static LinearModel fit(Formula formula, DataFrame data) { return fit(formula, data, new Properties()); }
@Test public void testCPU() { System.out.println("CPU"); MathEx.setSeed(19650218); // to get repeatable results. LinearModel model = RidgeRegression.fit(CPU.formula, CPU.data, 0.1); System.out.println(model); RegressionValidations<LinearModel> result = CrossValidation.regression(10, CPU.formula, CPU.data, (f, x) -> RidgeRegression.fit(f, x, 0.1)); System.out.println(result); assertEquals(50.9911, result.avg.rmse, 1E-4); }
@Override public ProviderInfo doSelect(SofaRequest request, List<ProviderInfo> providerInfos) { String key = getServiceKey(request); // 每个方法级自己轮询,互不影响 int length = providerInfos.size(); // 总个数 PositiveAtomicCounter sequence = sequences.get(key); if (sequence == null) { sequences.putIfAbsent(key, new PositiveAtomicCounter()); sequence = sequences.get(key); } return providerInfos.get(sequence.getAndIncrement() % length); }
@Test public void doSelect() throws Exception { RoundRobinLoadBalancer loadBalancer = new RoundRobinLoadBalancer(null); Map<Integer, Integer> cnt = new HashMap<Integer, Integer>(); int size = 20; int total = 190000; SofaRequest request = new SofaRequest(); { for (int i = 0; i < size; i++) { cnt.put(9000 + i, 0); } List<ProviderInfo> providers = buildSameWeightProviderList(size); long start = System.currentTimeMillis(); for (int i = 0; i < total; i++) { ProviderInfo provider = loadBalancer.doSelect(request, providers); int port = provider.getPort(); cnt.put(port, cnt.get(port) + 1); } long end = System.currentTimeMillis(); LOGGER.info("elapsed" + (end - start) + "ms"); LOGGER.info("avg " + (end - start) * 1000 * 1000 / total + "ns"); int avg = total / size; for (int i = 0; i < size; i++) { Assert.assertTrue(avg == cnt.get(9000 + i)); } } { for (int i = 0; i < size; i++) { cnt.put(9000 + i, 0); } List<ProviderInfo> providers = buildDiffWeightProviderList(size); long start = System.currentTimeMillis(); for (int i = 0; i < total; i++) { ProviderInfo provider = loadBalancer.doSelect(request, providers); int port = provider.getPort(); cnt.put(port, cnt.get(port) + 1); } long end = System.currentTimeMillis(); LOGGER.info("elapsed" + (end - start) + "ms"); LOGGER.info("avg " + (end - start) * 1000 * 1000 / total + "ns"); // 忽略了权重 int avg = total / size; for (int i = 0; i < size; i++) { Assert.assertTrue(avg == cnt.get(9000 + i)); } } }
@Config("router.config-file") public RouterConfig setConfigFile(String configFile) { this.configFile = configFile; return this; }
@Test public void testDefaults() { assertRecordedDefaults(recordDefaults(RouterConfig.class).setConfigFile(null)); }
@Udf public <T> Boolean contains( @UdfParameter final String jsonArray, @UdfParameter final T val ) { try (JsonParser parser = PARSER_FACTORY.createParser(jsonArray)) { if (parser.nextToken() != START_ARRAY) { return false; } while (parser.nextToken() != null) { final JsonToken token = parser.currentToken(); if (token == null) { return val == null; } else if (token == END_ARRAY) { return false; } parser.skipChildren(); if (TOKEN_COMPAT.getOrDefault(token, foo -> false).test(val)) { if (token == VALUE_NULL || (val != null && Objects.equals(parser.readValueAs(val.getClass()), val))) { return true; } } } return false; } catch (final IOException e) { return false; } }
@Test public void shouldFindBooleansInJsonArray() { assertEquals(true, jsonUdf.contains("[false, false, true, false]", true)); assertEquals(true, jsonUdf.contains("[true, true, false]", false)); assertEquals(false, jsonUdf.contains("[true, true]", false)); assertEquals(false, jsonUdf.contains("[false, false]", true)); }
void collectMetrics() { MetricsPublisher[] publishersArr = publishers.toArray(new MetricsPublisher[0]); PublisherMetricsCollector publisherCollector = new PublisherMetricsCollector(publishersArr); collectMetrics(publisherCollector); publisherCollector.publishCollectedMetrics(); }
@Test public void testUpdatesRenderedInOrder() { MetricsService metricsService = prepareMetricsService(); testProbeSource.update(1, 1.5D); metricsService.collectMetrics(metricsCollectorMock); testProbeSource.update(2, 5.5D); metricsService.collectMetrics(metricsCollectorMock); InOrder inOrderLong = inOrder(metricsCollectorMock); InOrder inOrderDouble = inOrder(metricsCollectorMock); MetricDescriptor descRoot = metricsRegistry.newMetricDescriptor() .withPrefix("test") .withUnit(COUNT); MetricDescriptor descLongValue = descRoot.copy().withMetric("longValue"); inOrderLong.verify(metricsCollectorMock).collectLong(descLongValue, 1); inOrderLong.verify(metricsCollectorMock).collectLong(descLongValue, 2); inOrderLong.verify(metricsCollectorMock, never()).collectLong(eq(descLongValue), anyLong()); MetricDescriptor descDoubleValue = descRoot.copy().withMetric("doubleValue"); inOrderDouble.verify(metricsCollectorMock).collectDouble(descDoubleValue, 1.5D); inOrderDouble.verify(metricsCollectorMock).collectDouble(descDoubleValue, 5.5D); inOrderDouble.verify(metricsCollectorMock, never()).collectDouble(eq(descDoubleValue), anyDouble()); }
public static <T> Set<T> ofSet(T... values) { int size = values == null ? 0 : values.length; if (size < 1) { return emptySet(); } float loadFactor = 1f / ((size + 1) * 1.0f); if (loadFactor > 0.75f) { loadFactor = 0.75f; } Set<T> elements = new LinkedHashSet<>(size, loadFactor); for (int i = 0; i < size; i++) { elements.add(values[i]); } return unmodifiableSet(elements); }
@Test void testOfSet() { Set<String> set = ofSet(); assertEquals(emptySet(), set); set = ofSet(((String[]) null)); assertEquals(emptySet(), set); set = ofSet("A", "B", "C"); Set<String> expectedSet = new LinkedHashSet<>(); expectedSet.add("A"); expectedSet.add("B"); expectedSet.add("C"); assertEquals(expectedSet, set); }
public static SingleInputSemanticProperties createProjectionPropertiesSingle( int[] fields, CompositeType<?> inType) { Character.isJavaIdentifierStart(1); SingleInputSemanticProperties ssp = new SingleInputSemanticProperties(); int[] sourceOffsets = new int[inType.getArity()]; sourceOffsets[0] = 0; for (int i = 1; i < inType.getArity(); i++) { sourceOffsets[i] = inType.getTypeAt(i - 1).getTotalFields() + sourceOffsets[i - 1]; } int targetOffset = 0; for (int i = 0; i < fields.length; i++) { int sourceOffset = sourceOffsets[fields[i]]; int numFieldsToCopy = inType.getTypeAt(fields[i]).getTotalFields(); for (int j = 0; j < numFieldsToCopy; j++) { ssp.addForwardedField(sourceOffset + j, targetOffset + j); } targetOffset += numFieldsToCopy; } return ssp; }
@Test void testSingleProjectionProperties() { int[] pMap = new int[] {3, 0, 4}; SingleInputSemanticProperties sp = SemanticPropUtil.createProjectionPropertiesSingle( pMap, (CompositeType<?>) fiveIntTupleType); assertThat(sp.getForwardingTargetFields(0, 0)).contains(1); assertThat(sp.getForwardingTargetFields(0, 3)).contains(0); assertThat(sp.getForwardingTargetFields(0, 4)).contains(2); pMap = new int[] {2, 2, 1, 1}; sp = SemanticPropUtil.createProjectionPropertiesSingle( pMap, (CompositeType<?>) fiveIntTupleType); assertThat(sp.getForwardingTargetFields(0, 1)).containsExactly(2, 3); assertThat(sp.getForwardingTargetFields(0, 2)).containsExactly(0, 1); pMap = new int[] {2, 0}; sp = SemanticPropUtil.createProjectionPropertiesSingle( pMap, (CompositeType<?>) nestedTupleType); assertThat(sp.getForwardingTargetFields(0, 4)).contains(0); assertThat(sp.getForwardingTargetFields(0, 0)).contains(1); assertThat(sp.getForwardingTargetFields(0, 1)).contains(2); assertThat(sp.getForwardingTargetFields(0, 2)).contains(3); pMap = new int[] {2, 0, 1}; sp = SemanticPropUtil.createProjectionPropertiesSingle( pMap, (CompositeType<?>) deepNestedTupleType); assertThat(sp.getForwardingTargetFields(0, 6)).contains(0); assertThat(sp.getForwardingTargetFields(0, 0)).contains(1); assertThat(sp.getForwardingTargetFields(0, 1)).contains(2); assertThat(sp.getForwardingTargetFields(0, 2)).contains(3); assertThat(sp.getForwardingTargetFields(0, 3)).contains(4); assertThat(sp.getForwardingTargetFields(0, 4)).contains(5); assertThat(sp.getForwardingTargetFields(0, 5)).contains(6); pMap = new int[] {2, 1}; sp = SemanticPropUtil.createProjectionPropertiesSingle( pMap, (CompositeType<?>) pojoInTupleType); assertThat(sp.getForwardingTargetFields(0, 2)).contains(0); assertThat(sp.getForwardingTargetFields(0, 3)).contains(1); assertThat(sp.getForwardingTargetFields(0, 4)).contains(2); assertThat(sp.getForwardingTargetFields(0, 5)).contains(3); assertThat(sp.getForwardingTargetFields(0, 1)).contains(4); }
@Override public BlobDescriptor call() throws IOException, RegistryException { EventHandlers eventHandlers = buildContext.getEventHandlers(); DescriptorDigest blobDigest = blobDescriptor.getDigest(); try (ProgressEventDispatcher progressEventDispatcher = progressEventDispatcherFactory.create( "pushing blob " + blobDigest, blobDescriptor.getSize()); TimerEventDispatcher ignored = new TimerEventDispatcher(eventHandlers, DESCRIPTION + blobDescriptor); ThrottledAccumulatingConsumer throttledProgressReporter = new ThrottledAccumulatingConsumer(progressEventDispatcher::dispatchProgress)) { // check if the BLOB is available if (!forcePush && registryClient.checkBlob(blobDigest).isPresent()) { eventHandlers.dispatch( LogEvent.info( "Skipping push; BLOB already exists on target registry : " + blobDescriptor)); return blobDescriptor; } // If base and target images are in the same registry, then use mount/from to try mounting the // BLOB from the base image repository to the target image repository and possibly avoid // having to push the BLOB. See // https://docs.docker.com/registry/spec/api/#cross-repository-blob-mount for details. String baseRegistry = buildContext.getBaseImageConfiguration().getImageRegistry(); String baseRepository = buildContext.getBaseImageConfiguration().getImageRepository(); String targetRegistry = buildContext.getTargetImageConfiguration().getImageRegistry(); String sourceRepository = targetRegistry.equals(baseRegistry) ? baseRepository : null; registryClient.pushBlob(blobDigest, blob, sourceRepository, throttledProgressReporter); return blobDescriptor; } }
@Test public void testCall_forcePushWithNoBlobCheck() throws IOException, RegistryException { call(true); Mockito.verify(registryClient, Mockito.never()).checkBlob(Mockito.any()); Mockito.verify(registryClient) .pushBlob(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); }
@Override public Long computeValue(final Collection<T> elementsInBin, final int totalElements) { return (long) elementsInBin.stream() .mapToLong(valueRetrievalFunction) .average() .orElse(0L); }
@Test void testReturnsAverage() { final Long result = toTest.computeValue( List.of( QueryExecutionStats.builder().duration(10).build(), QueryExecutionStats.builder().duration(20).build(), QueryExecutionStats.builder().duration(60).build() ), 42 ); assertEquals(30, result); }
public FEELFnResult<Object> invoke(@ParameterName("list") List list) { if ( list == null || list.isEmpty() ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null or empty")); } else { try { return FEELFnResult.ofResult(Collections.max(list, new InterceptNotComparableComparator())); } catch (ClassCastException e) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "contains items that are not comparable")); } } }
@Test void invokeNullList() { FunctionTestUtil.assertResultError(maxFunction.invoke((List) null), InvalidParametersEvent.class); }
Object[] findValues(int ordinal) { return getAllValues(ordinal, type, 0); }
@Test public void testMapKeyReferenceToList() throws Exception { ListType listType = new ListType(); listType.intValues = Arrays.asList(1, 2, 3); Map<ListType, Integer> map = new HashMap<>(); map.put(listType, 1); MapKeyReferenceAsList mapKeyReferenceAsList = new MapKeyReferenceAsList(); mapKeyReferenceAsList.mapValues = map; objectMapper.add(mapKeyReferenceAsList); StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine); FieldPath fieldPath; Object[] values; fieldPath = new FieldPath(readStateEngine, "MapKeyReferenceAsList", "mapValues.key.intValues.element.value"); values = fieldPath.findValues(0); Assert.assertEquals(3, values.length); }
public static MonthsWindows months(int number) { return new MonthsWindows(number, 1, DEFAULT_START_DATE, DateTimeZone.UTC); }
@Test public void testDefaultWindowMappingFn() { MonthsWindows windowFn = CalendarWindows.months(2); WindowMappingFn<?> mapping = windowFn.getDefaultWindowMappingFn(); assertThat( mapping.getSideInputWindow( new BoundedWindow() { @Override public Instant maxTimestamp() { return new Instant(100L); } }), equalTo(windowFn.assignWindow(new Instant(100L)))); assertThat(mapping.maximumLookback(), equalTo(Duration.ZERO)); }
@Override public int compare(String version1, String version2) { if(ObjectUtil.equal(version1, version2)) { return 0; } if (version1 == null && version2 == null) { return 0; } else if (version1 == null) {// null或""视为最小版本,排在前 return -1; } else if (version2 == null) { return 1; } return CompareUtil.compare(Version.of(version1), Version.of(version2)); }
@Test public void I8Z3VETest() { // 传递性测试 int compare = VersionComparator.INSTANCE.compare("260", "a-34"); assertTrue(compare > 0); compare = VersionComparator.INSTANCE.compare("a-34", "a-3"); assertTrue(compare > 0); compare = VersionComparator.INSTANCE.compare("260", "a-3"); assertTrue(compare > 0); }
public void receiveMessage(ProxyContext ctx, ReceiveMessageRequest request, StreamObserver<ReceiveMessageResponse> responseObserver) { ReceiveMessageResponseStreamWriter writer = createWriter(ctx, responseObserver); try { Settings settings = this.grpcClientSettingsManager.getClientSettings(ctx); Subscription subscription = settings.getSubscription(); boolean fifo = subscription.getFifo(); int maxAttempts = settings.getBackoffPolicy().getMaxAttempts(); ProxyConfig config = ConfigurationManager.getProxyConfig(); Long timeRemaining = ctx.getRemainingMs(); long pollingTime; if (request.hasLongPollingTimeout()) { pollingTime = Durations.toMillis(request.getLongPollingTimeout()); } else { pollingTime = timeRemaining - Durations.toMillis(settings.getRequestTimeout()) / 2; } if (pollingTime < config.getGrpcClientConsumerMinLongPollingTimeoutMillis()) { pollingTime = config.getGrpcClientConsumerMinLongPollingTimeoutMillis(); } if (pollingTime > config.getGrpcClientConsumerMaxLongPollingTimeoutMillis()) { pollingTime = config.getGrpcClientConsumerMaxLongPollingTimeoutMillis(); } if (pollingTime > timeRemaining) { if (timeRemaining >= config.getGrpcClientConsumerMinLongPollingTimeoutMillis()) { pollingTime = timeRemaining; } else { final String clientVersion = ctx.getClientVersion(); Code code = null == clientVersion || ILLEGAL_POLLING_TIME_INTRODUCED_CLIENT_VERSION.compareTo(clientVersion) > 0 ? Code.BAD_REQUEST : Code.ILLEGAL_POLLING_TIME; writer.writeAndComplete(ctx, code, "The deadline time remaining is not enough" + " for polling, please check network condition"); return; } } validateTopicAndConsumerGroup(request.getMessageQueue().getTopic(), request.getGroup()); String topic = request.getMessageQueue().getTopic().getName(); String group = request.getGroup().getName(); long actualInvisibleTime = Durations.toMillis(request.getInvisibleDuration()); ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig(); if (proxyConfig.isEnableProxyAutoRenew() && request.getAutoRenew()) { actualInvisibleTime = proxyConfig.getDefaultInvisibleTimeMills(); } else { validateInvisibleTime(actualInvisibleTime, ConfigurationManager.getProxyConfig().getMinInvisibleTimeMillsForRecv()); } FilterExpression filterExpression = request.getFilterExpression(); SubscriptionData subscriptionData; try { subscriptionData = FilterAPI.build(topic, filterExpression.getExpression(), GrpcConverter.getInstance().buildExpressionType(filterExpression.getType())); } catch (Exception e) { writer.writeAndComplete(ctx, Code.ILLEGAL_FILTER_EXPRESSION, e.getMessage()); return; } this.messagingProcessor.popMessage( ctx, new ReceiveMessageQueueSelector( request.getMessageQueue().getBroker().getName() ), group, topic, request.getBatchSize(), actualInvisibleTime, pollingTime, ConsumeInitMode.MAX, subscriptionData, fifo, new PopMessageResultFilterImpl(maxAttempts), request.hasAttemptId() ? request.getAttemptId() : null, timeRemaining ).thenAccept(popResult -> { if (proxyConfig.isEnableProxyAutoRenew() && request.getAutoRenew()) { if (PopStatus.FOUND.equals(popResult.getPopStatus())) { List<MessageExt> messageExtList = popResult.getMsgFoundList(); for (MessageExt messageExt : messageExtList) { String receiptHandle = messageExt.getProperty(MessageConst.PROPERTY_POP_CK); if (receiptHandle != null) { MessageReceiptHandle messageReceiptHandle = new MessageReceiptHandle(group, topic, messageExt.getQueueId(), receiptHandle, messageExt.getMsgId(), messageExt.getQueueOffset(), messageExt.getReconsumeTimes()); messagingProcessor.addReceiptHandle(ctx, grpcChannelManager.getChannel(ctx.getClientID()), group, messageExt.getMsgId(), messageReceiptHandle); } } } } writer.writeAndComplete(ctx, request, popResult); }) .exceptionally(t -> { writer.writeAndComplete(ctx, request, t); return null; }); } catch (Throwable t) { writer.writeAndComplete(ctx, request, t); } }
@Test public void testReceiveMessage() { StreamObserver<ReceiveMessageResponse> receiveStreamObserver = mock(ServerCallStreamObserver.class); ArgumentCaptor<ReceiveMessageResponse> responseArgumentCaptor = ArgumentCaptor.forClass(ReceiveMessageResponse.class); doNothing().when(receiveStreamObserver).onNext(responseArgumentCaptor.capture()); when(this.grpcClientSettingsManager.getClientSettings(any())).thenReturn(Settings.newBuilder().getDefaultInstanceForType()); PopResult popResult = new PopResult(PopStatus.NO_NEW_MSG, new ArrayList<>()); when(this.messagingProcessor.popMessage( any(), any(), anyString(), anyString(), anyInt(), anyLong(), anyLong(), anyInt(), any(), anyBoolean(), any(), isNull(), anyLong())).thenReturn(CompletableFuture.completedFuture(popResult)); this.receiveMessageActivity.receiveMessage( createContext(), ReceiveMessageRequest.newBuilder() .setGroup(Resource.newBuilder().setName(CONSUMER_GROUP).build()) .setMessageQueue(MessageQueue.newBuilder().setTopic(Resource.newBuilder().setName(TOPIC).build()).build()) .setAutoRenew(true) .setFilterExpression(FilterExpression.newBuilder() .setType(FilterType.TAG) .setExpression("*") .build()) .build(), receiveStreamObserver ); assertEquals(Code.MESSAGE_NOT_FOUND, getResponseCodeFromReceiveMessageResponseList(responseArgumentCaptor.getAllValues())); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void AcGroupAccessControlWrong() throws Exception { JwtClaims claims = ClaimsUtil.getTestClaimsGroup("stevehu", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("account.r", "account.w"), "backOffice"); claims.setExpirationTimeMinutesInTheFuture(5256000); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***Long lived token Authorization code customer with roles***: " + jwt); }
public List<Stream> getStreams() { return streams; }
@Test public void testGetStreams() throws Exception { final StreamMock stream = getStreamMock("test"); final StreamRouterEngine engine = newEngine(Lists.newArrayList(stream)); assertEquals(Lists.newArrayList(stream), engine.getStreams()); }
public static int parseHexToInt(String hex) { return parseHexToInt(hex, true); }
@Test public void parseHexToInt() { Assertions.assertEquals(0xAB, TbUtils.parseHexToInt("AB")); Assertions.assertEquals(0xABBA, TbUtils.parseHexToInt("ABBA", true)); Assertions.assertEquals(0xBAAB, TbUtils.parseHexToInt("ABBA", false)); Assertions.assertEquals(0xAABBCC, TbUtils.parseHexToInt("AABBCC", true)); Assertions.assertEquals(0xAABBCC, TbUtils.parseHexToInt("CCBBAA", false)); Assertions.assertThrows(NumberFormatException.class, () -> TbUtils.parseHexToInt("AABBCCDD", true)); Assertions.assertEquals(0x11BBCC22, TbUtils.parseHexToInt("11BBCC22", true)); Assertions.assertEquals(0x11BBCC22, TbUtils.parseHexToInt("22CCBB11", false)); }
public static void checkNullOrNonNullNonEmptyEntries( @Nullable Collection<String> values, String propertyName) { if (values == null) { // pass return; } for (String value : values) { Preconditions.checkNotNull( value, "Property '" + propertyName + "' cannot contain null entries"); Preconditions.checkArgument( !value.trim().isEmpty(), "Property '" + propertyName + "' cannot contain empty strings"); } }
@Test public void testCheckNullOrNonNullNonEmptyEntries_mapNullKeyFail() { try { Validator.checkNullOrNonNullNonEmptyEntries(Collections.singletonMap(null, "val1"), "test"); Assert.fail(); } catch (NullPointerException npe) { Assert.assertEquals("Property 'test' cannot contain null keys", npe.getMessage()); } }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String oracleType = typeDefine.getDataType().toUpperCase(); switch (oracleType) { case ORACLE_INTEGER: builder.dataType(new DecimalType(DEFAULT_PRECISION, 0)); builder.columnLength((long) DEFAULT_PRECISION); break; case ORACLE_NUMBER: Long precision = typeDefine.getPrecision(); if (precision == null || precision == 0 || precision > DEFAULT_PRECISION) { precision = Long.valueOf(DEFAULT_PRECISION); } Integer scale = typeDefine.getScale(); if (scale == null) { scale = 127; } if (scale <= 0) { int newPrecision = (int) (precision - scale); if (newPrecision == 1) { builder.dataType(BasicType.BOOLEAN_TYPE); } else if (newPrecision <= 9) { builder.dataType(BasicType.INT_TYPE); } else if (newPrecision <= 18) { builder.dataType(BasicType.LONG_TYPE); } else if (newPrecision < 38) { builder.dataType(new DecimalType(newPrecision, 0)); builder.columnLength((long) newPrecision); } else { builder.dataType(new DecimalType(DEFAULT_PRECISION, 0)); builder.columnLength((long) DEFAULT_PRECISION); } } else if (scale <= DEFAULT_SCALE) { builder.dataType(new DecimalType(precision.intValue(), scale)); builder.columnLength(precision); builder.scale(scale); } else { builder.dataType(new DecimalType(precision.intValue(), DEFAULT_SCALE)); builder.columnLength(precision); builder.scale(DEFAULT_SCALE); } break; case ORACLE_FLOAT: // The float type will be converted to DecimalType(10, -127), // which will lose precision in the spark engine DecimalType floatDecimal = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); builder.dataType(floatDecimal); builder.columnLength((long) floatDecimal.getPrecision()); builder.scale(floatDecimal.getScale()); break; case ORACLE_BINARY_FLOAT: case ORACLE_REAL: builder.dataType(BasicType.FLOAT_TYPE); break; case ORACLE_BINARY_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case ORACLE_CHAR: case ORACLE_VARCHAR: case ORACLE_VARCHAR2: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); break; case ORACLE_NCHAR: case ORACLE_NVARCHAR2: builder.dataType(BasicType.STRING_TYPE); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); break; case ORACLE_ROWID: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(MAX_ROWID_LENGTH); break; case ORACLE_XML: case ORACLE_SYS_XML: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); break; case ORACLE_LONG: builder.dataType(BasicType.STRING_TYPE); // The maximum length of the column is 2GB-1 builder.columnLength(BYTES_2GB - 1); break; case ORACLE_CLOB: case ORACLE_NCLOB: builder.dataType(BasicType.STRING_TYPE); // The maximum length of the column is 4GB-1 builder.columnLength(BYTES_4GB - 1); break; case ORACLE_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); // The maximum length of the column is 4GB-1 builder.columnLength(BYTES_4GB - 1); break; case ORACLE_RAW: builder.dataType(PrimitiveByteArrayType.INSTANCE); if (typeDefine.getLength() == null || typeDefine.getLength() == 0) { builder.columnLength(MAX_RAW_LENGTH); } else { builder.columnLength(typeDefine.getLength()); } break; case ORACLE_LONG_RAW: builder.dataType(PrimitiveByteArrayType.INSTANCE); // The maximum length of the column is 2GB-1 builder.columnLength(BYTES_2GB - 1); break; case ORACLE_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case ORACLE_TIMESTAMP: case ORACLE_TIMESTAMP_WITH_TIME_ZONE: case ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); if (typeDefine.getScale() == null) { builder.scale(TIMESTAMP_DEFAULT_SCALE); } else { builder.scale(typeDefine.getScale()); } break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.ORACLE, oracleType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertFloat() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("float") .dataType("float") .build(); Column column = OracleTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(new DecimalType(38, 18), column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("binary_float") .dataType("binary_float") .build(); column = OracleTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.FLOAT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder().name("test").columnType("real").dataType("real").build(); column = OracleTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.FLOAT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
Map<String, Object> targetAdminConfig(String role) { Map<String, Object> props = new HashMap<>(); props.putAll(originalsWithPrefix(TARGET_CLUSTER_PREFIX)); props.keySet().retainAll(MirrorClientConfig.CLIENT_CONFIG_DEF.names()); props.putAll(originalsWithPrefix(ADMIN_CLIENT_PREFIX)); props.putAll(originalsWithPrefix(TARGET_PREFIX + ADMIN_CLIENT_PREFIX)); addClientId(props, role); return props; }
@Test public void testTargetAdminConfig() { Map<String, String> connectorProps = makeProps( MirrorConnectorConfig.ADMIN_CLIENT_PREFIX + "connections.max.idle.ms", "10000" ); MirrorConnectorConfig config = new TestMirrorConnectorConfig(connectorProps); Map<String, Object> connectorAdminProps = config.targetAdminConfig("test"); Map<String, Object> expectedAdminProps = new HashMap<>(); expectedAdminProps.put("connections.max.idle.ms", "10000"); expectedAdminProps.put("client.id", "source1->target2|ConnectorName|test"); assertEquals(expectedAdminProps, connectorAdminProps, MirrorConnectorConfig.ADMIN_CLIENT_PREFIX + " target connector admin props not matching"); }
static Time toTime(final JsonNode object) { if (object instanceof NumericNode) { return returnTimeOrThrow(object.asLong()); } if (object instanceof TextNode) { try { return returnTimeOrThrow(Long.parseLong(object.textValue())); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.TIME); } } throw invalidConversionException(object, SqlBaseType.TIME); }
@Test public void shouldNotConvertNegativeNumberToTime() { try { JsonSerdeUtils.toTime(JsonNodeFactory.instance.numberNode(-5)); } catch (Exception e) { assertThat(e.getMessage(), equalTo("Time values must use number of milliseconds greater than 0 and less than 86400000.")); } }
public static FindKV findKV(String regex, int keyGroup, int valueGroup) { return findKV(Pattern.compile(regex), keyGroup, valueGroup); }
@Test @Category(NeedsRunner.class) public void testKVFindName() { PCollection<KV<String, String>> output = p.apply(Create.of("a b c")) .apply(Regex.findKV("a (?<keyname>b) (?<valuename>c)", "keyname", "valuename")); PAssert.that(output).containsInAnyOrder(KV.of("b", "c")); p.run(); }
public File getJvmOptions() { return new File(confDirectory, "jvm.options"); }
@Test public void getJvmOptions_is_in_es_conf_directory() throws IOException { File tempDir = temp.newFolder(); Props props = new Props(new Properties()); props.set(PATH_DATA.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_HOME.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_TEMP.getKey(), tempDir.getAbsolutePath()); props.set(PATH_LOGS.getKey(), temp.newFolder().getAbsolutePath()); EsInstallation underTest = new EsInstallation(props); assertThat(underTest.getJvmOptions()).isEqualTo(new File(tempDir, "conf/es/jvm.options")); }
public long position() { if (isClosed) { return finalPosition; } return subscriberPosition.get(); }
@Test void shouldAllowValidPosition() { final Image image = createImage(); final long expectedPosition = TERM_BUFFER_LENGTH - 32; position.setOrdered(expectedPosition); assertThat(image.position(), is(expectedPosition)); image.position(TERM_BUFFER_LENGTH); assertThat(image.position(), is((long)TERM_BUFFER_LENGTH)); }
public boolean filterMatchesEntry(String filter, FeedEntry entry) throws FeedEntryFilterException { if (StringUtils.isBlank(filter)) { return true; } Script script; try { script = ENGINE.createScript(filter); } catch (JexlException e) { throw new FeedEntryFilterException("Exception while parsing expression " + filter, e); } JexlContext context = new MapContext(); context.set("title", entry.getContent().getTitle() == null ? "" : Jsoup.parse(entry.getContent().getTitle()).text().toLowerCase()); context.set("author", entry.getContent().getAuthor() == null ? "" : entry.getContent().getAuthor().toLowerCase()); context.set("content", entry.getContent().getContent() == null ? "" : Jsoup.parse(entry.getContent().getContent()).text().toLowerCase()); context.set("url", entry.getUrl() == null ? "" : entry.getUrl().toLowerCase()); context.set("categories", entry.getContent().getCategories() == null ? "" : entry.getContent().getCategories().toLowerCase()); context.set("year", Year.now().getValue()); Callable<Object> callable = script.callable(context); Future<Object> future = executor.submit(callable); Object result; try { result = future.get(config.feedRefresh().filteringExpressionEvaluationTimeout().toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new FeedEntryFilterException("interrupted while evaluating expression " + filter, e); } catch (ExecutionException e) { throw new FeedEntryFilterException("Exception while evaluating expression " + filter, e); } catch (TimeoutException e) { throw new FeedEntryFilterException("Took too long evaluating expression " + filter, e); } try { return (boolean) result; } catch (ClassCastException e) { throw new FeedEntryFilterException(e.getMessage(), e); } }
@Test void cannotLoopForever() { Mockito.when(config.feedRefresh().filteringExpressionEvaluationTimeout()).thenReturn(Duration.ofMillis(200)); service = new FeedEntryFilteringService(config); Assertions.assertThrows(FeedEntryFilterException.class, () -> service.filterMatchesEntry("while(true) {}", entry)); }
@Override @SneakyThrows public WxJsapiSignature createWxMpJsapiSignature(Integer userType, String url) { WxMpService service = getWxMpService(userType); return service.createJsapiSignature(url); }
@Test public void testCreateWxMpJsapiSignature() throws WxErrorException { // 准备参数 Integer userType = randomPojo(UserTypeEnum.class).getValue(); String url = randomString(); // mock 方法 WxJsapiSignature signature = randomPojo(WxJsapiSignature.class); when(wxMpService.createJsapiSignature(eq(url))).thenReturn(signature); // 调用 WxJsapiSignature result = socialClientService.createWxMpJsapiSignature(userType, url); // 断言 assertSame(signature, result); }
@SuppressWarnings({"unchecked", "rawtypes"}) public Collection<DataNode> getDataNodes(final String tableName) { Collection<DataNode> result = getDataNodesByTableName(tableName); if (result.isEmpty()) { return result; } for (Entry<ShardingSphereRule, DataNodeBuilder> entry : dataNodeBuilders.entrySet()) { result = entry.getValue().build(result, entry.getKey()); } return result; }
@Test void assertGetDataNodesForSingleTableWithDataNodeContainedRuleWithoutDataSourceContainedRule() { DataNodes dataNodes = new DataNodes(mockDataNodeRules()); Collection<DataNode> actual = dataNodes.getDataNodes("t_single"); assertThat(actual.size(), is(1)); Iterator<DataNode> iterator = actual.iterator(); DataNode firstDataNode = iterator.next(); assertThat(firstDataNode.getDataSourceName(), is("readwrite_ds")); assertThat(firstDataNode.getTableName(), is("t_single")); }
@Override public String getOnuStatistics(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); String reply = null; String[] onuId = null; if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return null; } if (target != null) { onuId = checkIdString(target); if (onuId == null) { log.error("Failed to check ID: {}", target); return null; } } try { StringBuilder request = new StringBuilder(); request.append(VOLT_NE_OPEN + VOLT_NE_NAMESPACE); request.append(ANGLE_RIGHT + NEW_LINE); request.append(buildStartTag(VOLT_STATISTICS)); if (onuId != null) { request.append(buildStartTag(ONU_STATISTICS)) .append(buildStartTag(ONU_GEM_STATS)) .append(buildStartTag(GEM_STATS)) .append(buildStartTag(PONLINK_ID, false)) .append(onuId[FIRST_PART]) .append(buildEndTag(PONLINK_ID)); if (onuId.length > ONE) { request.append(buildStartTag(ONU_ID, false)) .append(onuId[SECOND_PART]) .append(buildEndTag(ONU_ID)); } request.append(buildEndTag(GEM_STATS)) .append(buildEndTag(ONU_GEM_STATS)); request.append(buildStartTag(ONU_ETH_STATS)) .append(buildStartTag(ETH_STATS)) .append(buildStartTag(PONLINK_ID, false)) .append(onuId[FIRST_PART]) .append(buildEndTag(PONLINK_ID)); if (onuId.length > ONE) { request.append(buildStartTag(ONU_ID, false)) .append(onuId[SECOND_PART]) .append(buildEndTag(ONU_ID)); } request.append(buildEndTag(ETH_STATS)) .append(buildEndTag(ONU_ETH_STATS)) .append(buildEndTag(ONU_STATISTICS)); } else { request.append(buildEmptyTag(ONU_STATISTICS)); } request.append(buildEndTag(VOLT_STATISTICS)) .append(VOLT_NE_CLOSE); reply = controller .getDevicesMap() .get(ncDeviceId) .getSession() .get(request.toString(), REPORT_ALL); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); } return reply; }
@Test public void testValidGetOnuStats() throws Exception { String reply; String target; for (int i = ZERO; i < VALID_GET_STATS_TCS.length; i++) { target = VALID_GET_STATS_TCS[i]; currentKey = i; reply = voltConfig.getOnuStatistics(target); assertNotNull("Incorrect response for VALID_GET_STATS_TCS", reply); } }
@Override public long getPeriodMillis() { return periodMillis; }
@Test public void testGetPeriodMillis() { assertEquals(1000, plugin.getPeriodMillis()); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { TikaInputStream tis = TikaInputStream.get(stream, new TemporaryResources(), metadata); File tmpFile = tis.getFile(); GrobidRESTParser grobidParser = new GrobidRESTParser(); grobidParser.parse(tmpFile.getAbsolutePath(), handler, metadata, context); PDFParser parser = new PDFParser(); parser.parse(new FileInputStream(tmpFile), handler, metadata, context); }
@Test public void testJournalParser() { String path = "/test-documents/testJournalParser.pdf"; ContentHandler handler = new BodyContentHandler(); Metadata metadata = new Metadata(); assumeTrue(canRun()); InputStream stream = JournalParserTest.class.getResourceAsStream(path); JournalParser jParser = new JournalParser(); try { jParser.parse(stream, handler, metadata, new ParseContext()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } assertNotNull(metadata.get("grobid:header_Title")); }
static void handleDowngrade(Namespace namespace, Admin adminClient) throws TerseException { handleUpgradeOrDowngrade("downgrade", namespace, adminClient, downgradeType(namespace)); }
@Test public void testHandleDowngradeDryRun() { Map<String, Object> namespace = new HashMap<>(); namespace.put("metadata", "3.3-IV3"); namespace.put("feature", Collections.singletonList("foo.bar=1")); namespace.put("dry_run", true); String downgradeOutput = ToolsTestUtils.captureStandardOut(() -> { Throwable t = assertThrows(TerseException.class, () -> FeatureCommand.handleDowngrade(new Namespace(namespace), buildAdminClient())); assertTrue(t.getMessage().contains("1 out of 2 operation(s) failed.")); }); assertEquals(format("foo.bar can be downgraded to 1.%n" + "Can not downgrade metadata.version to 7. Can't downgrade to newer version."), downgradeOutput); }
public static File applyBaseDirIfRelative(File baseDir, File actualFileToUse) { if (actualFileToUse == null) { return baseDir; } if (actualFileToUse.isAbsolute()) { return actualFileToUse; } if (StringUtils.isBlank(baseDir.getPath())) { return actualFileToUse; } return new File(baseDir, actualFileToUse.getPath()); }
@Test void shouldUseDefaultIfActualIsNull() { final File baseFile = new File("xyz"); assertThat(FileUtil.applyBaseDirIfRelative(baseFile, null)).isEqualTo(baseFile); }
public List<String> listOfConfigKeys() { ArrayList<String> list = new ArrayList<>(); for (ConfigurationProperty configurationProperty : this) { list.add(configurationProperty.getConfigurationKey().getName()); } return list; }
@Test void shouldGetConfigurationKeysAsList() { ConfigurationProperty property1 = new ConfigurationProperty(new ConfigurationKey("key1"), new ConfigurationValue("value1"), null, null); ConfigurationProperty property2 = new ConfigurationProperty(new ConfigurationKey("key2"), new ConfigurationValue("value2"), null, null); Configuration config = new Configuration(property1, property2); assertThat(config.listOfConfigKeys()).isEqualTo(List.of("key1", "key2")); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 默认使用积分为 0 result.setUsePoint(0); // 1.1 校验是否使用积分 if (!BooleanUtil.isTrue(param.getPointStatus())) { result.setUsePoint(0); return; } // 1.2 校验积分抵扣是否开启 MemberConfigRespDTO config = memberConfigApi.getConfig(); if (!isDeductPointEnable(config)) { return; } // 1.3 校验用户积分余额 MemberUserRespDTO user = memberUserApi.getUser(param.getUserId()); if (user.getPoint() == null || user.getPoint() <= 0) { return; } // 2.1 计算积分优惠金额 int pointPrice = calculatePointPrice(config, user.getPoint(), result); // 2.2 计算分摊的积分、抵扣金额 List<TradePriceCalculateRespBO.OrderItem> orderItems = filterList(result.getItems(), TradePriceCalculateRespBO.OrderItem::getSelected); List<Integer> dividePointPrices = TradePriceCalculatorHelper.dividePrice(orderItems, pointPrice); List<Integer> divideUsePoints = TradePriceCalculatorHelper.dividePrice(orderItems, result.getUsePoint()); // 3.1 记录优惠明细 TradePriceCalculatorHelper.addPromotion(result, orderItems, param.getUserId(), "积分抵扣", PromotionTypeEnum.POINT.getType(), StrUtil.format("积分抵扣:省 {} 元", TradePriceCalculatorHelper.formatPrice(pointPrice)), dividePointPrices); // 3.2 更新 SKU 优惠金额 for (int i = 0; i < orderItems.size(); i++) { TradePriceCalculateRespBO.OrderItem orderItem = orderItems.get(i); orderItem.setPointPrice(dividePointPrices.get(i)); orderItem.setUsePoint(divideUsePoints.get(i)); TradePriceCalculatorHelper.recountPayPrice(orderItem); } TradePriceCalculatorHelper.recountAllPrice(result); }
@Test public void testCalculate_TradeDeductMaxPrice() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setUserId(233L).setPointStatus(true) // 是否使用积分 .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), // 使用积分 new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(true), // 使用积分 new TradePriceCalculateReqBO.Item().setSkuId(30L).setCount(5).setSelected(false) // 未选中,不使用积分 )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100).setSpuId(1L), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(true) .setPrice(50).setSpuId(2L), new TradePriceCalculateRespBO.OrderItem().setSkuId(30L).setCount(5).setSelected(false) .setPrice(30).setSpuId(3L) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(积分配置 信息) MemberConfigRespDTO memberConfig = randomPojo(MemberConfigRespDTO.class, o -> o.setPointTradeDeductEnable(true) // 启用积分折扣 .setPointTradeDeductUnitPrice(1) // 1 积分抵扣多少金额(单位分) .setPointTradeDeductMaxPrice(50)); // 积分抵扣最大值 when(memberConfigApi.getConfig()).thenReturn(memberConfig); // mock 方法(会员 信息) MemberUserRespDTO user = randomPojo(MemberUserRespDTO.class, o -> o.setId(param.getUserId()).setPoint(100)); when(memberUserApi.getUser(user.getId())).thenReturn(user); // 调用 tradePointUsePriceCalculator.calculate(param, result); // 断言:使用了多少积分 assertEquals(result.getUsePoint(), 50); // 断言:Price 部分 TradePriceCalculateRespBO.Price price = result.getPrice(); assertEquals(price.getTotalPrice(), 350); assertEquals(price.getPayPrice(), 300); assertEquals(price.getPointPrice(), 50); // 断言:SKU 1 TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getPointPrice(), 28); assertEquals(orderItem01.getPayPrice(), 172); // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getPointPrice(), 22); assertEquals(orderItem02.getPayPrice(), 128); // 断言:SKU 3 TradePriceCalculateRespBO.OrderItem orderItem03 = result.getItems().get(2); assertEquals(orderItem03.getSkuId(), 30L); assertEquals(orderItem03.getCount(), 5); assertEquals(orderItem03.getPrice(), 30); assertEquals(orderItem03.getPointPrice(), 0); assertEquals(orderItem03.getPayPrice(), 150); // 断言:Promotion 部分 assertEquals(result.getPromotions().size(), 1); TradePriceCalculateRespBO.Promotion promotion01 = result.getPromotions().get(0); assertEquals(promotion01.getId(), user.getId()); assertEquals(promotion01.getName(), "积分抵扣"); assertEquals(promotion01.getType(), PromotionTypeEnum.POINT.getType()); assertEquals(promotion01.getTotalPrice(), 350); assertEquals(promotion01.getDiscountPrice(), 50); assertTrue(promotion01.getMatch()); assertEquals(promotion01.getDescription(), "积分抵扣:省 0.50 元"); assertEquals(promotion01.getItems().size(), 2); TradePriceCalculateRespBO.PromotionItem promotionItem011 = promotion01.getItems().get(0); assertEquals(promotionItem011.getSkuId(), 10L); assertEquals(promotionItem011.getTotalPrice(), 200); assertEquals(promotionItem011.getDiscountPrice(), 28); TradePriceCalculateRespBO.PromotionItem promotionItem012 = promotion01.getItems().get(1); assertEquals(promotionItem012.getSkuId(), 20L); assertEquals(promotionItem012.getTotalPrice(), 150); assertEquals(promotionItem012.getDiscountPrice(), 22); }
@Override public Mono<User> updatePassword(String username, String newPassword) { return getUser(username) .filter(user -> !Objects.equals(user.getSpec().getPassword(), newPassword)) .flatMap(user -> { user.getSpec().setPassword(newPassword); return client.update(user); }) .doOnNext(user -> publishPasswordChangedEvent(username)); }
@Test void shouldUpdatePasswordIfUserFoundInExtension() { var fakeUser = new User(); fakeUser.setSpec(new User.UserSpec()); when(client.get(User.class, "faker")).thenReturn(Mono.just(fakeUser)); when(client.update(eq(fakeUser))).thenReturn(Mono.just(fakeUser)); StepVerifier.create(userService.updatePassword("faker", "new-fake-password")) .expectNext(fakeUser) .verifyComplete(); verify(client, times(1)).get(eq(User.class), eq("faker")); verify(client, times(1)).update(argThat(extension -> { var user = (User) extension; return "new-fake-password".equals(user.getSpec().getPassword()); })); verify(eventPublisher).publishEvent(any(PasswordChangedEvent.class)); }
@Override public boolean deleteAll(JobID jobId) { return delete(BlobUtils.getStorageLocationPath(basePath, jobId)); }
@Test void testDeleteAllWithNotExistingJobId() { final JobID jobId = new JobID(); assertThat(testInstance.deleteAll(jobId)).isTrue(); assertThat(getPath(jobId)).doesNotExist(); }
public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); }
@Test public void cannotOperateOnEditAfterRevert() throws Exception { DiskLruCache.Editor editor = cache.edit("k1"); editor.set(0, "A"); editor.set(1, "B"); editor.abort(); assertInoperable(editor); }
public static <T extends PipelineOptions> T validateCli(Class<T> klass, PipelineOptions options) { return validate(klass, options, true); }
@Test public void testValidationOnOverriddenMethodsCli() throws Exception { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( "Missing required value for " + "[--object, \"Fake Description\"]."); SubClassValidation required = PipelineOptionsFactory.fromArgs(new String[] {}).as(SubClassValidation.class); PipelineOptionsValidator.validateCli(Required.class, required); }
public void poll(final long timeoutMs, final long currentTimeMs) { trySend(currentTimeMs); long pollTimeoutMs = timeoutMs; if (!unsentRequests.isEmpty()) { pollTimeoutMs = Math.min(retryBackoffMs, pollTimeoutMs); } this.client.poll(pollTimeoutMs, currentTimeMs); maybePropagateMetadataError(); checkDisconnects(currentTimeMs); }
@Test public void testPropagateMetadataError() { AuthenticationException authException = new AuthenticationException("Test Auth Exception"); doThrow(authException).when(metadata).maybeThrowAnyException(); LinkedList<BackgroundEvent> backgroundEventQueue = new LinkedList<>(); this.backgroundEventHandler = new BackgroundEventHandler(backgroundEventQueue); NetworkClientDelegate networkClientDelegate = newNetworkClientDelegate(); assertEquals(0, backgroundEventQueue.size()); networkClientDelegate.poll(0, time.milliseconds()); assertEquals(1, backgroundEventQueue.size()); BackgroundEvent event = backgroundEventQueue.poll(); assertNotNull(event); assertEquals(BackgroundEvent.Type.ERROR, event.type()); assertEquals(authException, ((ErrorEvent) event).error()); }
public void setEphemeral(boolean ephemeral) { this.ephemeral = ephemeral; }
@Test void testSetEphemeral() { serviceMetadata.setEphemeral(false); assertFalse(serviceMetadata.isEphemeral()); }
public static ClusterHealthStatus isHealth(List<RemoteInstance> remoteInstances) { if (CollectionUtils.isEmpty(remoteInstances)) { return ClusterHealthStatus.unHealth("can't get the instance list"); } if (!CoreModuleConfig.Role.Receiver.equals(ROLE)) { List<RemoteInstance> selfInstances = remoteInstances.stream(). filter(remoteInstance -> remoteInstance.getAddress().isSelf()).collect(Collectors.toList()); if (CollectionUtils.isEmpty(selfInstances)) { return ClusterHealthStatus.unHealth("can't get itself"); } } if (remoteInstances.size() > 1 && hasIllegalNodeAddress(remoteInstances)) { return ClusterHealthStatus.unHealth("find illegal node in cluster mode such as 127.0.0.1, localhost"); } return ClusterHealthStatus.HEALTH; }
@Test public void unHealthWithEmptyInstance() { ClusterHealthStatus clusterHealthStatus = OAPNodeChecker.isHealth(Lists.newArrayList()); Assertions.assertFalse(clusterHealthStatus.isHealth()); }
@Nullable public DnsCache resolveCache() { return resolveCache; }
@Test void resolveCacheBadValues() { assertThat(builder.build().resolveCache()).isNull(); assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> builder.resolveCache(null)); }
public static void createTopics( Logger log, String bootstrapServers, Map<String, String> commonClientConf, Map<String, String> adminClientConf, Map<String, NewTopic> topics, boolean failOnExisting) throws Throwable { // this method wraps the call to createTopics() that takes admin client, so that we can // unit test the functionality with MockAdminClient. The exception is caught and // re-thrown so that admin client is closed when the method returns. try (Admin adminClient = createAdminClient(bootstrapServers, commonClientConf, adminClientConf)) { createTopics(log, adminClient, topics, failOnExisting); } catch (Exception e) { log.warn("Failed to create or verify topics {}", topics, e); throw e; } }
@Test public void testCreatesOneTopicVerifiesOneTopic() throws Throwable { final String existingTopic = "existing-topic"; List<TopicPartitionInfo> tpInfo = new ArrayList<>(); tpInfo.add(new TopicPartitionInfo(0, broker1, singleReplica, Collections.emptyList())); tpInfo.add(new TopicPartitionInfo(1, broker2, singleReplica, Collections.emptyList())); adminClient.addTopic( false, existingTopic, tpInfo, null); Map<String, NewTopic> topics = new HashMap<>(); topics.put(existingTopic, new NewTopic(existingTopic, tpInfo.size(), TEST_REPLICATION_FACTOR)); topics.put(TEST_TOPIC, NEW_TEST_TOPIC); WorkerUtils.createTopics(log, adminClient, topics, false); assertEquals(Utils.mkSet(existingTopic, TEST_TOPIC), adminClient.listTopics().names().get()); }
public void resumePollingForPartitionsWithAvailableSpace() { for (final Task t: tasks.activeTasks()) { t.resumePollingForPartitionsWithAvailableSpace(); } }
@Test public void shouldResumePollingForPartitionsWithAvailableSpaceForAllActiveTasks() { final StreamTask activeTask1 = statefulTask(taskId00, taskId00ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId00Partitions).build(); final StreamTask activeTask2 = statefulTask(taskId01, taskId01ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId01Partitions).build(); final TasksRegistry tasks = mock(TasksRegistry.class); final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true); when(tasks.activeTasks()).thenReturn(mkSet(activeTask1, activeTask2)); taskManager.resumePollingForPartitionsWithAvailableSpace(); verify(activeTask1).resumePollingForPartitionsWithAvailableSpace(); verify(activeTask2).resumePollingForPartitionsWithAvailableSpace(); }
public static void setTransferEncodingChunked(HttpMessage m, boolean chunked) { if (chunked) { m.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED); m.headers().remove(HttpHeaderNames.CONTENT_LENGTH); } else { List<String> encodings = m.headers().getAll(HttpHeaderNames.TRANSFER_ENCODING); if (encodings.isEmpty()) { return; } List<CharSequence> values = new ArrayList<CharSequence>(encodings); Iterator<CharSequence> valuesIt = values.iterator(); while (valuesIt.hasNext()) { CharSequence value = valuesIt.next(); if (HttpHeaderValues.CHUNKED.contentEqualsIgnoreCase(value)) { valuesIt.remove(); } } if (values.isEmpty()) { m.headers().remove(HttpHeaderNames.TRANSFER_ENCODING); } else { m.headers().set(HttpHeaderNames.TRANSFER_ENCODING, values); } } }
@Test public void testDoubleChunkedHeader() { HttpMessage message = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); message.headers().add(HttpHeaderNames.TRANSFER_ENCODING, "chunked"); HttpUtil.setTransferEncodingChunked(message, true); List<String> expected = singletonList("chunked"); assertEquals(expected, message.headers().getAll(HttpHeaderNames.TRANSFER_ENCODING)); }
static SelType callJavaMethod(Object javaObj, SelType[] args, MethodHandle m, String methodName) { try { if (args.length == 0) { return callJavaMethod0(javaObj, m); } else if (args.length == 1) { return callJavaMethod1(javaObj, args[0], m); } else if (args.length == 2) { return callJavaMethod2(javaObj, args[0], args[1], m); } } catch (IllegalStateException e) { throw e; } catch (Throwable t) { throw new IllegalArgumentException("Failed calling method " + methodName, t); } throw new UnsupportedOperationException( "DO NOT support calling method: " + methodName + " with args: " + Arrays.toString(args)); }
@Test public void testCallJavaMethodWithTwoArgs() throws Throwable { m1 = MethodHandles.lookup() .findStatic( MockType.class, "staticTwoArgs", MethodType.methodType(double.class, double.class, boolean.class)); m2 = MethodHandles.lookup() .findVirtual( MockType.class, "twoArgs", MethodType.methodType(String.class, String.class, DateTime.class)); SelType res = SelTypeUtil.callJavaMethod( null, new SelType[] {SelDouble.of(1.2), SelBoolean.of(true)}, m1, "staticTwoArgs"); assertEquals(SelTypes.DOUBLE, res.type()); assertEquals(2.2, ((SelDouble) res).doubleVal(), 0.01); res = SelTypeUtil.callJavaMethod( new MockType(), new SelType[] {SelString.of("foo"), SelJodaDateTime.of(new DateTime(DateTimeZone.UTC))}, m2, "twoArgs"); assertEquals(SelTypes.STRING, res.type()); assertEquals("foo1970-01-01T00:00:12.345Z", res.toString()); }
public static List<ReservationAllocationState> convertAllocationsToReservationInfo(Set<ReservationAllocation> res, boolean includeResourceAllocations) { List<ReservationAllocationState> reservationInfo = new ArrayList<>(); Map<ReservationInterval, Resource> requests; for (ReservationAllocation allocation : res) { List<ResourceAllocationRequest> allocations = new ArrayList<>(); if (includeResourceAllocations) { requests = allocation.getAllocationRequests(); for (Map.Entry<ReservationInterval, Resource> request : requests.entrySet()) { ReservationInterval interval = request.getKey(); allocations.add(ResourceAllocationRequest.newInstance( interval.getStartTime(), interval.getEndTime(), request.getValue())); } } reservationInfo.add(ReservationAllocationState.newInstance( allocation.getAcceptanceTime(), allocation.getUser(), allocations, allocation.getReservationId(), allocation.getReservationDefinition())); } return reservationInfo; }
@Test public void testConvertAllocationsToReservationInfoEmptySet() { List<ReservationAllocationState> infoList = ReservationSystemUtil .convertAllocationsToReservationInfo( Collections.<ReservationAllocation>emptySet(), false); assertThat(infoList).isEmpty(); }
public static void validateGroupInstanceId(String id) { Topic.validate(id, "Group instance id", message -> { throw new InvalidConfigurationException(message); }); }
@Test public void shouldThrowOnInvalidGroupInstanceIds() { char[] longString = new char[250]; Arrays.fill(longString, 'a'); String[] invalidGroupInstanceIds = {"", "foo bar", "..", "foo:bar", "foo=bar", ".", new String(longString)}; for (String instanceId : invalidGroupInstanceIds) { try { JoinGroupRequest.validateGroupInstanceId(instanceId); fail("No exception was thrown for invalid instance id: " + instanceId); } catch (InvalidConfigurationException e) { // Good } } }