focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public GetNewReservationResponse getNewReservation( GetNewReservationRequest request) throws YarnException, IOException { if (request == null) { routerMetrics.incrGetNewReservationFailedRetrieved(); String errMsg = "Missing getNewReservation request."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); RouterServerUtil.logAndThrowException(errMsg, null); } long startTime = clock.getTime(); Map<SubClusterId, SubClusterInfo> subClustersActive = federationFacade.getSubClusters(true); for (int i = 0; i < numSubmitRetries; ++i) { SubClusterId subClusterId = getRandomActiveSubCluster(subClustersActive); LOG.info("getNewReservation try #{} on SubCluster {}.", i, subClusterId); ApplicationClientProtocol clientRMProxy = getClientRMProxyForSubCluster(subClusterId); try { GetNewReservationResponse response = clientRMProxy.getNewReservation(request); if (response != null) { long stopTime = clock.getTime(); routerMetrics.succeededGetNewReservationRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(user.getShortUserName(), GET_NEW_RESERVATION, TARGET_CLIENT_RM_SERVICE); return response; } } catch (Exception e) { String logFormatted = "Unable to create a new Reservation in SubCluster {}."; LOG.warn(logFormatted, subClusterId.getId(), e); RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_CLIENT_RM_SERVICE, logFormatted, subClusterId.getId()); subClustersActive.remove(subClusterId); } } routerMetrics.incrGetNewReservationFailedRetrieved(); String errMsg = "Failed to create a new reservation."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); throw new YarnException(errMsg); }
@Test public void testGetNewReservation() throws Exception { LOG.info("Test FederationClientInterceptor : Get NewReservation request."); // null request LambdaTestUtils.intercept(YarnException.class, "Missing getNewReservation request.", () -> interceptor.getNewReservation(null)); // normal request GetNewReservationRequest request = GetNewReservationRequest.newInstance(); GetNewReservationResponse response = interceptor.getNewReservation(request); Assert.assertNotNull(response); ReservationId reservationId = response.getReservationId(); Assert.assertNotNull(reservationId); Assert.assertTrue(reservationId.toString().contains("reservation")); Assert.assertEquals(reservationId.getClusterTimestamp(), ResourceManager.getClusterTimeStamp()); }
@Override public CompletableFuture<Set<String>> availableResources(NodeId nodeId, Type resourceType) { if (clusterService.getLocalNode().id().equals(nodeId)) { Set<String> resources = getLocalAvailableResources(resourceType); return CompletableFuture.completedFuture(resources); } else { return communicationService.sendAndReceive(createResourceRequest(resourceType), CONTROL_RESOURCE, SERIALIZER::encode, SERIALIZER::decode, nodeId); } }
@Test public void testAvailableResources() { MetricValue mv = new MetricValue.Builder().load(50).add(); Set<String> diskSet = ImmutableSet.of("disk1", "disk2"); diskSet.forEach(disk -> DISK_METRICS.forEach(cmt -> testUpdateMetricWithResource(cmt, mv, disk))); Set<String> networkSet = ImmutableSet.of("eth0", "eth1"); networkSet.forEach(network -> NETWORK_METRICS.forEach(cmt -> testUpdateMetricWithResource(cmt, mv, network))); assertThat(monitor.availableResourcesSync(nodeId, Type.DISK), is(diskSet)); assertThat(monitor.availableResourcesSync(nodeId, Type.NETWORK), is(networkSet)); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public GenericRow apply( final GenericKey k, final GenericRow rowValue, final GenericRow aggRowValue ) { final GenericRow result = GenericRow.fromList(aggRowValue.values()); for (int idx = 0; idx < nonAggColumnCount; idx++) { result.set(idx, rowValue.get(idx)); } for (int idx = nonAggColumnCount; idx < columnCount; idx++) { final TableAggregationFunction function = aggregateFunctions.get(idx - nonAggColumnCount); final Object argument = getCurrentValue( rowValue, function.getArgIndicesInValue(), function::convertToInput ); final Object previous = result.get(idx); result.set(idx, function.undo(argument, previous)); } return result; }
@Test public void shouldApplyUndoableVariadicAggregateFunctions() { when(func1.convertToInput(any())).thenAnswer( (invocation) -> { List<?> inputs = invocation.getArgument(0, List.class); return Pair.of(inputs.get(0), new VariadicArgs<>(inputs.subList(1, 4))); } ); when(func1.getArgIndicesInValue()).thenReturn(Arrays.asList(0, 1, 2, 3)); // Given: final GenericRow value = genericRow(1, 2L, 3L, 4L); final GenericRow aggRow = genericRow(1, 2L, 3); // When: final GenericRow resultRow = aggregator.apply(key, value, aggRow); // Then: assertThat(resultRow, equalTo(genericRow(1, 2L, "func1-undone"))); }
public synchronized void post(Map<String, List<Map<String, Object>>> ruleMap) { if (ruleMap.isEmpty()) { // clear rule for (Map.Entry<String, Set<MeshRuleListener>> entry : listenerMap.entrySet()) { for (MeshRuleListener listener : entry.getValue()) { listener.clearRule(appName); } } } else { for (Map.Entry<String, List<Map<String, Object>>> entry : ruleMap.entrySet()) { String ruleType = entry.getKey(); Set<MeshRuleListener> listeners = listenerMap.get(ruleType); if (CollectionUtils.isNotEmpty(listeners)) { for (MeshRuleListener listener : listeners) { listener.onRuleChange(appName, entry.getValue()); } } else { logger.warn( CLUSTER_NO_RULE_LISTENER, "Receive mesh rule but none of listener has been registered", "", "Receive rule but none of listener has been registered. Maybe type not matched. Rule Type: " + ruleType); } } // clear rule listener not being notified in this time for (Map.Entry<String, Set<MeshRuleListener>> entry : listenerMap.entrySet()) { if (!ruleMap.containsKey(entry.getKey())) { for (MeshRuleListener listener : entry.getValue()) { listener.clearRule(appName); } } } } }
@Test void post() { MeshRuleDispatcher meshRuleDispatcher = new MeshRuleDispatcher("TestApp"); Map<String, List<Map<String, Object>>> ruleMap = new HashMap<>(); List<Map<String, Object>> type1 = new LinkedList<>(); List<Map<String, Object>> type2 = new LinkedList<>(); List<Map<String, Object>> type3 = new LinkedList<>(); ruleMap.put("Type1", type1); ruleMap.put("Type2", type2); ruleMap.put("Type3", type3); AtomicInteger count = new AtomicInteger(0); MeshRuleListener listener1 = new MeshRuleListener() { @Override public void onRuleChange(String appName, List<Map<String, Object>> rules) { Assertions.assertEquals("TestApp", appName); Assertions.assertEquals(System.identityHashCode(type1), System.identityHashCode(rules)); count.incrementAndGet(); } @Override public void clearRule(String appName) {} @Override public String ruleSuffix() { return "Type1"; } }; MeshRuleListener listener2 = new MeshRuleListener() { @Override public void onRuleChange(String appName, List<Map<String, Object>> rules) { Assertions.assertEquals("TestApp", appName); Assertions.assertEquals(System.identityHashCode(type2), System.identityHashCode(rules)); count.incrementAndGet(); } @Override public void clearRule(String appName) {} @Override public String ruleSuffix() { return "Type2"; } }; MeshRuleListener listener4 = new MeshRuleListener() { @Override public void onRuleChange(String appName, List<Map<String, Object>> rules) { Assertions.fail(); } @Override public void clearRule(String appName) { Assertions.assertEquals("TestApp", appName); count.incrementAndGet(); } @Override public String ruleSuffix() { return "Type4"; } }; meshRuleDispatcher.register(listener1); meshRuleDispatcher.register(listener2); meshRuleDispatcher.register(listener4); meshRuleDispatcher.post(ruleMap); Assertions.assertEquals(3, count.get()); }
public static <T> Partition<T> of( int numPartitions, PartitionWithSideInputsFn<? super T> partitionFn, Requirements requirements) { Contextful ctfFn = Contextful.fn( (T element, Contextful.Fn.Context c) -> partitionFn.partitionFor(element, numPartitions, c), requirements); return new Partition<>(new PartitionDoFn<T>(numPartitions, ctfFn, partitionFn)); }
@Test @Category(NeedsRunner.class) public void testModPartition() { PCollectionList<Integer> outputs = pipeline.apply(Create.of(1, 2, 4, 5)).apply(Partition.of(3, new ModFn())); assertTrue(outputs.size() == 3); PAssert.that(outputs.get(0)).empty(); PAssert.that(outputs.get(1)).containsInAnyOrder(1, 4); PAssert.that(outputs.get(2)).containsInAnyOrder(2, 5); pipeline.run(); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void testA0() throws Exception { String testExpression = "#a0"; String firstArgument = "test"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{firstArgument}, testExpression); assertThat(result).isEqualTo(firstArgument); }
@Override public int complete(String buffer, int cursor, List<CharSequence> candidates) { if (cursor <= 0) { return cursor; } int blankPos = findLastBlank(buffer.substring(0, cursor)); String prefix = buffer.substring(blankPos + 1, cursor); String schemaName = queryRunner.getSession().getSchema(); if (schemaName != null) { List<String> functionNames = functionCache.getIfPresent(schemaName); List<String> tableNames = tableCache.getIfPresent(schemaName); SortedSet<String> sortedCandidates = new TreeSet<>(); if (functionNames != null) { sortedCandidates.addAll(filterResults(functionNames, prefix)); } if (tableNames != null) { sortedCandidates.addAll(filterResults(tableNames, prefix)); } candidates.addAll(sortedCandidates); } return blankPos + 1; }
@Test public void testAutoCompleteWithoutSchema() { QueryRunner runner = createQueryRunner(new ClientOptions().toClientSession()); TableNameCompleter completer = new TableNameCompleter(runner); assertEquals(completer.complete("SELECT is_infi", 14, ImmutableList.of()), 7); }
@GwtIncompatible("java.util.regex.Pattern") public void containsMatch(@Nullable Pattern regex) { checkNotNull(regex); if (actual == null) { failWithActual("expected a string that contains a match for", regex); } else if (!regex.matcher(actual).find()) { failWithActual("expected to contain a match for", regex); } }
@Test public void stringContainsMatchString() { assertThat("aba").containsMatch(".*b.*"); expectFailureWhenTestingThat("aaa").containsMatch(".*b.*"); assertFailureValue("expected to contain a match for", ".*b.*"); }
public Predicate convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testEq() { ConstantOperator value = ConstantOperator.createInt(5); ScalarOperator op = new BinaryPredicateOperator(BinaryType.EQ, F0, value); Predicate result = CONVERTER.convert(op); Assert.assertTrue(result instanceof LeafPredicate); LeafPredicate leafPredicate = (LeafPredicate) result; Assert.assertTrue(leafPredicate.function() instanceof Equal); Assert.assertEquals(5, leafPredicate.literals().get(0)); }
public static VersionNumber v(String version) { return new VersionNumber(version); }
@Test void testVersionNumberConstructor() { assertThatCode(() -> v("5.0.0")).doesNotThrowAnyException(); assertThatCode(() -> v("7.0.0-alpha.1")).doesNotThrowAnyException(); assertThatCode(() -> v("23.ea.3")).doesNotThrowAnyException(); assertThatCode(() -> v("21-ea")).doesNotThrowAnyException(); assertThatCode(() -> v("this is even not parseable as a version")).doesNotThrowAnyException(); }
public static TransformExecutorService parallel(ExecutorService executor) { return new ParallelTransformExecutor(executor); }
@Test public void parallelScheduleMultipleSchedulesBothImmediately() { @SuppressWarnings("unchecked") DirectTransformExecutor<Object> first = mock(DirectTransformExecutor.class); @SuppressWarnings("unchecked") DirectTransformExecutor<Object> second = mock(DirectTransformExecutor.class); TransformExecutorService parallel = TransformExecutorServices.parallel(executorService); parallel.schedule(first); parallel.schedule(second); verify(first).run(); verify(second).run(); parallel.complete(first); parallel.complete(second); }
private String getUpstreamIp() { ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT); assert shenyuContext != null; if (RpcTypeEnum.HTTP.getName().equals(shenyuContext.getRpcType())) { URI uri = exchange.getAttribute(Constants.HTTP_URI); if (Objects.nonNull(uri)) { return uri.getHost(); } else { return getUpstreamIpFromHttpDomain(); } } else { String domain = (String) exchange.getAttributes().get(Constants.HTTP_DOMAIN); if (StringUtils.isNotBlank(domain)) { return getUpstreamIpFromHttpDomain(); } // The current context is difficult to obtain the upstream IP of grpc and Dubbo. need change plugin code. } return ""; }
@Test public void testGetUpstreamIp() throws Exception { loggingServerHttpResponse.setExchange(exchange); Method method1 = loggingServerHttpResponse.getClass().getDeclaredMethod("getUpstreamIp"); method1.setAccessible(true); String upstreamIp1 = (String) method1.invoke(loggingServerHttpResponse); Assertions.assertEquals(upstreamIp1, ""); exchange.getAttributes().put(Constants.HTTP_DOMAIN, "http://localhost:9195/http/order/path/123/name"); loggingServerHttpResponse.setExchange(exchange); Method method2 = loggingServerHttpResponse.getClass().getDeclaredMethod("getUpstreamIp"); method2.setAccessible(true); String upstreamIp2 = (String) method2.invoke(loggingServerHttpResponse); Assertions.assertEquals(upstreamIp2, "localhost"); ShenyuContext shenyuContext2 = new ShenyuContext(); shenyuContext2.setRpcType("http"); exchange.getAttributes().put(Constants.CONTEXT, shenyuContext2); loggingServerHttpResponse.setExchange(exchange); Method method3 = loggingServerHttpResponse.getClass().getDeclaredMethod("getUpstreamIp"); method3.setAccessible(true); String upstreamIp3 = (String) method3.invoke(loggingServerHttpResponse); Assertions.assertEquals(upstreamIp3, "localhost"); exchange.getAttributes().put(Constants.HTTP_URI, new URI("test", "localhost", "/test", "test")); loggingServerHttpResponse.setExchange(exchange); Method method4 = loggingServerHttpResponse.getClass().getDeclaredMethod("getUpstreamIp"); method4.setAccessible(true); String uri = (String) method4.invoke(loggingServerHttpResponse); Assertions.assertEquals(uri, "localhost"); }
public static String findBsn(List<Container> categorieList){ return findValue(categorieList, CATEGORIE_IDENTIFICATIENUMMERS, ELEMENT_BURGERSERVICENUMMER); }
@Test public void testEmptyElement() { Container container = new Container(); container.setNummer(CategorieUtil.CATEGORIE_IDENTIFICATIENUMMERS); Element element = new Element(); container.getElement().add(element); assertThat(CategorieUtil.findBsn(List.of(container)), nullValue()); }
@Override public void validateNameUniqueness(Map<CaseInsensitiveString, AbstractMaterialConfig> map) { if (StringUtils.isBlank(scmId)) { return; } if (map.containsKey(new CaseInsensitiveString(scmId))) { AbstractMaterialConfig material = map.get(new CaseInsensitiveString(scmId)); material.addError(SCM_ID, "Duplicate SCM material detected!"); addError(SCM_ID, "Duplicate SCM material detected!"); } else { map.put(new CaseInsensitiveString(scmId), this); } }
@Test public void shouldAddErrorIfSCMNameUniquenessValidationFails() { Map<CaseInsensitiveString, AbstractMaterialConfig> nameToMaterialMap = new HashMap<>(); PluggableSCMMaterialConfig existingMaterial = new PluggableSCMMaterialConfig("scm-id"); nameToMaterialMap.put(new CaseInsensitiveString("scm-id"), existingMaterial); nameToMaterialMap.put(new CaseInsensitiveString("foo"), git("url")); pluggableSCMMaterialConfig.validateNameUniqueness(nameToMaterialMap); assertThat(pluggableSCMMaterialConfig.errors().getAll().size(), is(1)); assertThat(pluggableSCMMaterialConfig.errors().on(PluggableSCMMaterialConfig.SCM_ID), is("Duplicate SCM material detected!")); assertThat(existingMaterial.errors().getAll().size(), is(1)); assertThat(existingMaterial.errors().on(PluggableSCMMaterialConfig.SCM_ID), is("Duplicate SCM material detected!")); assertThat(nameToMaterialMap.size(), is(2)); }
public static <T> JSONSchema<T> of(SchemaDefinition<T> schemaDefinition) { SchemaReader<T> reader = schemaDefinition.getSchemaReaderOpt() .orElseGet(() -> new JacksonJsonReader<>(jsonMapper(), schemaDefinition.getPojo())); SchemaWriter<T> writer = schemaDefinition.getSchemaWriterOpt() .orElseGet(() -> new JacksonJsonWriter<>(jsonMapper())); return new JSONSchema<>(parseSchemaInfo(schemaDefinition, SchemaType.JSON), schemaDefinition.getPojo(), reader, writer); }
@Test public void testAllowNullEncodeAndDecode() { JSONSchema<Foo> jsonSchema = JSONSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build()); Bar bar = new Bar(); bar.setField1(true); Foo foo1 = new Foo(); foo1.setField1("foo1"); foo1.setField2("bar1"); foo1.setField4(bar); foo1.setColor(SchemaTestUtils.Color.BLUE); Foo foo2 = new Foo(); foo2.setField1("foo2"); foo2.setField2("bar2"); byte[] bytes1 = jsonSchema.encode(foo1); Assert.assertTrue(bytes1.length > 0); byte[] bytes2 = jsonSchema.encode(foo2); Assert.assertTrue(bytes2.length > 0); Foo object1 = jsonSchema.decode(bytes1); Foo object2 = jsonSchema.decode(bytes2); Assert.assertEquals(object1, foo1); Assert.assertEquals(object2, foo2); }
public Stream<T> stream() { return values.stream(); }
@Test public void shouldGetAllArgsByStream() { final VariadicArgs<Integer> varArgs = new VariadicArgs<>(ImmutableList.of(1, 2, 3)); final List<Integer> foundArgs = varArgs.stream().collect(Collectors.toList()); assertEquals(Arrays.asList(1, 2, 3), foundArgs); }
public static < T> ConsumerPath<T> matchBestPath(String requestMethod, String requestPath, List<ConsumerPath<T>> consumerPaths) { ConsumerPath<T> answer = null; List<ConsumerPath<T>> candidates = new ArrayList<>(); // first match by http method for (ConsumerPath<T> entry : consumerPaths) { if (matchRestMethod(requestMethod, entry.getRestrictMethod())) { candidates.add(entry); } } // then see if we got a direct match Iterator<ConsumerPath<T>> it = candidates.iterator(); while (it.hasNext()) { ConsumerPath<T> consumer = it.next(); if (matchRestPath(requestPath, consumer.getConsumerPath(), false)) { answer = consumer; break; } } // we could not find a direct match, and if the request is OPTIONS then we need all candidates if (answer == null && isOptionsMethod(requestMethod)) { candidates.clear(); candidates.addAll(consumerPaths); // then try again to see if we can find a direct match it = candidates.iterator(); while (it.hasNext()) { ConsumerPath<T> consumer = it.next(); if (matchRestPath(requestPath, consumer.getConsumerPath(), false)) { answer = consumer; break; } } } // if there are no uri template, then select the matching with the longest path boolean noCurlyBraces = candidates.stream().allMatch(p -> countCurlyBraces(p.getConsumerPath()) == 0); if (noCurlyBraces) { // grab first which is the longest that matched the request path answer = candidates.stream() .filter(c -> matchPath(requestPath, c.getConsumerPath(), c.isMatchOnUriPrefix())) // sort by longest by inverting the sort by multiply with -1 .sorted(Comparator.comparingInt(o -> -1 * o.getConsumerPath().length())).findFirst().orElse(null); } // is there a direct match by with a different VERB, as then this call is not allowed if (answer == null) { for (ConsumerPath<T> entry : consumerPaths) { if (matchRestPath(requestPath, entry.getConsumerPath(), false)) { // okay we have direct match but for another VERB so this call is not allowed return null; } } } if (answer != null) { return answer; } // then match by uri template path it = candidates.iterator(); List<ConsumerPath<T>> uriTemplateCandidates = new ArrayList<>(); while (it.hasNext()) { ConsumerPath<T> consumer = it.next(); // filter non matching paths if (matchRestPath(requestPath, consumer.getConsumerPath(), true)) { uriTemplateCandidates.add(consumer); } } // if there is multiple candidates with uri template then pick anyone with the least number of uri template ConsumerPath<T> best = null; Map<Integer, List<ConsumerPath<T>>> pathMap = new HashMap<>(); if (uriTemplateCandidates.size() > 1) { it = uriTemplateCandidates.iterator(); while (it.hasNext()) { ConsumerPath<T> entry = it.next(); int curlyBraces = countCurlyBraces(entry.getConsumerPath()); if (curlyBraces > 0) { List<ConsumerPath<T>> consumerPathsList = pathMap.computeIfAbsent(curlyBraces, key -> new ArrayList<>()); consumerPathsList.add(entry); } } OptionalInt min = pathMap.keySet().stream().mapToInt(Integer::intValue).min(); if (min.isPresent()) { List<ConsumerPath<T>> bestConsumerPaths = pathMap.get(min.getAsInt()); if (bestConsumerPaths.size() > 1 && !canBeAmbiguous(requestMethod, requestMethod)) { String exceptionMsg = "Ambiguous paths " + bestConsumerPaths.stream().map(ConsumerPath::getConsumerPath) .collect(Collectors.joining(",")) + " for request path " + requestPath; throw new IllegalStateException(exceptionMsg); } best = bestConsumerPaths.get(0); } if (best != null) { // pick the best among uri template answer = best; } } // if there is one left then it's our answer if (answer == null && uriTemplateCandidates.size() == 1) { return uriTemplateCandidates.get(0); } // last match by wildcard path it = candidates.iterator(); while (it.hasNext()) { ConsumerPath<T> consumer = it.next(); // filter non matching paths if (matchWildCard(requestPath, consumer.getConsumerPath())) { answer = consumer; break; } } return answer; }
@Test public void testRestConsumerContextPathMatcherWithAmbiguousPaths() { List<RestConsumerContextPathMatcher.ConsumerPath<MockConsumerPath>> consumerPaths = new ArrayList<>(); consumerPaths.add(new MockConsumerPath("GET", "/camel/{a}/b/{c}")); consumerPaths.add(new MockConsumerPath("GET", "/camel/a/{b}/{c}")); IllegalStateException illegalStateException = assertThrows(IllegalStateException.class, () -> { RestConsumerContextPathMatcher.matchBestPath("GET", "/camel/a/b/3", consumerPaths); }); assertEquals("Ambiguous paths /camel/{a}/b/{c},/camel/a/{b}/{c} for request path /camel/a/b/3", illegalStateException.getMessage()); }
@Override public void modifyHandshake(final ServerEndpointConfig sec, final HandshakeRequest request, final HandshakeResponse response) { HttpSession httpSession = (HttpSession) request.getHttpSession(); sec.getUserProperties().put(WebsocketListener.CLIENT_IP_NAME, httpSession.getAttribute(WebsocketListener.CLIENT_IP_NAME)); super.modifyHandshake(sec, request, response); }
@Test public void testModifyHandshake() { WebsocketConfigurator websocketConfigurator = new WebsocketConfigurator(); ServerEndpointConfig sec = mock(ServerEndpointConfig.class); Map<String, Object> userProperties = mock(Map.class); when(sec.getUserProperties()).thenReturn(userProperties); HandshakeRequest request = mock(HandshakeRequest.class); HttpSession httpSession = mock(HttpSession.class); when(request.getHttpSession()).thenReturn(httpSession); HandshakeResponse response = mock(HandshakeResponse.class); websocketConfigurator.modifyHandshake(sec, request, response); verify(userProperties).put(WebsocketListener.CLIENT_IP_NAME, httpSession.getAttribute(WebsocketListener.CLIENT_IP_NAME)); }
@Override public HealthStatus getStatus() { if (cr.getStatus() == ComponentStatus.INITIALIZING) return HealthStatus.INITIALIZING; PartitionHandlingManager partitionHandlingManager = cr.getComponent(PartitionHandlingManager.class); if (!isComponentHealthy() || partitionHandlingManager.getAvailabilityMode() == AvailabilityMode.DEGRADED_MODE) { return HealthStatus.DEGRADED; } DistributionManager distributionManager = cr.getDistributionManager(); if (distributionManager != null && distributionManager.isRehashInProgress()) { return HealthStatus.HEALTHY_REBALANCING; } return HealthStatus.HEALTHY; }
@Test public void testUnhealthyStatusWithStoppingComponent() { //given ComponentRegistry componentRegistryMock = mock(ComponentRegistry.class); doReturn(ComponentStatus.STOPPING).when(componentRegistryMock).getStatus(); CacheHealth cacheHealth = new CacheHealthImpl(componentRegistryMock); //when HealthStatus status = cacheHealth.getStatus(); //then assertEquals(status, HealthStatus.DEGRADED); }
public static Read read() { return new AutoValue_HCatalogIO_Read.Builder() .setDatabase(DEFAULT_DATABASE) .setPartitionCols(new ArrayList<>()) .build(); }
@Test public void testReadFailureTableDoesNotExist() { defaultPipeline.apply( HCatalogIO.read() .withConfigProperties(getConfigPropertiesAsMap(service.getHiveConf())) .withTable("myowntable")); thrown.expectCause(isA(NoSuchObjectException.class)); defaultPipeline.run(); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testNotEqMissingColumn() throws Exception { BinaryColumn b = binaryColumn("missing_column"); assertFalse( "Should not drop block for non-null query", canDrop(notEq(b, Binary.fromString("any")), ccmd, dictionaries)); assertTrue("Should not drop block null query", canDrop(notEq(b, null), ccmd, dictionaries)); }
@Override public boolean equals(Object other) { if (this == other) return true; if(!(other instanceof HollowObjectSchema)) return false; HollowObjectSchema otherSchema = (HollowObjectSchema) other; if(!getName().equals(otherSchema.getName())) return false; if(otherSchema.numFields() != numFields()) return false; if (!isNullableObjectEquals(primaryKey, otherSchema.getPrimaryKey())) return false; for(int i=0;i<numFields();i++) { if(getFieldType(i) != otherSchema.getFieldType(i)) return false; if(getFieldType(i) == FieldType.REFERENCE && !getReferencedType(i).equals(otherSchema.getReferencedType(i))) return false; if(!getFieldName(i).equals(otherSchema.getFieldName(i))) return false; } return true; }
@Test public void testEquals() { { HollowObjectSchema s1 = new HollowObjectSchema("Test", 2); s1.addField("F1", FieldType.INT); s1.addField("F2", FieldType.LONG); HollowObjectSchema s2 = new HollowObjectSchema("Test", 2); s2.addField("F1", FieldType.INT); s2.addField("F2", FieldType.LONG); Assert.assertEquals(s1, s2); } { HollowObjectSchema s1 = new HollowObjectSchema("Test", 2); s1.addField("F1", FieldType.INT); s1.addField("F2", FieldType.LONG); HollowObjectSchema s2 = new HollowObjectSchema("Test", 1); s2.addField("F1", FieldType.INT); Assert.assertNotEquals(s1, s2); } }
public MapStateDescriptor( String name, TypeSerializer<UK> keySerializer, TypeSerializer<UV> valueSerializer) { super(name, new MapSerializer<>(keySerializer, valueSerializer), null); }
@Test void testMapStateDescriptor() throws Exception { TypeSerializer<Integer> keySerializer = new KryoSerializer<>(Integer.class, new SerializerConfigImpl()); TypeSerializer<String> valueSerializer = new KryoSerializer<>(String.class, new SerializerConfigImpl()); MapStateDescriptor<Integer, String> descr = new MapStateDescriptor<>("testName", keySerializer, valueSerializer); assertThat(descr.getName()).isEqualTo("testName"); assertThat(descr.getSerializer()).isNotNull(); assertThat(descr.getSerializer()).isInstanceOf(MapSerializer.class); assertThat(descr.getKeySerializer()).isNotNull(); assertThat(descr.getKeySerializer()).isEqualTo(keySerializer); assertThat(descr.getValueSerializer()).isNotNull(); assertThat(descr.getValueSerializer()).isEqualTo(valueSerializer); MapStateDescriptor<Integer, String> copy = CommonTestUtils.createCopySerializable(descr); assertThat(copy.getName()).isEqualTo("testName"); assertThat(copy.getSerializer()).isNotNull(); assertThat(copy.getSerializer()).isInstanceOf(MapSerializer.class); assertThat(copy.getKeySerializer()).isNotNull(); assertThat(copy.getKeySerializer()).isEqualTo(keySerializer); assertThat(copy.getValueSerializer()).isNotNull(); assertThat(copy.getValueSerializer()).isEqualTo(valueSerializer); }
@Override public MetricType getType() { return MetricType.GAUGE_BOOLEAN; }
@Test public void set() { BooleanGauge gauge = new BooleanGauge("bar"); assertThat(gauge.getValue()).isNull(); gauge.set(true); assertThat(gauge.getValue()).isTrue(); gauge.set(false); assertThat(gauge.getValue()).isFalse(); assertThat(gauge.getType()).isEqualTo(MetricType.GAUGE_BOOLEAN); assertThat(gauge.getValue()).isFalse(); }
public Materialization create( final StreamsMaterialization delegate, final MaterializationInfo info, final QueryId queryId, final QueryContext.Stacker contextStacker ) { final TransformVisitor transformVisitor = new TransformVisitor(queryId, contextStacker); final List<Transform> transforms = info .getTransforms() .stream() .map(xform -> xform.visit(transformVisitor)) .collect(Collectors.toList()); return materializationFactory.create( delegate, info.getSchema(), transforms ); }
@Test public void shouldBuildMaterializationWithMapTransform() { // Given: factory.create(materialization, info, queryId, contextStacker); when(mapper.transform(any(), any(), any())).thenReturn(rowOut); final Transform transform = getTransform(0); // When: final Optional<GenericRow> result = transform.apply(keyIn, rowIn, ctx); // Then: assertThat(result, is(Optional.of(rowOut))); }
public FEELFnResult<List<BigDecimal>> invoke(@ParameterName( "list" ) List list, @ParameterName( "match" ) Object match) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } final List<BigDecimal> result = new ArrayList<>(); for( int i = 0; i < list.size(); i++ ) { Object o = list.get( i ); if ( o == null && match == null) { result.add( BigDecimal.valueOf( i+1L ) ); } else if ( o != null && match != null ) { if ( equalsAsBigDecimals(o, match) || o.equals(match) ) { result.add( BigDecimal.valueOf( i+1L ) ); } } } return FEELFnResult.ofResult( result ); }
@Test void invokeMatchNotNull() { FunctionTestUtil.assertResult(indexOfFunction.invoke(Arrays.asList("test", null, 12), "testttt"), Collections.emptyList()); FunctionTestUtil.assertResult( indexOfFunction.invoke(Arrays.asList("test", null, BigDecimal.valueOf(12)), "test"), Collections.singletonList(BigDecimal.valueOf(1))); FunctionTestUtil.assertResult( indexOfFunction.invoke(Arrays.asList("test", null, "test"), "test"), Arrays.asList(BigDecimal.valueOf(1), BigDecimal.valueOf(3))); }
public void isEqualTo(@Nullable Object expected) { standardIsEqualTo(expected); }
@Test public void isEqualToFailureWithObjects() { Object a = OBJECT_1; Object b = OBJECT_2; expectFailure.whenTesting().that(a).isEqualTo(b); assertFailureKeys("expected", "but was"); assertFailureValue("expected", "Object 2"); assertFailureValue("but was", "Object 1"); }
public MaterializedConfiguration getConfiguration() { MaterializedConfiguration conf = new SimpleMaterializedConfiguration(); FlumeConfiguration fconfig = getFlumeConfiguration(); AgentConfiguration agentConf = fconfig.getConfigurationFor(getAgentName()); if (agentConf != null) { Map<String, ChannelComponent> channelComponentMap = Maps.newHashMap(); Map<String, SourceRunner> sourceRunnerMap = Maps.newHashMap(); Map<String, SinkRunner> sinkRunnerMap = Maps.newHashMap(); try { loadChannels(agentConf, channelComponentMap); loadSources(agentConf, channelComponentMap, sourceRunnerMap); loadSinks(agentConf, channelComponentMap, sinkRunnerMap); Set<String> channelNames = new HashSet<String>(channelComponentMap.keySet()); for (String channelName : channelNames) { ChannelComponent channelComponent = channelComponentMap.get(channelName); if (channelComponent.components.isEmpty()) { LOGGER.warn("Channel {} has no components connected" + " and has been removed.", channelName); channelComponentMap.remove(channelName); Map<String, Channel> nameChannelMap = channelCache.get(channelComponent.channel.getClass()); if (nameChannelMap != null) { nameChannelMap.remove(channelName); } } else { LOGGER.info("Channel {} connected to {}", channelName, channelComponent.components.toString()); conf.addChannel(channelName, channelComponent.channel); } } for (Map.Entry<String, SourceRunner> entry : sourceRunnerMap.entrySet()) { conf.addSourceRunner(entry.getKey(), entry.getValue()); } for (Map.Entry<String, SinkRunner> entry : sinkRunnerMap.entrySet()) { conf.addSinkRunner(entry.getKey(), entry.getValue()); } } catch (InstantiationException ex) { LOGGER.error("Failed to instantiate component", ex); } finally { channelComponentMap.clear(); sourceRunnerMap.clear(); sinkRunnerMap.clear(); } } else { LOGGER.warn("No configuration found for this host:{}", getAgentName()); } return conf; }
@Test public void testSourceAndSinkThrowExceptionDuringConfiguration() throws Exception { String agentName = "agent1"; String sourceType = UnconfigurableSource.class.getName(); String channelType = "memory"; String sinkType = UnconfigurableSink.class.getName(); Map<String, String> properties = getProperties(agentName, sourceType, channelType, sinkType); MemoryConfigurationProvider provider = new MemoryConfigurationProvider(agentName, properties); MaterializedConfiguration config = provider.getConfiguration(); assertEquals(config.getSourceRunners().size(), 0); assertEquals(config.getChannels().size(), 0); assertEquals(config.getSinkRunners().size(), 0); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void AcRoleAccessControlRight() throws Exception { JwtClaims claims = ClaimsUtil.getTestClaims("stevehu", "CUSTOMER", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("account.r", "account.w"), "customer"); claims.setExpirationTimeMinutesInTheFuture(5256000); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***Long lived token Authorization code customer with roles***: " + jwt); }
@PatchMapping("/api/v1/meetings/{uuid}/lock") public void lock(@PathVariable String uuid, @AuthAttendee long id) { meetingService.lock(uuid, id); }
@DisplayName("약속을 잠그면 200 OK를 반환한다.") @Test void lock() { Meeting meeting = meetingRepository.save(MeetingFixture.DINNER.create()); Attendee attendee = attendeeRepository.save(AttendeeFixture.HOST_JAZZ.create(meeting)); String token = getToken(attendee, meeting); RestAssured.given().log().all() .cookie("ACCESS_TOKEN", token) .contentType(ContentType.JSON) .pathParam("uuid", meeting.getUuid()) .when().patch("/api/v1/meetings/{uuid}/lock") .then().log().all() .statusCode(HttpStatus.OK.value()); }
@Override public String toString() { return "Configuration [asyncExecutorFactory=" + asyncExecutorFactory + ", balancingStrategyFactory=()->" + balancingStrategyFactory.get() + ",classLoader=" + classLoader + ", clientIntelligence=" + clientIntelligence + ", connectionPool=" + connectionPool + ", connectionTimeout=" + connectionTimeout + ", consistentHashImpl=" + Arrays.toString(consistentHashImpl) + ", forceReturnValues=" + forceReturnValues + ", keySizeEstimate=" + keySizeEstimate + ", marshallerClass=" + marshallerClass + ", marshaller=" + marshaller + ", protocolVersion=" + protocolVersion + ", servers=" + servers + ", socketTimeout=" + socketTimeout + ", security=" + security + ", tcpNoDelay=" + tcpNoDelay + ", tcpKeepAlive=" + tcpKeepAlive + ", valueSizeEstimate=" + valueSizeEstimate + ", maxRetries=" + maxRetries + ", serialAllowList=" + serialAllowList + ", batchSize=" + batchSize + ", nearCache=" + nearCache + ", remoteCaches= " + remoteCaches + ", transaction=" + transaction + ", statistics=" + statistics + "]"; }
@Test public void testHotRodURItoString() { HotRodURI uri = HotRodURI.create("hotrod://user:secret@host1?client_intelligence=BASIC"); assertEquals("hotrod://host1?client_intelligence=BASIC", uri.toString()); assertEquals("hotrod://user:secret@host1?client_intelligence=BASIC", uri.toString(true)); }
@Override public void process() { JMeterContext context = getThreadContext(); Sampler sam = context.getCurrentSampler(); SampleResult res = context.getPreviousResult(); HTTPSamplerBase sampler; HTTPSampleResult result; if (!(sam instanceof HTTPSamplerBase) || !(res instanceof HTTPSampleResult)) { log.info("Can't apply HTML Link Parser when the previous" + " sampler run is not an HTTP Request."); return; } else { sampler = (HTTPSamplerBase) sam; result = (HTTPSampleResult) res; } List<HTTPSamplerBase> potentialLinks = new ArrayList<>(); String responseText = result.getResponseDataAsString(); int index = responseText.indexOf('<'); // $NON-NLS-1$ if (index == -1) { index = 0; } if (log.isDebugEnabled()) { log.debug("Check for matches against: "+sampler.toString()); } Document html = (Document) HtmlParsingUtils.getDOM(responseText.substring(index)); addAnchorUrls(html, result, sampler, potentialLinks); addFormUrls(html, result, sampler, potentialLinks); addFramesetUrls(html, result, sampler, potentialLinks); if (!potentialLinks.isEmpty()) { HTTPSamplerBase url = potentialLinks.get(ThreadLocalRandom.current().nextInt(potentialLinks.size())); if (log.isDebugEnabled()) { log.debug("Selected: "+url.toString()); } sampler.setDomain(url.getDomain()); sampler.setPath(url.getPath()); if (url.getMethod().equals(HTTPConstants.POST)) { for (JMeterProperty jMeterProperty : sampler.getArguments()) { Argument arg = (Argument) jMeterProperty.getObjectValue(); modifyArgument(arg, url.getArguments()); } } else { sampler.setArguments(url.getArguments()); } sampler.setProtocol(url.getProtocol()); } else { log.debug("No matches found"); } }
@Test public void testSimpleParse3() throws Exception { HTTPSamplerBase config = makeUrlConfig(".*index.*"); config.getArguments().addArgument("param1", "value1"); HTTPSamplerBase context = makeContext("http://www.apache.org/subdir/previous.html"); String responseText = "<html><head><title>Test page</title></head><body>" + "<a href=\"/home/index.html?param1=value1\">" + "Goto index page</a></body></html>"; HTTPSampleResult result = new HTTPSampleResult(); result.setResponseData(responseText, null); result.setSampleLabel(context.toString()); result.setURL(context.getUrl()); jmctx.setCurrentSampler(context); jmctx.setCurrentSampler(config); jmctx.setPreviousResult(result); parser.process(); String newUrl = config.getUrl().toString(); assertEquals("http://www.apache.org/home/index.html?param1=value1", newUrl); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldCreateTypesInDifferentPackages() throws Exception { try (InputStream in = Tests.getLocalResource("explicit-package-test-schema.xml")) { final ParserOptions options = ParserOptions.builder().stopOnError(true).build(); final MessageSchema schema = parse(in, options); final IrGenerator irg = new IrGenerator(); ir = irg.generate(schema); outputManager.clear(); outputManager.setPackageName(ir.applicableNamespace()); final JavaGenerator generator = new JavaGenerator( ir, BUFFER_NAME, READ_ONLY_BUFFER_NAME, false, false, false, true, outputManager); generator.generate(); final String encoderFqcn = ir.applicableNamespace() + ".TestMessageEncoder"; final Class<?> encoderClazz = compile(encoderFqcn); assertNotNull(encoderClazz); final String decoderFqcn = ir.applicableNamespace() + ".TestMessageDecoder"; final Class<?> decoderClazz = compile(decoderFqcn); assertNotNull(decoderClazz); final Map<String, CharSequence> sources = outputManager.getSources(); assertNotNull(sources.get("test.message.schema.common.CarEncoder")); assertNotNull(sources.get("test.message.schema.common.CarDecoder")); assertNotNull(sources.get("outside.schema.BooleanType")); assertNotNull(sources.get("outside.schema.DaysEncoder")); assertNotNull(sources.get("outside.schema.DaysDecoder")); assertNotNull(sources.get(ir.applicableNamespace() + ".MessageHeaderEncoder")); } }
@Override public boolean addClass(final Class<?> stepClass) { if (stepClasses.contains(stepClass)) { return true; } if (injectorSourceFromProperty == null) { if (hasInjectorSource(stepClass)) { checkOnlyOneClassHasInjectorSource(stepClass); withInjectorSource = stepClass; // Eager init to allow for static binding prior to before all // hooks injector = instantiateUserSpecifiedInjectorSource(withInjectorSource).getInjector(); } } stepClasses.add(stepClass); return true; }
@Test void shouldThrowExceptionIfTwoDifferentInjectorSourcesAreFound() { factory = new GuiceFactory(); assertTrue(factory.addClass(YourInjectorSource.class)); Executable testMethod = () -> factory.addClass(SecondInjectorSource.class); CucumberBackendException actualThrown = assertThrows(CucumberBackendException.class, testMethod); String exceptionMessage = String.format("" + "Glue class %1$s and %2$s are both implementing io.cucumber.guice.InjectorSource.\n" + "Please ensure only one class configures the Guice context\n" + "\n" + "By default Cucumber scans the entire classpath for context configuration.\n" + "You can restrict this by configuring the glue path.\n" + ClasspathSupport.configurationExamples(), SecondInjectorSource.class, YourInjectorSource.class); assertThat("Unexpected exception message", actualThrown.getMessage(), is(exceptionMessage)); }
public static Path compose(final Path root, final String path) { if(StringUtils.startsWith(path, String.valueOf(Path.DELIMITER))) { // Mount absolute path final String normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), true); if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(normalized, normalized.equals(String.valueOf(Path.DELIMITER)) ? EnumSet.of(Path.Type.volume, Path.Type.directory) : EnumSet.of(Path.Type.directory)); } else { final String normalized; if(StringUtils.startsWith(path, String.format("%s%s", Path.HOME, Path.DELIMITER))) { // Relative path to the home directory normalized = normalize(StringUtils.removeStart(StringUtils.removeStart( StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), Path.HOME), String.valueOf(Path.DELIMITER)), false); } else { // Relative path normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), false); } if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(String.format("%s%s%s", root.getAbsolute(), root.isRoot() ? StringUtils.EMPTY : Path.DELIMITER, normalized), EnumSet.of(Path.Type.directory)); } }
@Test public void testStartingWithHome() { final Path home = PathNormalizer.compose(new Path("/", EnumSet.of(Path.Type.directory)), String.format("%smyfolder/sub", Path.HOME)); assertEquals(new Path(String.format("/%smyfolder/sub", Path.HOME), EnumSet.of(Path.Type.directory)), home); assertEquals(new Path(String.format("/%smyfolder", Path.HOME), EnumSet.of(Path.Type.directory)), home.getParent()); }
@HighFrequencyInvocation public Optional<EncryptAlgorithm> findEncryptor(final String logicColumnName) { return columns.containsKey(logicColumnName) ? Optional.of(columns.get(logicColumnName).getCipher().getEncryptor()) : Optional.empty(); }
@Test void assertFindEncryptorName() { assertTrue(encryptTable.findEncryptor("logicColumn").isPresent()); }
@Override public boolean createTopic( final String topic, final int numPartitions, final short replicationFactor, final Map<String, ?> configs, final CreateTopicsOptions createOptions ) { final Optional<Long> retentionMs = KafkaTopicClient.getRetentionMs(configs); if (isTopicExists(topic)) { validateTopicProperties(topic, numPartitions, replicationFactor, retentionMs); return false; } final short resolvedReplicationFactor = replicationFactor == TopicProperties.DEFAULT_REPLICAS ? getDefaultClusterReplication() : replicationFactor; final NewTopic newTopic = new NewTopic(topic, numPartitions, resolvedReplicationFactor); newTopic.configs(toStringConfigs(configs)); try { LOG.info("Creating topic '{}' {}", topic, (createOptions.shouldValidateOnly()) ? "(ONLY VALIDATE)" : "" ); ExecutorUtil.executeWithRetries( () -> adminClient.get().createTopics( Collections.singleton(newTopic), createOptions ).all().get(), ExecutorUtil.RetryBehaviour.ON_RETRYABLE); return true; } catch (final InterruptedException e) { Thread.currentThread().interrupt(); throw new KafkaResponseGetFailedException( "Failed to guarantee existence of topic " + topic, e); } catch (final TopicExistsException e) { // if the topic already exists, it is most likely because another node just created it. // ensure that it matches the partition count, replication factor, and retention // before returning success validateTopicProperties(topic, numPartitions, replicationFactor, retentionMs); return false; } catch (final TopicAuthorizationException e) { throw new KsqlTopicAuthorizationException( AclOperation.CREATE, Collections.singleton(topic)); } catch (final Exception e) { throw new KafkaResponseGetFailedException( "Failed to guarantee existence of topic " + topic, e); } }
@Test public void shouldCreateTopicWithEmptyConfigs() { Map<String, ?> configs = ImmutableMap.of(); // When: kafkaTopicClient.createTopic("someTopic", 1, (short) 2); // Then: verify(adminClient).createTopics( eq(ImmutableSet.of(newTopic("someTopic", 1, 2, configs))), argThat(createOptions -> !createOptions.shouldValidateOnly()) ); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { JsonObject json = JsonParser.parseString(msg.getData()).getAsJsonObject(); String tmp; if (msg.getOriginator().getEntityType() != EntityType.DEVICE) { ctx.tellFailure(msg, new RuntimeException("Message originator is not a device entity!")); } else if (!json.has("method")) { ctx.tellFailure(msg, new RuntimeException("Method is not present in the message!")); } else if (!json.has("params")) { ctx.tellFailure(msg, new RuntimeException("Params are not present in the message!")); } else { int requestId = json.has("requestId") ? json.get("requestId").getAsInt() : random.nextInt(); boolean restApiCall = msg.isTypeOf(TbMsgType.RPC_CALL_FROM_SERVER_TO_DEVICE); tmp = msg.getMetaData().getValue("oneway"); boolean oneway = !StringUtils.isEmpty(tmp) && Boolean.parseBoolean(tmp); tmp = msg.getMetaData().getValue(DataConstants.PERSISTENT); boolean persisted = !StringUtils.isEmpty(tmp) && Boolean.parseBoolean(tmp); tmp = msg.getMetaData().getValue("requestUUID"); UUID requestUUID = !StringUtils.isEmpty(tmp) ? UUID.fromString(tmp) : Uuids.timeBased(); tmp = msg.getMetaData().getValue("originServiceId"); String originServiceId = !StringUtils.isEmpty(tmp) ? tmp : null; tmp = msg.getMetaData().getValue(DataConstants.EXPIRATION_TIME); long expirationTime = !StringUtils.isEmpty(tmp) ? Long.parseLong(tmp) : (System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(config.getTimeoutInSeconds())); tmp = msg.getMetaData().getValue(DataConstants.RETRIES); Integer retries = !StringUtils.isEmpty(tmp) ? Integer.parseInt(tmp) : null; String params = parseJsonData(json.get("params")); String additionalInfo = parseJsonData(json.get(DataConstants.ADDITIONAL_INFO)); RuleEngineDeviceRpcRequest request = RuleEngineDeviceRpcRequest.builder() .oneway(oneway) .method(json.get("method").getAsString()) .body(params) .tenantId(ctx.getTenantId()) .deviceId(new DeviceId(msg.getOriginator().getId())) .requestId(requestId) .requestUUID(requestUUID) .originServiceId(originServiceId) .expirationTime(expirationTime) .retries(retries) .restApiCall(restApiCall) .persisted(persisted) .additionalInfo(additionalInfo) .build(); ctx.getRpcService().sendRpcRequestToDevice(request, ruleEngineDeviceRpcResponse -> { if (ruleEngineDeviceRpcResponse.getError().isEmpty()) { TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getCustomerId(), msg.getMetaData(), ruleEngineDeviceRpcResponse.getResponse().orElse(TbMsg.EMPTY_JSON_OBJECT)); ctx.enqueueForTellNext(next, TbNodeConnectionType.SUCCESS); } else { TbMsg next = ctx.newMsg(msg.getQueueName(), msg.getType(), msg.getOriginator(), msg.getCustomerId(), msg.getMetaData(), wrap("error", ruleEngineDeviceRpcResponse.getError().get().name())); ctx.enqueueForTellFailure(next, ruleEngineDeviceRpcResponse.getError().get().name()); } }); ctx.ack(msg); } }
@Test public void givenRetries_whenOnMsg_thenVerifyRequest() { given(ctxMock.getRpcService()).willReturn(rpcServiceMock); given(ctxMock.getTenantId()).willReturn(TENANT_ID); Integer retries = 3; TbMsgMetaData metadata = new TbMsgMetaData(); metadata.putValue(DataConstants.RETRIES, String.valueOf(retries)); TbMsg msg = TbMsg.newMsg(TbMsgType.RPC_CALL_FROM_SERVER_TO_DEVICE, DEVICE_ID, metadata, MSG_DATA); node.onMsg(ctxMock, msg); ArgumentCaptor<RuleEngineDeviceRpcRequest> requestCaptor = captureRequest(); assertThat(requestCaptor.getValue().getRetries()).isEqualTo(retries); }
@Override public Schema getSourceSchema() { if (schema == null) { try { Schema.Parser parser = new Schema.Parser(); schema = parser.parse(schemaString); } catch (Exception e) { throw new HoodieSchemaException("Failed to parse schema: " + schemaString, e); } } return schema; }
@Test public void validateWrappedPrimitiveAndTimestampsAsRecordSchemaGeneration() throws IOException { TypedProperties properties = new TypedProperties(); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_CLASS_NAME.key(), Sample.class.getName()); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_WRAPPED_PRIMITIVES_AS_RECORDS.key(), "true"); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_TIMESTAMPS_AS_RECORDS.key(), "true"); ProtoClassBasedSchemaProvider protoToAvroSchemaProvider = new ProtoClassBasedSchemaProvider(properties, null); Schema convertedSchema = protoToAvroSchemaProvider.getSourceSchema(); Schema.Parser parser = new Schema.Parser(); Schema expectedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/sample_schema_wrapped_and_timestamp_as_record.avsc")); Assertions.assertEquals(expectedSchema, convertedSchema); }
@Override public Timestamp getTimestamp(final int columnIndex) throws SQLException { return (Timestamp) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, Timestamp.class), Timestamp.class); }
@Test void assertGetTimestampWithColumnLabel() throws SQLException { when(mergeResultSet.getValue(1, Timestamp.class)).thenReturn(new Timestamp(0L)); assertThat(shardingSphereResultSet.getTimestamp("label"), is(new Timestamp(0L))); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { final PathAttributes attr = super.find(file, listener); if(StringUtils.isNotBlank(file.attributes().getVersionId())) { return attr.withVersionId(file.attributes().getVersionId()).withFileId(file.attributes().getFileId()); } return attr; }
@Test public void testFindLock() throws Exception { final Path test = new DAVTouchFeature(new NextcloudWriteFeature(session)).touch(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final NextcloudAttributesFinderFeature f = new NextcloudAttributesFinderFeature(session); assertNull(f.find(test).getLockId()); final String lockId = new DAVLockFeature(session).lock(test); assertNotNull(f.find(test).getLockId()); assertThrows(LockedException.class, () -> new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledPasswordCallback(), new Delete.DisabledCallback())); new DAVLockFeature(session).unlock(test, lockId); new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
public String getPrefix() { return ENABLE_HTTPS ? HTTPS_PREFIX : HTTP_PREFIX; }
@Test void testGetPrefix() { assertEquals(HTTP_PREFIX, NamingHttpClientManager.getInstance().getPrefix()); }
@Override public OAuth2AccessTokenDO grantAuthorizationCodeForAccessToken(String clientId, String code, String redirectUri, String state) { OAuth2CodeDO codeDO = oauth2CodeService.consumeAuthorizationCode(code); Assert.notNull(codeDO, "授权码不能为空"); // 防御性编程 // 校验 clientId 是否匹配 if (!StrUtil.equals(clientId, codeDO.getClientId())) { throw exception(ErrorCodeConstants.OAUTH2_GRANT_CLIENT_ID_MISMATCH); } // 校验 redirectUri 是否匹配 if (!StrUtil.equals(redirectUri, codeDO.getRedirectUri())) { throw exception(ErrorCodeConstants.OAUTH2_GRANT_REDIRECT_URI_MISMATCH); } // 校验 state 是否匹配 state = StrUtil.nullToDefault(state, ""); // 数据库 state 为 null 时,会设置为 "" 空串 if (!StrUtil.equals(state, codeDO.getState())) { throw exception(ErrorCodeConstants.OAUTH2_GRANT_STATE_MISMATCH); } // 创建访问令牌 return oauth2TokenService.createAccessToken(codeDO.getUserId(), codeDO.getUserType(), codeDO.getClientId(), codeDO.getScopes()); }
@Test public void testGrantAuthorizationCodeForAccessToken() { // 准备参数 String clientId = randomString(); String code = randomString(); List<String> scopes = Lists.newArrayList("read", "write"); String redirectUri = randomString(); String state = randomString(); // mock 方法(code) OAuth2CodeDO codeDO = randomPojo(OAuth2CodeDO.class, o -> { o.setClientId(clientId); o.setRedirectUri(redirectUri); o.setState(state); o.setScopes(scopes); }); when(oauth2CodeService.consumeAuthorizationCode(eq(code))).thenReturn(codeDO); // mock 方法(创建令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class); when(oauth2TokenService.createAccessToken(eq(codeDO.getUserId()), eq(codeDO.getUserType()), eq(codeDO.getClientId()), eq(codeDO.getScopes()))).thenReturn(accessTokenDO); // 调用,并断言 assertPojoEquals(accessTokenDO, oauth2GrantService.grantAuthorizationCodeForAccessToken( clientId, code, redirectUri, state)); }
public static <T> T getYamlDataObject(String path, Class<T> clazz) { try (FileInputStream fis = new FileInputStream(path)) { return getYamlDataObject(fis, clazz); } catch (FileNotFoundException ignore) { return null; } catch (Exception e) { throw new AclException(e.getMessage(), e); } }
@SuppressWarnings("unchecked") @Test public void testGetYamlDataObject() throws IOException { try (InputStream is = AclUtilsTest.class.getClassLoader().getResourceAsStream("conf/plain_acl_correct.yml")) { Map<String, Object> map = AclUtils.getYamlDataObject(is, Map.class); Assert.assertNotNull(map); Assert.assertFalse(map.isEmpty()); } }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); Collection<Set<String>> broadcastDataSourceGroup = getBroadcastDataSourceGroup(getDataSourceGroup(shardingRule)); for (Set<String> each : broadcastDataSourceGroup) { String dataSourceName = getRandomDataSourceName(each); result.getRouteUnits().add(new RouteUnit(new RouteMapper(dataSourceName, dataSourceName), Collections.emptyList())); } return result; }
@Test void assertRoute() { List<List<String>> shards = new LinkedList<>(); shards.add(Arrays.asList("ds1", "ds2", "ds3")); shards.add(Arrays.asList("ds1", "ds2", "ds3")); shards.add(Arrays.asList("ds1", "ds2", "ds3")); Map<String, ShardingTable> shardingTables = mockShardingTables(shards); when(shardingRule.getShardingTables()).thenReturn(shardingTables); RouteContext routeContext = shardingDataSourceGroupBroadcastRoutingEngine.route(shardingRule); assertThat(routeContext.getRouteUnits().size(), is(1)); Iterator<RouteUnit> iterator = routeContext.getRouteUnits().iterator(); assertThat(Arrays.asList("ds1", "ds2", "ds3"), hasItems(iterator.next().getDataSourceMapper().getActualName())); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchOchSignalTypeTest() { Criterion criterion = Criteria.matchOchSignalType(OchSignalType.FIXED_GRID); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
public static TObjectDependencyRes listObjectDependencies(TObjectDependencyReq req) { TAuthInfo auth = req.getAuth_info(); TObjectDependencyRes response = new TObjectDependencyRes(); UserIdentity currentUser; if (auth.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(auth.getCurrent_user_ident()); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(auth.getUser(), auth.getUser_ip()); } // list dependencies of mv Locker locker = new Locker(); Collection<Database> dbs = GlobalStateMgr.getCurrentState().getLocalMetastore().getFullNameToDb().values(); for (Database db : CollectionUtils.emptyIfNull(dbs)) { String catalog = Optional.ofNullable(db.getCatalogName()) .orElse(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME); locker.lockDatabase(db, LockType.READ); try { for (Table table : db.getTables()) { // If it is not a materialized view, we do not need to verify permissions if (!table.isMaterializedView()) { continue; } // Only show tables with privilege try { Authorizer.checkAnyActionOnTableLikeObject(currentUser, null, db.getFullName(), table); } catch (AccessDeniedException e) { continue; } MaterializedView mv = (MaterializedView) table; for (BaseTableInfo refObj : CollectionUtils.emptyIfNull(mv.getBaseTableInfos())) { TObjectDependencyItem item = new TObjectDependencyItem(); item.setObject_id(mv.getId()); item.setObject_name(mv.getName()); item.setDatabase(db.getFullName()); item.setCatalog(catalog); item.setObject_type(mv.getType().toString()); item.setRef_object_id(refObj.getTableId()); item.setRef_database(refObj.getDbName()); item.setRef_catalog(refObj.getCatalogName()); Optional<Table> refTable = MvUtils.getTableWithIdentifier(refObj); item.setRef_object_type(getRefObjectType(refTable, mv.getName())); // If the ref table is dropped/swapped/renamed, the actual info would be inconsistent with // BaseTableInfo, so we use the source-of-truth information if (refTable.isEmpty()) { item.setRef_object_name(refObj.getTableName()); } else { item.setRef_object_name(refTable.get().getName()); } response.addToItems(item); } } } finally { locker.unLockDatabase(db, LockType.READ); } } return response; }
@Test public void testObjectDependencies() throws Exception { starRocksAssert.withTable("CREATE TABLE test.test_mv_base_table\n" + "(\n" + " k1 date,\n" + " k2 int,\n" + " v1 int sum\n" + ")\n" + "PARTITION BY RANGE(k1)\n" + "(\n" + " PARTITION p1 values [('2022-02-01'),('2022-02-16')),\n" + " PARTITION p2 values [('2022-02-16'),('2022-03-01'))\n" + ")\n" + "DISTRIBUTED BY HASH(k2) BUCKETS 3\n" + "PROPERTIES('replication_num' = '1');") .withMaterializedView("create materialized view test.mv_1\n" + "PARTITION BY k1\n" + "distributed by hash(k2) buckets 3\n" + "refresh async\n" + "as select k1, k2, sum(v1) as total from test_mv_base_table group by k1, k2;") .withMaterializedView("create materialized view test.mv_2\n" + "PARTITION BY date_trunc('month', k1)\n" + "distributed by hash(k2) buckets 3\n" + "refresh async\n" + "as select k1, k2, sum(v1) as total from test_mv_base_table group by k1, k2;"); String grantSql2 = "GRANT ALL ON TABLE test.test_mv_base_table TO USER `test_mv`@`%`;"; String grantSql = "GRANT ALL ON MATERIALIZED VIEW test.mv_1 TO USER `test_mv`@`%`;"; String grantSql1 = "GRANT ALL ON MATERIALIZED VIEW test.mv_2 TO USER `test_mv`@`%`;"; DDLStmtExecutor.execute(UtFrameUtils.parseStmtWithNewParser(grantSql2, connectContext), connectContext); DDLStmtExecutor.execute(UtFrameUtils.parseStmtWithNewParser(grantSql, connectContext), connectContext); DDLStmtExecutor.execute(UtFrameUtils.parseStmtWithNewParser(grantSql1, connectContext), connectContext); TObjectDependencyReq dependencyReq = buildRequest(); TObjectDependencyRes objectDependencyRes = SysObjectDependencies.listObjectDependencies(dependencyReq); Assert.assertNotNull(objectDependencyRes); Assert.assertEquals(2, objectDependencyRes.getItemsSize()); Assert.assertEquals("OLAP", objectDependencyRes.getItems().get(0).getRef_object_type()); }
void handleSegmentWithDeleteSegmentFinishedState(RemoteLogSegmentId remoteLogSegmentId) { // It completely removes the tracking of this segment as it is considered as deleted. unreferencedSegmentIds.remove(remoteLogSegmentId); }
@Test void handleSegmentWithDeleteSegmentFinishedState() { RemoteLogSegmentId segmentId = new RemoteLogSegmentId(tpId, Uuid.randomUuid()); epochState.handleSegmentWithCopySegmentStartedState(segmentId); assertEquals(1, epochState.unreferencedSegmentIds().size()); epochState.handleSegmentWithDeleteSegmentFinishedState(segmentId); assertTrue(epochState.unreferencedSegmentIds().isEmpty()); }
Double applyMax(double predictionDouble) { return targetField.getMax() != null ? Math.min(targetField.getMax(), predictionDouble) : predictionDouble; }
@Test void applyMax() { TargetField targetField = new TargetField(Collections.emptyList(), null, "string", null, null, null, null, null); KiePMMLTarget kiePMMLTarget = getBuilder(targetField).build(); assertThat(kiePMMLTarget.applyMax(4.33)).isCloseTo(4.33, Offset.offset(0.0)); targetField = new TargetField(Collections.emptyList(), null, "string", null, null, 4.34, null, null); kiePMMLTarget = getBuilder(targetField).build(); assertThat(kiePMMLTarget.applyMax(4.33)).isCloseTo(4.33, Offset.offset(0.0)); assertThat(kiePMMLTarget.applyMax(4.35)).isCloseTo(4.34, Offset.offset(0.0)); }
public Containerizer withAdditionalTag(String tag) { Preconditions.checkArgument(ImageReference.isValidTag(tag), "invalid tag '%s'", tag); additionalTags.add(tag); return this; }
@Test public void testWithAdditionalTag() throws InvalidImageReferenceException { Containerizer containerizer = Containerizer.to(DockerDaemonImage.named("image")); containerizer.withAdditionalTag("tag"); try { containerizer.withAdditionalTag("+invalid+"); Assert.fail(); } catch (IllegalArgumentException ex) { Assert.assertEquals("invalid tag '+invalid+'", ex.getMessage()); } }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void withColon() throws ScanException { Tokenizer tokenizer = new Tokenizer("a:${b}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); Node witness = new Node(Node.Type.LITERAL, "a"); Node t = witness.next = new Node(Node.Type.LITERAL, ":"); t.next = new Node(Node.Type.VARIABLE, new Node(Node.Type.LITERAL, "b")); assertEquals(witness, node); }
public void doesNotContainCell( @Nullable Object rowKey, @Nullable Object colKey, @Nullable Object value) { doesNotContainCell( Tables.<@Nullable Object, @Nullable Object, @Nullable Object>immutableCell( rowKey, colKey, value)); }
@Test public void doesNotContainCell() { ImmutableTable<String, String, String> table = ImmutableTable.of("row", "col", "val"); assertThat(table).doesNotContainCell("row", "row", "val"); assertThat(table).doesNotContainCell("col", "row", "val"); assertThat(table).doesNotContainCell("col", "col", "val"); assertThat(table).doesNotContainCell(null, null, null); assertThat(table).doesNotContainCell(cell("row", "row", "val")); assertThat(table).doesNotContainCell(cell("col", "row", "val")); assertThat(table).doesNotContainCell(cell("col", "col", "val")); assertThat(table).doesNotContainCell(cell(null, null, null)); }
@Override public void setNullAt(int ordinal) { if (ordinal < metaFields.length) { metaFields[ordinal] = null; } else { sourceRow.setNullAt(rebaseOrdinal(ordinal)); } }
@Test public void testIsNullCheck() { for (int i = 0; i < 16; i++) { Object[] values = getRandomValue(true); InternalRow row = new GenericInternalRow(values); HoodieInternalRow hoodieInternalRow = new HoodieInternalRow(UTF8String.fromString("commitTime"), UTF8String.fromString("commitSeqNo"), UTF8String.fromString("recordKey"), UTF8String.fromString("partitionPath"), UTF8String.fromString("fileName"), row, true); hoodieInternalRow.setNullAt(i); nullIndices.clear(); nullIndices.add(i); assertValues(hoodieInternalRow, "commitTime", "commitSeqNo", "recordKey", "partitionPath", "fileName", values, nullIndices); } // try setting multiple values as null // run it for 5 rounds for (int i = 0; i < 5; i++) { int numNullValues = 1 + RANDOM.nextInt(4); List<Integer> nullsSoFar = new ArrayList<>(); while (nullsSoFar.size() < numNullValues) { int randomIndex = RANDOM.nextInt(16); if (!nullsSoFar.contains(randomIndex)) { nullsSoFar.add(randomIndex); } } Object[] values = getRandomValue(true); InternalRow row = new GenericInternalRow(values); HoodieInternalRow hoodieInternalRow = new HoodieInternalRow(UTF8String.fromString("commitTime"), UTF8String.fromString("commitSeqNo"), UTF8String.fromString("recordKey"), UTF8String.fromString("partitionPath"), UTF8String.fromString("fileName"), row, true); nullIndices.clear(); for (Integer index : nullsSoFar) { hoodieInternalRow.setNullAt(index); nullIndices.add(index); } assertValues(hoodieInternalRow, "commitTime", "commitSeqNo", "recordKey", "partitionPath", "fileName", values, nullIndices); } }
public static PMML4Result evaluate(final KiePMMLModel model, final PMMLRuntimeContext context) { if (logger.isDebugEnabled()) { logger.debug("evaluate {} {}", model, context); } addStep(() -> getStep(START, model, context.getRequestData()), context); final ProcessingDTO processingDTO = preProcess(model, context); addStep(() -> getStep(PRE_EVALUATION, model, context.getRequestData()), context); PMMLModelEvaluator executor = getFromPMMLModelType(model.getPmmlMODEL()) .orElseThrow(() -> new KiePMMLException(String.format("PMMLModelEvaluator not found for model %s", model.getPmmlMODEL()))); PMML4Result toReturn = executor.evaluate(model, context); addStep(() -> getStep(POST_EVALUATION, model, context.getRequestData()), context); postProcess(toReturn, model, context, processingDTO); addStep(() -> getStep(END, model, context.getRequestData()), context); return toReturn; }
@Test void evaluate() { modelLocalUriId = getModelLocalUriIdFromPmmlIdFactory(FILE_NAME, MODEL_NAME); PMMLRuntimeContext pmmlContext = getPMMLContext(FILE_NAME, MODEL_NAME, memoryCompilerClassLoader); KiePMMLModelFactory kiePmmlModelFactory = PMMLLoaderUtils.loadKiePMMLModelFactory(modelLocalUriId, pmmlContext); List<KiePMMLModel> kiePMMLModels = kiePmmlModelFactory.getKiePMMLModels(); PMML4Result retrieved = PMMLRuntimeHelper.evaluate(kiePMMLModels, pmmlContext); assertThat(retrieved).isNotNull(); commonEvaluatePMML4Result(retrieved, pmmlContext.getRequestData()); }
public static SchemaKStream<?> buildSource( final PlanBuildContext buildContext, final DataSource dataSource, final QueryContext.Stacker contextStacker ) { final boolean windowed = dataSource.getKsqlTopic().getKeyFormat().isWindowed(); switch (dataSource.getDataSourceType()) { case KSTREAM: return windowed ? buildWindowedStream( buildContext, dataSource, contextStacker ) : buildStream( buildContext, dataSource, contextStacker ); case KTABLE: return windowed ? buildWindowedTable( buildContext, dataSource, contextStacker ) : buildTable( buildContext, dataSource, contextStacker ); default: throw new UnsupportedOperationException("Source type:" + dataSource.getDataSourceType()); } }
@Test public void shouldReplaceWindowedTableSourceWithMatchingPseudoColumnVersion() { // Given: givenWindowedTable(); givenExistingQueryWithOldPseudoColumnVersion(windowedTableSource); // When: final SchemaKStream<?> result = SchemaKSourceFactory.buildSource( buildContext, dataSource, contextStacker ); // Then: assertThat(((WindowedTableSource) result.getSourceStep()).getPseudoColumnVersion(), equalTo(LEGACY_PSEUDOCOLUMN_VERSION_NUMBER)); assertValidSchema(result); }
@Override public <K, T> UncommittedBundle<T> createKeyedBundle( StructuralKey<K> key, PCollection<T> output) { return UncommittedImmutableListBundle.create(output, key); }
@Test public void keyedWithNullKeyShouldCreateKeyedBundle() throws Exception { createKeyedBundle(VoidCoder.of(), null); }
public EndpointResponse getStatus(final String type, final String entity, final String action) { final CommandId commandId = new CommandId(type, entity, action); final Optional<CommandStatus> commandStatus = statementExecutor.getStatus(commandId); return commandStatus.map(EndpointResponse::ok) .orElseGet(() -> Errors.notFound("Command not found")); }
@Test public void testGetStatusNotFound() throws Exception { final StatusResource testResource = getTestStatusResource(); final EndpointResponse response = testResource.getStatus( CommandId.Type.STREAM.name(), "foo", CommandId.Action.CREATE.name()); assertThat(response.getStatus(), equalTo(NOT_FOUND.code())); assertThat(response.getEntity(), instanceOf(KsqlErrorMessage.class)); final KsqlErrorMessage errorMessage = (KsqlErrorMessage)response.getEntity(); assertThat(errorMessage.getErrorCode(), equalTo(Errors.ERROR_CODE_NOT_FOUND)); assertThat(errorMessage.getMessage(), equalTo("Command not found")); }
public LeaderService(WorkerService workerService, PulsarClient pulsarClient, FunctionAssignmentTailer functionAssignmentTailer, SchedulerManager schedulerManager, FunctionRuntimeManager functionRuntimeManager, FunctionMetaDataManager functionMetaDataManager, MembershipManager membershipManager, ErrorNotifier errorNotifier) { this.workerConfig = workerService.getWorkerConfig(); this.pulsarClient = pulsarClient; this.functionAssignmentTailer = functionAssignmentTailer; this.schedulerManager = schedulerManager; this.functionRuntimeManager = functionRuntimeManager; this.functionMetaDataManager = functionMetaDataManager; this.membershipManager = membershipManager; this.errorNotifier = errorNotifier; consumerName = String.format( "%s:%s:%d", workerConfig.getWorkerId(), workerConfig.getWorkerHostname(), workerConfig.getTlsEnabled() ? workerConfig.getWorkerPortTls() : workerConfig.getWorkerPort() ); }
@Test public void testLeaderService() throws Exception { MessageId messageId = new MessageIdImpl(1, 2, -1); when(schedulerManager.getLastMessageProduced()).thenReturn(messageId); assertFalse(leaderService.isLeader()); verify(mockClient, times(1)).newConsumer(); listenerHolder.get().becameActive(mockConsumer, 0); assertTrue(leaderService.isLeader()); verify(functionMetadataManager, times(1)).getIsInitialized(); verify(metadataManagerInitFuture, times(1)).get(); verify(functionRuntimeManager, times(1)).getIsInitialized(); verify(runtimeManagerInitFuture, times(1)).get(); verify(functionMetadataManager, times(1)).acquireExclusiveWrite(any()); verify(schedulerManager, times(1)).acquireExclusiveWrite(any()); verify(functionAssignmentTailer, times(1)).triggerReadToTheEndAndExit(); verify(functionAssignmentTailer, times(1)).close(); verify(schedulerManager, times((1))).initialize(any()); listenerHolder.get().becameInactive(mockConsumer, 0); assertFalse(leaderService.isLeader()); verify(functionAssignmentTailer, times(1)).startFromMessage(messageId); verify(schedulerManager, times(1)).close(); verify(functionMetadataManager, times(1)).giveupLeadership(); }
public static void assignFieldParams(Object bean, Map<String, Param> params) throws TikaConfigException { Class<?> beanClass = bean.getClass(); if (!PARAM_INFO.containsKey(beanClass)) { synchronized (TikaConfig.class) { if (!PARAM_INFO.containsKey(beanClass)) { List<AccessibleObject> aObjs = collectInfo(beanClass, org.apache.tika.config.Field.class); List<ParamField> fields = new ArrayList<>(aObjs.size()); for (AccessibleObject aObj : aObjs) { fields.add(new ParamField(aObj)); } PARAM_INFO.put(beanClass, fields); } } } List<ParamField> fields = PARAM_INFO.get(beanClass); for (ParamField field : fields) { Param<?> param = params.get(field.getName()); if (param != null) { if (field.getType().isAssignableFrom(param.getType())) { try { field.assignValue(bean, param.getValue()); } catch (InvocationTargetException e) { LOG.error("Error assigning value '{}' to '{}'", param.getValue(), param.getName()); final Throwable cause = e.getCause() == null ? e : e.getCause(); throw new TikaConfigException(cause.getMessage(), cause); } catch (IllegalAccessException e) { LOG.error("Error assigning value '{}' to '{}'", param.getValue(), param.getName()); throw new TikaConfigException(e.getMessage(), e); } } else { String msg = String.format(Locale.ROOT, "Value '%s' of type '%s' can't be" + " assigned to field '%s' of defined type '%s'", param.getValue(), param.getValue().getClass(), field.getName(), field.getType()); throw new TikaConfigException(msg); } } else if (field.isRequired()) { //param not supplied but field is declared as required? String msg = String.format(Locale.ROOT, "Param %s is required for %s," + " but it is not given in config.", field.getName(), bean.getClass().getName()); throw new TikaConfigException(msg); } else { LOG.debug("Param not supplied, field is not mandatory"); } } }
@Test public void testParserInheritance() { class Parent { @Field(required = true) int overridden; @Field(required = true) int parentField; } class Child extends Parent { @Field(required = true) int overridden; @Field(required = true) int childField; } int val = 1; Map<String, Param> params = new HashMap<>(); params.put("overridden", new Param<>("oevrriden", val)); params.put("parentField", new Param<>("parentField", val)); params.put("childField", new Param<>("childField", val)); try { Child child = new Child(); AnnotationUtils.assignFieldParams(child, params); assertEquals(child.overridden, val); assertEquals(child.parentField, val); assertEquals(child.childField, val); } catch (TikaConfigException e) { e.printStackTrace(); fail("Exception Not expected"); } try { params.remove("parentField"); AnnotationUtils.assignFieldParams(new Child(), params); fail("Exception expected, parent class field not set"); } catch (TikaConfigException e) { //expected } }
@Override public Expression getExpression(String tableName, Alias tableAlias) { // 只有有登陆用户的情况下,才进行数据权限的处理 LoginUser loginUser = SecurityFrameworkUtils.getLoginUser(); if (loginUser == null) { return null; } // 只有管理员类型的用户,才进行数据权限的处理 if (ObjectUtil.notEqual(loginUser.getUserType(), UserTypeEnum.ADMIN.getValue())) { return null; } // 获得数据权限 DeptDataPermissionRespDTO deptDataPermission = loginUser.getContext(CONTEXT_KEY, DeptDataPermissionRespDTO.class); // 从上下文中拿不到,则调用逻辑进行获取 if (deptDataPermission == null) { deptDataPermission = permissionApi.getDeptDataPermission(loginUser.getId()); if (deptDataPermission == null) { log.error("[getExpression][LoginUser({}) 获取数据权限为 null]", JsonUtils.toJsonString(loginUser)); throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 未返回数据权限", loginUser.getId(), tableName, tableAlias.getName())); } // 添加到上下文中,避免重复计算 loginUser.setContext(CONTEXT_KEY, deptDataPermission); } // 情况一,如果是 ALL 可查看全部,则无需拼接条件 if (deptDataPermission.getAll()) { return null; } // 情况二,即不能查看部门,又不能查看自己,则说明 100% 无权限 if (CollUtil.isEmpty(deptDataPermission.getDeptIds()) && Boolean.FALSE.equals(deptDataPermission.getSelf())) { return new EqualsTo(null, null); // WHERE null = null,可以保证返回的数据为空 } // 情况三,拼接 Dept 和 User 的条件,最后组合 Expression deptExpression = buildDeptExpression(tableName,tableAlias, deptDataPermission.getDeptIds()); Expression userExpression = buildUserExpression(tableName, tableAlias, deptDataPermission.getSelf(), loginUser.getId()); if (deptExpression == null && userExpression == null) { // TODO 芋艿:获得不到条件的时候,暂时不抛出异常,而是不返回数据 log.warn("[getExpression][LoginUser({}) Table({}/{}) DeptDataPermission({}) 构建的条件为空]", JsonUtils.toJsonString(loginUser), tableName, tableAlias, JsonUtils.toJsonString(deptDataPermission)); // throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 构建的条件为空", // loginUser.getId(), tableName, tableAlias.getName())); return EXPRESSION_NULL; } if (deptExpression == null) { return userExpression; } if (userExpression == null) { return deptExpression; } // 目前,如果有指定部门 + 可查看自己,采用 OR 条件。即,WHERE (dept_id IN ? OR user_id = ?) return new Parenthesis(new OrExpression(deptExpression, userExpression)); }
@Test // 无 LoginUser public void testGetExpression_noLoginUser() { // 准备参数 String tableName = randomString(); Alias tableAlias = new Alias(randomString()); // mock 方法 // 调用 Expression expression = rule.getExpression(tableName, tableAlias); // 断言 assertNull(expression); }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof ShowFunctionStatusStatement) { return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowProcedureStatusStatement) { return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowTablesStatement) { return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType())); } return Optional.empty(); }
@Test void assertCreateWithSelectStatementForShowConnectionId() { MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); when(selectStatement.getFrom()).thenReturn(Optional.empty()); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CONNECTION_ID()"))); when(selectStatement.getProjections()).thenReturn(projectionsSegment); when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CONNECTION_ID()", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ShowConnectionIdExecutor.class)); }
List<OffsetRange> getBundleSizes(int desiredNumBundles, long start, long end) { List<OffsetRange> result = new ArrayList<>(); double[] relativeSizes = getRelativeBundleSizes(desiredNumBundles); // Generate offset ranges proportional to the relative sizes. double s = sum(relativeSizes); long startOffset = start; double sizeSoFar = 0; for (int i = 0; i < relativeSizes.length; ++i) { sizeSoFar += relativeSizes[i]; long endOffset = (i == relativeSizes.length - 1) ? end : (long) (start + sizeSoFar * (end - start) / s); if (startOffset != endOffset) { result.add(new OffsetRange(startOffset, endOffset)); } startOffset = endOffset; } return result; }
@Test public void bundleSizesShouldBeProportionalToTheOneSuggestedInBundleSizeDistribution() { long expectedBundleSize = 4; options.bundleSizeDistribution = fromRealDistribution(new ConstantRealDistribution(2)); options.numRecords = 16; splitter = new BundleSplitter(options); List<OffsetRange> bundleSizes = splitter.getBundleSizes(4, 0, options.numRecords); bundleSizes.stream() .map(range -> range.getTo() - range.getFrom()) .forEach(size -> assertEquals(expectedBundleSize, size.intValue())); }
List<DataflowPackage> stageClasspathElements( Collection<StagedFile> classpathElements, String stagingPath, CreateOptions createOptions) { return stageClasspathElements(classpathElements, stagingPath, DEFAULT_SLEEPER, createOptions); }
@Test public void testPackageUploadIsSkippedWhenFileAlreadyExists() throws Exception { File tmpFile = makeFileWithContents("file.txt", "This is a test!"); when(mockGcsUtil.getObjects(anyListOf(GcsPath.class))) .thenReturn( ImmutableList.of( StorageObjectOrIOException.create( createStorageObject(STAGING_PATH, tmpFile.length())))); defaultPackageUtil.stageClasspathElements( ImmutableList.of(makeStagedFile(tmpFile.getAbsolutePath())), STAGING_PATH, createOptions); verify(mockGcsUtil).getObjects(anyListOf(GcsPath.class)); verifyNoMoreInteractions(mockGcsUtil); }
@Override protected Optional<ErrorResponse> filter(DiscFilterRequest req) { var now = clock.instant(); var bearerToken = requestBearerToken(req).orElse(null); if (bearerToken == null) { log.fine("Missing bearer token"); return Optional.of(new ErrorResponse(Response.Status.UNAUTHORIZED, "Unauthorized")); } var permission = Permission.getRequiredPermission(req).orElse(null); if (permission == null) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden")); var requestTokenHash = requestTokenHash(bearerToken); var clientIds = new TreeSet<String>(); var permissions = EnumSet.noneOf(Permission.class); var matchedTokens = new HashSet<TokenVersion>(); for (Client c : allowedClients) { if (!c.permissions().contains(permission)) continue; var matchedToken = c.tokens().get(requestTokenHash); if (matchedToken == null) continue; var expiration = matchedToken.expiration().orElse(null); if (expiration != null && now.isAfter(expiration)) continue; matchedTokens.add(matchedToken); clientIds.add(c.id()); permissions.addAll(c.permissions()); } if (clientIds.isEmpty()) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden")); if (matchedTokens.size() > 1) { log.warning("Multiple tokens matched for request %s" .formatted(matchedTokens.stream().map(TokenVersion::id).toList())); return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden")); } var matchedToken = matchedTokens.stream().findAny().get(); addAccessLogEntry(req, "token.id", matchedToken.id()); addAccessLogEntry(req, "token.hash", matchedToken.fingerprint().toDelimitedHexString()); addAccessLogEntry(req, "token.exp", matchedToken.expiration().map(Instant::toString).orElse("<none>")); ClientPrincipal.attachToRequest(req, clientIds, permissions); return Optional.empty(); }
@Test void fails_for_expired_token() { var entry = new AccessLogEntry(); var req = FilterTestUtils.newRequestBuilder() .withMethod(Method.GET) .withAccessLogEntry(entry) .withHeader("Authorization", "Bearer " + READ_TOKEN.secretTokenString()) .build(); var filter = newFilterWithClientsConfig(); var responseHandler = new MockResponseHandler(); filter.filter(req, responseHandler); assertNull(responseHandler.getResponse()); clock.advance(Duration.ofDays(1)); responseHandler = new MockResponseHandler(); filter.filter(req, responseHandler); assertNull(responseHandler.getResponse()); clock.advance(Duration.ofMillis(1)); responseHandler = new MockResponseHandler(); filter.filter(req, responseHandler); assertNotNull(responseHandler.getResponse()); assertEquals(FORBIDDEN, responseHandler.getResponse().getStatus()); }
public void markAsUnchanged(DefaultInputFile file) { if (isFeatureActive()) { if (file.status() != InputFile.Status.SAME) { LOG.error("File '{}' was marked as unchanged but its status is {}", file.getProjectRelativePath(), file.status()); } else { LOG.debug("File '{}' marked as unchanged", file.getProjectRelativePath()); file.setMarkedAsUnchanged(true); } } }
@Test public void dont_mark_file_if_sensor_is_not_enabled() { executingSensorContext.setSensorExecuting(new SensorId("cpp", "other")); UnchangedFilesHandler handler = new UnchangedFilesHandler(enabledConfig, defaultBranchConfig, executingSensorContext); handler.markAsUnchanged(file); verifyNoInteractions(file); }
@Override public void doInject(RequestResource resource, RamContext context, LoginIdentityContext result) { String accessKey = context.getAccessKey(); String secretKey = context.getSecretKey(); // STS 临时凭证鉴权的优先级高于 AK/SK 鉴权 if (StsConfig.getInstance().isStsOn()) { StsCredential stsCredential = StsCredentialHolder.getInstance().getStsCredential(); accessKey = stsCredential.getAccessKeyId(); secretKey = stsCredential.getAccessKeySecret(); result.setParameter(IdentifyConstants.SECURITY_TOKEN_HEADER, stsCredential.getSecurityToken()); } if (StringUtils.isNotEmpty(accessKey) && StringUtils.isNotBlank(secretKey)) { result.setParameter(ACCESS_KEY_HEADER, accessKey); } String signatureKey = secretKey; if (StringUtils.isNotEmpty(context.getRegionId())) { signatureKey = CalculateV4SigningKeyUtil .finalSigningKeyStringWithDefaultInfo(secretKey, context.getRegionId()); result.setParameter(RamConstants.SIGNATURE_VERSION, RamConstants.V4); } Map<String, String> signHeaders = SpasAdapter .getSignHeaders(getResource(resource.getNamespace(), resource.getGroup()), signatureKey); result.setParameters(signHeaders); }
@Test void testDoInjectWithoutResource() throws Exception { resource = new RequestResource(); LoginIdentityContext actual = new LoginIdentityContext(); configResourceInjector.doInject(resource, ramContext, actual); assertEquals(3, actual.getAllKey().size()); assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("Spas-AccessKey")); assertTrue(actual.getAllKey().contains("Timestamp")); assertTrue(actual.getAllKey().contains("Spas-Signature")); }
@Override public ChannelFuture writeHeaders(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endStream, ChannelPromise promise) { return writeHeaders0(ctx, streamId, headers, false, 0, (short) 0, false, padding, endStream, promise); }
@Test public void headersWriteShouldHalfCloseAfterOnErrorForPreCreatedStream() throws Exception { final ChannelPromise promise = newPromise(); final Throwable ex = new RuntimeException(); // Fake an encoding error, like HPACK's HeaderListSizeException when(writer.writeHeaders(eq(ctx), eq(STREAM_ID), eq(EmptyHttp2Headers.INSTANCE), eq(0), eq(true), eq(promise))) .thenAnswer(new Answer<ChannelFuture>() { @Override public ChannelFuture answer(InvocationOnMock invocation) { promise.setFailure(ex); return promise; } }); writeAllFlowControlledFrames(); Http2Stream stream = createStream(STREAM_ID, false); encoder.writeHeaders(ctx, STREAM_ID, EmptyHttp2Headers.INSTANCE, 0, true, promise); assertTrue(promise.isDone()); assertFalse(promise.isSuccess()); assertFalse(stream.isHeadersSent()); InOrder inOrder = inOrder(lifecycleManager); inOrder.verify(lifecycleManager).onError(eq(ctx), eq(true), eq(ex)); inOrder.verify(lifecycleManager).closeStreamLocal(eq(stream(STREAM_ID)), eq(promise)); }
public static void checkMock(Class<?> interfaceClass, AbstractInterfaceConfig config) { String mock = config.getMock(); if (ConfigUtils.isEmpty(mock)) { return; } String normalizedMock = MockInvoker.normalizeMock(mock); if (normalizedMock.startsWith(RETURN_PREFIX)) { normalizedMock = normalizedMock.substring(RETURN_PREFIX.length()).trim(); try { // Check whether the mock value is legal, if it is illegal, throw exception MockInvoker.parseMockValue(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock return in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } else if (normalizedMock.startsWith(THROW_PREFIX)) { normalizedMock = normalizedMock.substring(THROW_PREFIX.length()).trim(); if (ConfigUtils.isNotEmpty(normalizedMock)) { try { // Check whether the mock value is legal MockInvoker.getThrowable(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock throw in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } } else { // Check whether the mock class is a implementation of the interfaceClass, and if it has a default // constructor MockInvoker.getMockObject(config.getScopeModel().getExtensionDirector(), normalizedMock, interfaceClass); } }
@Test void checkMock1() { Assertions.assertThrows(IllegalStateException.class, () -> { InterfaceConfig interfaceConfig = new InterfaceConfig(); interfaceConfig.setMock("return {a, b}"); ConfigValidationUtils.checkMock(Greeting.class, interfaceConfig); }); }
@Override public SchedulerTypeInfo getSchedulerInfo() { try { long startTime = Time.now(); Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters(); Class[] argsClasses = new Class[]{}; Object[] args = new Object[]{}; ClientMethod remoteMethod = new ClientMethod("getSchedulerInfo", argsClasses, args); Map<SubClusterInfo, SchedulerTypeInfo> subClusterInfoMap = invokeConcurrent(subClustersActive, remoteMethod, SchedulerTypeInfo.class); FederationSchedulerTypeInfo federationSchedulerTypeInfo = new FederationSchedulerTypeInfo(); subClusterInfoMap.forEach((subClusterInfo, schedulerTypeInfo) -> { SubClusterId subClusterId = subClusterInfo.getSubClusterId(); schedulerTypeInfo.setSubClusterId(subClusterId.getId()); federationSchedulerTypeInfo.getList().add(schedulerTypeInfo); }); long stopTime = Time.now(); RouterAuditLogger.logSuccess(getUser().getShortUserName(), GET_SCHEDULERINFO, TARGET_WEB_SERVICE); routerMetrics.succeededGetSchedulerInfoRetrieved(stopTime - startTime); return federationSchedulerTypeInfo; } catch (NotFoundException e) { routerMetrics.incrGetSchedulerInfoFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_SCHEDULERINFO, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException("Get all active sub cluster(s) error.", e); } catch (YarnException | IOException e) { routerMetrics.incrGetSchedulerInfoFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_SCHEDULERINFO, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException("getSchedulerInfo error.", e); } routerMetrics.incrGetSchedulerInfoFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_SCHEDULERINFO, UNKNOWN, TARGET_WEB_SERVICE, "getSchedulerInfo error."); throw new RuntimeException("getSchedulerInfo error."); }
@Test public void testGetSchedulerInfo() { // In this test case, we will get the return results of 4 sub-clusters. SchedulerTypeInfo typeInfo = interceptor.getSchedulerInfo(); Assert.assertNotNull(typeInfo); Assert.assertTrue(typeInfo instanceof FederationSchedulerTypeInfo); FederationSchedulerTypeInfo federationSchedulerTypeInfo = (FederationSchedulerTypeInfo) typeInfo; Assert.assertNotNull(federationSchedulerTypeInfo); List<SchedulerTypeInfo> schedulerTypeInfos = federationSchedulerTypeInfo.getList(); Assert.assertNotNull(schedulerTypeInfos); Assert.assertEquals(4, schedulerTypeInfos.size()); List<String> subClusterIds = subClusters.stream().map(SubClusterId::getId). collect(Collectors.toList()); for (SchedulerTypeInfo schedulerTypeInfo : schedulerTypeInfos) { Assert.assertNotNull(schedulerTypeInfo); // 1. Whether the returned subClusterId is in the subCluster list String subClusterId = schedulerTypeInfo.getSubClusterId(); Assert.assertTrue(subClusterIds.contains(subClusterId)); // 2. We test CapacityScheduler, the returned type should be CapacityScheduler. SchedulerInfo schedulerInfo = schedulerTypeInfo.getSchedulerInfo(); Assert.assertNotNull(schedulerInfo); Assert.assertTrue(schedulerInfo instanceof CapacitySchedulerInfo); CapacitySchedulerInfo capacitySchedulerInfo = (CapacitySchedulerInfo) schedulerInfo; Assert.assertNotNull(capacitySchedulerInfo); // 3. The parent queue name should be root String queueName = capacitySchedulerInfo.getQueueName(); Assert.assertEquals("root", queueName); // 4. schedulerType should be CapacityScheduler String schedulerType = capacitySchedulerInfo.getSchedulerType(); Assert.assertEquals("Capacity Scheduler", schedulerType); // 5. queue path should be root String queuePath = capacitySchedulerInfo.getQueuePath(); Assert.assertEquals("root", queuePath); // 6. mockRM has 2 test queues, [root.a, root.b] List<String> queues = Lists.newArrayList("root.a", "root.b"); CapacitySchedulerQueueInfoList csSchedulerQueueInfoList = capacitySchedulerInfo.getQueues(); Assert.assertNotNull(csSchedulerQueueInfoList); List<CapacitySchedulerQueueInfo> csQueueInfoList = csSchedulerQueueInfoList.getQueueInfoList(); Assert.assertEquals(2, csQueueInfoList.size()); for (CapacitySchedulerQueueInfo csQueueInfo : csQueueInfoList) { Assert.assertNotNull(csQueueInfo); Assert.assertTrue(queues.contains(csQueueInfo.getQueuePath())); } } }
@PublicAPI(usage = ACCESS) public Set<Dependency> getDirectDependenciesToSelf() { return reverseDependencies.getDirectDependenciesTo(this); }
@Test public void direct_dependencies_to_self_by_class_type_parameters() { class ClassOtherTypeSignaturesDependOn { } @SuppressWarnings("unused") class FirstDependingOnOtherThroughTypeParameter<T extends ClassOtherTypeSignaturesDependOn> { } @SuppressWarnings("unused") class SecondDependingOnOtherThroughTypeParameter< U extends Map<?, List<? super Set<? extends ClassOtherTypeSignaturesDependOn>>>, V extends Map<ClassOtherTypeSignaturesDependOn, ClassOtherTypeSignaturesDependOn>> { } JavaClass someClass = importClasses(ClassOtherTypeSignaturesDependOn.class, FirstDependingOnOtherThroughTypeParameter.class, SecondDependingOnOtherThroughTypeParameter.class) .get(ClassOtherTypeSignaturesDependOn.class); assertThatDependencies(someClass.getDirectDependenciesToSelf()) .contain(from(FirstDependingOnOtherThroughTypeParameter.class).to(ClassOtherTypeSignaturesDependOn.class).inLocation(getClass(), 0) .withDescriptionContaining("type parameter 'T' depending on") .from(SecondDependingOnOtherThroughTypeParameter.class).to(ClassOtherTypeSignaturesDependOn.class).inLocation(getClass(), 0) .withDescriptionContaining("type parameter 'U' depending on") .from(SecondDependingOnOtherThroughTypeParameter.class).to(ClassOtherTypeSignaturesDependOn.class).inLocation(getClass(), 0) .withDescriptionContaining("type parameter 'V' depending on") ); }
public static EmoteClue forText(String text) { for (EmoteClue clue : CLUES) { if (clue.getText().equalsIgnoreCase(text)) { return clue; } } return null; }
@Test public void itemRequirementsFullBarrowsSetNonDegraged() { EmoteClue barrowsClue = EmoteClue.forText("Do a jig at the barrows chest. Beware of double agents! Equip any full barrows set."); ItemRequirement[] requirements = barrowsClue.getItemRequirements(); assertEquals(1, requirements.length); ItemRequirement fullBarrowsSetRequirement = requirements[0]; assertTrue("Full Ahrim set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(AHRIMS_STAFF), item(AHRIMS_HOOD), item(AHRIMS_ROBETOP), item(AHRIMS_ROBESKIRT) })); assertTrue("Full Dharok set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(DHAROKS_GREATAXE), item(DHAROKS_HELM), item(DHAROKS_PLATEBODY), item(DHAROKS_PLATELEGS) })); assertTrue("Full Guthan set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(GUTHANS_WARSPEAR), item(GUTHANS_HELM), item(GUTHANS_PLATEBODY), item(GUTHANS_CHAINSKIRT) })); assertTrue("Full Karil set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(KARILS_CROSSBOW), item(KARILS_COIF), item(KARILS_LEATHERTOP), item(KARILS_LEATHERSKIRT) })); assertTrue("Full Torag set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(TORAGS_HAMMERS), item(TORAGS_HELM), item(TORAGS_PLATEBODY), item(TORAGS_PLATELEGS) })); assertTrue("Full Verac set, nondegraded", fullBarrowsSetRequirement.fulfilledBy(new Item[]{ item(VERACS_FLAIL), item(VERACS_HELM), item(VERACS_BRASSARD), item(VERACS_PLATESKIRT) })); }
@Override public String buildContext() { final PluginHandleDO after = (PluginHandleDO) getAfter(); if (Objects.isNull(getBefore())) { return String.format("the plugin-handle [%s] is %s", after.getField(), StringUtils.lowerCase(getType().getType().toString())); } return String.format("the plugin-handle [%s] is %s : %s", after.getField(), StringUtils.lowerCase(getType().getType().toString()), contrast()); }
@Test public void deletePluginHandleBuildContextTest() { PluginHandleChangedEvent pluginChangedEvent = new PluginHandleChangedEvent(pluginHandleDO, null, EventTypeEnum.PLUGIN_HANDLE_DELETE, "test-operator"); String context = String.format("the plugin-handle [%s] is %s", pluginHandleDO.getField(), StringUtils.lowerCase(EventTypeEnum.PLUGIN_HANDLE_DELETE.getType().toString())); assertEquals(context, pluginChangedEvent.buildContext()); }
String getNodePath(BaseMetadataIdentifier metadataIdentifier) { return toRootDir() + metadataIdentifier.getUniqueKey(KeyTypeEnum.PATH); }
@Test void testConsumer() throws ClassNotFoundException, InterruptedException { String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService"; String version = "1.0.0.zk.md"; String group = null; String application = "vic.zk.md"; MetadataIdentifier consumerMetadataIdentifier = storeConsumer(zookeeperMetadataReport, interfaceName, version, group, application); String fileContent = zookeeperMetadataReport.zkClient.getContent( zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); fileContent = waitSeconds(fileContent, 3500, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); Assertions.assertNotNull(fileContent); deletePath(consumerMetadataIdentifier, zookeeperMetadataReport); fileContent = zookeeperMetadataReport.zkClient.getContent( zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); fileContent = waitSeconds(fileContent, 1000, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); Assertions.assertNull(fileContent); consumerMetadataIdentifier = storeConsumer(zookeeperMetadataReport, interfaceName, version, group, application); fileContent = zookeeperMetadataReport.zkClient.getContent( zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); fileContent = waitSeconds(fileContent, 3000, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier)); Assertions.assertNotNull(fileContent); Assertions.assertEquals(fileContent, "{\"paramConsumerTest\":\"zkCm\"}"); }
@Override public void execute(Object[] args) { invokeMethod(args); }
@Test void can_provide_location_of_step() throws Throwable { Method method = JavaStepDefinitionTest.class.getMethod("method_throws"); JavaStepDefinition definition = new JavaStepDefinition(method, "three (.*) mice", lookup); CucumberInvocationTargetException exception = assertThrows(CucumberInvocationTargetException.class, () -> definition.execute(new Object[0])); Optional<StackTraceElement> match = stream(exception.getInvocationTargetExceptionCause().getStackTrace()) .filter(definition::isDefinedAt).findFirst(); StackTraceElement stackTraceElement = match.get(); assertAll( () -> assertThat(stackTraceElement.getMethodName(), is("method_throws")), () -> assertThat(stackTraceElement.getClassName(), is(JavaStepDefinitionTest.class.getName()))); }
public static String hashKey(long value) { return encodeBase62(value, false); }
@Test public void testHashKey() { Assert.assertEquals("29C4", IdHelper.hashKey(1000000L)); Assert.assertEquals("o9oZ9l1", IdHelper.hashKey(100000000000L)); }
@Override public void setHeaders(URLConnection connection, HTTPSamplerBase sampler) throws IOException { // Get the encoding to use for the request String contentEncoding = sampler.getContentEncoding(); long contentLength = 0L; boolean hasPutBody = false; // Check if the header manager had a content type header // This allows the user to specify their own content-type for a PUT request String contentTypeHeader = connection.getRequestProperty(HTTPConstants.HEADER_CONTENT_TYPE); boolean hasContentTypeHeader = contentTypeHeader != null && contentTypeHeader.length() > 0; HTTPFileArg[] files = sampler.getHTTPFiles(); // If there are no arguments, we can send a file as the body of the request if(sampler.getArguments() != null && sampler.getArguments().getArgumentCount() == 0 && sampler.getSendFileAsPostBody()) { // If getSendFileAsPostBody returned true, it's sure that file is not null HTTPFileArg file = files[0]; hasPutBody = true; if(!hasContentTypeHeader) { // Allow the mimetype of the file to control the content type if(file.getMimeType().length() > 0) { connection.setRequestProperty(HTTPConstants.HEADER_CONTENT_TYPE, file.getMimeType()); } } // Create the content length we are going to write File inputFile = new File(file.getPath()); contentLength = inputFile.length(); } else if(sampler.getSendParameterValuesAsPostBody()) { hasPutBody = true; // Allow the mimetype of the file to control the content type // This is not obvious in GUI if you are not uploading any files, // but just sending the content of nameless parameters if(!hasContentTypeHeader && files.length == 1 && files[0].getMimeType().length() > 0) { connection.setRequestProperty(HTTPConstants.HEADER_CONTENT_TYPE, files[0].getMimeType()); } // We create the post body content now, so we know the size ByteArrayOutputStream bos = new ByteArrayOutputStream(); // Just append all the parameter values, and use that as the put body StringBuilder putBodyBuffer = new StringBuilder(); for (JMeterProperty jMeterProperty : sampler.getArguments()) { HTTPArgument arg = (HTTPArgument) jMeterProperty.getObjectValue(); putBodyBuffer.append(arg.getEncodedValue(contentEncoding)); } bos.write(putBodyBuffer.toString().getBytes(contentEncoding)); bos.flush(); bos.close(); // Keep the content, will be sent later formDataUrlEncoded = bos.toByteArray(); contentLength = bos.toByteArray().length; } if(hasPutBody) { // Set the content length connection.setRequestProperty(HTTPConstants.HEADER_CONTENT_LENGTH, Long.toString(contentLength)); // Make the connection ready for sending post data connection.setDoOutput(true); } }
@Test public void testSetHeadersWithNoParams() throws Exception { URLConnection uc = new NullURLConnection(); HTTPSampler sampler = new HTTPSampler(); sampler.setHTTPFiles(new HTTPFileArg[] { new HTTPFileArg("file1", "", "mime1") }); PutWriter pw = new PutWriter(); pw.setHeaders(uc, sampler); assertEquals("mime1", uc.getRequestProperty(HTTPConstants.HEADER_CONTENT_TYPE)); }
public boolean hasMessagesIndexedUpTo(TimeRange timeRange) { return ProcessingNodesState.SOME_UP_TO_DATE == processingStatusService.calculateProcessingState(timeRange); }
@Test public void hasMessagesIndexedUpTo() { TimeRange any = AbsoluteRange.create("2019-01-01T00:00:00.000Z", "2019-01-01T00:00:30.000Z"); when(dbProcessingStatusService.calculateProcessingState(any)).thenReturn(ProcessingNodesState.SOME_UP_TO_DATE); assertThat(dependencyCheck.hasMessagesIndexedUpTo(any)).isTrue(); }
@Override public boolean edgeExists(String source, String target) { checkId(source); checkId(target); NodeDraftImpl sourceNode = getNode(source); NodeDraftImpl targetNode = getNode(target); if (sourceNode != null && targetNode != null) { boolean undirected = edgeDefault.equals(EdgeDirectionDefault.UNDIRECTED) || (undirectedEdgesCount > 0 && directedEdgesCount == 0); long edgeId = getLongId(sourceNode, targetNode, !undirected); for (Long2ObjectMap l : edgeTypeSets) { if (l != null) { if (l.containsKey(edgeId)) { return true; } } } } return false; }
@Test public void testEdgeExists() { ImportContainerImpl importContainer = new ImportContainerImpl(); generateTinyGraph(importContainer); Assert.assertTrue(importContainer.edgeExists("1")); Assert.assertTrue(importContainer.edgeExists("1", "2")); Assert.assertTrue(importContainer.edgeExists("2", "1")); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseDropConnectorIfExistsStatement() { // Given: final String dropConnector = "DRoP CONNEcTOR IF EXISTS `jdbc-connector` ;"; // When: List<CommandParser.ParsedCommand> commands = parse(dropConnector); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getCommand(), is(dropConnector)); assertThat(commands.get(0).getStatement().isPresent(), is (true)); assertThat(commands.get(0).getStatement().get(), instanceOf(DropConnector.class)); assertThat(((DropConnector) commands.get(0).getStatement().get()).getConnectorName(), is("jdbc-connector")); assertThat(((DropConnector) commands.get(0).getStatement().get()).getIfExists(), is(true)); }
public final Sensor storeLevelSensor(final String taskId, final String storeName, final String sensorSuffix, final RecordingLevel recordingLevel, final Sensor... parents) { final String sensorPrefix = storeSensorPrefix(Thread.currentThread().getName(), taskId, storeName); // since the keys in the map storeLevelSensors contain the name of the current thread and threads only // access keys in which their name is contained, the value in the maps do not need to be thread safe // and we can use a LinkedList here. // TODO: In future, we could use thread local maps since each thread will exclusively access the set of keys // that contain its name. Similar is true for the other metric levels. Thread-level metrics need some // special attention, since they are created before the thread is constructed. The creation of those // metrics could be moved into the run() method of the thread. return getSensors(storeLevelSensors, sensorSuffix, sensorPrefix, recordingLevel, parents); }
@Test public void shouldNotUseSameStoreLevelSensorKeyWithDifferentStoreNames() { final Metrics metrics = mock(Metrics.class); final ArgumentCaptor<String> sensorKeys = setUpSensorKeyTests(metrics); final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, VERSION, time); streamsMetrics.storeLevelSensor(TASK_ID1, STORE_NAME1, SENSOR_NAME_1, INFO_RECORDING_LEVEL); streamsMetrics.storeLevelSensor(TASK_ID1, STORE_NAME2, SENSOR_NAME_1, INFO_RECORDING_LEVEL); assertThat(sensorKeys.getAllValues().get(0), not(sensorKeys.getAllValues().get(1))); }
public static <MSG extends Message> MSG read(File file, Parser<MSG> parser) { InputStream input = null; try { input = new BufferedInputStream(new FileInputStream(file)); return parser.parseFrom(input); } catch (Exception e) { throw ContextException.of("Unable to read message", e).addContext("file", file); } finally { IOUtils.closeQuietly(input); } }
@Test public void read_file_returns_empty_message_if_file_is_empty() throws Exception { File file = temp.newFile(); Fake msg = Protobuf.read(file, Fake.parser()); assertThat(msg).isNotNull(); assertThat(msg.isInitialized()).isTrue(); }
public static URLArgumentPlaceholderType valueOf(final Properties queryProps) { try { return URLArgumentPlaceholderType.valueOf(queryProps.getProperty(KEY, URLArgumentPlaceholderType.NONE.name()).toUpperCase()); } catch (final IllegalArgumentException ex) { return URLArgumentPlaceholderType.NONE; } }
@Test void assertValueOfWithInvalidQueryProperties() { assertThat(URLArgumentPlaceholderTypeFactory.valueOf(PropertiesBuilder.build(new Property("placeholder-type", "invalid"))), is(URLArgumentPlaceholderType.NONE)); }
@SuppressWarnings("StringSplitter") public static List<String> extractImportantUrlData(String text) throws MalformedURLException { final List<String> importantParts = new ArrayList<>(); final URL url = new URL(text); final String[] domain = url.getHost().split("\\."); //add the domain except www and the tld. for (int i = 0; i < domain.length - 1; i++) { final String sub = domain[i]; if (Arrays.binarySearch(IGNORE_LIST, sub.toLowerCase()) < 0) { importantParts.add(sub); } } final String document = url.getPath(); final String[] pathParts = document.split("[\\\\//]"); for (int i = 0; i < pathParts.length - 1; i++) { if (!pathParts[i].isEmpty()) { importantParts.add(pathParts[i]); } } if (pathParts.length > 0 && !pathParts[pathParts.length - 1].isEmpty()) { final String tmp = pathParts[pathParts.length - 1]; final int pos = tmp.lastIndexOf('.'); if (pos > 1) { importantParts.add(tmp.substring(0, pos)); } else if (pos == 0 && tmp.length() > 1) { importantParts.add(tmp.substring(1)); } else { importantParts.add(tmp); } } return importantParts; }
@Test public void testExtractImportantUrlData() throws Exception { String text = "http://github.com/jeremylong/DependencyCheck/.gitignore"; List<String> expResult = Arrays.asList("jeremylong", "DependencyCheck", "gitignore"); List<String> result = UrlStringUtils.extractImportantUrlData(text); assertEquals(expResult, result); text = "http://jeremylong.github.io/DependencyCheck/index.html"; expResult = Arrays.asList("jeremylong", "DependencyCheck", "index"); result = UrlStringUtils.extractImportantUrlData(text); assertEquals(expResult, result); text = "http://example.com/jeremylong/DependencyCheck/something"; expResult = Arrays.asList("example", "jeremylong", "DependencyCheck", "something"); result = UrlStringUtils.extractImportantUrlData(text); assertEquals(expResult, result); }
@Override public ObjectNode encode(LispNatAddress address, CodecContext context) { checkNotNull(address, "LispListAddress cannot be null"); final ObjectNode result = context.mapper().createObjectNode() .put(MS_UDP_PORT_NUMBER, address.getMsUdpPortNumber()) .put(ETR_UDP_PORT_NUMBER, address.getEtrUdpPortNumber()); final JsonCodec<MappingAddress> addressCodec = context.codec(MappingAddress.class); if (address.getGlobalEtrRlocAddress() != null) { ObjectNode globalEtrRlocNode = addressCodec.encode(address.getGlobalEtrRlocAddress(), context); result.set(GLOBAL_ETR_RLOC_ADDRESS, globalEtrRlocNode); } if (address.getMsRlocAddress() != null) { ObjectNode msRlocNode = addressCodec.encode(address.getMsRlocAddress(), context); result.set(MS_RLOC_ADDRESS, msRlocNode); } if (address.getPrivateEtrRlocAddress() != null) { ObjectNode privateEtrRlocNode = addressCodec.encode(address.getPrivateEtrRlocAddress(), context); result.set(PRIVATE_ETR_RLOC_ADDRESS, privateEtrRlocNode); } final ArrayNode jsonRtrRlocNodes = result.putArray(RTR_RLOC_ADDRESSES); if (address.getRtrRlocAddresses() != null) { for (final MappingAddress mappingAddress : address.getRtrRlocAddresses()) { jsonRtrRlocNodes.add(addressCodec.encode(mappingAddress, context)); } } return result; }
@Test public void testLispNatAddressEncode() { List<MappingAddress> rtrRlocs = ImmutableList.of(MappingAddresses.ipv4MappingAddress(GLOBAL_ETR_RLOC_ADDRESS), MappingAddresses.ipv4MappingAddress(MS_RLOC_ADDRESS), MappingAddresses.ipv4MappingAddress(PRIVATE_ETR_RLOC_ADDRESS)); LispNatAddress address = new LispNatAddress.Builder() .withMsUdpPortNumber(MS_UDP_PORT_NUMBER) .withEtrUdpPortNumber(ETR_UDP_PORT_NUMBER) .withGlobalEtrRlocAddress(MappingAddresses.ipv4MappingAddress(GLOBAL_ETR_RLOC_ADDRESS)) .withMsRlocAddress(MappingAddresses.ipv4MappingAddress(MS_RLOC_ADDRESS)) .withPrivateEtrRlocAddress(MappingAddresses.ipv4MappingAddress(PRIVATE_ETR_RLOC_ADDRESS)) .withRtrRlocAddresses(rtrRlocs) .build(); ObjectNode addressJson = natAddressCodec.encode(address, context); assertThat("errors in encoding NAT address JSON", addressJson, LispNatAddressJsonMatcher.matchesNatAddress(address)); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testGtFloat() throws Exception { FloatColumn f = floatColumn("float_field"); float highest = Float.MIN_VALUE; for (int value : intValues) { highest = Math.max(highest, toFloat(value)); } assertTrue("Should drop: > highest value", canDrop(gt(f, highest), ccmd, dictionaries)); assertFalse("Should not drop: > (highest value - 1.0)", canDrop(gt(f, highest - 1.0f), ccmd, dictionaries)); assertFalse("Should not drop: contains matching values", canDrop(gt(f, Float.MIN_VALUE), ccmd, dictionaries)); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchTcpDstTest() { Criterion criterion = Criteria.matchTcpDst(tpPort); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { final String msg = new String(rawMessage.getPayload(), charset); try (Timer.Context ignored = this.decodeTime.time()) { final ResolvableInetSocketAddress address = rawMessage.getRemoteAddress(); final InetSocketAddress remoteAddress; if (address == null) { remoteAddress = null; } else { remoteAddress = address.getInetSocketAddress(); } return parse(msg, remoteAddress == null ? null : remoteAddress.getAddress(), rawMessage.getTimestamp()); } }
@Test public void testDecodeUnstructured() throws Exception { final Message message = codec.decode(buildRawMessage(UNSTRUCTURED)); assertNotNull(message); assertEquals("c4dc57ba1ebb syslog-ng[7208]: syslog-ng starting up; version='3.5.3'", message.getMessage()); assertEquals(new DateTime(YEAR + "-10-21T12:09:37"), message.getField("timestamp")); assertEquals("c4dc57ba1ebb", message.getField("source")); assertEquals(5, message.getField("level")); assertEquals("syslogd", message.getField("facility")); assertNull(message.getField("full_message")); assertEquals(5, message.getField("facility_num")); }
public void addFilenameChangedListener( FilenameChangedListener listener ) { if ( listener != null ) { filenameChangedListeners.add( listener ); } }
@Test public void testAddFilenameChangedListener() { meta.fireFilenameChangedListeners( "a", "a" ); meta.fireFilenameChangedListeners( "a", "b" ); meta.addFilenameChangedListener( null ); meta.fireFilenameChangedListeners( "a", "b" ); FilenameChangedListener listener = mock( FilenameChangedListener.class ); meta.addFilenameChangedListener( listener ); meta.fireFilenameChangedListeners( "b", "a" ); verify( listener, times( 1 ) ).filenameChanged( meta, "b", "a" ); meta.removeFilenameChangedListener( null ); meta.removeFilenameChangedListener( listener ); meta.fireFilenameChangedListeners( "b", "a" ); verifyNoMoreInteractions( listener ); }
@Override public int count(String term) { MutableInt count = freq.get(term); return count == null ? 0 : count.value; }
@Test public void testGetBigramFrequency() { System.out.println("getBigramFrequency"); Bigram bigram = new Bigram("romantic", "comedy"); assertEquals(9, corpus.count(bigram)); }
@Override public double logp(int k) { if (k < 0) { return Double.NEGATIVE_INFINITY; } else { return lgamma(r + k) - lfactorial(k) - lgamma(r) + r * Math.log(p) + k * Math.log(1 - p); } }
@Test public void testLogP() { System.out.println("logP"); NegativeBinomialDistribution instance = new NegativeBinomialDistribution(3, 0.3); instance.rand(); assertEquals(Math.log(0.027), instance.logp(0), 1E-7); assertEquals(Math.log(0.0567), instance.logp(1), 1E-7); assertEquals(Math.log(0.07938), instance.logp(2), 1E-7); assertEquals(Math.log(0.09261), instance.logp(3), 1E-7); assertEquals(Math.log(0.05033709), instance.logp(10), 1E-7); }
@Override public int compare(T o1, T o2) { if (o1 == o2) return 0; if (o1 == null) return -1; if (o2 == null) return 1; return compareNonNull(o1, o2); }
@Test public void should_evaluate_non_null_instance_as_greater_than_null_instance() { // GIVEN Object o1 = "foo"; Object o2 = null; // WHEN int compare = NULL_SAFE_COMPARATOR.compare(o1, o2); // THEN then(compare).isPositive(); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 7) { onInvalidDataReceived(device, data); return; } // First byte: flags int offset = 0; final int flags = data.getIntValue(Data.FORMAT_UINT8, offset++); // See UNIT_* for unit options final int unit = (flags & 0x01) == UNIT_mmHg ? UNIT_mmHg : UNIT_kPa; final boolean timestampPresent = (flags & 0x02) != 0; final boolean pulseRatePresent = (flags & 0x04) != 0; final boolean userIdPresent = (flags & 0x08) != 0; final boolean measurementStatusPresent = (flags & 0x10) != 0; if (data.size() < 7 + (timestampPresent ? 7 : 0) + (pulseRatePresent ? 2 : 0) + (userIdPresent ? 1 : 0) + (measurementStatusPresent ? 2 : 0)) { onInvalidDataReceived(device, data); return; } // Following bytes - systolic, diastolic and mean arterial pressure final float cuffPressure = data.getFloatValue(Data.FORMAT_SFLOAT, offset); // final float ignored_1 = data.getFloatValue(Data.FORMAT_SFLOAT, offset + 2); // final float ignored_2 = data.getFloatValue(Data.FORMAT_SFLOAT, offset + 4); offset += 6; // Parse timestamp if present Calendar calendar = null; if (timestampPresent) { calendar = DateTimeDataCallback.readDateTime(data, offset); offset += 7; } // Parse pulse rate if present Float pulseRate = null; if (pulseRatePresent) { pulseRate = data.getFloatValue(Data.FORMAT_SFLOAT, offset); offset += 2; } // Read user id if present Integer userId = null; if (userIdPresent) { userId = data.getIntValue(Data.FORMAT_UINT8, offset); offset += 1; } // Read measurement status if present BPMStatus status = null; if (measurementStatusPresent) { final int measurementStatus = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // offset += 2; status = new BPMStatus(measurementStatus); } onIntermediateCuffPressureReceived(device, cuffPressure, unit, pulseRate, userId, status, calendar); }
@Test public void onInvalidDataReceived_noTimestamp() { final DataReceivedCallback callback = new IntermediateCuffPressureDataCallback() { @Override public void onIntermediateCuffPressureReceived(@NonNull final BluetoothDevice device, final float cuffPressure, final int unit, @Nullable final Float pulseRate, @Nullable final Integer userID, @Nullable final BPMStatus status, @Nullable final Calendar calendar) { assertEquals("Invalid data reported as correct", 1, 2); } @Override public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Invalid ICP", 7, data.size()); } }; final Data data = new Data(new byte[] { 3, 2, 3, 4, 5, 6, 7 }); assertArrayEquals( new byte[] { 3, 2, 3, 4, 5, 6, 7 }, data.getValue() ); callback.onDataReceived(null, data); }
@Nonnull @SuppressWarnings("unchecked") public static <T extends Factory> T getFactoryByIdentifier( String identifier, Class<T> factoryClass) { final ServiceLoader<Factory> loader = ServiceLoader.load(Factory.class); final List<Factory> factoryList = new ArrayList<>(); for (Factory factory : loader) { if (factory != null && factory.identifier().equals(identifier) && factoryClass.isAssignableFrom(factory.getClass())) { factoryList.add(factory); } } if (factoryList.isEmpty()) { throw new RuntimeException( String.format( "Cannot find factory with identifier \"%s\" in the classpath.\n\n" + "Available factory classes are:\n\n" + "%s", identifier, StreamSupport.stream(loader.spliterator(), false) .map(f -> f.getClass().getName()) .sorted() .collect(Collectors.joining("\n")))); } if (factoryList.size() > 1) { throw new RuntimeException( String.format( "Multiple factories found in the classpath.\n\n" + "Ambiguous factory classes are:\n\n" + "%s", factoryList.stream() .map(f -> f.getClass().getName()) .sorted() .collect(Collectors.joining("\n")))); } return (T) factoryList.get(0); }
@Test void getFactoryByIdentifier() { assertThat( FactoryDiscoveryUtils.getFactoryByIdentifier( "data-source-factory-1", Factory.class)) .isInstanceOf(DataSourceFactory1.class); assertThat( FactoryDiscoveryUtils.getFactoryByIdentifier( "data-sink-factory-1", Factory.class)) .isInstanceOf(DataSinkFactory1.class); assertThatThrownBy( () -> FactoryDiscoveryUtils.getFactoryByIdentifier( "data-sink-factory-3", Factory.class)) .hasMessageStartingWith( "Cannot find factory with identifier \"data-sink-factory-3\" in the classpath"); }
@Override public void metricChange(final KafkaMetric metric) { if (!metric.metricName().group().equals(StreamsMetricsImpl.STATE_STORE_LEVEL_GROUP)) { return; } metricRemoval(metric); final Collection<AggregatedMetric<?>> registered = registeredMetrics.get(metric.metricName().name()); if (registered == null) { return; } registered.forEach(r -> r.add(metric)); }
@Test public void shouldIgnoreMetricsFromWrongGroup() { // When: collector.metricChange(mockMetric( "some-group", RocksDBMetricsCollector.BLOCK_CACHE_USAGE, "a", BigInteger.valueOf(123) )); // Then: final Gauge<?> value = verifyAndGetRegisteredMetric(RocksDBMetricsCollector.BLOCK_CACHE_USAGE + "-total"); assertThat(value.value(null, 0), equalTo(BigInteger.valueOf(0))); }
@Override public GetApplicationsResponse getApplications(GetApplicationsRequest request) throws YarnException, IOException { if (request == null) { routerMetrics.incrMultipleAppsFailedRetrieved(); String msg = "Missing getApplications request."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_APPLICATIONS, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, null); } long startTime = clock.getTime(); ClientMethod remoteMethod = new ClientMethod("getApplications", new Class[] {GetApplicationsRequest.class}, new Object[] {request}); Collection<GetApplicationsResponse> applications = null; try { applications = invokeConcurrent(remoteMethod, GetApplicationsResponse.class); } catch (Exception ex) { routerMetrics.incrMultipleAppsFailedRetrieved(); String msg = "Unable to get applications due to exception."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_APPLICATIONS, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, ex); } long stopTime = clock.getTime(); routerMetrics.succeededMultipleAppsRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(user.getShortUserName(), GET_APPLICATIONS, TARGET_CLIENT_RM_SERVICE); // Merge the Application Reports return RouterYarnClientUtils.mergeApplications(applications, returnPartialReport); }
@Test public void testGetApplicationsNullRequest() throws Exception { LOG.info("Test FederationClientInterceptor: Get Applications request."); LambdaTestUtils.intercept(YarnException.class, "Missing getApplications request.", () -> interceptor.getApplications(null)); }
@Override public NacosUser authenticate(String username, String rawPassword) throws AccessException { if (StringUtils.isBlank(username) || StringUtils.isBlank(rawPassword)) { throw new AccessException("user not found!"); } NacosUserDetails nacosUserDetails = (NacosUserDetails) userDetailsService.loadUserByUsername(username); if (nacosUserDetails == null || !PasswordEncoderUtil.matches(rawPassword, nacosUserDetails.getPassword())) { throw new AccessException("user not found!"); } return new NacosUser(nacosUserDetails.getUsername(), jwtTokenManager.createToken(username)); }
@Test void testAuthenticate9() throws AccessException { NacosUserDetails nacosUserDetails = new NacosUserDetails(user); when(userDetailsService.loadUserByUsername(anyString())).thenReturn(nacosUserDetails); when(jwtTokenManager.createToken(anyString())).thenReturn("token"); MockHttpServletRequest mockHttpServletRequest = new MockHttpServletRequest(); mockHttpServletRequest.addHeader(AuthConstants.AUTHORIZATION_HEADER, "token"); mockHttpServletRequest.addParameter(AuthConstants.PARAM_USERNAME, "nacos"); mockHttpServletRequest.addParameter(AuthConstants.PARAM_PASSWORD, "test"); NacosUser authenticate = abstractAuthenticationManager.authenticate(mockHttpServletRequest); assertEquals("token", authenticate.getToken()); assertEquals(user.getUsername(), authenticate.getUserName()); }
public byte[] readAll() throws IOException { if (pos == 0 && count == buf.length) { pos = count; return buf; } byte[] ret = new byte[count - pos]; super.read(ret); return ret; }
@Test public void testConstructWithEmptyArray() throws IOException { try (ExposedByteArrayInputStream s = new ExposedByteArrayInputStream(new byte[0])) { assertEquals(0, s.available()); byte[] data = s.readAll(); assertEquals(0, data.length); } }
static Map<String, Comparable> prepareProperties(Map<String, Comparable> properties, Collection<PropertyDefinition> propertyDefinitions) { Map<String, Comparable> mappedProperties = createHashMap(propertyDefinitions.size()); for (PropertyDefinition propertyDefinition : propertyDefinitions) { String propertyKey = propertyDefinition.key(); if (properties.containsKey(propertyKey.replace("-", ""))) { properties.put(propertyKey, properties.remove(propertyKey.replace("-", ""))); } if (!properties.containsKey(propertyKey)) { if (!propertyDefinition.optional()) { throw new InvalidConfigurationException( String.format("Missing property '%s' on discovery strategy", propertyKey)); } continue; } Comparable value = properties.get(propertyKey); TypeConverter typeConverter = propertyDefinition.typeConverter(); Comparable mappedValue = typeConverter.convert(value); ValueValidator validator = propertyDefinition.validator(); if (validator != null) { validator.validate(mappedValue); } mappedProperties.put(propertyKey, mappedValue); } verifyNoUnknownProperties(mappedProperties, properties); return mappedProperties; }
@Test public void unsatisfiedOptionalProperty() { // given Map<String, Comparable> properties = emptyMap(); Collection<PropertyDefinition> propertyDefinitions = singletonList( new SimplePropertyDefinition(PROPERTY_KEY_1, true, STRING)); // when Map<String, Comparable> result = prepareProperties(properties, propertyDefinitions); // then assertTrue(result.isEmpty()); }
protected TransactionReceipt executeTransaction(Function function) throws IOException, TransactionException { return executeTransaction(function, BigInteger.ZERO); }
@Test public void testStaticEIP1559GasProvider() throws IOException, TransactionException { StaticEIP1559GasProvider gasProvider = new StaticEIP1559GasProvider(1L, BigInteger.TEN, BigInteger.ZERO, BigInteger.ONE); TransactionManager txManager = mock(TransactionManager.class); when(txManager.executeTransaction( any(BigInteger.class), any(BigInteger.class), anyString(), anyString(), any(BigInteger.class), anyBoolean())) .thenReturn(new TransactionReceipt()); contract = new TestContract(ADDRESS, web3j, txManager, gasProvider); Function func = new Function( "test", Collections.<Type>emptyList(), Collections.<TypeReference<?>>emptyList()); contract.executeTransaction(func); verify(txManager) .executeTransactionEIP1559( eq(1L), eq(BigInteger.ZERO), eq(BigInteger.TEN), eq(BigInteger.ONE), anyString(), anyString(), any(BigInteger.class), anyBoolean()); }
public static String readLine(ByteBuffer buffer, Charset charset) { final int startPosition = buffer.position(); final int endPosition = lineEnd(buffer); if (endPosition > startPosition) { byte[] bs = readBytes(buffer, startPosition, endPosition); return StrUtil.str(bs, charset); } else if (endPosition == startPosition) { return StrUtil.EMPTY; } return null; }
@Test public void readLineTest() { String text = "aa\r\nbbb\ncc"; ByteBuffer buffer = ByteBuffer.wrap(text.getBytes()); // 第一行 String line = BufferUtil.readLine(buffer, CharsetUtil.CHARSET_UTF_8); assertEquals("aa", line); // 第二行 line = BufferUtil.readLine(buffer, CharsetUtil.CHARSET_UTF_8); assertEquals("bbb", line); // 第三行因为没有行结束标志,因此返回null line = BufferUtil.readLine(buffer, CharsetUtil.CHARSET_UTF_8); assertNull(line); // 读取剩余部分 assertEquals("cc", StrUtil.utf8Str(BufferUtil.readBytes(buffer))); }